pax_global_header00006660000000000000000000000064147724656740014540gustar00rootroot0000000000000052 comment=ac94c79cbe16c5e2cfec1ad9129c15d75f0dc6fc dub-1.40.0/000077500000000000000000000000001477246567400123745ustar00rootroot00000000000000dub-1.40.0/.codecov.yml000066400000000000000000000017001477246567400146150ustar00rootroot00000000000000# Documentation: https://docs.codecov.io/docs/codecov-yaml # Validate with: `curl --data-binary @.codecov.yml https://codecov.io/validate` codecov: notify: # We don't want to wait for the CodeCov report # See https://github.com/codecov/support/issues/312 require_ci_to_pass: false after_n_builds: 1 # send notifications after the first upload wait_for_ci: false bot: dlang-bot # At Travis, the PR is merged into `master` before the testsuite is run. # This allows CodeCov to adjust the resulting coverage diff, s.t. it matches # with the GitHub diff. # https://github.com/codecov/support/issues/363 # https://docs.codecov.io/v4.3.6/docs/comparing-commits allow_coverage_offsets: true coverage: precision: 3 round: down range: "80...100" # Learn more at https://docs.codecov.io/docs/commit-status status: project: off changes: off patch: default: informational: true comment: false dub-1.40.0/.dockerignore000066400000000000000000000002241477246567400150460ustar00rootroot00000000000000# Build files are ignored to avoid collision and large context # in the event someone hasn't run `dub clean` for a while *.a *.o .dub/ test/*/.dub/ dub-1.40.0/.editorconfig000066400000000000000000000002441477246567400150510ustar00rootroot00000000000000root = true [*.{c,h,d,di,dd,json}] end_of_line = lf insert_final_newline = true indent_style = tab indent_size = 4 trim_trailing_whitespace = true charset = utf-8 dub-1.40.0/.github/000077500000000000000000000000001477246567400137345ustar00rootroot00000000000000dub-1.40.0/.github/issue_template.md000066400000000000000000000011111477246567400172730ustar00rootroot00000000000000 ### System information - **dub version**: (e.g. dub 1.3.0) - **OS Platform and distribution**: (e.g. Windows 10, Linux Ubuntu 16.04) - **compiler version** (e.g. dmd-2.074.1) ### Bug Description ### How to reproduce? ### Expected Behavior ### Logs dub-1.40.0/.github/workflows/000077500000000000000000000000001477246567400157715ustar00rootroot00000000000000dub-1.40.0/.github/workflows/alpine.yml000066400000000000000000000021701477246567400177640ustar00rootroot00000000000000# Build dub on Alpine Linux, testing compatibility with Musl name: Alpine on: pull_request: branches: - master - stable paths-ignore: - 'changelog/**' push: branches: - master - stable # Use this branch name in your fork to test changes - github-actions jobs: main: name: Run strategy: # Default, disable if you want to debug fail-fast: false matrix: include: # Disabled as we rely on DIP1000 `foreach (scope)` which GDC < 12 doesn't support # - { dc: gdc, dcpkg: gcc-gdc, dcbin: gdc } - { dc: ldc, dcpkg: ldc, dcbin: ldc2 } - { dc: dmd, dcpkg: dmd, dcbin: dmd } # OS doesn't matter, we just need Docker runs-on: ubuntu-latest steps: # Checkout the repository - name: Checkout uses: actions/checkout@v4 - name: Build run: | docker build -t alpine-dub-image \ --build-arg="DCPKG=${{ matrix.dcpkg}}" \ --build-arg="DCBIN=${{ matrix.dcbin}}" \ -f docker/Dockerfile.alpine $(pwd) - name: Test run: docker run alpine-dub-image dub-1.40.0/.github/workflows/main.yml000066400000000000000000000120411477246567400174360ustar00rootroot00000000000000# Cross platform tests for DUB name: Testsuite # Only triggers on pushes to master & stable, as well as PR to master and stable # Sometimes reverts appear in the upstream repository (e.g. when the revert button # is clicked by a contributor with commit access), this should be tested as PR). # # Also note that Github actions does not retrigger on target branch changes, # hence the check on push. on: pull_request: branches: - master - stable paths-ignore: - 'changelog/**' push: branches: - master - stable # Use this branch name in your fork to test changes - github-actions jobs: single_checks: name: "Single sanity check" runs-on: ubuntu-latest steps: - name: Install latest DMD uses: dlang-community/setup-dlang@v1 - name: Checkout uses: actions/checkout@v4 - name: Run tests run: | # check for trailing whitespace TRAILING_WS_COUNT=$(find . -type f -name '*.d' -exec grep -Hn "[[:blank:]]$" {} \; | wc -l) if [ $TRAILING_WS_COUNT -ne 0 ]; then echo "========================================" find . -type f -name '*.d' -exec grep -Hn "[[:blank:]]$" {} \; echo "========================================" echo "The files above have trailing whitespace" exit 1 fi # check that the man page generation still works dub --single -v scripts/man/gen_man.d main: name: Run strategy: # Default, disable if you want to debug fail-fast: false matrix: # Latest stable version, update at will os: [ macOS-13, ubuntu-22.04, windows-2019 ] dc: # Always test latest as that is what we use to compile on release - dmd-latest - ldc-latest # Provide some testing for upstream - dmd-master - ldc-master # Test some intermediate versions - ldc-1.29.0 - dmd-2.099.1 - dmd-2.102.2 - dmd-2.105.3 - dmd-2.108.1 include: - { do_test: false } - { dc: dmd-latest, do_test: true } - { dc: ldc-latest, do_test: true } - { dc: dmd-master, do_test: true } - { dc: ldc-master, do_test: true } # Test on ARM64 - { os: macOS-14, dc: ldc-latest, do_test: true } exclude: # Error with those versions: # ld: multiple errors: symbol count from symbol table and dynamic symbol table differ in [.../dub.o]; address=0x0 points to section(2) with no content in '[...]/osx/lib/libphobos2.a[3177](config_a68_4c3.o)' - { os: macOS-13, dc: dmd-2.099.1 } - { os: macOS-13, dc: dmd-2.102.2 } - { os: macOS-13, dc: dmd-2.105.3 } runs-on: ${{ matrix.os }} steps: # Install required dependencies - name: '[OSX] Install dependencies' if: runner.os == 'macOS' run: | # We need to install GNU utils as the test-suite scripts expect it. # Without them we may get slightly different behavior in tests and hard-to-track failures brew install coreutils diffutils echo "PKG_CONFIG_PATH=/usr/local/opt/openssl@1.1/lib/pkgconfig/" >> $GITHUB_ENV - name: '[Linux] Install dependencies' if: runner.os == 'Linux' run: | sudo apt-get update && sudo apt-get install -y libcurl4-openssl-dev netcat # Compiler to test with - name: Prepare compiler uses: dlang-community/setup-dlang@v1 with: compiler: ${{ matrix.dc }} # Checkout the repository - name: Checkout uses: actions/checkout@v4 - name: '[POSIX] Test' if: runner.os != 'Windows' env: COVERAGE: true run: | dub build --compiler=${{ env.DC }} if [[ ${{ matrix.do_test }} == 'true' ]]; then dub run --compiler=${{ env.DC }} --single test/issue2051_running_unittests_from_dub_single_file_packages_fails.d ./scripts/ci/ci.sh fi - name: '[Windows] Test' if: runner.os == 'Windows' env: DUB: ${{ github.workspace }}\bin\dub.exe run: | dub build --compiler=${{ env.DC }} if [[ ${{ matrix.do_test }} == 'true' ]]; then dub test --compiler=${{ env.DC }} dub run --compiler=${{ env.DC }} --single test/issue2051_running_unittests_from_dub_single_file_packages_fails.d dub --single test/run-unittest.d # FIXME: DMD fails a few tests on Windows; remove them for now if [[ '${{ matrix.dc }}' = dmd* ]]; then # DLL support is lacking rm -rf test/{1-dynLib-simple,2-dynLib-dep,2-dynLib-with-staticLib-dep} # Unicode in paths too rm -rf test/issue130-unicode-СНА* # ImportC probably requires set-up MSVC environment variables rm -rf test/use-c-sources fi test/run-unittest.sh fi shell: bash - name: Codecov if: matrix.do_test && runner.os != 'Windows' uses: codecov/codecov-action@v4 dub-1.40.0/.github/workflows/pr_info_intro.yml000066400000000000000000000012311477246567400213600ustar00rootroot00000000000000name: PR Info (pre-comment) on: # NOTE: high probability for security vulnerabilities if doing ANYTHING in # this file other than commenting something! pull_request_target: branches: - master - stable permissions: pull-requests: write jobs: intro_comment: name: Make intro comment runs-on: ubuntu-latest steps: - name: 'Prepare sticky comment' uses: marocchino/sticky-pull-request-comment@v2.9.0 with: message: | Thanks for your Pull Request and making D better! This comment will automatically be updated to summarize some statistics in a few minutes. only_create: true dub-1.40.0/.github/workflows/pr_info_post.yml000066400000000000000000000030121477246567400212110ustar00rootroot00000000000000name: PR Info (comment) on: workflow_run: workflows: ["PR Info"] types: - completed permissions: pull-requests: write jobs: comment: name: PR Info runs-on: ubuntu-latest if: > github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' steps: # from https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ - name: 'Download artifact' uses: actions/github-script@v7 with: script: | var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, run_id: ${{github.event.workflow_run.id }}, }); var matchArtifact = artifacts.data.artifacts.filter((artifact) => { return artifact.name == "pr" })[0]; var download = await github.rest.actions.downloadArtifact({ owner: context.repo.owner, repo: context.repo.repo, artifact_id: matchArtifact.id, archive_format: 'zip', }); var fs = require('fs'); fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); - run: unzip pr.zip - name: Set variable run: | PR_ID=$(cat ./NR) echo "PR_ID=$PR_ID" >> $GITHUB_ENV - name: Update GitHub comment uses: marocchino/sticky-pull-request-comment@v2.9.0 with: path: ./comment.txt number: ${{ env.PR_ID }} dub-1.40.0/.github/workflows/pr_info_untrusted.yml000066400000000000000000000034421477246567400222700ustar00rootroot00000000000000name: PR Info # This workflow builds the whole project once and: # - comments build deprecations/warnings (highlighting new ones since last tested PR) on: pull_request: branches: - master - stable jobs: pr_info: name: PR Info runs-on: ubuntu-latest steps: # we first create a comment thanking the user in pr_info_intro.yml # (separate step due to needing GITHUB_TOKEN access) - name: '[Linux] Install dependencies' if: runner.os == 'Linux' run: | sudo apt-get update && sudo apt-get install -y libcurl4-openssl-dev # Compiler to test with - name: Prepare compiler uses: dlang-community/setup-dlang@v1 with: compiler: ldc-latest - name: Checkout uses: actions/checkout@v4 with: fetch-depth: 0 - name: Checkout old stuff, with new comment script run: | git checkout ${{ github.base_ref }} git checkout ${{ github.sha }} -- ./scripts/ci/summary_comment.sh ./scripts/ci/summary_comment_diff.sh # first dump old info - name: Check pre-PR status run: ./scripts/ci/summary_comment.sh | tee ../OLD_OUTPUT.txt - name: Checkout PR target run: | git checkout ${{ github.sha }} git clean -fd git reset --hard - name: Evaluate PR run: ./scripts/ci/summary_comment.sh | tee ../NEW_OUTPUT.txt - name: Generate comment run: ./scripts/ci/summary_comment_diff.sh ../OLD_OUTPUT.txt ../NEW_OUTPUT.txt | tee comment.txt - name: Prepare comment for upload run: | mkdir -p ./pr mv comment.txt pr echo ${{ github.event.number }} > ./pr/NR - name: upload comment to high-trust action making the comment uses: actions/upload-artifact@v4 with: name: pr path: pr/ dub-1.40.0/.github/workflows/release.yml000066400000000000000000000074541477246567400201460ustar00rootroot00000000000000# When a release is published, build the assets and upload them name: Build release assets on: release: types: - published jobs: # First we define a job with a matrix that will build all relevant assets, # and collect them in a temporary storage using `actions/upload-artifacts` build: name: 'Build artifacts for ${{ github.event.release.tag_name }}' strategy: fail-fast: false matrix: os: [ macOS-13, ubuntu-22.04, windows-2019 ] arch: [ x86_64 ] include: - { os: windows-2019, arch: i686 } - { os: macOS-latest, arch: arm64 } runs-on: ${{ matrix.os }} steps: ## Dependencies - name: '[OSX] Install dependencies' if: runner.os == 'macOS' run: | brew install pkg-config coreutils echo "PKG_CONFIG_PATH=/usr/local/opt/openssl@1.1/lib/pkgconfig/" >> $GITHUB_ENV - name: '[Linux] Install dependencies' if: runner.os == 'Linux' run: | sudo apt-get update && sudo apt-get install -y libcurl4-openssl-dev ## Boileterplate (compiler/repo) - name: Install compiler uses: dlang-community/setup-dlang@v1 with: compiler: ldc-latest - name: Checkout repository uses: actions/checkout@v4 with: ref: ${{ github.event.release.tag_name }} ## Actually build the releases - name: '[POSIX] Build release' if: runner.os == 'Linux' || runner.os == 'macOS' env: GITVER: ${{ github.event.release.tag_name }} DMD: "ldmd2" ARCH_TRIPLE: ${{ matrix.arch }}-${{ runner.os == 'linux' && 'pc-linux' || 'apple-darwin' }} run: | ldc2 -run ./build.d -release -mtriple=${ARCH_TRIPLE} pushd bin if [ ${{ runner.os }} == 'Linux' ]; then tar -c -f 'dub-${{ github.event.release.tag_name }}-linux-${{ matrix.arch }}.tar.gz' -v -z --owner=0 --group=0 dub else gtar -c -f 'dub-${{ github.event.release.tag_name }}-osx-${{ matrix.arch }}.tar.gz' -v -z --owner=0 --group=0 dub fi popd - name: '[Windows] Build release' if: runner.os == 'Windows' env: GITVER: ${{ github.event.release.tag_name }} DMD: "ldmd2" run: | ldc2 -run ./build.d -release -mtriple=${{ matrix.arch }}-pc-windows-msvc pushd bin 7z a dub-${{ github.event.release.tag_name }}-windows-${{ matrix.arch }}.zip dub.exe popd - name: 'Upload temporary binaries' uses: actions/upload-artifact@v4 with: name: dub-release-${{ matrix.os }}-${{ matrix.arch }} path: | bin/dub-${{ github.event.release.tag_name }}-* if-no-files-found: error retention-days: 1 # Uploads collected builds to the release release: name: "Update release artifacts" runs-on: ubuntu-latest needs: - build steps: - name: Download artifacts to release uses: actions/download-artifact@v4 with: path: ~/artifacts/ - name: List all artifacts included in the release id: list-artifacts shell: bash run: | set -euox pipefail ls -aulR ~/artifacts echo "artifacts_directory=$HOME/artifacts" >> $GITHUB_OUTPUT - name: Update release artifacts uses: ncipollo/release-action@v1 with: token: "${{ secrets.GITHUB_TOKEN }}" tag: ${{ github.event.release.tag_name }} artifacts: ${{ steps.list-artifacts.outputs.artifacts_directory }}/*/* # Keep the existing state of the release allowUpdates: true artifactErrorsFailBuild: true omitNameDuringUpdate: true omitBodyDuringUpdate: true omitPrereleaseDuringUpdate: true dub-1.40.0/.gitignore000066400000000000000000000021101477246567400143560ustar00rootroot00000000000000*.o *.obj *.pdb *~ # Unknown hidden files .* !.gitignore !/.github !/.editorconfig !/.codecov.yml # Unknown script files /*.sh /*.bat /*.cmd !/build.cmd !/build.sh # dub generation files dub.selections.json docs.json __dummy.html # Ignore build files. /bin/dub /bin/__test__library-nonet__ /bin/__test__library__ /bin/dub-test-library /bin/libdub.a /bin/dub-* /bin/dub.* # Ignore files or directories created by the test suite. *.exe *.lib *.log /test/*/* /test/*.* !/test/*.d !/test/*.d.min_frontend !/test/*.sh !/test/*.sh.min_frontend !/test/*/.no_* !/test/*/.min_frontend !/test/*/.fail_build !/test/*/dub.json !/test/*/dub.sdl !/test/*/dub.settings.json !/test/*/source/ !/test/*/src/ # Ignore coverage files cov/ # Ignore auto-generated docs /docs scripts/man/dub*.1.gz # Ignore generated files for examples /examples/generated-sources/generated-sources /examples/generated-sources/source/test.d dub_test_root.sh.*/ test-cov-ctfe-test.lst test-unittest-cov-ctfe-source-mod.lst test-unittest-cov-ctfe-.dub-code-test-test-library-unittest-cov-ctfe-*-dub_test_root.lst example.yaml dub-1.40.0/ARCHITECTURE.md000066400000000000000000000024361477246567400146050ustar00rootroot00000000000000## Architecture ![architecture](architecture.png) ## Terminology
Package
A locally available version of a dub package, consisting of sources, binaries, and described by it's dub.sdl/json file.
PackageSupplier
A source to search and fetch package versions (zip bundles) from.
PackageManager
Responsible to manage packages (fetched or add-local packages), and overrides.
PackageRecipe
Abstract description of package sources, targets, configurations, and build settings.
Generator
Responsible for generating a build recipe (e.g. CMakeLists.txt, VS .sln) for a package, config, and build type. Direct builds (dmd, rdmd) are also implemented as generators.
PackageDependency
Unresolved, abstract specification of a dependency, e.g. dependency "vibe-d" version="~>0.8.1".
DependencyResolver
Algorithm to resolve package dependencies to specific package versions (dub.selections.json), searching available package versions in package suppliers.
Target
A build output like a static library or executable.
BuildCache
Caches targets for a specific build id.
dub-1.40.0/CONTRIBUTING.md000066400000000000000000000026741477246567400146360ustar00rootroot00000000000000# Guidelines for Contributing ## Building You can build a development version of dub using `./build.d`. When you already have a working dub binary, you can also just run `dub build`, though that won't update the version string. ## Changelog Every feature addition should come with a changelog entry, see the [changelog/README.md](changelog/README.md) for how to add a new entry. Any `.dd` file is rendered using ddoc and is the same format used across all dlang repos. For bugfixes make sure to automatically [close the issue via commit message](https://blog.github.com/2013-01-22-closing-issues-via-commit-messages/) (e.g. `fixes #123`), so that they can be listed in the changelog. ## Documentation The source of the documentation for DUB lives in a separate repository: https://github.com/dlang/dub-docs. ## Backwards compatiblity DUB is a command line tool, as well as a library that can be embedded into other applications. We aim to stay backwards compatible as long as possible and as required by the SemVer specification. For this reason, any change to the public API, as well as to the command line interface, needs to be carefully reviewed for possible breaking changes. No breaking changes are allowed to enter the master branch at this point. However, to prepare for backwards-incompatible changes that go into the next major release, it is allowed to deprecate symbols, as well as to hide symbols and command line options from the documentation. dub-1.40.0/LICENSE000066400000000000000000000021511477246567400134000ustar00rootroot00000000000000MIT License Copyright (c) 2012-2016 RejectedSoftware e.K. Copyright (c) 2016-2018 D Language Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. dub-1.40.0/README.md000066400000000000000000000124011477246567400136510ustar00rootroot00000000000000# dub package manager Package and build manager for [D](http://dlang.org/) applications and libraries. There is a central [package registry](https://github.com/dlang/dub-registry/) located at . [![GitHub tag](https://img.shields.io/github/tag/dlang/dub.svg?maxAge=86400)](#) [![Coverage Status](https://coveralls.io/repos/dlang/dub/badge.svg)](https://coveralls.io/r/dlang/dub) [![Buildkite](https://badge.buildkite.com/c54d71c42284a042b9d578e28e093dff35f20cc8528319b1b6.svg?branch=master)](https://buildkite.com/dlang/dub) ## Introduction DUB emerged as a more general replacement for [vibe.d's](http://vibed.org/) package manager. It does not imply a dependency to vibe.d for packages and was extended to not only directly build projects, but also to generate project files (currently [VisualD](https://github.com/rainers/visuald)). [Mono-D](http://mono-d.alexanderbothe.com/) also supports the use of dub.json (dub's package description) as the project file. The project's philosophy is to keep things as simple as possible. All that is needed to make a project a dub package is to write a short [dub.json](http://code.dlang.org/publish) file and put the source code into a `source` subfolder. It *can* then be registered on the public [package registry](http://code.dlang.org) to be made available for everyone. Any dependencies specified in `dub.json` are automatically downloaded and made available to the project during the build process. ## Key features - Simple package and build description not getting in your way - Integrated with Git, avoiding maintenance tasks such as incrementing version numbers or uploading new project releases - Generates VisualD project/solution files, integrated into MonoD - Support for DMD, GDC and LDC (common DMD flags are translated automatically) - Supports development workflows by optionally using local directories as a package source ## Future direction To make things as flexible as they need to be for certain projects, it is planned to gradually add more options to the [package file format](http://code.dlang.org/package-format) and eventually to add the possibility to specify an external build tool along with the path of it's output files. The idea is that DUB provides a convenient build management that suffices for 99% of projects, but is also usable as a bare package manager that doesn't get in your way if needed. ## Installation DUB comes [precompiled](http://code.dlang.org/download) for Windows, OS X and Linux. It needs to have libcurl with SSL support installed (except on Windows). The `dub` executable then just needs to be accessible from `PATH` and can be invoked from the root folder of any DUB enabled project to build and run it. If you want to build for yourself, just install a D compiler such as [DMD](http://dlang.org/download.html) and libcurl development headers and run `./build.d`. ### Arch Linux Михаил Страшун (Dicebot) maintains a dub package of the latest release in `Community`, for [x86_64](https://www.archlinux.org/packages/community/x86_64/dub/) and [i686](https://www.archlinux.org/packages/community/i686/dub/). Moritz Maxeiner has created a PKGBUILD file for GIT master: ### Debian/Ubuntu Linux Jordi Sayol maintains a DEB package as part of his [D APT repository](http://d-apt.sourceforge.net). Run `sudo apt-get install dub` to install. ### OS X Chris Molozian has added DUB to [Homebrew](http://mxcl.github.io/homebrew/). Use `brew install dub` to install the stable version, optionally adding `--HEAD`/`--devel` to install the latest git master or development release respectively. There is also a [MacPorts](https://www.macports.org/) package available. Type `sudo port install dub` to install the latest stable version. ### Windows Daniel Jost maintains a dub package on [chocolatey](https://chocolatey.org/packages/dub). Use `cinst dub` or `cinst dub -version #.#.#` to install stable or a custom version respectively. ## Alpine Linux Mathias (@Geod24) Lang maintains the Alpine Linux packages. It is currently part of 'edge' and can be installed through `apk --no-cache add -X http://dl-cdn.alpinelinux.org/alpine/edge/testing dub`. ## OpenBSD Brian Callahan (bcallah@) maintains the OpenBSD package. Use `pkg_add dub` to install it. ## Using DUB as a library The [DUB package of DUB](http://code.dlang.org/packages/dub) can be used as a library to load or manipulate packages, or to resemble any functionality of the command line tool. The former task can be achieved by using the [Package class](https://github.com/dlang/dub/blob/master/source/dub/package_.d#L40). For examples on how to replicate the command line functionality, see [commandline.d](https://github.com/dlang/dub/blob/master/source/dub/commandline.d). ## Minimal D compiler required to build DUB In general it is always recommended to build DUB with the latest version of your D compiler. However, currently [2.076](https://dlang.org/changelog/2.076.0.html) is required to build DUB from source. # Contributing New contributers are always welcome, there's plenty to work on! For an easy start, take a look at issues marked [`bootcamp`](https://github.com/dlang/dub/labels/bootcamp) The contributing guidelines can be found [here](https://github.com/dlang/dub/blob/master/CONTRIBUTING.md) dub-1.40.0/architecture.graphmlz000066400000000000000000000062071477246567400166310ustar00rootroot00000000000000Ks8\e*{vl'UI{v2-7"8mC)y&F!W%"h?4[M&YǴQכw`d~h'_OjB8;|z8T$RZAIz?E_?$ (8߅rCP(kg'~e6NpW aҷNr.TDiGc>XnZU]*Ueyr<ZP6 3ٻdrFE{e}X<}˂ x9۹1? a ?S6y6JéߠHSHF50h !;0ê9M\Q,jDdɺ([QtUɄ|E5,i~Zc4d\LU|#n ^dӪ,#9sn4fk"5iyi(zaGV..b ޓ((Ŧ*X%0/ypA46J~*U,y?HfepEFbGcޏa^%Ϩd]Ez0`TOyM 9A3ԏXw~N4u>x՞)_P]OƼaX<2('S-<WIBtb? #H|u_QWb`Ց٩?r&88+F[LKd|: R4-JPn9h@}J0ey2@YKRpDm_,Y-JDk1%0KL Vf6\A!\ Ys%j\iWK"KAXt0AaОLSHqVōH}h`-]R.]O WelziK;a4Dn5֖*Mb}y$57F#L!p&I}سAklh~s>_p%l?[egxDfXކo øi ?UD:EN)kiF##1@ymV +c]|ͧ< v[DУ&iYߥ%Q &#_(;8 c6? ͚cR%>r< $naKUSY$F' ;h=c.RBmRTjdf٤;Z^ |둬jrgc|1ΡϪghIĺnAn&* [ >/z|: %t/t45s˙ǰfo)ik:cJ,m xh;OEE@ٰ7ql7U'o^|_~PUol-R=̗:WQdGT|ɕG`ڲÅ1Qxa q!Oi~]*em'"ޔkOF!VoVݛIwRnw̺q0HBR$46wpSe=.EH҅=,׵{<}UqQR!_'_{ncO\$d u0jH#Z02:yM0lS0e;:ywGirKˈdb!ʘQʜ.i>)4eh2Ï2_5z^ 6Z7N^=#n =dՖ)1c!<ЛCnS/a]ބ*ccȵFi3ڌh,3j@lAptMbM D/3i?cl#&WMQˏ~0h8QI;佉j9l1q̐iQt>N|=NP5}w6Ǝ;\bc0m5:nW:q9!z "0˫e1I#Ql,d/MTsq#tĝw:qg;q&DHz&;!Q%+B/ER~'W|1)aNoGI *ӛՊIC^3˗!"B*d>,~ )ZsTh6"27\ZꛆyZrKۼLkU}wT%-چi*õR=-uTC6dM1ٴx-g}_qX 3]mlyWT<\WCcs!9a{^RR 5wBlISRJLho7 FRS\+ly6,zxdub-1.40.0/architecture.png000066400000000000000000002512231477246567400155710ustar00rootroot00000000000000PNG  IHDRmIDATx XT{Y/*J42JJLJ+PQPq  2m,0QQQQQ\we}_TVEHͬ^fܹw繏0❗=39>BGP xBGP xBGP x@_%SE`(;wdK wޥ*G /͋7nܼyΝ;P <``@SMyVTbW9]k2vUݤ*Ŀ@<*Jj>>;;7<5#=e݌pNJg#ތ"0#3Kr>wG?p#˨<0Erg$„OnkԯWOZ%9_ևc;#YtIyUWnzϡgb^ C{y*!Gؼy ryq᲍-Z<`ƚ]?d ܌!*wݻo...3NJ  nF(Ќ_z=[v7#^fcijr+N`7#d+J嘆8\]qR.j:S'$<4 WWWlAg3 AegN"pW6]*D,T$G`"(/q<=.u4JDж3# Bpp fGryDЌD0D3#46##@c<{5#،ЌpYh3Bi4##a ?q+#ӱât6#1fC ``bccY3B5##{zz޽{wGPj ߌoFH@$jjj^#w^#ȻHf8oF8ݻ&L/((݀G4,jӌPn"R6#ތjfxooo++SNV#M:QO3vft1rM!Is3<m۶YXX+0{ r3B7#TЌP.8ݻ޺}6n<`A>Y6#ěl3Ax03f̘:u*<`$mFS3<wvu׮]z96#!IfF`L(bx=f 6#5f4Gތp0 6JG#Th3By&Ҍ,%#ft5#Iڌрfj7#d_ PF%#MmFȗ!Ō@(ZZZV#$kFȖ!AfnFG0a<`7#\J`3#ᰀf u[lxA~ьFkFG >} ps jF{Y7#F3#4$ʪG(nlF{}WEhF,!4Ae3f}GGPaQfLIΘx3<HKEEuUUn<`FR%mF_3<HٳOG5oFy7#$Jڌp(#\zں0 f")7#dH@vx{{cxl̘1cɸ@󤢢:2$((oS-[ ɓ1G3UUU^^^`xxxL:#bA3RΝ;:u w$x<{73p7@S5&@Sܹs׮]322p7%lEOOϪ* w޵ݵknрG"kp/b rh#0?~8#Ь`'999 kiiSM#` (**rqq! nZ_Ν{ `ZKPNvuu}GP Ν;lI|Rc꒺v횞J* 4GpΝ;;;;_P.1aaa666fͪHTcԩ^^^207>#[[[7tfOuuI7oތJ-[VTT`//:^{}pezU|_WU||Ք)S^y17 ~6FGGwGWU|_=w/}|yb4//k׮M27TQ ƍnnno[YY-[  6;v`C%pDFF$fx6l(@ 4GjjjfΜIs•+W޾}[ƍ#P.͊QF?٥`/#B@f߾}۷裏hN1gϞ{nnnׁkPx|4GjjjvЕo„ #XZZ 2`~GvvvT?̯zl_&'ܹQ%7KiӦY[[oڴ鯿 }ǕroMM fr`@sbʕ4X`Yn7ll*%yo0ٳ1TWk[݃1ۗG`!Lsnnћ@Q8믿ZZZΛ7͛^8Gfپ[#`<dܹ1M%Q֬Y666aaa _|񅶖mAw͞FGGDPBH#o1#`GP8 4諯*//x++C^ZZПq%!z$m&&MT]]ݠqzoF@uS#< oܸa'={%nnnK.m3<'Nߟ;w.}&==Tz@V ,<59*@<(#:t##<`ϠҸAL6ݽK.'Olܸ TNj<$9CSɓ'߿_uV#ٶmիW^vM6<.h*[d'%%<e =Bxx8ȑ#?O?M6_~evvJu/^{Lz_~&Lf\dGy)cǎyzz~'ϟ]IMF߮@'E3fСC[wE4>=fҥJ/??@C߲EY[[,֭[qtx>HHNN~뭷 RTT$F쬣.dժU*•W[uЛ釜5kVn_/_NLٞK7lJD߳ggzBooo%LG^0JBGGGc/GU%n+lܸ&//oܸqT/QsJ8͊++(I^zyyy$&&Ҹ+8c4m?l7@8W^:u*Mm.džz)..o׮])-7D~'֭7n%dK*ꔙlBuDGrXfi.\8{쬬,*1 iٚR:k,1cZW X E۷KJJ-2@*RXXH#yМ{BB<}<ж ;ؓпޫW/>A9ch`Gs=7x`ڴiW!>hQ6#{֝Р'O}#p5zH`sQ۷ƍX" cƌ{iܣa<;FF{BIǏ ׻H FWnJn3o< .O4 V4>'}P'm0mj,nG`m?ޛooo,pf#MMw۹sgF4j,:""=ʕ+Dٕ%mYcu9@_KWSʖFРz^0GРߢ5nxx!%4J͇~#6;֥Kww&.d|}}5:xjjj[)0A[unzAyU*W7lؠ{U!Y|u K7li w*e {쳨ux-@PX |dee%3:u*,,>޽a_7jē /xNan޼ٸGGG}hMk܏ݠ vvv"u=´iӄxMbcctbC˴%˗/MzJuhb^>e G2  ѣGME9~͛7uԉ۾yngg7{l^πA[GLESNXe G@zqwwo3A㹏Ќ Yn]DDFc@Δ888dddgƎR}WqTTTPll۶ e z 7#ШLAK=ȁs5t1y= (@8QQQŸiZa۷o[YYw[ m1GȇM&L0Z׷GP &<h45553f̰32GPYgӧ GGGy4@]%W.\+w圯:w*V\Ut%w^iZqU\uWJw]N+JbWJ̫rxW|]:]ٗxWlEɼwEӕQIiJW\]'jJU\W9]j]x ޱGZxߢ.ޚ"/w;GKx"~x׋p -ąw^έ x-#5E>)("?!e0xTAy (4Y YPY b"G`nۺ' e' :u ,4$ >Y7Yu:& W^y&{߱IA6 LftVaըX_^K FJּp(_o5RFg~j0T5-**pKԉGtti 4jx djz᭧]ѣ6BU |d% JKK\r-M Z9x] ,?Y)uM:O LŜ,7Y8#_vby=0^i6#D7UWWW=ђ&sbj2isN6WDxӦJ6dǔvً/P3GGZZg-DyFA@ i7#dIҌ+Y3,$&&fffWUUݼySdME6#􋧄źj ޜGV޴i&MBxFHc]VVv8aڴiY YK1GLoFHRo3 ӧO'%%WVV :OxnF(ATR(5#oIkv2$]c7mXddd\vhA!!ru k!k!!M,4lI$mFG{jҋjq DMzQ=/<@B֒75G0x3BqiF7f< &!FE#^3;#oaQ@_נ#hd-d-q<"\"G75Go3B0@3B1f3eyyW#x-l jEx}P:Hbhšޏ#6#dJڌpFf=-#";=B$mFh@3B' M؏C{3BfTw^b'jo DMoQY Y̳"<#D>}8f|IR]3aDR-vJ)*L ),J5JΫ tE!6"A㹏|`YKcQF)kb]cAbZr,tzфSieL<X%Vef&7#>Sx"q2Y#!Q"-v2mfΎylJNOo0m{ܲ믾F^}'2du{CӧOBe#rYl hAnVurv]d17/L@A㹏|^7Q4L֔ZLˀs-jDzmyg_wwvZ>ݱOľS`Fg ;;;W1' k,HhFP|~\Z<0tCcēmU~,p"ޞG5Y˙{oLyܨuSW׌0ᳯuy.!y'\a<#/߽'MểBwv{󫺫цcS8R8@IaifG/:(1bͦowuHʭ4b':g6@]v999hтg#rߴY&G{i\ZqJ^e`w:\46l7 D%ieZeMO?W54kO >hz%,kJ.~U =nJڵ+}ɓS׸8Y4QSzc܋Zl1%qe6oi9,p[\D}L/}K.TwOz˙![l 9y4滟֌@Wz8;>W/^gxiF9 ؉jUA,5GYj9z|Nq5K _ׄ8#J7h_8^XmպEڪ7? M[syaĘ"xRos]兯ũ`ng8Aở?u=30l?UcI&LgAnBwby}ԄzNtvu"h}:}C*!==}Μ9666ѹs۷o\f+VZRۛ6Ȁ4 c=O5eO7 [1fbЖ?(Nr6!3`ĘIOgRl?֣>zaԙvܲW6dG?q.W|5#f-Xʚ3oQz!t}\~<<.k}?{ 7~u[g&>c}.JY;prv;ؙ\6^N)xzY?z4.nNo?EiJs#dko~\H+s~ GoO嘲_x}y`mhWbFz}~yjXb漞}?,`.kjQp6k!<< ūk\P]37ArI]]N']XcAY$k$ѱ8qKY]cAҏm~_}F>rl|v%ĺۈ?>T37}].\i&#x4W(xo]ChJnCG`e_D5Ealڹd&mBwfEKW.]Ҫu_Q^p0UwoleՆy;{뗮 @hI@Ȋ 'Ov/+xQz u갽(#~>, җjD%Kh?6o8J``Ȯ 6nǗ:/_J}rlŅ2Cv!=}ߖjG>oѪkVlϵ{b;cNb{t =.bloC L+ :.[$ T D(u 6k=§էZ4X_uc/gi|{hf9p!;'[(e-%WB/ζ=V#lcVJRfGX*R :F!OV >3L"h•W-ӜGxw XvYFo ioVm(;vъɳԩSU*ٳ Ek,NX])N-ZZ$ӹ Q3.N۟j$b?$ ndF.NkeQϮxֽ'o Se30d燣syūi̳hF>C1JIJy*gܺ ~t]lm1ڮ]oݺʋR$pd\ɢ}ǹ,8Yy ;v޳}nwH bcx];lٲefQ5{YRX&~j>MWye n"} HbQc?>rฬrO>.8?!.xR1]_ܰ@]CEJ T'/V/ANVz_ֳ@ycB;q>PjQ~ފ\3s{k'(Jɿ֛3߷ w elO?5bq V>НbxF6 N?r/n85[=MoApa l [B?jKy *@MZ4?xp?Թ{jjn9]:F9y, trNJ0%^ǵPeVg݋a;9uc+RĜ4._ |_|X-!l"񔲐OwNB~s7Max]Z`=jÇծkPWĤ/IN"u[(lelmGYzpyqC<=2@iXsV#ֽ3w\֚mq;VVnڶ20w/BBϦP}2嫧ӟ2ՓZSi^:щ4Zp(&3ύ7eu 0ntJ_ԥ/P[/~=iL-Ӿ.**uenF^D?Ukv=nd9"wd-~KKKdVVHuM'aʃyAқ +D#pAI `qyeRQO̽DՍ&uRxx<@7;*#(ʙm?$>`PQ^^x4Y[TvF{=(5 ḩӾMObi3jefdhLŏ+Hu=iֻ_tzeY79G1k=eyGН)\&WR<'Z-_jC36ϩ y4>TxP !;(| {#Px vܞ@4?CG#MM _eO0evϷ)ۭn% @,kE9FCebr;4E FVyw2cu90X6Q̭Vr=yoO= ]F]*jZ| mJII@; JԺ>V׸  F2NTd9 b~(  '^w(N<󋚎l{x}vg8zbC.6&k7mAM=U)*4T>j9d:~]Bn”zfoM7hyEAG̍\z0@' `ma^a# ePzߍVN:E/=sI4~#";i(/\yꩧWpQwt*}8x w&J:~8lʰP&7e=AlI6qa4K#A(2dxYuc&Lm##O,ذM(~.$g:ZBn DJB@rְ|Gw%rGKZ[]E]pd#4 hFPz-xz}i UBzcrN.>wL0 ӫ=zeػc[ھ`;({||Lߏ'_~{Ͼ>6Gq#z!GNT: ez|mҽ^x=znw~^'+VSjXUAͲaņ]-OW^BDAG_^rzzzZYYUTTۈ\&Ef̦}o)w ywoѫve]VmGZ}@/ uGŃg_\#գs>wv_ x"t/^xf];53Ntx5#4N#dƝ\e^}>ߞ*uvj޲36ϽoۍrV3M)w˦h{mP~l3Ĝ\zMufY * K#uUrj=ka#PrPk˴"m_@ҭG>#~FjCٶn=x}XG)f/ŷ?-ظKx-b(䖮>M=ny#|2wa)8;w]wA=tcZvS-c`VTN4>vDB~jec`c/L*䬶c`/L9w4@üc`?r^1+yTrAFa,N)Roxvtڅ*mx>Pp:fڵUy:t]W|`fI)fY+6(%B=kϣZ] f/ZYjŭ4|*ƬΥItd6g "0kQ17~] MuʼnQIQBVauw_lWbjZIȩ<QƏ3Ng5KDp–J+=Pgֵ3C`GhA+/ YEUqJM%g=Y`!v+2Y(9\^,ϏI+QmyIIu)#GSb&ߒ{y $1DgJd5q<)TIb~U#xfMoDKԖZ?y =&@B# B'Bz_d'Gv8Hqᬉ4# ,=z݄GZZyDYEL54Hf4#T~Є87#Tb'x-l j?<@B5k!B3Bz_d'G0x3B)ԌXdʋn !f!r8Q>ْ6#$Hڌp@ ɤ"<65 D%*>˃^<@B5k!B3BddьpNdU' wFDG0/N& Ax :(鹏&<@B:(G@I#p) MAAT& LAx {___x%yBg#4zФ8ٌoޏ#`/v<65 DYxt]CS< @B2G@OGC4#p4#4py6#``HN Ax xykNҽ?j @B# B"BpB6#1fҞ׀QQXd‹#hoaffGZ#@d-d-czDYEW|}}݌*i3B D:xQiJh~b'#hoaQ@`mHx#<@B֒pbLrM!I3Ay #`I/v|uu5fܹsƍJʕiii񒄷ѣE'XUƫhѢh'66[酅B*=´izr <8p=zh9kIˮ]~>U֒O]3('NxI>޾}իeee)xY5bgmm6! E8lYhM,u(֨ (Bh[ۚn߯^\.ι|ܮۘ?uWZzI.((hĉ| J ӧO?իuuuf#Haݻy^x\U^^~4p3f0wN> K_ptt/^_eeeuu5T?Ioܸqҥ Y. $?K3;wNR{Rlc2kr75&KQ+!!k׮v؎9lG-kмy:v옘h=QJ .Dҿ5 H?V,?>>Ap•+WjkkF͂5 G ='D[n\2A7K/9sٿv֭{fDٺUDx AܷJ Ç!Aum|8vojjjݡJ`xw\Һs[NͲi1%%EAj5h1QD-( ?~&/Fe%:r|0a8Qٳg^^yGo?ǽ{f(nݺuUU#fݻ4 PFp7+gDDٳg۵kW>cƌwy\~\!566`㣕* ѣGII $?ǎ~ { ,q&UPPfΜ9Xe8Z#@v-''e <4%%f} |i ? =#q8 #`ʔ۩S';͛7YNګW/DB$ql{YYYstO>dŊ/x69Gм&ըe}ddd!]k,ZVǏs[>>7ZڲJst$D"gZC(9! ݻ5*Z#uRP⦚@)ꉲ;j8H9-*d@.8$?ݿM%8#sssyZ֩,gg稨( 6.( E"Qqq1H&;2޾QxAXmGkQdh٬a|/~~~AAAZmAyyy VsV#зjy9=UWWHZצ-c(//G7ڲrssbqrr2"#@Px{{ 5(YGPc]HPQ||'??? nz77H|]\\T*VAAA111L7#XRtvvy_dW("r9.LeԸj+<<<::9+Ax"{544@z.((`_uttdfTT_^1S%LD@]} Pr7!d^Juu5fD`T7fLJH$UUUqY¿P( Rzi+r84&|A0Jvxqqq0#Yxl9}rÕm$r8D`n0wBaI)͂٢BSRRtGb3+EFFD<ͪ%""|DmC`lgarQPVMM#g[ Je``EJC{{B|Jxɯ_9ќ{DƐlAA6244=ljhhg&` 993 b,PRpp0;-V:99K{UcZZ/*r(;p9d@ͣHC3obDBr<7"G`$)[PL͊sD&{Pꎎ\R|O^aIt*'?-r9&MHTTPa칹LP%<892|G'r(~d2T,T*K}JC%Q˲g+9;9&MQd*Jq P S'/⽏H"dffbP#辯?MKDҀNd2HONbJgxN uK*#} (Fr$A6{G`Arx+mmɋLQ9#04ƒŁ .dwO5VuIRA7n=ǨQ% Y̛4Q ƣ$A[X`%; OOOwwwNj`\7E&˨sА# G@RyyyYYY@TTe/D.0*%pO+0.O3;GPjʉe@2Gdw\.j[+!Ë&(& >reu#x)h-usAUU, ԡ(!hrWi8Gk@bJ~~~yD 𢡓nS OP&>,Dbr8Xɖ]`ggg ;f!JfEDD]mШ63#h@x>X999A_amd I"0T>xbm|{{{cQhr]m5px_An@Vr2TR Tnn+W<== Wmi4Q HPPA[.ՋD+&(PQkR|DdA r`c[-{eZް6UUU9;;[y1"##9}K*r9"A75)Vud2777)k0Ǡ)GDX8뙡*--x@ j{TT:^JZZ0*<<"A71i}#>4d'*((*(,, )44=4'ASP)222((_TuֵBg"$ u[| H`#`DVaa+;\ǎ%f4JmM_777l}rQ!roTSS h񯤶 ?(, Z ۄ흛/9Ug!V 95r=!U"޵2X,}4AFA5Qhh\g7ܨѨEY(9zk^&?([a5/8{VUWW{6 Ò]$ޮL*ʐs#faP#hރ% 䝟ocpR*((cMH~D"|ņp>jMLd8Ì%" &#XǠ@'$$D.[mqpp UWWb0?AR]]/Z/G_W555PºYɍ%,dPLI&A:9SjllXA 9X,[k^њZurr’K?O~AAAJߵ^w_7m\'cnbbbm#ǧ}40u J p<Ŷ`l[ۉSEkSߥ/$EEE% ٴ*`GлM߀I۱dgJ*]*꘩F CPl ՝GK8nOǠGP Z+ح5CJKKsttԽ_Q9` G_Eωj5h7mDyyy eX3^;;;[N(f QxZWP9v%(D=Xl{ţ%*JȯG$h8G)N/88R^^~XB0XXX)lk<} dG GPŒru~mZPbI...ےjr򋈈dy9-00. ~HHX,d(GJPg5GG^fkyPAͪ<ɂ#,,v*r?AG Olm G Dž D"Q(ZQjj*r 1p JPV-|ΜfO*"J`V|LN~&/CUUU q? \.00ť? A H$nA3mk*%8׀bCVQC|A()1xzX9ϴl#h4&rK.;99~#XJJL&v7 G0\Pٙ6;AQ(Fa9}nnn-2JUUU6Xv=Mq>9fD U"T*4#XV555mЂ}wråT*R)%(5iZɻⷸ89"g+feefEG$#جrss!EGG 0r`L(&50dQ#tTb“/W6U]]v$(I$Fkll Y? Gл9UWWYZZr+,zK|&jwh̙ĠGP̪P,Ko(mywU(22ۛ koCx19ʺX,4`(yQC`ggߓWyy9ga\ef3 #ཏWDDDttf: lP &6YYY Ï9<<ꮼy◧H$ J9#R9[B/q^ 'ʛc9[Uz$_jXAᧁ0VZqD 566=<<<Q]lEB!(Db68BlllK`9&111xM{`a宮ZM?"k׮;urM_#G@jkk;kgg'8@$J#""H~qִ8{lY4ł+$$/k`wpphժnc swwjr{{x{#G#>Oj#(rQIBm %h~2L"s`$ -Ŗ#QO?z*H4v؅ >|޽{F=,Fw-8N}֯_…KLUGh'xYC.]uV0? ;wb4C|;s̱c"GP+t?9>"D@mQ& l;T_fee ҄lK$;#\. 8H罏 +㏿ :FEt:i)͞=eϜ9## G`#{QP,}7777VV]] 񸺺 ҇`B{{{>~o\W1t({^7&M2[s { Oy\#?>~੮}\yLMϥ'O E<~*s GuSqzS9yP~sˏKMꩺ_OiTԓӧ]$O'sOc) Q9{S{s3MϯMϭV۬ mt3]AC!-#Z|dIr"(nCGн?M1=j+E/\,Lvvv;i~>_*d2GyŁ LSt4j=Z @K=%=FPRB@5<zaSoIDATVA~wJ1&'9aB;w|/8Bqqr#LH46=-m'ϲ1xZ&p%C![wRSS5Mppmrd???k.# G@Z#ﮰÃF#h=B  6/_|~5"TJh :`E`mc'Y!t:D-1LMIIH$[+00ٻ#0! oΎ Fdg8u/FBAoo˗ܹ"O*!GPےE*o[YD"]":A<& |n#FjI>>> BߣH$#<|[`⊊7n@eF{AmH/G0%0>XfQ>xDDt:Pך5kl*GWUU1{8r(+Ϗؖ8 -m?~ٳׯ_#G 9j fftUDDt:H888VD\.HBYYYN#""yeG0z1 VY(|. |VD"]":Aؖ8£sm>WT|)#<-Ӵqj?Z#`BA &{^(,DADt:G:5xp>>Xg m'-%C!s49=??_ ɩ9ro#o#h] pH<<<#0L惆 mCۄă!t:D3ѓV/RD"J)N#G@=jaڔ9s8YPx  $6?8}KGh)onr>xJU |Pch.z|Azp2] qRkEZbvr­V4`COU+_O_Ǫb8G vvdВ GRy6rD7'''[sI=#pR:  Di%tM9'аo 켣u^^yR|~ێ\`='LGNu|}¶l|PeEt8%jars Z#egQi)cP7z5=nC tC;2vTx!sr,<~\I8šɟL z+r{EjrD{~2=8i%__CxCF2r1eS!o_ɨYy%M5[Cz䐅˽C6<:`#g/H9EᣚAM͍p<&4r9F#bcLZGb%z9BK<L>gІo-)o_w; Eo(d`\C?=pbm?}=oQGxMuql"y'F`Ӷ;:xA^&nܲTO1@?ޘ*߼s֜]DІwͦ~O]q7uWdh/Z.v!u~}ktb;>C%XXֱ"moZ6mek%d"!1&؀ dt"cND&N+Y)S:c}`e[ѱ e `B3 ;buB– _] ڝ8D2j!oNUx76XA31!|0Qu?BI0qP-~#Y+[ fǐ-;?`$)P{|$S9G"Ycy^E,nST#K6ܐJ5=0pI$@-$M}I6ܸǑmx#Q@@YJ5ussHd2ۇ,%]@/F8e Rmf~S5wA{6pw A7E!u׹=|&}hC}nSןN) :`ݙwȸ^xQ;r?ogWv}{?Vil5ڐB wO=dۙ[s)6qBqt>XxvjxDz$ݜ^u/_R>ݯ%5dh/X;l̪ qϷT3]i aV#{S~~Cܓ_V)iZ?w7& GE~&|Hf&ޘBRz wȟ8h5z@(ւ%9YqpUxD{$m ٲ#@}&Nݞs z o.<| c"pe k)Kc׍Nqs:qŚ^=wdkS8SyK6x>n]R/Is^fv /H^7q+v睄WSRyѱFcH<Zy~޾.n^~+nڡXu#87I X' h%CKW+OGX)s΃g%A8BM$8$<)=$tZBM~~~؄9akj${h/M 앒.Fb' _\))0&Hl#i GS ,Z`%}hS[F6mP6|ثڸC;zٚ'0$lkۜ!z OcEKE$|pڌ9N}޶Ez캭|[shCmap@?oam:,[wA9vE _c'Ҧנa0%c&s}E:UhCd)A^)O?o*tc*[E Ok"iۮ<)siGaYO?M6WC_ T?# >NwȔϛlPom9mL,_  8At #&'g7MAC#,[)_w3d#9Wl\.j/ۿ⋳kCn2ؠ̀ۇ*Ǝp#$o~ cMM=f͙?*`8djÊu[lb> `bSJI=Z&/F8b ָAxZXXA+G0v=#|ȠëSluR=\ޖ'q]qcWW7q}UKu'oS%_nm>ׇ/:i'8ªMt>@ކ.)>5 E;,]Hb;rwCt{Պ P+u"FtʮjW3KqS+0o89'G+uu{8j.i盭~3`pHr /ՁC;Km#q!f}rީ ۵lu" ŗlc8_ ?~S G(<GZv&L=z&lu3{d>Gy oj0jB8!i?b8D3۷u| )]#)v+&= 4 ʌ+7O|fpnojCbHȗsvwĐ/T|oبqoQs '+j'Tms_3Y\޾uB/`_ *^iӖpT$y␴Hrub$CF!Tok#]>fT3G]"OC0IbJcWL%{UC޵{oOzz[O_ 6Glޞ۱S>f'@תH9rL5~6O=OR^QG0X8uCKWq |sV@_|8--{!jCu2w_/L E_,t9\$fuc(6eu1oĔ_{l}HMLbēKVj!_}wh׾;nYOR‡0'zmu׻<޾w~G8G`YVpH~$hA"$.yq$IA$a!H陝-j555AUUUQ)SSN%rIUꏕ\X<<=7zzJ{柾n ,ୖ&X!:$,":d t<\r]{7t/;t SZk!pAP,)}8r(fT[[ꁨS~~ A}1|+@hG0,/qsG-]?ű A,3ja".bC2~|%+D0 CvHΡq ?,]VDp CD"Mpz8r(f,`ή"KI> r;P#A#> Cʲ|'|DDbީlvt:L>2G{G!%e~GGl\\<*7X(hբ 9PWWgkAǾ$od>H*eVކGx(>xjZ=|.#|p/mc35MHdK4!z ~;j) \|Ku:!9y<), C'Du:Gܠ0 !ZcȹwCJT7_g<h:[CLs+e75cȡS_ᐦ0•CCH؛CGl\r?5y{{+J9ۥ$Kk6/%M]`\ HJ6~='RZ?1[b^>H+uG'~ >K[RIƴx| ͻ͟HdKd ^YWOfҋNuX|p|q:&GPO9O62|>Hl[vG^զ9^6A>v鵎իdZ"t`A8Bvu[`PO{M:hȈ1 \)?wO?w<}h5XCnMU|׫F}3l9 A3}%s-^|c ci2ۿZcM$xp%2H|i3C]:=ts,s |+7Ĭt#\)[A+Aްyҁ%>?xi|j^S|33siBݑ럅~*ED;ۅˉCV)qA>lwYM<}z 9]q}_ 6j%AMz&NObUb7JcY-s W[w#4D8%$xXOgA  1F|=?YtxY_| &79cXrūZ9"C9dܤm9'7"?ݸ-1D !{UJ?m ,,s/ϰ1;9!j&yA!g⒛#W0D]]]q1Vwm #\5|1#8>BIg [LX`G}S 䃪ڴmG6ǵG&*w~/x'OL8xƔxEsgђrΨ"xȖUjCTίGǬA|ꍩ79g~׻Qzў޾P'f~ G؝_|ݣ_>t݊w̞nԸ~^Ϟ p)0}C⒕m;$q b՛Wn}LBb2}h3'Ys^kp˜q4w'~#.%a-x]H]E$kdd֟B 7r_͋EB?7r̄uIsI4 c0x52#eW 1w}W%RlU$qBژuV+2rf;l칋Ղ?!=Iʔy ܕ:R  nS<)sҸ s mY_~z{ aM#40ƨw|=14z8dh0ɷ {uTb/;uR{ ,Oڱtce>R&d"Vv\]L6jʍ0Ͽp \!OQjjA%J{Ae`|b!¬!t Z`~)ibE=Ybf-F -% ;rf9Q|_ܖC ZhFL]pJKܚ=p-;w>Ԟ^&d\/[py]27mo`2,@Ƶ,.wȸ.p5xv%c/ ǟ{%W|%+=nq?w;Tu~Q76¸Vfqw)>]~I _G..u\vqϷ^ROg9C[D"])‹/nݞyQpB=Pa,.oV (,^xz=wߞvv%u$7#\!$H46iǶ|GԐ?hhgk]9PE5ޘ4sda8!/@'^|uqȊ5Z^_tFn)?eww=b>^ G ƸXGb11I偏OC]^-֭NT8cgo̍"Kq({O\Db+O8C"π1)v5{@`6r0¾C>@PWUU C  p6T*s!Hg^4y1<)Ig]C'T ,FX-F6G & >9f4#Oi/5Nm_oj:ҕr2g.q^#5wr+p鿐q} |2'O+(Ci!s::{ ;m; i0W6+Ae /NNAÜ^sgXd.qͽ N"[l;89oz^qh5hڒ1#YԐ=cBc͜HdKd F-S498BuGؒp=MN<:::ppK_)=N -"EG 184?`oaGoE-h!j! nNqUs`i48rȌ9 sR HKK@:oD&!G^6{j?P\`bo9vb? \@OFJV3vPؐh8}Ϥχç.O6kА,Ǖ P}F  P19f4#\q6>uѦЮA:9w)8uAm\oܜM܃ףJГ&Q66nrhӎ] |)(w>(߼~OO::8]^F8šM;8uT۬}DT Tn!6g9\Pvίw#a4h)>8D0v”)u/ݐ|F14s"q1G]K;w%l$l.IE~p#@q $UYmڴS[.tu[){PY2zǓ8EӯSAU(Regs:NEaF}*!ërR~p!5@℠.FZP1~FO!;6;>1Gx~SO!۲ڴm!cw>E6ŐEL#@.'Ơ(T%$d{ a$'vvoB0ٺpk:k]G8FCDޠb߈H,_ͩcB1#DFF EP ## Jd2T*A"""BBB8-Fv1$5[V~FU¸6/J{$ǿpyJ^tuէP=@R:u~w7ߡ82_KVà0>5Ͻ'(RsEހ?W2qL׉wu^>/ٷfa 5?ٽ@t۶ MS͜HK4!z rbJG_/' &NB9$>?##}wpElKgGݔY!:F!a,C[)냆|_WEO8dͦv>?K'_ {xO9g #sDQ fn1:C6$5g\46:Ǐ\ьo 8ZR9quQw pȚM3w , F6ҝ:99=+ hr9#<^T*" /Fq49)%F%rHr}r>T0yC=ZoNqp_~-Ku*5`Y]r܉ڒSnZ\Iz{?=Ni9'+nAC1K4cHŕ{Ԯ(z )t^)>S!:Yy; />}Xuws7_zo.+\<aFqt$ŵPl G GlJVbChggFDVj᎕.F8wE#<{ dgGXљ*G8š􎝺Kwqš#GHKKI9BJJ!5]g>;w[nv'@ NN=WCs9o޾➇N] H8eO5%9cׁMs\ˉC;gEWu-F8sK10جCtF=FsbccI1#@\ MJii)r(kTPPPTT~Ņ(iO=2[JY(P  3P8]y}P%s'#U܆__ A /F0]9BCC#=X!|ྎek?{-N5_0Zn+c!B9|>,% !-_˼CN5C!JMM5#T*&bD҅#RaaX,F)77ÃE`݋*/5g~Kٛ+l# ukeHjM`5d\2a".ACrd#VY8??_,s0#G0#hރAx"~ήU`R(Xtբpdooos>jM86>Xk(,ē2C!'%C!t3 ZQLLLhh(r(\ .77 {1 ֽjF8B}}O: r Cʲ|'%N$%C!t#r>fa???<"G@ү4\`BBBD%W-G+ asLr#}`]&kHlj!t:|DOFrooo6.,g[qxbDŻDt:b 1g_Cdddtt4o}}]CCr\aXUU)---b,xjѦ8mm`p=(>HV/c"ѰXO$%C!tZ>o3#D5_7;;"[# G@blPV&/FBzV-Gx$ĭ z95;Dg jNlTpI:#|>z=C6h.A؈C̹1>>>44_`oFCGq_rZ$GaJT*e#0/W-G>9M8',O|".ACPnյ9/9= "[d*,,tqqa#\q¢ |\hkAx[Hyp#>xƢǬo".AظC{W*2 3r(MIRl4\\\,FZ U6Am_VFb^$Xiщ O$fa"EM ]":A~[Vy-xK%r8H$B Apuuŏq1NXYP9_hꕪ*qsA3oH,D"]":;tޡܹgL1r3L49%%%&)%H$SNQDQaQ"hRaab""""(wk>;z_8׼yǴm)..G@>$UUYe m d-z'G %'''&&#c817A&Jزe˦M6Bn{5b!bh}mcccLX8E3VmjjPORm`io2B+9ZfRTĭ,G09}As={ڵa];|J]r566RAst]t8>2@9#Gb L9K8FA%pE(333>>VUTTHU~"Gp8d&9jݑTǁIRBɛ9)_YYY5Gd3g6ô8SO<Kmmm?r}ҝ!)!qqqyC#444 m0KѨd:j<&#QH2Bk(>4 ):%\$A 1N: _iii?tR_ GtOκ1KǿC x+ ,9Dʾ,udjp 8Я"Gh)M(6@- TRw4iҤEsTAܯh6~d[J|eIʇ (`W=\ߒ:ӆϮ}/+ ``05,/ѨhRRR222\߱P<9~8mhI{۷A;#  hڵÇ8xF|V]|9BBĉyq+4MMM 89 JKKSh%3(???>> }Ν;g4iyС }2E`GGGWWӧ':;;W%\mpaÆ=3P|A̡ΝKoR+;~a\[gΜ7n߾~EC (K.E8B]]^礸8vGGpNLTPI6-A 'SN8q eA[I$eUI\%Y֭[{1^}Z 6L>s>(Fí<|p:xcVVVb4u=k&j ]!dgg4#`_F9=WVKjBOE&? :¤e\ٳ[o{Ck֬>|8}ںuk@@;@6>644AW W$IA8 2yfTDKLLTPJ9Xݪ@Fq̘1w}wgg'FAq={v@@+0mmm>,MGPGM8RD4e>jW)lpq* GJPy 6`(gy[n1iy-D  gj6 G@>RTYYhx^~a Np%xK /Vpp#ܡaÆ7y4&6ѣBXXqXII R ᪗kѢE!#* vr=Qc\jo&Ie˖r-0Ȩ~!%o/~_QQa@@@hRJUv&??^ۋ]MMMAAAM4IUL>&4W�}Ǐ?y򤢇1w &!,Xj+ LuYWjjjRRRj4+::z{{t ?{JIGABQX)={;wTpqr#͛Gυ:woMlnnhx,jzT~~>?h4MMMXJ#@VEEEPPh1no!6P6eme?̚5k׮uӍ>gXW^!'[Ԗ-[z}RRROOӟw ڭ&I;ή2>kb)B844\ - ?D_Ϋ$8z״> )Yya}@PSSSpp0M&Q7HWvW3+;=6T+ZV~϶هTA+Kmsrr mmmd`́#HЯg%(( CвBstsJ#J u7***<< ᄸ=effJ dF8Rvv6kfpp+'''22 N8@~+11>~ީ`(<0,~EGGp***R'G8!DբE~n'4ph@QQeff&''cMz + !si$'y:A|n ϭBCCqBsH bp5q@@@vv6~Zh?l9ҦӱGBҊL;F^@fHKKC iEEE@@"| GAG]ZPOBO*[(}!w(33_ܧF[xȐVJpp55?*rsscbb0뒷P#dee#@RL#x222Y'DPކRv4* hP1q$+ K85@n5^ڃ^###Q&BBBdQDEp8#ȫVFCCP#'JKK=¹[ʈ`G$ (###11QK;J,#_8(W!!!h!IHHH##@XdB_Ձ#[=O쇧°GG***z=XJ241zx gDBxUEot555p;1^Cqq1RhGK؂*]2T&VVTQQATȲ2oK{{{cbbP9(T[~TqOF$GKVt+ 2$$Dko^QQ?uСtO=6kAཀ?cJ-Oi O8*5i$%#@Vwwwtt4BSRRe? rrrdKٞ/N,V\\\zz:Rhmpܡޠ ($$DA}n pMxp I|???W1G0?f*8KIIIrV8 I`i#%Iiii(99ٌTVV_ZZ1eUZ655U%:;;CBBPWIܳ(L4 K)pX6InF:䐻qܷwkVs FD}Uijz}CC\`Rʋ8¿-Gl€J+$#OC8@~,kG<`v*77WyI3Ksuwwʕr! ) 4VBsܚ \JMMLMp'DmTPP)˯Ԥqco#_qPZ-V8徬}AjʕYf _v dO˓%"'%%)7!88IW9NGZ[[5Mww7B}yu nJl}JrvE˳+o ݴi>HwR J=\(+%"""t# cDI*---;;#x@,+A>yr:_utt|ĈǏWݫ?a֬Yo-!Oj@ 㡆Ԥ$.$h蔛Lu8BV|q<V. GDAGh3td9b} w?;nvcǎ$?={W_пhȐ!?}n6x?O}ݓ'OF ,222HLLD,n-Z(44ꖟ\}w pTwwwXXXdd )"===HH_ iwQy'ŽyNgŽZV1ݻwO09oRRRdɄȫdRTTꚁ#((*k kС*k #/`iua#deeY G1g {ܴ[Ǐpwk̙SZl =]]]t KIIP\ " ;Wnnnxx8E͌pth4`If1GPhfi 39`477[nuww3fĈy}ۣM#N|oooq-466r>}:!RPW&5VNNNch?I_#A;+eJJק`| d3@ ZF$&&b|h4-r#X& \H.7o6>׬Y 馛fϞmǎn:ݻǐQF9쀼DJ=k딆#df#I&;̨(ZU\\O{֒׬<Ǯ E!Q Ϝ,w}wuuFٵk}for&L0@N7qDŝ$E؝RpѢE*(FjGQ[__O|u+**ݥf_'v2dCZC=bdL+++}YX#` yLWu ɟ_}'hHb a'7 F,477x8.Z6ުi`_PP8I줵,[;d q Ps@y#>`<>hȐ!̠ϟ?/SֶmxxWO> AH;3&L0YnmmUj +(i8~ @ "P2uW kl㣦BCCm1h ěhrJr8ۣ͛xxK42KNNp@9J0wfޝƪGI&eggc$G0yb#%8*!>B?##c%JMMΰ& M+ `RII }0i`(V``m<^Z A: {K<Ԝ6?&jD 6Hd86iB<kѣ8%*++6@rRIS F}WJJ ,dŬ,eJrŽqff&5ᤳy1#]+::~d#:;;5Mss'Mww7ͦ& rxx8hqHЌ3XQɶ ( d;Z8j9v;%vT2Dv;jd";xNN:Ö\ۦDקү0?4GFWp|OmZaa!mfΜ9~xq&MR>9777..@ee,9sz.gg\o߾vьF7~+(Nc) Do?#: krfc@QC8qٳNrEQ(AAgptntV;:4d:pÇiK3;C{ 97bC'|"lZ`0x#z/IHH # 볖psYsY2$#ر& <쉎GG Nۉ\$#7sQ{ysy#{ϟ?~h,Cm2?tV;j׮]:s[Oinٲ%$$dG6>~ p#܈ vO'b@A믿g8BdddIIZ+++ E]9OdK?3-e;vطoMggϞu>k& :J`5QdjO%#0Gܹ~0G(8Jemvfmjkk;;;O>M3giӦ+V56>p#+8D >NOXj$Sd1sړ`RALFh5NQl.鬥ȑ#gΜ# Ar2F6nX__o0? kwNfDm6o~Iws;::o= 7ޠ. &aĉdu7bC| mqmmm:N=QdQb e d]&#ȓi2[>Q͇#ءA-r rK dn߾}ǎ#RJ訍7644cA(u֗_~Y}3g$K@:thGGI?pn| 'AoV@"44XśdY#n,Y=9ܝ~zn9JrЦC9BVV@e{1ZՍ7nŊ W_}EoRՒpCA ">z#$'''%%{"r4SR2MĨG@ X#+4S{{{VV;o6667̛7Ϝ#p#+8>Oѣ:V&#M6hnrS&E%6Y]МO*8hb&FsUD N6k&paȐ!.>FWp9|l5.@ jWJ,BSSw((=GTPO|<ڜ;&F61괳n_%`^VvPXXh#|yA+::(p#܈ A\Oldҥ vGL r4SE2MĈoIG@#;G@ Xu+\MN ke#&N8ȊÍp#+8>O,2J" vGL111 #9re\r41l#IXt#,GmsALXE!z3FFO}gg UUUT_AV?|2$#lt.#MJF#O]~aܹ| aag_7bCD5k\wus"""u233i_= ?q)9Я~|4t9MHy(SWrW^y6GF'}>;vq鴈r+G@U-ZD L 1GXP|eLA#'#PVBnsGFWp|}rW0pp>%%%]]]}F#mĿL 3G@s>NnG@#71bv#O]_rq,F6'<̼y}} Cppp\\V+44|7$ep41RR RG@ X.N] FF`rA,n1_!2qDU>Ѵi$UUUZ655 ktuuvwwH|Jo#G@#w61L2O%`e=u%C9ɹ!, p#܈ O|s kH׀|$#l&'&FJJF(~8J*,sqB K >{87bC9TZmMMcbb233x^לU=sMH(.8J*ԕKn1_! |OxTTT"GXTTTpp`ڍh##t:8 zxq41r)UoMё|tt#p#+8>O~p#3444&&6޼1...:t(^ݜ w}|s41:bt#BO]9Gp}n|'#>#Я^XUDqqqq/lηMĻ>8yXt#KG,:گn1_! | jJJJZm^^v8.h4r.9x1G@#{HF#{O]A¾p#܈ O|"+=h4&$$bKLE9ɃAp@A9r41@2F%#pT[v Kl} t#p#'=}555iii8 (** ypY9:Qs>\+G&F-jhb'G@ XZ9BVV#p#+8>Ob= G =v‚1:t>1؜O&F'#ț3GTi OnÍp#+8c>QOx$68TDDD\\m 69qp#Gp>2pCs.5e\rkbT&FbGqN]5"@% 7>q# x266SAA6 ]h~ n׀|\r41rU\ Xf'9BaapFCs ===IONNFIEajFCGP\2–{h.nlzd.۲sˮCN%#Xm杝<7eroKFذQuwu*QqVr~Gm}Tt9NvRC5}d2? G΍[w7 +66RU6_YtHՎG!g9|bm&XѼ'MKK H Y<:p@jjF JIIILL8#t:8<^xG5}6iCm쪫}YB/ JLاb(e˾cW V{Ï,vC?/9_ܜ7m_}ՃucYzw "͚EGTqMx}qۚ:|/˘^E{6aziRz&km/SW󗔑 9s^KvU#1{s~#c 5*nu㋯>b]F?'N@RYLx5r#]#F=>p 7gR FɢE2338_pHߒikÏ!w[B%]6dkf9_geF>+3 Ų˼'^  ֭7nOdd vUUUxhwoX />/ ~tooĪ/K6is\D#|:";ߺwc>_6[1G৉/({sfgGYOx&FVіo-r#SkExWf*'===UZZjFn>m}dkwoX^q#=#hfL}!#lk5?1y3GE{`?Ox~ƝL~ht8'3c~ޞ-MJI1Qce841i8A3?XaC/Nzsd#c=*s O$Smk>Ҥ|h++Dا_ݟI2E}aYYe?(G#}Bq s W{᪉ASW9B p]Qo?婿&lA?Gp٣OĎE d[ZΞCO]ǖo7/?B[#G?Nכid?OHjF4>#Lpo|CDo=F111>>>lO@W6Nĕ\Jx#WtadtO5[wy>_櫙VSaaEMk3/>8**>̚=9k?[GD>:*j,?>KV>_3T ;!߭aˤ{zLdF?6Z)1租]ϏyI2ܢo ZPfeWU.OqKڸyWvO9/mn0k֬niE}/Goj4[ou]]]>Bbgdd` [=<\voXrkwuUld}#DF#,Ō{O'4 p=?6/_t u#,,^b[Ky :.5ًǐd<>rFW9s$ u̅t \3%l.s.x,{yVv}A}Nޢ?s?|ny\f'7ٟI^k_ưWg[IN>eZ7o(ϋjb$vOAMٰ¯WM9sj]-Xz#dL˝"ہv8¦wss/ 6r,:s_^r ;y+j8B%G--j)>bdWnoEqqfyO]L>ف q:+1o=8a)<񼢯'xMl~`i^WLsꆃw{IQsFVg 9 ٿKCHh\5.n}O,XV8BEԳ"&GxhÆtyK^z¹~7fw3>#+F<>2Ӽ%fϽZsMޢUqMhw7I9zJCf^ϽjHk_Lyw|8ƺ4=k [MՔFq#O~6wɫLO'2 M\3 d5%]hVbGeGRzll,fn id5gaɴY 4}7A9hMk>f)3.v;I>a==8j3.y r7k YSg=rXsisL sӞqْG9wnB3¯G8!s J3_cDٹjRkv~..XDK6mʼnU4Ҝe866VlXY7ޝИ'j+Gnm ~n֦NXsc yEvJ'<>N>gvhڜ[~wEPrM Q)i2|>U!G yK[om0Iتs7.+gon{ý՛5Mlz?2MP2GejਜyKiC}@Aqǎ03'fSW\qEEAEɯY-]&gfSs|>x !ʟ\8|1+ 3ے6R$h8BE'ߟͿt~n[b;}UW 2SII+1G_O<"'|䮪6OAXGfXQ؋_.+. 86hg 7'.Weee{kkk9 [)}{͵Z6eҷtkwfD25=k1#Lf/X~sPbu U;:I""}zbY X\ sisOYҝwɞ/_|ǎN#d-q#ל\ńՕ)|cϔ#|y?({ɧG`Ŀk xc>geSg.ꯏ-ޞXLMa,5Zv ixh9.\51< 9b 6\HK0;2 B[?w>ˢ䩹8E+)鋬v}m--F.fx~0[#7A8[v_0$$V"G8싯꺀F1*9+(ڹ=ׅEyvnCchÖqGX*p9f'ڿ}+o6>OmʷG{ 2fܘگ葑Qr%) ^64f/!)h 4GȞeI #?򧳋&`5X4"?rI*rGX aV\r~1}8UW?bqW H|WaJlah;{sIOfY5sNG)qĥ&.^23H+qƭ\sid/M7nl4YZ-X2)I ǟ|b][6 j˚4?>zhs6ϥ>-I SxxI<v2/zarmw#\vՃ>8¶lqᄘ#Ь4ga 7 1DZ%QMS'Mv©c#.杇XސGWh3rGH}^Is>W#/2IFkּ⫏<{#8d#d bxq~:{żM&#G6#߸moE۵9j&#OUWvn[X8Ba^G(~M[W)&(9r¹ݸu]а(b 7͞d΂_]gli\>KݿrEC/o 1=ɂgy~y  ?R2%NCYL"w#|:+fysSg_F !CL58ڢs 9sE8-Wj3/OzF;mVs[MUTKjjƜŴo \~O#dծW4<HMMTORҫ1*o AaD&\yujDŽ_s2j<1LpE&o/j# A5dn'p ѻZr=e7 i.eC8B,fjw_,\u[Qė߼ᦛKֲ!5}ܢXKs1Ps暌OU[ZY#<66\xn4k>Lߐ6 nKa'z*/5)8杝𚒓/8ib$|p9O]Yt*[W; .-;Zoo=J|Al&prT[ LT|:s:)6Q\ ;7Էs{scOJG,k`߷ѯ Gp-r¥e>`cdŹ_.0{ƽ46{|6m7.Yr]5jiHS02 $Z_|զ /'O^~Gs%/lv'75HQ̍[9˜XGƎ|\R?̞+qъ*OŋGDD1_ C3p|UrTk2sv.Z횾IOVS?1gі>}uwגX}rHUc5#_.[U2WBEViX(2 ;||Cw~ꌹGk=JǴY @qͤǚǣ DEs͍~ P\[[zq` eI(5 w`_T1Gp_sv}hU8]7}s?/<4;.r˦._<p~/d@Mśn}'tvnQ qyÝG8®KKr6lHoM9xG౉]/'Kto:5`B 9B1@?T5[.[xny#|.41GG^89AŲ&/p#Gdws#76^0̞E%ku7f>cO#4rf']WFaqʪ?qs r9Baa5`70GpA""oް wuoo=s#ٌX[~>%2CF\0$-\7L"fD#=y !;O(H޸a|f6XIoHrEIsO-~~'YqWD#~VIIIwh|u# B2[PY.]E?uǝ%d~{3/YuM7Ӎ CQOMLbÅKW{SJS|E{H~[*'v©~1q߬ݨgmLf(Ѱ!w#{|_}#ǵ.{E>o8i̐Ӄs HF#'>Eך-&#d#x^>>'2vxG8A\_4:@Kw&#.,йQSWrG#ƍߖo(9nLpFi!WmNMiƧs!+ī G5 A|e $#~.9A&F%#4`;Ɲ3qdxhEQmkFWp|8P\sr5s\Vq>9Rtt9arujUȩ+I|_p#܈ O|dpӞp~vɚP#k2+>6q8kbd&#(Q2sN]V衍bG@W87!p|*\$#H]d.ߜCT= əa#9N]5)#Gʒ0n1_! |bOxHFP G0)a#HWOdHFpk 0T*g2gGP٩m^sJ^`\s#p#+8>Ob d9P \ss>mVB=<8uȑA\qUz Xf'~5aщ~ p#܈ O|s HFP GkTߜϓ6TG&Fmb$\qUC9Baa?$n1_! |s HFG9i46Cc}hbdo2_APVd'9ŲAsn|' 'GtYTS2w6#HĨMKF#ԕzK5F'(y2Bvv6Ɓ|8~S;М||rȑdpNuCp1Mp>7bC#أ⠠  B9hg3a5?Ad41SW(#_F'h4t2leOWhΧ|r5gqPV%`VsQ F'(_,МOsOMH(86eAGۊdž_ޜo5Ad41*EG8JN]y_ X6A^y97bC!`hW#8ZAhgq.#x4M\kb$vUЉ,+;Gu9گn1_! |GѠNy,&hΧ|qSNMX2O%`]MP2rBkq's 4osG@s>e7epV7촊c;u%8Jp#! |bG(**np}>_A9x_OĈ&F KqPV%`Ŏ#XGFWp|8L"7sn9G0s4Sts>\ GO(Ї6|rVn8>Oh & FM|JoG :vMVqPV%`W%%p#܈ OG9Y,)) %%v~9)8:PVmd65W#p#+8>O^{ȸ8{91cPG@#'#(#"J2,%#G(,,+ 9Fov#' 5XSBBBXXj+*#/lp4Sns>N9)8:rPV%`%#G0"GFWp|8#9BFFFPPPgg' :|m'֭[WSS#;G@#E71;[J)+vԦMdb G܍p'v}MO}*..jMMMϫ3R6lذm۶#G 5;q8&UWWܹh}n|'C>ZP\\LAyur8pq˖- A 8}t CWWٳg=F#G[__yf;(j6mںu]ںiP[N:tݻkkk7n;)QUUU۶mkii<}4AI9!KRFW$#xO}LV 0`t"/4#xi 7b| >NxG?-111((G/pa:;{,m:={@Tkk+>ǎ30G /_Mi{{;HzW5u޽{)4vvvvwwDAޓ'O>|~p_}}NxUGDFF^{xq?!ЌFϣG+}tiw̙~n'&MiA3nEwi0{w J:qӧҝu!++|r#"FW;+;}"h~NM`0šţ9@>ѥ-!q_/JwM4E(618 JM{j1$q{H >Bhڴi*XUUjSSSzhr/G](&dr%rRd rWNp5ٯhrn|J >65#Q<Ǿ]F6?HE p(1G(,,3lp/q# ܈ r|XkPSSjʰiRAA}_s @IKJJpt:8AuA|.  <kp@q#@AXX}2l A jjj  [8XY0޻G G2F6h R1G0A$ GPG[iiFAaErQXAA^L>p Ax$@mrzɡAfe; HxBCCz{{q[  HPd MHJ8:7T  lkEQMp Hxs 8E  ۄXXXh#ОY7  Ar8w˦&kGA񨨨HYg#G n䄄.1GNJJJLL $G&5x8Am9yE!/ Lͥ!jEMM A$!G0I#,88A`##Sdd~ :tE:F #H|rej~ Am>#,)??F  go  o}"\^CdBIP7 AtGʲVg222!%%. 06Wg  hs <Kn___||| A$m<CmN8Mw   8~ b3 AI:@o ==]ח#_ArBk39\VUU1PVV AI9PYY/y \;!!!44#t:p 6ALlpϩQyyy9|x|d rw<#FiM_㾀#XG n68&k;z.={~:<H#"8K umpSXvC.\]gEWQ:s#:wﻌt:/p=tqzjm]-jp5ݢk]ٵ]uq߉^{zumt]LWVUsBW]Gf׮kp5#TUUEEEUTT^# fѯ( E/#2}2D&,p}8%"8 Da-fm @mAc!E MC96 G~quG8|Sz{{A8` .((Ap8iOp k9ɺM@{8}qeMF&s2*9®]u,!rwHXGRSSz}fffoo/n8AfG df,B;?TSd-r$#0P[[yA[9>F >$G] s s;Ϊ,^dM%#0y'O#@n @mwppdSB}eyd Rąj_#8pT!jݑ`#TTa$q7644#@s F1777((Heffvwwc!g8ŕA"p8yE!8d&#ʛG! -Τ9B# j1#8x98lݺ5555..nOi/Gp)g^#4A#_P9/}x㍋/~daP |AN\br:hnnOLL4LSN9# s"-nѝwWa_] /ֵY>:[G{]@>x4n;l[Y<AAKQ1ڋh(FN 0٤:''_~ѣFz?3%?ݐ#x9м m@7p"!zή.{Ml׳{Cj+{]7F_`Br%DК#xYpn^# D1 #[19ZsMJJԩɓmvʕ 8<ݐ#9Q.d#akY #vt F >u!EyA_4s)F j1`#^pU19Y,c\p9Q#80p@DQru`b#TX1*rްZyyySWW}TN: Gx9v\Ά{ bá^pbr]r833G111ζX,8_Dn`[Pob:#Tk7n(++NLLNJJ*)){qƑ'Lr #(5#x\pAk15 B1^]544|ᇫV1cFJJJ=z=mڴf8r'G[cL j-F^ ἺXoZ,Xݷoߴ삂j95rNCbu>9'''O<9//NzƑ'3^Ax *F8b)F G—ʸ6r#`A>!4x1#\0M6͝;7##_~[ֹA/Gف9B#|m1BsNuuu'ONNN6Lm۶ݻwZZZVV֪Uۧ95<*FВ#h)Fxӓ40y#22T#pOKK1#\:G;Æ ,Xʕ+(((;wnzzz޽M&MAIII#E#`A\z#sb&= |ퟦ4,݊gP$G8r OXԹY ~(‘q[,?pӦMVTfffFFuֶm[qA=#5_Wuv9gu>#(9 1[>d'xc]2Gh&Աsol`% ×-^dx,FwI%_O?hM/4GL~|PռUĮށ &s_Rͼe-+Z|_myͥ9zA]z.oߋ9v9޽{ΝۥKV*&ʕ+&M:o߾M6';;;333%%%))o߾hCn222~_/_jժR1Qr1GAW1W9Bm<(e܄mQ![/;_^8ldA_]M ~hyBm[W(hȚk:"M^[^ԐgnjXbz9F#9[w44{T#;eԿewQVYgdzkr{J1¶aԨQڵkePw,(k![D>H=z0# OL3dee,_D|ˁ޸q "GPk`G#`A\{#x]E1rǟ?Y9\>*$JQr5ߑOXRV):^/{_|mw''#Wn_dFKt21OxW3Nzb+9Bּܑi-'#fkC65nB0G(,,$G9y]ߜ=?حa~GkB]'n*ڼԱS>?}7;oᠵ17&)yHLLzB oto f1"iPr {}>%2Lw|mٛa]1MhZ'O,(3G r#1BrG9|9prSּ`]'n:rI ^à WKj֯ng8`mEJuVySCx<A=~9QVRإ7~qEw,Fp#HӦMH@r[{1 G9:DP?&G8r_M~2A Ad[9*D|-DP?aQr% 3]Pb9(G`#`:S5_b˫w[MC9Bm1:D9Ksr_YZC1##qA/#x΢!5)#Th+FP^-F G N1 G ls.h-F_T`#f390|p9ߊj-F*^ brr!Gyz<*F85H1a;##?Ggr#`:9,d#brq N^y#_p>U@@Cr #8|>!̊#@rG$#kp#|AKbA-F8b(F G* G r9i/Fh @1Bbu1N#x#L&r?#\ B1)c#s#l%G{Hr #x\.lNA#xZ@@Ay89!]1•#4j-FA#l%Gl6#נYbcA-FobC`3Z'4V95#_1*Gп29@9+F8brG_݃brAC¨bڋ979K:/r_^# D1 #Ti*Fx9>9ߋjZp0Ppm19r#or} -F j1BE##?G`G#9v\*F8b!\@@qA@,F1BU ]1yߋ#fr#`n-FAk-Fs19sk9#Τw9g#X[PkbZsT~rzrlszPpPplJ#_Dhu1ɠ#TSp9>pK:_r#j1# p19r#orx#Zb#4h,FP#y19r#/\ ;9BH#2F1:GпA#ݻ99dr#(vJpsΑ#ø G r/^z'NTUUٳ> cڵk׾}68"#hޭ:G醵r #xT/9Y9L&9@>pGsұyF/9 GIl6#^'} <5GXK GaY\ܺ"^t֯_Or$Gpvaǵ]"55uѢE~ڵkѭoTl۶MPTTty>ܞtq@s|r%z}7n={699بr#9#GHhѢAn:a„ѣGO2ߓsssǶWb?y755M:U>P9f̘q޽ukwܙ.0iҤ3gΈw_>{lNfffMM2>֯]V~/~ Ub&Gc{?f^Ev2łݻW#fb 5l׀MK4bU|2o矷X,K.; "u6uW^|-[KHx" Gr3g>\vKJJm6o޼Ν;+aJKKEW^QrW80++K;xbbbFF車^Xz뭸 gMݼyo4HA3//OԩSrbуyYZw w.>+:GK{~a3:+zzV[^6s_!w 5`1M׮]ŜvQǭ2b2l]gϞuM:|p||;#ӧO1sH5G0L>#4BN:q@Qs/#wrJѱoΜ9C#G=ztllK۴i3k,>>deٷo~oNfW~tұcǺ98\q)f7M"6mtLm'f.mz 8lz*:Ζ~b?-̙!'mZ AAW޹sgw---a PKtqt2L5X1ngeewQ0a#(=qDmO7C)ޑmhL9ot#8kFQb۶m111Ζ~&GsnDBpBg5k &G5#D` :u*mY[j%❸7ɚNjm߾GF޽/&oŬfܹSݰ/*=9s^vرCGtSj]666޲}pq\ŧ6Co}K>_^e}Xsg,Gp1MFF|GY4pMгgOe8XC~L>{Æ aʔ) G9豐#Df >sW#G8p *0`m^={Vjʕ.>^^^ >"JsonҥKĢ++Vnv6m $_[[+{עc/Kwԩr^zE{\a3D?YA97W+Ŷp#F9i&LC=b;[F[D#[.66VPp-VUQj" 9M9kHtujllt+-DS_477Wt]|f6mԾ8q)mo:W8[^-+S7:cٱ $~lcUTTxz ##rVnny֮]* #t׾@@ObӧO^^Kgr#`A>Ayt69|W[[nݺ+Vlܸu Zz:NDH /Zrva]bN}aǵ_!As 1T N M_-T[QՓlgBH\(I-Gh^Ӻz$_r59{F%#hti(w\uҭWϚ BB{h?BCdp7bгZ"$yl?5k90p /n޼d.z G ˗/O2رc.!š5k^~er#&9͛7]|||#GwUUU[C+M~o-Vo9 V GnrO[T|90dU8G J"d#@!(&Pݰ~ ܽnƮ́[Ձ|oz{hoCjbUqxn?^("i \w{ݞa-Hmnã6\̋)臡v8g[#Dt`(x:Tx@blsmƶͮ[vmsVuzy{hiλCUCY˟#.(!=ƀ\~uЇqYU6;K{tB\0mv{0COq9(5N G(B#0B"GZ#V>yoIENDB`dub-1.40.0/bin/000077500000000000000000000000001477246567400131445ustar00rootroot00000000000000dub-1.40.0/bin/libcurl.dll000066400000000000000000012530001477246567400152760ustar00rootroot00000000000000MZ@ !L!This program cannot be run in DOS mode. $PEL P# 2R@P$k Pc`$e\.textT02`P`.datatP6@`.rdata `8@`@.eh_fram0@0@.bssH@`.edatacP @0@.idata`@0.CRT,&@0.tls (@0.rsrc*@0.reloc$&0@0BS$#$ۣ,E)k(E)kt 1[Ã[ÐVST$$uy@)k)g)k@)k$ӃD$D$$0E)k  uѡ4E)k$O#[^ ud5g)kX9$փD$\$$0E)k u1ۡ4E)k4E)k4E)ku0E)k&)ktT$(D$T$T$ $Ѓ @)k[^ 14,E)k$s tA(E)k$` Ã9wt9v4$@"(E)k,E)k4E)k10E)k[^ fD$)k$)k4E)k!$!D$)k$)k!4E)k',҉\$Ӊ|$$ljl$(͉t$ \P(ku]@)kt|?l$D$<$ ƃ\$\P(kt$ |$$l$(,Ðt&Cw1l$\$<$3 1믍t&l$\$<$ ofl$D$<$h l$D$<$a l$D$<$ !l$\$<$. Ɖl$\$<$y D&l$\$<$ #l$D$<$& l$D$<$ zT$$A)ktL$(D$  &T$ T$ אpP(ktCU$`(kHg)ktD$`(k$Lg)k…t $pP(kfU]ÐUSEt4Ct$LP(kCCt$-C1[]Í&'USEt4Ct$LP(kCCt$C1[]Í&'UWVS,U$jlD$ D$$D$$Ft$LP(kFFt$.F/ɉ}t-UE ЃJt/u\ЃJuUD$$U~F tE 1,[^_]Ãu拃D$@`(k$D$ UD$D$%$`%G}FW|t :4WG:U#D$ $U@P(kt <$LP(kUI'UWVSMu  $d` $ M @xxED$ $g)kDEЋUDž\hE܅ɉl1|tt\3M MUD$T$$x}5Hh)kptt0Xˍt&É$օu-u }u⋝XED$D$<$M9M!уtn+ptMA(pQ,tyC\mu}ԉ!MA(A,x;lD$a(k$,$[^_]Ë1D$\$X|]Mۉhl<|%t{tM %d D$T$$x"!tT$D$4$AEEMUT$D$@ $}|p t799)ȋ|1ۉppx\$L$$91]UMU4$L$T$\M $*xT$Tt&S,C(ptp tq\ t/xD$ t$|$ $$11 11D$$up}t?|D$L$x $ ƄEM)ϋ|D$ D$L$M $ФUM$T$L$M $g*MT $IT*Dt&Dž\DžhDžl;hEb\1Ʌ@&lh $D$`(kT$D$ D$@Լ|D$ D$L$M $ϣMD$ D$`(kD$ $袣ED$E܉$EuUD$`(k<(kNlEMT$L(kU|$L$$MD$ T$D$?L$ ML$| $|D$ D$L$M $l$T$hTD$#Tp)dždž0`d$D$T$Bh&v1| ?4$|$^xhl4$T$L$g3 tZMptD$`(k$| hl)ptt&pt$D$T$v@D$$1:pt؃D$V`(k4$D$+ET$T*E7ED$x$g)kuEЋUԉx$0D$p`(k4$D$蚟iUSXg)kMbi)Éi[] UMbEM +E+MSi)ʍ[]ÍvUS UE] M9t))ˉUE]E5a(k [])ˉ]E5a(k []Í&UE]ÐUWV1S,zE1Au sv8tla(kutZ=D9uɉ%$tUBƉ$tUB4$tU,[^_]f몉'UWVSE܋U D$ ET$D$E$膸t{KtD$D$$!U E|$$T$D$ S҉EtD$$ME؅t1/<$G E]܅tUE؉D$$uًE1҅‰&UESEx []t&[]Ð&U]]uu CtD$D$$/nuFt2Ct]uE ]]cv]u]Í$V 4$LP(k빍t&'UE xtÉD$E$KÍt&UD$ &$kD$@&kD$&k$\Ít&UED$0,$kD$$_ US]$$_C[]ÐUWVSLE\ucXt8-tꍕt$T$ \$D$Fb(k$u4$D$ K$‹EHt$D$D$$6T$UD$$]$LP(k$QUJtD$$'Ut$\$D$ D$sb(k$DU1ǂ\L[^_]ËD$Xb(kD$E$eU\$D$ $D$B$딐UXu։}ω]ED$D$ 4$|$5ӉE~~D$QD$ 4$|$ӉEE\$D$b(kD$ D$ E$۪]u}]| rE--:-@-:--@| cwie+MD$<D$ ׉4$|$o\$D$b(kD$ kĉT$D$)D$ET$ D$ E$']u}]| = iˀe+MD$D$ $T$\$D$b(kD$ D$ET$D$ E$謩]u}] U8]ˉuƉ}~] )'\t$ |$$D$b(kD$$؋u]}]Ðt&|pwt$ |$$D$b(kD$ߨ؋u]}]=D=?f(|=?v%D$D$ $D$\t$ |$$D$b(kD$D$T$4؋u]}]Ðt&t$ |$D$b(kD$$؋u]}]Ðt$ |$D$b(kD$$ȧƍ t$ |$$D$b(kD$虧؋u]}]É%?D$fffD$ $D$\t$ |$D$b(kD$$D$T$4/t$ |$D$b(kD$$vUEݐǀpݐ`ǀtݐǀxݘǀ|]Í&U(uu ]]}V w1$c(k$T$t$|$ ݛt&]u}]Ív ]u}]f$T$t$|$ @ݛ]u}]Ë$T$t$|$ ݛ]u}]Ë$T$t$|$ ݛ]u}]Í&$T$t$|$ ݛ]u}]Ë$T$t$|$ mݛ]u}]É]u}]ÐUS]ǃh[]Ít&'UME U]ÍUME U]ÍUUEM ҉tpx @]Ã]fUUEM ҉|xx  ]Ã]fUWVS,E0h+`E+EEE5Dd(kptݖ߭p}E fElm߽PmTPh߭hm߽XmX\19hRغ*h)R)‹l;tLhL¦D `L|D hh1҃~Ⱥ*)R)‹<8ݝ(<$$\$ `L$\$`$݅(DLL\ + x|߭x}E5Hd(k}E fEm߾muLf߮\$߮x\$߮\$ ߮p\$$҅t]؍؁,1[^_]-ptj7A/i|$ݝ(L$ ډ$T$݅(#f}E fEm߽PmTPlh߭hD?t&\;TX;Pzt&ÀuG D$lc(k(ݝ($葺ˀ݅( #x|ݝ(T$D$ $|$$$`݅(d $ݝ(D$dD$ |$cD$T$ $T$C8݅(<;pB4$D$(c(k(e(,[^_]Ít&PTuDž`DždDž8Dž<@ptݝ(T$D$ $|$pX݅(pݝ(D$dD$ |$$$D$T$ $T$$@݅(D!J11Dž@DžD}ptE fEdm߽hm9`dptt+pt+hlEp}tlEh Nx|pthl@pt`dptӉtlph`dt1ptD$dD$ $T$$+$\$ $D$T$ hlMދaMԉ`GMʉX-MPMLtMËp`ML$MA$C$Y$G<DEE]8w -Ot G +G ƒ W$ƒ t GRW u 9}kWG G GW y$d(kI q jrI 1uUEuVq4I @uUEu%A4A Au9wAt&uEuQ8\A8uErQwA /$MDP(kD$@$@P(kMp@ QI AA7 wAjfQ M$DP(kMA I AQt?M$DP(kMAAUEu]t&Gtt$LP(kGGtG t$LP(kG GGtGt$LP(kGGw0tG,t$LP(kG,G0<cƒ00$t$t1fO t$G$Ot$GۋD$`d(kG $DžEt G D$e(kD$E$i} MG ftsB,|$D$le(kt$D$ $ 1B}G G,υ vU\ WVS`)čD$ED$ $D$D$u^t>GvPW MT$GD$E $ы9?u $1\ [^_]f}DžDžDžD$E \$$9utK ڍ= v͋t$ $}\ [^_]Ë?$T=USÃ@ t $LP(kCt $LP(kC t $LP(kCt $LP(kCt $LP(kC,t $LP(kC(t $LP(k$LP(k[]Ð&Uf(kWf(kVS\HpXDʋPu܋p ]؋X$UP0Һf(kDf(kEօۉUЋPtEf(k:.EԶf(kuҾf(kEf(kDEԠf(k@8f(k]܉L$(M؉T$ Uԅf(kDE\$ ]Љ|$t$D$$E\$L$T$D$$f(k/\[^_]úf(kU($]ӉuƉ}$9v]1u}])É\$4$H]u}]ÍvUVSE 0E@ t/$gV ҉t'$V9r9҃[^]ËV 1ۅu1t&'UWVSl]$D$@$@P(k}Džf(k ;#tdD$ $tD$ $t$T$D$g(kvtD$:1$Ftn$LP(kDžl[^_]ÃDžD$D$g(k$YOw$g(kDžD$f(k$\A0닾f(ktf(k $f(kDP(kA ЃD$f(k$DžA$1;.É$DP(kBt6Dž&$DP(kAuʍT$D$g(k$ Dž"$DP(kBtDž D$ D$$DžAQ @8($f(kDP(kAPBU Bu RM B4FD$C$tCV^T$$JS /F 4D$$ p4C$LP(kCt $LP(kCt $LP(kC t $LP(kC t $LP(kC(t $LP(kC,t $LP(kދACACA C ACACACACA C A$C$A(C(A,C,A0C0A4C4A8C8A.BEޅt $LP(k$DP(kAA$yT$$g(ktl@8M$DP(kB K|< '< _v f=fEt$D$Tg(k$T^DžU T$$g(kUA(t $LP(k4$DP(kB(euED$?$B9M+ED$/D$ $+]C$sPP(kB pMt$$L$(B Dw11< t&Mt$D$ g(k $$]DžL$$g(k/B,t $LP(k4$DP(kA,1Ҁ8"D$ D$Љ$mBJUD$/$.D$$g(ktjB t $LP(k4$DP(kA D$<$}É ؋@@eHT$DP(kA4$DP(kBR$LP(kDžxHXD$;<@@'UWVS$t&9D9t7U=U؃j tH Htօۉlju2[^_]Ít&ǍUVSut3Ft $LP(kt ۉuuLP(k[^][^]Í&UWVS,E`tZX tStM1'D$<$}4$ELP(kEtDžtủ<$1,[^_]Ív,1[^_]ÍUWVS,E uE䋆|PXt4$ND$D$4$Y`|tC u7}tF`t;Pt$uE ,[^_](Y|$$g(k g)kE EED$D$$ h(kD$ }u'D$D$h(k<$E4$LP(kűED$h(ku؉$E܅|D$h(k4$D$St&EtXt$džXD$D$4$Wu؋E܅E$fD$ h(k<$EaEE$EU#Pu6uuW uBub@@1f@A]@A]Í@ f@A]@멍'U]1ۉutE~= tHutu3tt2<؋u]]ËutԋuÍ&'UE0E ]Ð&US]E D$$D$?t ǃ[]Ít&'U8U]1ۉu}r rtJM ]1ɃzX1 ދBz ~7B\$UD$E$访UZ)rz ؋u]}]ÍvJD$UL$M $qUBTrPZ tBB zHB@MrzBD؃BXBPBT]u}]BB ]u}]91ivUWVS,:u :tuÀ=Hh)ku&t $ׅuD$ $EtY9=Hh)kr 9t $ׅu)ލF$~PP(ktU|$\$$E@UD2,[^_]D$ $ֽuD$$w1v'U8uƉ]}E HM܋Q(I,UME11;Mdž}tCt&(EtREtD} t> t9džCCU Uti4$聞]u}]Ít&Љ+EU|!+uvudžD$h(k$Nf]1u}]Ðt1t~tAdž;UE܋PH !ȃEptE!ȃdt$1ɉE!ȃ|e2D$T$ D$h(k$Mt&=t&t&'U8}}]u7<$Mۉ<htoC\t!E$LP(kE$LP(kC\HtktRE u7Eu)uFHNLC S$)ڃ~kE E ]u}]Ít&C S$C(S,F8V<뚋C S$C(S,F8V<$BC8u$wC8av|wD$,i(k4$FE 4y'U1U4A@tMtdttt&]Ë0U ]Ë0U ]Ív'U8]ӉuΉ}NjBX9=v1D$D$Di(k<$ F]u}]Í&@ҋKT9CЋT$U)M$HP(kUMtLȉCTS`t$$T$8CTsXCT]1u}]ËCTˍD$ti(k<$`ERUWVSÃu 2[t+34$蔸|$4$D$ܹtۃ[^_]É|$D$$i(k賹[^_]ÍvB>u UD$L$$i(krɅÐ&U(]]}} u3ǃptjtR8@@t1ptZD$$}u]u}]Ð1]u}]Ë]1u}]É$d,t։} u]}]]7&UWVSu 4$4NjEu +[t$|$t$$bt[^_]Ít&1[^_]ÍUX]ӉuƉ}8BptC1]u}]ËE uC ̋tD$i(k<$#uً HEEEȋMЉD$Eȍ $D$i(kD$@D$ ỦMH\Mԉ $MỦL$ML$ Mԉ$D$L$y)U MЋt$LP(kUUM i(kT$$i(kɹi(kDD$tMЉE$LP(kUиC i(kU 4$T$4i(kE tM i(kDƾi(kD$t$<$L$ D$j(k{FS 1u}C]1]Ëi(kDƾi(k뱋EL$M 4$D$ L$Ri(knD$i(k<$LTEEMЉEȋ &U(uu]}Bcv%<t*u]}Ѓ]1]u}]Ë1t5tt#+ Iu ǃ<1t=tt%+$uǃ<t& t$LP(kǃH$DP(kHtf)D$8j(k$D$?+utHtH$DP(kt+ǃw&adZU8uE u}U]}E䋆 tu$t(u(ǃ0t"ut}M䍓 E ]}u]Nǃ1ǃ0]u}]9aM䍓$D$<$HH‹$T$輱Xǃ뜐&UWVS,E} }EEuE Hh)ku t $ӅuHh)ktyD$|$$i(kѱtmEFN u t <,u<,u t$Ӆuuf,1[^_]ÍD$|$$i(kHtYFED$j(k$GA|f1} |$D$E$-uMEǀ<ED$|$$i(kӰ%EN~FED$`j(k$@Eǀ<E1N} |$D$E$FuuE$' UD$ $@P(kÍvUWVSLM] U 0MԋMIMЋ4}A@MԍE|$ D$T$L$$6ME+E9G)‰E̋)1F(V,U 9M2UȉV@EԋFVDVFPEЉVHV )ȋMVTUǃ`t$kFVF N\FXL[^_]ǃxUt $LP(kM $LP(kL[^_]ÍMԿ@}@F}E$L$|$UuUEEvM̉\$D$$L$ MԉUȉL$7UȋMuAU 9MC7aUFX7;&EẺT$ \$D$D$$l7MUgU8]]uu}S9Ћs.t$LP(k$LP(k]u}]Åt K<9v?xx 6щM 9Mr<tI|$$HP(kt/S{‹E t$$D$ 1s]u}]ËTf<$PP(k빐t&UVS uED$E $Skt.$}\$4$D$$ELP(kE [^]Ët $LP(k4$LP(k [^]Ít&'UWVS} ]<$|$$D$W=Hh)ku t $ׅuD$ $質tWƋE)މ$谪9Ɖs 09w&E|$$D$t߃[^_]f1[^_]ÍD$ $HuD$$4냍v'U(]É}ωuLǀxt 1tM t t=t t4D$j(k$t4D$j(kD$j(k$x1]u}]Í&D$j(k<$uǃxUWVS,UHh)k]D$:$U>x@u$ӅuU`t$UD$$j(kT$Ou{UUHUUlt$UD$ $j(kT$u'UUT$U D$ k(k$ut&v 1,[^_]ËUD$;$@tԍx@tTu} t;$ׅuu맋UD$$j(kT$\U'u;kG:D$AUD$ $j(kT$9U UWVS|]EĉD$D$aU܋M؋ED$$k(kzlҋUċ(kL$D$?T$$UȉD$ENjL(kT$ Ủ4$D$ T$UЉT$7M@t$t;tE D$Gk(k$0|[^_]ËE D$yk(k$|[^_]ËU D$_k(k$|[^_]f$D$k(kE3E|[^_]fUWVSuE EEE}UȋH4$UE u0t $LP(k$DP(kxE@ @$@(@,4@<^rD$k(k$ItLt$LP(kdžLUD$ |$4$T$EDEudž\t$LP(kdž\PtD$k(k$# dž\xtD$l(k$x ED$l(k$YD$.l(k$AD$Hl(k$)sD$l(kD$Hl(k$)Ei(k`t$LP(kdž`D$j(k$t4-D$U$萤U8@<t f t fP~ L$i(kk(k$l(kҺk(kDET$ UD$T$b`EEЁĬ[^_]Ë DEEk(k{Ek(kg<s(k[t&4E_<[ $D$:U菢UtdtU$LP(kUddž`M9\ HT$U $M艢U$E UЉ$EUȉ$EUETE)‰$PP(kMEc+Uȉ$UT$UȉT$UEET$$T$ǡEEEЋMU$E)ȍDM+EL$D$虡LtHt $LP(kEǃLHEȋUD$$l(kT$ktwlt-UD$l(k$Z Pt x t6 ҃U,t&dž$LP(k$fE}3D$l(k$/1҅k(kDЋẺPEw( tt'E1D$Am(k$! LEk(kt* t rt eD$ $@P(kE?|$D$m(k$E'EUȉ$pMȉL$D$E$EЋEЅEi(ktP\i(kE‰EȋEi(kt8tPi(kEEli(kU̅DJ}Ћ`DȅDN:EtLE‰EEi(ktXEi(kEEETi(kDЋHL$$M|$ }T$UEi(kEEL$|$EET$ D$m(kD$8ED$4EȉD$0ED$,EЉD$(ED$E߉D$E$:EЋTt $LP(kEdžTX`D$D$$Y34d@K|Kt+KxdClC|Chu {p< @QA< dcC`ChE  u T$)׉4$|$ D$CHddCHCƉCLEC`ESC!T$D$$躊CSC(S, U1L[^_]< ChPShDuH[D$ L$$j(kӉu  Y u9u t5D$fq(kD$m(k$    D$fq(kD$[l(k$} LJCpD$ t$u D$4$CpCH}CpClst>1} 4@<BC`L[^_]ËC#CC`bTu C 64@>&HD$o(k$@džǃ,CCC(C,C(S, |MxE |$ D$Cx$D$D$L[^_]ËE 4 } |$CpD$ D$$D$U $b_ +M JD$p(k$. 8t $LP(kRD$L$$n(kxzt&D$+q(kD$[l(k$u džD$q(k$/E |$D$]p(k $D$ ED$荄Eu u t܆ɉttK~C H}  ~u udž|$0E 6t&SC>t&D$ D$$Ƌ t[9|W~KD$p(k?$5D$+q(kD$m(k$iq9w"sSs(S,t$T$$\&} 9QF@t ǃCCC(C,ǃD$$Hl(kD$r=Hh)ktˆỦ$U̅ua,t\D$t$$l(k$uED$t$$q(kt6ǃt댃뀋E ǃ$裳롐D$t$$q(k訂tǃ7fD$t$$q(kxǃD$E D$wp(k $D$ ED$Eu dž E$LP(k|$D$mp(k$萀} LJ ǃLJ M E HD$0p(k$gU ǂxǃPD$p(k$ LJD$t$$q(kӀǃ^D$$q(kD$蛀 p@uftHh)k$҅uD$t$$q(k?ǃD$t$$q(knǃ} LJt$T$ D$p(k$} LJD$8q(k$k} LJD$lq(k$ID$$Am(kD$j"BRJЀ wJЀ vuD$ D$$z33 ǃf؋UCh{p} ؋4C`Cdnt&u A} 0D$8j(k$D$ `t&D$ $q(kD$~vD$$ r(kD$}@u$}||$E䋃$/҉XMD$t$$q(kt}EǃUC`Cdp(kuuD$ Ủ4${U̅xpD$ 4$|{Ǹ t$D$p(k$EË~tD$ t$$q(k|;ǃ T$D$8j(k$D$$r(kD$4|t(D$$,r(kD$|t =-,c#} 4@><$D$zqMCd9tD$D$$u dT$D$D$ D$ `$D$輡D$$D$t$$q(k{ǃu D$T$4$ǃD$ $@r(kD$zn8$DP(kE +D$t$$q(kzD$t$$q(kytOǃD$ 4$ixU̅\ D$LP(kbD$ t$$q(kyuAU8]Éut,,L$T$$t$ut$ ЉƉ]u]1w틃(D$D$MD$ Rs(k$x(MD$D$ E $D$w]u]É'UVS0EU u]E94t$u$҉t$ uJPT$t$T‰1x0[^]ËU7tQt 0[^]Ð00[^]ÐUED$ D$E D$E$Li)ktU1Í&i)kɉ¸Q3'8EÍvUWVS<]uE8@$E 94)9}F}UT$@}@FUHPT$ D$$T$|$D$E$vRE 1ҹ@94}‹|$T$$D;EOEJPD$ ED$x ><[1^_]ËE<[^_]ËE|$T$$u1ǃ><[^_]Ív'Uuu]E}U  s(ks(ktsHD$L$T$ D$ s(kD$<$!Ӂ tډÀDڍPDۃ1)$7uvEMU $]u}]UWVSLE 8ED$E$3E$Esuԉ E9td)ƍED$E\$ t$D$E $Nu9tċE t$D$<$D$EMЉD$ iEM9uEԉMЉ$LP(kMЃL[^_]ÍvUVS ]ED$ E D$@4$D$!20t tUt/4$s=?v'D$D$ t$D$$ [^]Ƅ Ƅĉt$ D$+s(kD$$ǃU8uuE}} ]u E$rt98;]u}]uj @iT$ Ut$D$$=9$D$Qs(k]u}]Åt8t$D$D$ E$=9]t$ D$$D$0s(k+]u}]É4$PP(kǸMt$<$L$qǃ]1u}]Ð|$$HP(k¸Mt$UЉL$$qU1gft4$PP(k¸=M$t$UL$pU1 U(Euu]}D$ED$ D$E 0$Li)kËEt؋u]}]Ívi)k=3'tB4$D$%D$gs(kD$$_E8؋u]}]ËEQ뒍t&U(Euu]}D$ED$ D$E 0$Xi)kËEt؋u]}]Ívi)k=3'tB4$D$eD$xs(kD$$E7؋u]}]ËE1QfU8UET$UT$ UT$U 94$҉T$UEÍ'U8]]utVtLED$ E D$4$D$,-4$D$/4$zRkD$$@P(k‰,t)$U蘵D$ t$D$$~U,iǃ0wt >ǃ8Uԋ t8P1ǃDUԍED$ D$D$$81}t$AQ$A)NjP}$&Q$&;E4$LP(kL1[^_]ËUЋ*>!4$P|/UD$ D$t$$>8Eǃ0D$P4$D$bZ=UD$w(k$ǃD,$hw(kDP(k08U-Uȋz RUD$tw(k$ylUD$jw(k$\EG4$LP(kE,EUD$jw(k$v'UVSut teǃD$w(k$O@$Ndž fKL$ D$x(k$D$ $(y(ktFdždždD$t$D$ D$D$ $!8t!t$D$D$ D$D$ $=D$t$D$ D$L$fD$ x(k$:,: : fF@D$ D$t(kD$0$D$L$$:D$x(kD$L$D$ $WD$t(kD$0 $D$ 43D$hx(kD$0 $D$D$D$D$ D$t(kD$0 $D$ }L$4$衾\$ D$x(k$D$\$4$\L1󫋅L$D$4$D$ N,&'U(]ۃuAƉ}:Tt:ÉD$D$Hy(k$mu dT]u}]Éd]ʋu}]&U]utD8t:TɃA9t C ]u]]u]@USÃu @t88t.D$D$Py(k$}u ǃd[]Ã[]-'UWVS, E2 džXtfX~1 t&tO[9utA8* dž\D$D$t(k$迏ud,[^_]à DU1dž\`0,džXD$D$Xy(k$ cdždTXw ($dž\&,[1^_],1[^_]ËUZ u?!ÃtfT$D$ $C,[^_]džd1(džX,[^_]a8D$du(kD$$Ddžd5v'USÃR u`uv,8 T$uZD$~y(k$~u ǃd1[]ǃd [ ][1]ZD$qy(k뤃[]t&tT$D$iy(kydy(k_y(kD؍&'USÃ@ t<8t2D$D$y(k$蠌u ǃd[]Ã[]USÃx t []뀋8tD$D$du(k$<u ǃd[]Ð&U8uƉ]}tJtV t 1t]u}]Í&]u}]v]u}]/s(k1ȋL$ D$T$$y(kqtSD$D$t(k$QtE<$LP(kEE$LP(kE3džd$t <$LP(k R?ID$/<$>1<$DP(kt$E>M|/t $D$/>Mt@ϸy(kdy(kۻ_y(kD&'UWVSL] }E0UЋUrUE^4$օR=LЋt}~ET$ UD$D$$u$?tL*[^_]ËV,E܋ U̍UT$UЉD$ t$Mĉ$褉UMā܆tnE܅uOUԋDЉUԋUi)kUD$y(k$D$s8L[^_]ÍE뼍&D$y(k $ U̸ǂdǃL[^_]Í}t&1ދUD$y(k$fUHu։}lj]0Eԋ4EЉD$y(k$EE̅tGt=83~8D$Tz(k$UED$E|$$ ]u}]ËED$ D$D$Eԉ$tv1ttD$z(k$ED$E|$$*1}D{t&$D$z(k] u}]É$D$z(k] u}]D$z(k$EYEt&'UWVS,D${(kE$D$$*:KE.u_udpu{LωL$ D$D$$yD$z(k$ ,[^_]ÍvEuE,[^_]Ut&~T$$EGED$`$E/E럍v'U]]u}E0Uulut$ ut$L$$UMUƋE=܆MtIutd!vfǃd]u}]É $ň]}Ɖu]Ít&D$y(k$[LJd]u}]f$(k=;ǃh!UdMYD$@(k<$ǃL: }Mt =B u1 BRMI(MMq,9 | 9E   MEqAD$t$ D$ (k $6 EMhtpUW}LMD$@(k $ǃdǃL ‰3= UTD$D$~(k$= X D$~(k $d=^! 3=uS   u؉7v8 c=tD$D$@~(k $;=EM$PP(kME U"W ׁǑUu x@ <"u" "G=D$$ JG-cU10hu8tD$D$ |(k $[ǃduPC0D$|(k $E‰D$ǃd-cHtX U D$|(k$ǃHS=~\/1҉?,==EM$PP(kME= EU u8 t8UtU}uED$|(kM$=M xt $LP(kExW;=u=&D$x}(k<$1@t-D9 Ɛ=]D$ D${(k$u=N==2`M0B@v=UD$D$D{(k$ J8cv`r҉`9X,ǃdDD$D$Xy(k $V~=K18c\=LXtpD$CD${(k$Q=B ǃdfU@0Q(Xǃ\B;0X, =^nD$~(k$賽D$~(k $~ǃH:D$D$}(k<$ZǃdD$$~(k $6ugD$}(k<$ED$ED$ED$ED$ED$ ED$D$ }(k$U008UĉT$$Eԋu܋MD$D}(k(kVlL$MD$E‹UċL(k $D$?T$$UȉD$ T$ ỦT$UЉT$uD$ D$$t$蚼D$D$|(k$谻M D$D$|(k $菻BRu0uPgǃ`4ǃ\ǃXXD${(k $ă(kD$z:ǃd+}cMt Mt$ uD$ (kEL$4$UyEUE֋EMt$ D$!(kD$ $PFs(kD${(k $D‰D$y{ǃdlD$$hZǃh߿E9+D$D$$)щ!ȃt[uT$ L$D$u~(kƌ4$D$@UMt$D$ D$$UML$MT$ $X.nD$D${(k $xBǃd3D$ ~(k<$追'Ɛ9D$}(k1<$葽MA LJǃduD$D$i(kE4$tE=&8D$t(k $D$w|ǃd mED$ D$D$$EAcceEpt-rEangeEs: bEytesE 蠸D$~(k $衼ǃdt$D$t(k $>wǀpǃd`X\Xă(kD$D${(k $v?uT$L$U4$ME,U؋M(UD$$|(k$>ӼvMt$ uD$ (kEL$4$U觻EUEj\t,ǃ\DD$|(k $D$"vǃd N U0D$D$|{(k$'fE$LP(kUD$g|(k$8$u(MD$$}(kD$@ $EED$ ED$ED$ED$ED$ED$ GUt$$8x(t $LP(kEU(D$D$S|(k$*(Mlpǃ\ǃXD$}(k1<$UB LJǃd,| 1MD$(k $M(Mptr)ȃt:B<(G0 wSB)ʉ(*0 w39ȉuND$D$ $h$D${(kC$$cD$D$|(k $sxt $LP(kEǃd xE8/ID$D$N|(k $NsuY(t $LP(kEU(D$D$S|(k$U(Mlǃd NE$LP(k&'Uh]]uE }E31tBpta u w Lu U ]u}]ÍE܉D$D$$AsM܅tՃpudtU $T$uLutt&L #fp.+fD$D$D$D$D$D$ D$$E-LE&GG MԅUD$T$$!ŰUE t;uЉΉ]ĉӉ Hh)k‰$хu<-u]3u׉ڋuЋ]čED$D$$(!M9M !уM̅7+EŨMԉA(EȉQ,Ủǃ<MA(A,t)G _iF8t͋@~vUFiE|ǃLE1E $D$QM̅MԋEȋỦEȋU؃t&'UH]Å҉u}u+ udǃǃ@1]u}]ÍED$$ t#4tщD$$`ǃ4빋O tD$D$D$D$D$D$ D$$*at&U]] uu\$4$uu ]u]Ë]1ҋu]UWVS,]E ǃL$Eo/u)3 t-}+}u,[^_]Í9u3FFD$D$4$"D$D$4$"D$D$4$"D$D$4$"ǃ@t @ E pNjE}E0,[^_]bf$x(k(t ؆$Ћ̆D$$T$xq̆x Ѓ3pІ}l$BЃ3I`:̆8$‹U܉$U܍D$PP(kĆ$U܉D$U܉D$$U܋t $LP(kǀU܉D$8(kD$$q$t.̆R T$؆<$T$Ѓ6FGdž3ІPH8@@dž$OĂ@ E}NdžD$/} $M5M؅x-@U܉$DP(kȆU~BD$ $@P(kE=@M؅?ІdžԆ$k؉MGM؅iȆtU܉$LP(kdžȆU܋ІU܉$ԆdžԆdžІU܃#pE$D$Odž3/džҸ늋}XI1ҸD$T(kD$$٭dž309t $MDP(kȆMtVȆtƉ$LP(kdžȆU܉M؉$DP(kĆU܋MAǀ0W&k8A8D$(kU܉$U܃3t&ǀȆtM؉$LP(kdžȆM؉ $LP(k Ȇt$LP(kdžȆІ$ԆdžԆdžІ}}B8@tBRb̆D$$T$k̆x džt&'UWVSLuED$ E D$<$D$ !%tDSD)f @Dž9tv)׍D$\$ |$04$D$.uHtt$|$D$T$ $D9ut&L[^_]ÍvUWVSlu}ЋɉMc} N } dž@džHdžPtU$LP(kU T$D$ D$$48/$UUEȉ$UxHҋU+Et|tsMȉP~(t8t؆U $ЋUdždž8t8E_t ǂ|$Zs|$D$4$$auȃ,[^_]Ð&USÃ$ׇD$$D$$4t D$$M0t D$$Mt$LP(kǃt$LP(kǃ t$LP(kǃ t$LP(kǃHt$LP(kǃHLt$LP(kǃLTt$LP(kǃTPt$LP(kǃPlt$LP(kǃlXt$LP(kǃX\t$LP(kǃ\`t$LP(kǃ`dt$LP(kǃdht$LP(kǃht$LP(kǃCxt$LP(kCxt$LP(kǃt$LP(kǃD$$?_D$$)_D$$_D$$^ǃǃǃǃt$LP(kǃ$$LP(k[]Ít&'UWVS$(kD$ t]1<.);Mv:_9]v&}9v$(kD$z u녃<1[^_]fE܋U؉L$M)‰T$$U UЋMԅt;Mtz.uE؉$* Ev'UVS1ۃuvpt$LP(kDŽp&uًTtPt $LP(kdžTdžP[^]ÐU庘WVS ] }((}puTuh1tft1&tMpptރ [^_]ËEǁqƀptf닍t&'UVSUE uT~ =ND$ $@P(ktD$4$@P(kts[^]Å 뷉$1LP(kfUVSutRt5N~1f9^$LP(kFLP(ku[^][^]Ð&US]g)kCX4(kP C@SCdP4(kChClC@ǃǃǃǃǃǃCHCLC0ǃ ǃǃǃǃǃǃl<ǃTC 8ǃhC$C(ǃǃ0ǃ4ǃ0ǃ`ǃ<ǃ@ǃǃ$(k2ǃǃǃǃǃ<ǃ<[]Ít&'U(D$]u}$@P(kǀ|@$~$PP(k$($ǃǃǃǃǃǃ̆ǃ,u4E]u}]Ív$LP(k]u}]ÿ@$5~t $LP(k$$LP(k뛍'UEP1t$6iÍUVS]3C8tD$4$C84$At džt$LP(kdž4@0t U $T$ЋC4t*D$D$(k4$dtt S4D$$4$uRdž4$W1[^]Ë$dž0(dž<$踅-v@0 1l&'UWVSÃ,ƋtH1҉]܉EE tU9X~P tQ0u䋁 $4$|$D$T$ ;E~]EU܃tut&}]t,UD$$tUE,[^_]E썴&UWVS,tH01ۋuY&tK9uUUtht@\$D$ (k<$D$ -UZ4B0Z4B0t t:,[^_]tB41B0D$D(k<$UӓUB4B0UWVS|H+D$$@P(kǀ4(kǀ0ǀ4@4ǀǀ(,8ǃv11$(X\<Bt$?dt$!%k1Q$!%kQ$!%k Q$!%kP҉t$DP(kn|xfC(C,H$=PCt$LP(kǀǀ4$PP(k4$PP(kCxT$D$(kt$ H$+Rt$D$(kD$ D$H$Dž. D$@<$D$? $Dž<$4$st$D$D$F|$$L$u/t&4@@D$(k4$8t-D$xt$$(k ǃD$@<$T$ L$D$(k<$ƅX t$LP(kǃ D$ D$|$$苜 t$LP(kǃD$ D$T$$8ǃ?[\D$:<$ZD$ D$$<$DP(k)4$LP(k4ǃǃf//t&D$y(k<$Z>/LD$/4$t&1/lj<$x|$4$D$DžDžy(kt&D$ D$L$$訚#$=<$LP(kaT$D$ (k<$/HD$$i(kD$LtHt$LP(kHLJL81?󫍽DžDž?󫍍tD$D$ $yƅtD$D$$HED$ D$T$$$ D$ D$T$$`D$(k$D$GD$ $:]D$(k$oz4$LP(k&1<$+4$t$D$F$/jt$ D$Å(kD$H$tRD$$(kD$DžDž(kfD$$(kD$dDž(k6D$$(kD$4Dž~(kD$$(kD$Dž(kDž(kE&|}:/<D$/$4t4OWׄtOˉ֍&Ë Ph)k$хu:t%t .uЋNjD$(k$'P8D$t$$(kqǃD$|$$+4@<uuǃ4r(kr(kǃ<$D$Ѕ(k}T$$Pǃ4D$|$$P D$q(k$}D$|$$R~/V1T$4$щL$>/~&!$<$T$D$QɋOKpGD$:$ƋH_QftB1Dž tBAuDD$ D$0(kD$ <$=(k(kDƒ(k(k|$ D$t$ L$|$D$T$8$9(kD$衭lLtHt $LP(kHǀLt!D$D$$}ƅt!D$D$$Lƅǃ4@@ t$DP(kf$DP(kZL`dhlptxP|$$记 i3ǃǃ~džx Hu džH4$~IG8WD$D$4$gD$D$4$/<hD$D$$$* N~D$ D$<$9:=D$D$O(k$fw$(k赌} ur40th)k$҈u捍_profGxyG $E >u$(k* t$LP(kD$t$$(kBaǃ4D$ D$D$$MǃǃǃD$]$M@~:D ى]Yǃ$(kDP(kt$(kDP(kqD$D$D$D$D$D$ D$$D$t$$Ɇ(kǃD$l(kD$$y(kl$Jf4S  ЋtxDž1tMY C4tP44B@Q@1ƒtI4$VF6t~4뱀~@0tF4D$ (kD$$w(4D$ D$D$0$749\9u ~08f9n9\NFD$$& tt 99D$$l9gwF4D$(kD$$uU$|$D$(kuD$4$t yA0t $LP(k<$txt$LP(kǁt$LP(kLJǃǃt~ t$LP(kLJ t$LP(kǁ  ǃ ǃBxt$LP(kAxsxx󥋽<$LJt$LP(kǃt$LP(kǃ t$LP(kǃ t$LP(kǃt$LP(kǃD$$3D$$3D$$3D$$3ǃǃǃǃt$LP(kǃ$LP(k K D$ A4D$P(kD$$Qr=džHqD$t$$І(k>$(k膂_|$$(k+D$$D$G $9$(kl<t :t$T$ D$$D$tHt<u%D$(kD$$ lC8닍fD$$%94D$4$u44R=oN 2=,N =XNǃ1niV=jt&=>$p} !kngoC0v]u}]Ð=Q'=('H t&=' U=' =' ='u10늃6N f@ ;:9>11+= = ^= =f =1$V:\)e    1Hf=' x='t&=' ?='61/=o' =c'v=\'m  =V'18=4'k=,' v{=)'; =*']u}];=}'D=v' E=q'3 =t'KK6t\D$D$$Bk{C`9P9HPPD$$^ksD$D$$jCPPCPt{CPt`t $iCP`PtP |@D$1$jM1F=Z= =G =611='='T C=' ='/,f-  *d +y>1 1\=u=N =ND =N1$==  =@ =1[ wxT7 P Q1\='  =' ='R1Kb\`">11=h'E~=f'?=g'D [611===|1u=N=N=NF1?=I' =5'=7'1$ w?qr1='='='GDEr>11d="' = '(=!'51='='='1<\='wy=W=1T3Z 052x11j=up=uG=u;V1+= ==1Kǃ11]u}]ֽ1<1ǃHǃi1O1@1&=?ǃ111d蘻ǃH1X}uEt]]E ]hǃ11hx1^10O1<@1011"11 1Pgǃl1111 11,dǃH1ǃg1@X18I11X311V1 >v;t|;pv9dt1pi1(Cg)k1@({1lV1T611>1/1؆ Tt'Pt$LP(kǃPǃT蒸ƋP&t1ht :(kDЍ1Ltp1,Z11D151&111 611H1311 i111L14m611W1(H1$,fx0t&V1111d-M.1 XǃH1ǃi16g tp!ƒP  ǃHd:14+6ǃH1ǃ ǃ11@2'ǃP10ǃ1x>11(buOǃL1K18<1-1ǃH1 Ȅ t1 t쉓L1Ȅ8t1 Qt쉓P11y0kǃl1ZǃH1I=t!=/11P!1$)| ='t='O1='t='#6jD$̈(k4$_`1$1/=N't2=O'1M4t51111=&'t=''1x=i't=m'k1(d 8]uE ]}]Z=t!= 611`>11\=t=11 w(k1yt=14>11D='t='l}]u]E ]?='t='6Lt'Ht$LP(kǃHǃL襮ƋH='t=''=ut#=uV1V1x!t"b1D[1@L=t!=2>11$1$# JtK>11611 =Nt=N11cte1611<p=~Nt=NV1O1@D$Ј(k4$ `1$򍃬趬ǃ1ǃ1D$(k+$K1ҍfpt E‰$PP(k3pt tD$t$<$1 C&DD$Ո(k4$$`4$DP(kmD$ D$$ۈ(k蔿D$D$F D$ D$`$D$4$1LP(kǃ1ǃ1ǃP4(kǃ1ǃ4(kdǃ`?ǃ?X$ǃX$$CPfD$1$1蝪ǃd1聪ǃdt$ D$`$D$D$ D$D$$`Kv'UE UT$P$T$A ɃÍ&UM Uu )@t 9vuD$D$ $ 1 UWVS,} ]tEx1t  19‰։D$$StMuh$D$6tt LJx$D$]E ,[^_]t$ 19‰U]vLJt댍t&11]t&U(EtRvtB1ÃuD$D$D$ T$T$ $ ÉD$D$T$ T$T$ $聯D$느t&U(E]ut;p4H@u0t$\$L$ D$D$ (k$K]u]Ëΐ&UUS4@ t[]1[]ÍvUESt4R$t[]t&1[]Ít&'UEU t#4It4]At&1]ÍUEU t#4It4]At&1]ÍU(]] uu}t)t4@tr1]u}]Ð$D$耸4$Xu٥ $4$-u4@t\$4$Ѕt두džsvUWVSL]3D$4$4@@E ǃtFLt$LP(kǃLD$$M(k-wLFFH100EE[F(k{4|$L$T$ D$D$`(k4$HED$ED$ EԉD$C8$D$EC]\$D$(k4$t#\$D$(k$u߃,[^_]ÍvUWVS]E EED$ D$(k$D$(kUEE;UH$D$Ό(kD$j;D$$@P(kEtt :/EE4$>h)kUD$(k$D$6EEEEԅt $h)k}UtEEt E$h)k$D$D$D$D$D$D$ D$wǃČ[^_]þZEZ"UD$(kT$U$%:WD$H$ތ(kD$EU@B$DP(kUBD$?$!P$D$?UUtFUrUB8t puEpUt)UD$ D$t$$KUBtqEp 148W׋UD$ D$D$$M[KUB M uZEZD$(k<$pEPiUB tjPuXEp1ɿu$v48W׋UD$ D$D$$MrJUBMuD$(k<$蛯+D$(k<$胯1҅D$(k<$iUEpt8UD$ D$t$$I4$LP(kExr8@@UuD$ۍ(kD$(k$}6=h)kt$D$$EEEt$D$$׋T$h)kD$EUĉ$҅E5ME&$h)kUD$8(k$D$0EUuD$(kD$(k$5=h)kt$D$$׋D$$h)kEE 11.&UEԉD$D$B D$BD$ BD$BD$E$h)kE$'h)kUD$Z(k$D$0E&}Et$ED$$׋t'D$ET$Uĉ$҉Eu11ߋEԉD$E$h)kE11EEh)kED$E$D$ D$j(kD$$E/ED$ D$$D$/D$ D$o(kD$$/E$J1҃ƋE׍UT$h)kD$E$҅ELfEh)kD$ED$E$҅EEEEvUT$ @D$$D$/U1ҋEUD$ D$o(kD$$.u}UEUUJD$ D$q(kD$$.ED$ D$$D$|.D$ D$s(kD$$\.E$EPUu}Uv(k|UUT$U܉T$ T$U@$D$1ED$ ED$$D$-E܉$LP(kE1EU&Eh)k$D$ D$o(kD$$r-Ut$|$$yEh)k$ҋED$ED$E$h)kEEh)k$ҋEtD$$h)kED$E$h)kEE1&ƋE$h)kEh)k$ҋEh)k$ҋED$$h)kUD$ D$(k$D$+EEE1uD$(k<$訨?E@UH]Éu։}M=$=Ǹtz\$D$ D$<$<D$D$s<$ED$ M<EUԋ@;]ԉ4$Cډ\$D$^<$<M1]u}]Ít&'UE $t<$t<E U8E ]u}ttt}v]u}]Ít&U ?M<`(k#(kt$ L$|$4$D$D$x(kD$CCt$$D$D$D$ 'U1}$D$T$ UD$T$']u}]þ2J$(k(k\(kR(kHg(k>t(k4(k*(k a(kǎ(k E}V(kt'}+(kt}7(kt}G(kj(kEȋU3<$(k(k(k(k(k(kӎ(kvU3`(k렃`(kNfU8 $M܉]u}ljUiU ‰ƉU$UPP(kU؋M܉øtoL$ T$D$(k$_AU D3$T$UT$EUDUx\$$腢$LP(kxUt41]u}]Ë$xUщ'U8]]uƉ}׉M$wM$D$]u}] UWVS˃\EEUt@4$PP(kEE\$ D$(kD$ D$Eȉ$.@] EM}؅ɉE~)t$<$D$(kD$ ?;]u׋ED$(kD$ EȉD$E$+EMȋU$EE$LP(k\[^_]Í&Uuu]] }}7IƉD$ED$[PC $.:x]u}]Ðt&D$C $:|gEQō&t1D$D$_(k$U%E7]u}]i)kD$ (kD$Ǎu&7pT$$7D$I(kD$뗍 ~kUWVSLDE10 wR uBXCd1NB4Bp~ vL[^_]ÍvÃL[^_]B<BxEB<BxEB<B x1EB E0< wB qE]u$)UMD$(k $D$UB$:)@~^ M#9 UB$)UBBf$(UD$p(k$D$ ƉD$KEĉt$ D$b(kD$ $/EĀ8$U(kEF@$(MD$(k $D$F@$(D$3(kD$ D$ Eĉ$v/EĹ6(kU$EFPt#$^(PIFPt#$4(PI!FPt#$ (PIFPt#$'PIAFP t#$'PIFP$t#$'PIcFP(t#$b'PIF@,$4'PIEĉD$F@,U$ 'UE(kET$UD$E$](kK[FP t#$&PI FPt#$&PIFPt#$j&PI)FPt#$@&PIF@$&PIEĉD$F@U$%Ub(kET$UD$E$q(k)9t&FPt#$%PIFP t#$t%PIIFPt#$J%PIF@$%PIEĉD$F@U$$Uy(kET$UD$E$q(k3CUD$D$(k$fEĉ@-.MT$$D$*tEEĉD$F@U$I$Uy(kET$UD$E$h(kEĉD$F@ U$$Uy(kET$UD$E$f(kBqEĉD$F@U$#Uy(kET$UD$E$O(kEĉD$F@U$w#UE(kET$UD$E$O(kyEĉD$F@U$1#UE(kET$UD$E$M(kp EĉD$F@U$"UE(kET$UD$E$K(k*EĉD$F@U$"UE(kET$UD$E$I(k)EĉD$F@U$_"Ub(kET$UD$E$h(kEĉD$F@U$"Ub(kET$UD$E$f(kXEĉD$F@U$!Ub(kET$UD$E$Q(k!EĉD$F@ U$!Ub(kET$UD$E$O(kEĉD$F@(U$G!UE(kET$UD$E$X(kEĉD$F@$U$!UE(kET$UD$E$S(k@WEĉD$F@ U$ UE(kET$UD$E$Q(kt&E]u$g }}ąU3MD$8(k $D$eUB$UBt$D$H(k$4LD$V(k$ƀ%D$ D$D$$4$肀UB<$D$EtD$(k$D$ UB$ <$SUBUD$(k$bD$$(k$m EM$ME9UD$E$ʃMpE$UE`(k$E]]ĉD$D$ $y]D$Eĉ$$$EuU EԉT$$bNjEԅZ$~$9tFU3D$e(k$9 E=`(ktet$+v1΋UftD$X(k$D$ USB$)UBM$T$U3MD$(k $D$ EԉD$q EBEtD$(k$D$H USB$$4$}_D$Ԕ(k$D$<$RUB $ƉTXE34$t$ D$(k$D$ EԋUD$(kD$$_EPE1>$@D$D$\(k$DUEt'$|$D$Eԉ$|E8M3D$(k $ED$c(k1$ 4$1t$ D$$(k$D$ 'UVS @)kutpt4`(kD$D‰$dg&苘${$$UE\$D$'$LP(ktƆD$@$tD$$uD$(k4$  @)k 1[^]ÍvUWVS|0RLP {$SBD$D$4$߳KQ8C $x'D$C $8fC=m= bT$$D$`(kDž#eT$ D$D$ĝ(k<$-|[^_]ËEDEljD$ L$D$$3xMDžwt&C C$>$>D$ɝ(k<$D$R SɍB>5DžCuC $uaDž<(k&CfD$ D$(k<$D$$$D$q(kD$D$ `(kDž<!D$D$4$(y$D$L(k4$4Džt&Dž<$KP@\"t&{DžNjSt$h<$hC2D$ D$D$!$Et7PTD$ D$ %- D‰D$C$p҉tS8t4T$$TD$%k $FoD$(k<$\$&Dž:&]Džtg!}CDži)kD$L(k4$D$Dž#uDžDž0C $gD$ǍD$$m;u4;)D$(k4$ D$$<D$|$$=X1ɉpCMCP%kDК%kD$d(k$DžDlDžD$$^D$(k<$D$,DžtɉtCK83*4tD$C$T$D$C$!Xh(kD$@(k<$DхDT$ D$'DžMD$%kD$C$5D$(k<$*+D$B(kDž $8rgD$$$qD$D$ D$D$$}0D$$(D$(k<$D$$Fwt&D$D$D$ C$sDD$̖(k$])D$T$ $xt2D$(k<$CT$ $Nu1ҋn]D$(k<$D$"(k$Dž;DD$D$ :(kD$D$ $DžD$L(k<$R'D$ț(k $(kD$(k<$DхDT$ D$bMNjC$|$$<t(D$$T$ D$(k $D$DžRDZ*J+V$h$Tt<$z $6D$$\$N $$ D$(k<$oD$D$D$ D$D$:(kD$ 3D$$D$(k<$$f $S D$$CD$(k<$$< $ D$('k$( mMҋED$L$$ D$̙(k<$(kEL$D$ <$|D$ (k4$D$(k$D$.(k<$C$ D$ $D (kD$K(k<$D‰D$h1D$@%kXD$C$ t$T$S $T$ЅC t$k C$H C $} CT$$D$D$D$$1ED$C $  D$$z D$(k$D$HDž# D$H(k<$\$D$(k<$ D T$ D$T$D$t D$ $< D$*(k<$$TD$t(k<$j?D$(k $/|$C $ t8C1D$(k<$ Dž D$$D$1(k$D$Dž#D$ $8 D$Ș(k<$\$D$Ț(k<$pED$h(k<$T$Q&$1D$(k<$'-D$$%D$(k<$L$D$ $4 $1҉k\$$D$D$$D$ N (\$$ $D$ $Q9|$$cD$('k$T$$D$ D$D$7D$ C $9 D$(k$SD$\(k$X&D$p(ktD$D(kgD$(k<$L$PD$(k<$ D$D(k<$$hvU1҅t xÍ&Uehk$gÍ'U(ED$ED$\$҉Ðt&U(}} ]]u<$Dt $D$ǃD4$tD]1u}]É4$t$D$$t$ |$$D$D(k]Bu}]É|$$D$'(kf]5u}]Ít&U]]u1Dt6D$$%D$~(D$p(k$D$]u]Ív6$D$(kD$]u]ÍUWVStJ1$t,$<$D$ku։<$k1[^_]Ã[^_]1ꍴ&'USE @P]C t%$C $C $dC Ct$C[]ÐUWVS]u vP\8d\B ~L4v`D$ 'D$D$$)s`pD$xt$PB X$XD$B ${vXi)kXt$$|$ D$ː(kD$1ۉ$\B $\B Ĭ[^_]Ít&t{1ud1D$(k$tfi)kdD$L(k$D$KwdD$(k$ t&d1D$˞(k$4v@ $\B E $D$ED$0E\$$)t[]ÐUVSD$$O@)k@@)klibcD@)kurl/H@)k7.28fL@)k.1N@)kf+tBN@)k )ÍO@)kQ\$4$D$ (kD$ @@)k[^]Ðt&N@)kƍt&UD$P$ A)k*P(k A)kP(kP(kÐUS$$^ƅt5D$\$$(g)k t$DP(k]1]Í&UE]뗐UWVS,}] wu $]p4$PP(ktMuEu1DE܉B;]uމ7,[^_]ÃEE9Ev eEU؉4$D$HP(kU؅t&ƍ>T$ D$(kD$$[딉4$1LP(k,[^_]1UE $D$ED$Í&'UWVS밉T$$ (k(E؉t$D$(k$^t?t E8Ct>rut`T$<$U(ƅ됍T$U$1(ӃDXtIukubDžƅ&D$?T$<$'ƅT$$(k't/ƅT$UD$?$Q'ƅT$$(k]'t<ƅ&$&uBL[^_]ÉT$$ (k'DE؉0$LP(kE$LP(k$(k tZD$ "(kD$)(k$+(kD$EtDžDž6$LP(kDž"US]ݓǃ܆ݓǃݓǃݓݓݓݛt $LP(kǃ1ǃǃǃƃ ƃ@ǃ<ǃp[]Í&U+SUt]M tl~P0@uD]t=} @#} "@} @u $d1[]Át2+[]Í&]tE - (w$8(kt&EtNjM (w$ܬ(kt&EtM 0w$(kt&t1q@1b1S 1D15l1&11H4(kDщ1݂1݂1݂1݂1݂1݂1݂1 ߪx@rߪpߪ1Mߪ1>ߪ1/ߪ1 p1<11111D$$_1111w1h1YT1J1;T1,11܆1$G]EK1tRU1V1S&?Dރt utuf[^]Ã1Ít& tXUV1S?Dވ t& ut$%@2@0 u[^]+Í'UXEuu ]}t$D$$L$ =)=N9 (kM݉|$ $(kDt$D$C(kD$ M‹)ЉMЉT$$UԉL$4$wL$t$M$UԋM9fU1:]u}]Ë t&E$D$ (k藫]u}]Ðt&$D$(kpE]*u}]Ðt& t U1]u}]ǃ/ UE]uǀdt 1]u]ÃHt닃htED$ D$D$$ЅtD$D$H(k$zA롍vtVD$$T$D$h(k$ƉD$]t$D$(k$AAvP4(ktD$(k$AD$D$<$뽐USU ]tDt@t+9~ v[]D$Ԯ(k$Z1ǃ9|D$(k$81ǃ뺍v'UWVSu>džE_*oE}5EuU ؉EEEE@WDEUEEE!Ѓt GHbEM܉L$D$ D$@4$D$QS8KMȃM 41@<t1҃xXED$D$@4$UMEUME1EEtEЉ 4@>]1҉Éuu 90< u  uÃ;E)t$E܉D$ G\D$$D$E܍M܉L$ D$G\4$D$qD!V$|T$D$t(kU$U@V$0uUCT$D$(k$U)dž&S8K$&t2ƃ[^]Í&$D$(kۓ[^]fǃ$$Vu,u2 !$!([^]ÐD$$DpfD$$4pfU1]É'UWVSL]}E׉D$ E؉D$E D$ɳ(k$E D$ $Ut5E !$PP(k[U cU $LP(ku }ǃLtHt $LP(kU ǃLHT$D$ܳ(k$L܆=.=/2=-$D$ 5$L1[^_]ÍvXt ;TNTǃTt'Pt$LP(kǃPǃTH$DP(kPt ǃTWE$LP(kt&L[^_]ÍvH$DP(kEtD$ٳ(k$PE DU0Uĉ/mUD$?$rt E 0?t UD$/$t E 0UD$/$+} EPDU1Uĉ.@Mąt7t3}Ƌ} D$/<$uUċ}ƋE$EDM$PP(kMEL$4$MD$MEtCtÍKluEA)k1ݐt&˹uUS]u []Í&U tMtUtދMt׋A)k0uˋE$ uU LP(kPP(kUDP(kUHP(kU@P(k됍'A)kt A)ktUA)ku%pkA)ku A)ki)kv ԉ'U( A)ku$TuE$uE1Ð&U+UtED$E $D$xÍUS]t%Ct+@t$ttj][]+[]f@t&Ctt]$ftC믍vuC~u멐D$$Lttԋ$JC늍vUEt]]Í&'UE UBt]ǂp]fU]]}{uAuJ1tftC(C,]}]Ív{CtffUED$E D$E$Q UVSuD$$@P(k$PP(kǃt$$ǃtǃ`t1T$ `T$@4$D$&`Xt$XtnHt$DP(kHtQǃLPt$DP(kPt*ǃT@D$@$gtt PX$ǃXt$LP(kǃHt$LP(kǃHPt$LP(kǃP$ $1LP(k[^]É$ǃt^t&$8ǃ|ǍUWVS]t$LP(kǃǃt$LP(kǃ($p 1tft4$hG1tft$4ǃǃ[^_]Ð)ƃ(Rt&f=iƃhkt&fVUWVS,]U Ѓ؃σ ȃ 1 Ш҉UtsǃUU܉UU@@F|$ T$UT$$:zut 9u&U܉M؉$LP(kM؃,[^_]})uېM؉t$$HP(kM؅t'U䉃t$$T$MM뜋$LP(kǃy U+S4]tWPtSED$$)t_UUT$UD$ET$ U $T$BuuUM4[]É$D$(kPx4[]Ðt&D$"(k$0xÉ'U+S4]EtfPtbED$$)UUT$UD$ET$ U $T$sUt?u,u(Qf4[]É$D$(kw4[]ËM4[]Ít&47[]Ðt&D$"(k$Pw멐UWVSÍllj1tft(kމ<*tLE_}vt(   E7E.1} $ % t&  t&uFCDFF;uԋMԋv AuAC<A@AAAA]ԃC   wt& juԋFC<F@FF0]ԃC-& 0 Jt&uԃF X WUԋMԋBBCDST$ |$D$$UYU? C? g&PvJPЀ vBPv:C^MDD$D$ $CC SDST$ |$D$$U؉MLXU؋Mԅu<[^_]Ð MytMT$$U؉L$-YU؅uM)C )ЅC ]CQ$D$H(kW M D PD\ǃC fCC@fC uYC ǃvMytMT$$U؉L$WUǃ$PP(kCXt&C 4D$ D$$D$V Evs$UWVS]u uoD$4$tu1[^_]Í&E{8Cu(E8tD$4${tE[^_]ËE띐UWVS,UNƉD$ ED$$D$s<$9u%t'u,,[^_]Ðt&fڍ&UVSÃ0D$EE$g)kED$ED$ D$D$$8i)ku#]H' te[^]i)k֍vU(]Éu։}ft"ftl]1u}]Ít&CD$ .T$D$$wtC$Hi)k]u}]ÍCD$ .T$D$$Awul&U8Muu]}t1~totrt 1t>M tjM |$ t$QT$$Q)ÉD‹]u}]Ë11u뜋9OʲEEUE 냍UMVS XXX XXXXX X$X(fP,@@XXX XXXXX X$X(fP,<p[^]ÍUH]]u}3 E T$Dž|$$0i)k t>i)k$D$Zl|$D$(k4$D$ P]u}]Ðt&hD$E DžT$$4i)k uzbu1Dg)k$D$k|$D$4(k4$D$ P{htRf$]u}]i)k$D$wk|$4$D$ (kD$ O]u}]fDg)k$D$VtS~ttP SPwgS @KT$ $D$td\$D$$Љ‹EtVp0t;te[^_]ÍC fS(e[^_]Ðt&CD$CD$$di)k‹E 땍t&'UWVS @,MDžH4E LD$ XD$@T$$e[^_]ÉL$$Ui)kMԉ$D$ExYD$D$ D$Ǻ(kD$Eԉ$=Uȍe[^_]ËE̍U$ND} MuED$Uȉ$=Uȉ$UẺ$D$UUԉ$i)kb$(D$ET$ U$T$舝019UԉL$D$(k$AE̍U1E؉$D$BXUD$"(k$D$[AUЉ$T$1n&щ‹EЉ0s<$D$UU U$PP(ktU@ @PÍ'UVS]u $ PP(ktQUS t*tQVpPVtVBFC [^]É@BCٍt&1[^]Í&@PB밉C먍U]] uutQN tJ9tUCSPCt_SPD$E$VCC$LP(kn ]u]ËCtC@묐t&CFF UVS]u u*t$C$D$)C u]LP(k[^][^]Ít&'UE]@ Ðt&U U ]M}]u}q 1tL9tpBrpBrpq q C u+B@SC ]u}]ËGzBGt@PWҍvBt7B@q 땋]1u}]ËBq AxS뾍AaU]] uuCt$LP(kCt $V CLP(k]u]]t&'UWVS,EutU t}u,[^_]f]tUFEFV U FV$PP(kM 1ۅ,1[^_]f;] t$&k<)uލSۉUt21D$$;}u׉$LP(kF(fF U8]] }}utWutPtLMtE$PP(kt2E|$\$4$D$ ED$t4$LP(k&1]u}] UWVS,]uU C;t$$D$S8Eu ?t8M t$ L$HL$@$St؋U\$|$$k$ PP(ktt4$EPP(kUtVE $t$M܉D$"UM܋ErJMT$A $UD$xUt*CE,[^_]É$LP(kEE,[^_]ËBU$LP(kU$LP(kE붍vUWVS,]}U C3|$$D$S0Et_&tIU t$ T$HL$@$St؋U\$|$$E1k,[^_]Í&,[^_]ÍvUWVS]C3D$ED$E $S8u 5t.E7D$ E D$FD$F$StՋ[^_]Ã1[^_]ÐUVS1ۃuF~"t$$;9^ދ$LP(kFF[^]Ít&UWVS,utlF~eEfU8Eu=ۉt7U _$UD$҅tU|$߉t$$nuɃEU9V,[^_]ÍUS]t$LP(k][][]Ðt&UUS] 9s19u1u[]Í&'U1SU ;Ut[]ËM] 8 uE]T$$\$n͐UEtLP(k]]ÍUEU]19 ÍUE}]ÍU]Ít&'UE ELP(k] UWVSӃ,$EPP(kǸtt1CGE$3t>Eu3t&Cޅt&ËWD$T$ PT$$~ыE|$t$$>1t,[^_]É<$LP(k,[^_]Ð&U@hD]Ít&9 Ut t]]ݍ'UWVS,UM܋MHhU؋Mu#\$]D$$6]t%St$|$ UPT$$~Mtq] HRM؉Q]D$D$$EM܋]PD$ D$HL$T$M܉,1[^_]Ðt&]1ǃHǃL,[^_] U8]Éu}ƋUм(kD$̼(kT$$覎;p @|U;P}6Ut$T$ P@ T$$]uEЉ}]]u}]ÍH|uDU(]u}p tX{ wX v1]u}]$(kv1D$T$@ $͍t&H t4X(1tD$T$ $렍&D$T$@ $뀍&D$T$@ $y]t&H 0E5D$T$@ $UWV1SÃ|UċEč}ԉEEEEE]EN ЅE=]D$EȍEȉD$C\UM$UM1UƒMDЅ 9St:MALt-KL$MIPT$L$ MȉL$MċIU $ЋUSL]EE1ҋH8]Ӎ&MąT$Ut";U1t&;9uUED$D$B\$ tvQ MR ҉UUE҉Utz vUċME;JMɉMt]y vWMċ;QUUă9Z8!EċU|$$T$$hUĉr8|[^_]EUtMQLt+@MD$APMD$D$ EȉD$A$ҋEMЋP\ED$L$$UEUSED$D$$,7EčM̋@EE@\D$L$UE]Eȉ$EUD$$@P(kUME̋MC Ẻ\$ D$D$ $UgU`$LP(kU tG9UUuFI BMtB 9MMuARP]UR MUEUMU8]Éu}@ptcCXtGUCX6Ux1Tԅt9Vt19Tt9u?w T>}As2;1Tԅt9St!19Tt9u?w T;ft9UMUăUk}E?9lj}HE}ĉ8<1[^_]UWVSLUt M9 tL[^_]ËyMu9ω}tK]fEĉ1ȺsystuՋMċ} 9Mu4$1PP(kM}IE9Mtq]ԍE61Ⱥs}č<fB:ys}č<fB:tuM} 9MuEt-E Mċ}f HfJ9uu1ۅu"Eĉ$LP(kEt)U1L[^_]ËEt$D$Eĉ$X1t&'UWVS,]t; t,[^_]ËSd1zR&9r~C<t苈4A@tD$$艓Cd8Sd9rslu<>4$LP(kt+FǀFtՉ$詻ˍ&C\$uCdC\$cCdD$CH${CCHE9u2[vǂtD$W$蓦<$LP(k;ut+W7zuȉ$谐WWBW먋CT$CT$LP(k,1[^_]ÍvU(]]uu }tV; uNCH$>t?CH:T$D$$CH$$]u}]Ë]1u}]ÍUUt: t ]Í&} 'tXw} t?} u%EBh1]Á} !Nt} $NtB} 't/]ËEBL1]ÍvM1B`1]ËEBP1]ËEBt1]ËEBp1]ÐUUt: tÍt&BXtBXU 1ËE 1Í'UWVS,]} st|Hu Lt`FXhD$PD$FX$܃t*vD$G<$D$yG uǃHǃL,[^_]ÍUMb)iM)i}?B}~@BM}䋃HHuJLu@PEU䉛dHL|$ NX$T$L$FX,[^_]ËHLD$ET$ U$T$sh~)U,[^_]ÉD$D$(k$u&PFXD$|$FX$mBD$D$D(k$1&)UWVS,R M܋H ʋ|Uu1,[^_]ËEM19Mv݅u ׅtDH$\$D$L$ D$$覓E䋗9EwM܉ $9mLJxD$$$,[^_]ËEJU}}]uEwUMEEt |t؋u]}]Ë|gW Gv2G Gtz4$NJG G11ۃ-uGUGwG BHWT$P$T$kۃMGA3$h(kG D$4$1G1pG 1ۅti4$D$G 4$D$G }G G D$D$ۉ$GG tG G G1ҋG!dž|G ǀxG ǀtG 4$UD$G 4$D$G 4$D$G U5GWG/G $  G* t;; t;; @ G 1; G @D‰D$D$T$ $T$͆D$G t1ۋ 1҅0;1(ǀt G _ u ^vRtT;2uE@ $T$D$ ED$tǃxD$$_  G rED$G E$JGEEE؉D$G $=GeG 1ۋGD$4$OE؉D$ ED$G D$4$G_ H MEI 4$MEE|t L$4$MM;o G} }G t GG x1ۋ 1҅;1ǀxlGE܉D$G $G+E܅iG у GWG1EP$G ǀtGG@E܉D$G $_G]܅ G E؉D$G $W G0tdžG@GEԉD$D$G $uUԅ҉GG1+E؉D$G $>G4$+D$GD$G $1|0ۉ|$D$p(k4$D$ (Gdž|GG G to$蔍tcG WǀGɃ GfW T$D$(k4$G)ftGGJPD$ED$4$AG)\F8VuO&>t@u믃~w   9u~E)D0E렉uEEt"tE8t>t u뙃Et8)1*&Ut$T$D$Լ(kB=É$D$(k+=Ít&UWVS<E؍M$@MPP(kERELJ@D$$vS$Z>D$ 8D$^(kD$$uVM܉M1냋=@t tiuҋE1$LP(k6E$LP(kM؋$=LJ<[^_]Í@)ƋEt$ D$MԉD$E؉$xMԅdƋE$LP(k$W=LJ뛉$f?=LJ<[^_]ËE$LP(k$=;$1=LJFU8]] uE}uEuu8Mσ1umucEǃ0&kǃP&kD$8D$^(k$uDǃC\ډE]u}]QftMu]}]ƃ7:f6UWVS<] Eԋgt6C\E1tEf<[^_]ÍED$<$HP(kpS\1M+ET$L$$p ~8$LP(kp&}Ժ81ǃ0&kd(kǃP&kD$ 8D$^(k)ШuHUD$/$ǃC\ډEEAED$$kǃ} C\lM $PP(kS\1$T$L$oǃ2ff名t&8MGNjEԉ$Z9ǃ<[^_]ÍMx*Mt $LP(kM䋃ǃ))US\<<$LP(kUԉ$8ǃ(xUMctES\ǃ‰U)‰Љfpƃ7PEԿ$ 8ǃ'USEt$7ǃ[]ÐUD$($@P(ktÍUS]St []Ít&} v vE $l(kEC1̍EC 1뼍EC1묍M!uCt$!1CivM fVSkD$ D$D$$aC<1uC C D$T$@P(kC$Ct1fC$=1CKcC^1cC$LP(k1CAt&'UVS ]Ct SD$D$$T$ ЋCCt$CCt$qCt+s t1kTЉ$2C9s w$LP(kC tSD$$T$Љ$LP(k1 [^]ËS tKED$$L$ҋE [^]Í'U(uu]M Vt10t$ZtREL$4$T$ UT$ӋE]u]Í&U(uu]M Vt*0tZ tREL$4$T$ӋE]u]ÐUSÃt $LP(kCt $LP(kCCt $LP(kCCt $LP(kCCt $LP(kCCt $LP(kCC C C[]Ít&UWVS1ۃ4$D$(kD$D$ uՃ[^_]Í'UWVSLEM ]5Hh)kąE‰ut&t $օuD$\$$(kei1{mS5Hh)kDžDžӄoÉ$օ==Ѝt9.=u=G<"DžDž2< $< ʃDžtEҍJt><"~<,<\fuƋu\DžuD$(k$?g$DP(kuE&É$օu担tJ2"L[^_]ËDž1t&1҃D$(k$fftVD$(k$LfGBB .,udX_D$(k$et:$DP(kBL[^_]fKD$(k$etN$DP(kBGDž|D$(k$:e$DP(kCD$D$(k$U-D$D$(k$D$(k<$duD$(k<$dD랋D$½(k$`d[$DP(kBfD$̽(k$"dB  $LP(ku$(k$(kDP(kBL[^_]ËD$ս(k$ctB pƅCt&'UWVSU EDžlDžpҋ8H` d$\`t$LP(k`dٽ(kdDȅDd\A C QCXt$ dC$(kL$D$ uD$4$<$LP(k$!PP(k‰{ \AQED$?$%a6+E$UM$(kT$ L$D$< …"CtD$(k$XaXT$4$X X$LP(kUC=ML$D$CD$C D$ |$$(kD$X<$LP(kXD$4$X$LP(kUhSKCs ;\M D$ ڽ(kT$ \t$ɹٽ(kDML$$MT$|$D$L$d$(kL$`D$(kC$M``1Ct7t$D$$(k`$LP(k`1Ct3t$D$$(kltf`$LP(k`14$ _4$D$HP(kt/$^f @`1<$LP(k&[^_]ÍT`ąd \KCC tKD$(kD$!ЍUlj$D$ X}X$7^XpL$lL$ <$T$D$MBlC.UE$(kT$D$A 1[^_]ÃC CD$ |$$(kD$D$4$XX$LP(k ED$ |$$(kD$} ٽ(kڽ(ks$(kDML$Mt$ T$D$L$dL$3`ƉWt&US]ą[[]KUxuu ]]$f4$\$趿t$$D$NE\$$G]u]Í'U$]uuPP(ktF $PP(kCt3$؋u]]É$1LP(kt&'UEML$M L$@$R1Ív'US]ST$U $PC$LP(k$LP(k1[]ÐiA)kmNAU]90A)kÍt&U$Zi}-!XA)kÐUW1VS*L'm3O'M'N'<(kg9'V('r'''>(k4$WD$$D$WEƀD$ $Vt )ڃ~D$ $Vt )څ~9EtE$xg)ke[^_]Ál'$=*m'u'uq(kD?'2B'$@'A'u23(km(kD$D$$VEƀD$D$\$D$ t$D$$,g)kt$t$ D$(kD$$bv(kkvI'<FG' H'd(k4V'DT'(U'0(k<'):'t;'(k5' 4'};3'(kf7'C(k((k\(kO(k{C(kq(kg|(k](kS(kI(k>(k4(k*(k (k(k (kI(k(k(k(k)(k(k.(kt&*t| *(kX(k=(kD'j(ksV(k(kOaW'tk's(kC(k9't&'Jӿ(kƿ(kJ'tK'" (k(kQ'[(kQ(kl(kO='t>'(k(kU8E ]]u}M;WtD$D$<$1UD$ T$<$D$,FtVGtD$<$UU䉓ǃ]u}]11ۍt&1΍&EfU$SU듉'U]]ut C8ǃE $D$諘tD$$UK]u]É'U]TP(kt UD$D$$di)k t)D$$TP(kTP(k1ɅTP(k1Ít&'UE<t]]dUXu}׉]PET$PT$PT$U؉$D$@(kD$D$ UЉM+UЋMԉ֋!Á tÀD؍FDۃ)9s-t)t$<$T$O]u}]Í&$1xg)k]u}] UWVS|Et't>$?'xg)kEEe[^_]ËMUE EEe[^_]1D r11tM M+MM Tu1ɺ11&$t'tttt9~ƒuكJt9ωоME1ҍ 7Ӊ}ljEM9]~9t`t9~܅t#:uu}}D<$D$L(kD$D$ ǃu}t +]9]ED$E$NEe[^_]fؿt&}sME ڋ})ك. tB$%Mz:t&:R$xg)kEUWVSMEEEDuEEEEEEEu[t&I 4h)k<$хt<$=8h)kׅ90 } Dg)kыxg)k$UEҍEăD$ D$4$L=Dg)k׋MU9t $҃$Mĉ)1}~} I}EE΅Ht m}} Ѓ}} }1}e[^_]1<$=8h)kׅE1Dr]ȉ\$D$`(k4$Jى]!%tDQE)ڃ}U}^}uO(k (kt=|$$Jtk<}uu}e[^_]Ã}u0}u*}UEEo뭐t&1E-x*9u!~+‰։-t غQ)k؍k&UWVSÃ, 1ƍC$$-t$D$C$vgOt&]Mb)ιgfff){$K >C(C CL$D$C()D$ D$(kD$C,$ɾ$,,1[^_]Í&Mb{$)ιgfff>C()K ~tC>2A¹2C 25C'D$(kC$*,[^_]ùCC ` UD$J(k]ÉuD$`(k@$蓽t ]u]É؋u]]6U8]Ãu}@0/kT$1D$(k4$@]u}]ÐNűCCD$ C0D$(k4$%D$ռC 9C1C]u}]ËPP@C0PfJPB8D$ D$D$D$PD$C$\i)k1]u}]f8D$ D$D$@D$PD$C$\i)kF@1VD4$D$T$+fC0PCP@C0PfJPBDf{0v 9@+@T$D$C$k !8D$ D$D$@D$PD$C$\i)k(@N@^DN@^DL$\$4$*LP@ S09 fu =D$L$ D$d(k4$~C;C C~UC07D$(k4$D$s.i)k7D$C$D$(k4$D$@81D$ D$D$@D$PD$C$\i)ki)kf7D$C$D$(k4$D$´}C,$'fC0&UD$(k]ÉuD$`(k@$St ]u]É؋u]]U(uu]}4$vuD$X$@P(kÉLP1džs0HCCǃDF<@fC8u2F<@D$C8D$C$i)k dž$'E ]1u}]ÍWG.]u}]ÍGD$$@P(kL ]u}]i)k4$D$wD$(kD$$豲]u}]ÍGD$$@P(kP됍t&'USE$3t *[]ËC[]UVS]3CtD$(k$%tB@$%$EK(9S,S9cU )D$ D$D$C$ t_IuVS t5Ft&C,$5C(+EU ~Hu1U ;uI1]u}]fE C<$D$j(k躦]u}]D$D$D$D$D$D$ D$4$X"rvi)k4$D$D$(k<$D$:1C =U]G&UWVSlM] M $]=gM̋ɉM]1ҋEMă9E]̋3p]̍~]EUFPVTFXV\Eԃ8WEЅLUiB  t]ȋ9UԋRۉU]ȉD$ D$D$$RG@G8WDWEԋP EЋEԃ8MD$D$D$D$D$D$ D$ $]]ЅuMċAl[^_]OEЃl[^_]Ít& k]ȋ9T| ;MIEԋD\$ $T$D$EȉD$d E'W|9X7FV NP HVMP1Ƌ]}u]ËtEt$P@ T$$9x  |;Xv}M]u}]ÍU]u}]Ët"D$<$\$bVPU2뮍vFEUS ] Utbt^{ t'T$SC T$$¸9tD []Ã{uӋ tCACt E1Ƀ []fCtS KP HSPM1떋tD$SC T$$~SPӋCΐUWVS[^_]ÐtD$ (k$[^_]ÉtD$D$(k$ĚDžl{t&D$(k$蹕[^_]D$<(k$蕕[^_]D$!(k$q[^_]ËpD$$Np|$ D$D$$tAt!tTD$(k8$D$(k$D$(k$ϔpD$$pL$D$ D$T$$Vu t tD$(k$^Dt tD$(k$5t;<S<+vHtD$(k$lƅƅƅUt$$ƅT$=ET$p~M|$ 5D$T$$5u ;ptD$P(k$+MUt$ $L$T$tvtZt)t#JtZtD$$UD$$(kT$t$角t$$V~tvlƅJ1DutD$$ƅR1D:t>D$D$(k $dduʾ딋tD$(k$ϑ} 0}&}1} уh򮃽hƅэyhthM$T$L$ht'M Љ|$$L$dd׍T$pD$|$ $T$~u;tstD$<(k$ǐ}t M95tD$ (k$蕐{ƅ1ƅƅUpL$D$ D$T$$u t tD$l(k$D$ tD$(k$D$ۏpL$D$ D$T$$xu t@tD$x(k$耏ftD$(k$`Ft tD$(k$7eD$L ȉD$HT$L$D$DD$@D$<D$8D$4D$0D$,D$(D$$D$ D$D$D$ D$tD$L(k$' <<pD$$ D$D$(k ЋUD$ T$t$賍D$D$(k ЋtD$$D$D$D$ D$L2pxL$|$ D$T$$r;f UEPw$(k&B]øY]fX]øW]fV]øU]fT]øS]fR]øQ]fP]øO]fN]øM]fL]øK]fJ]øI]fH]øG]fF]øE]fD]øC]fA]øZ]fUWVS]u tuTNtTVt>Ή$UU$8t$8[^_]É$1$F$o뱐&UWVS u] }u:?t&tl$M1M $E"8Eu+t uƅt$E$1:E [^_]Ðt& [^_]ÐUWVSu}] t$tu[^_]ÐUVS]t3Ct $LP(kCt $LP(ks$LP(ku΃[^]Í&UWVSu ,u1[^] US$E ]uGt=00 (kD$D$P(k$D$ D$t8$D(t$LP(kǃ($1[]Ðt&ǃ,뵍'UEE]CUS]4@@t 8t&$<,1ɅҋU  []Í8D$D$$̍&'U(]]uu };${A4ǃǃ@wǃ B'kǃ$pA'k@@tptD$$u<$=ǃ,ǃ4Z(kptu]u}]Éu }]u]]t&U8]]uU}{,R 4U܉ $M1ҋMƍ@9Cr+U܉t$ $T$'u ]܀<3 tb1҃,t]Ћu}]Ë]D$$\(kU\$uʋE *]Ћu}]Í&U D3 U($D$D$D$D$D$D$ D$*Ð&USÃ8D$D$$轷8tǃ4`(kǃ,[]fUh]]u}E,0utML$ ML$D$$Z=uDEt=,,$(k0tPrf]u}]Ð$H@]}Ɖu]ÃD$ D$$MMĉU ‰EEЋUD$(k<$D$T$ M见MąD$D$$UU9UFUT$ UH}MąUEUȋE)EЋŰU9E +ED$ED$$ U)MċUԋED$(k<$T$ D$MU UЋMD$D$PD$D$D$D$ D$$EЋUԉG(W,ǃ,\ǃ,fYBf;*t<{u t <{u<{%느t&OD$(kC${OUD$CD$(k$z7fOtw0D$@D$(k$zO@D$`(k$zID$D$<$ǃ,"pD$$蕳ǃ4`(kx00 (kD$(k$D$ D$bǃ,00 (kD$(k$D$ D$ǃ,WUD$D$D$EЉT$ $D$[EЋUԉ<$D$T$pM`Mĉ$LP(kǃǃMv'U8]]}} u$=:Vt]u}]Ð,(k8D$D$D(L$ D$D$$u3FFD$D$4$aD$D$4$)D$D$4$D$D$4$9t @ 0@(k0 (k(D$2(k$T$ DD$T$%ǃ,ptFƅ@ v|$$U(]] uu}\$4$_ۉu]u}]Ë1H tp]u}]ÐU]Éu0t&82BB1]u]ÍvD$$@P(k…҉uː&U(]]}1E utEuP(t$LP(kǃ(,t$LP(kǃ,F ]u}]ǃ뢍t&'UESt1u'4 (k(k(kǀEˉ41[]USÃP(kD$(kDЍT$$4u ǃH[]Ít&UVSà 4$&2u Hu1[^] UVSE ]u&tD$D$(k4$f4tB4$j8@$D$Dt$LP(kǃD1[^]fǃH/뫍'UEE]<7US]$80H1ɅҋU  []U(]]uu };$;5ul4ǃǃ@wǃ W'kǃ$M'k@@uF$1ǃHpt;>u]u}]Í&D$$蠬t։u }]u]]vUWVS,]{ <$[,]vD$|$$(k twU䋂HtRt}u 1,[^_]À?+t!vD$|$$7(kuЋE +,[^_]Ívv|7>?+uЋE -,1[^_]Ít?.tuD$D$(k<$N3D$D$(k<$."D$D$(k<$U䃊8< < < < v>O1+v< < < < I9uуэ "<$D$ D$(kUJMu U䃊<)΅=< >< *݋U䃊8U䃊8oD$D$(k<$MMuU߃<D$D$(k<$MMuU߃<L1ɉAD$D$(k<$M]MuNU߃< uj<$D$D$"(kU#MU䃊<D$D$)(k<$u[U߹< tɃH<$D$D$2(kUMhU䃊<@Y߹M1Ҁ?ǃHt&]u}]Ð4$+]ulj}]Í&H$\(k+tD$CD$1(k$Sh듋3+ǃH8|+EED$CD$(k$gE+EEu͍ED$ED$ D$D$$wED$D$(k4$''u ǃH E$LP(k+ EED$CD$(k $,gw+EEUU}ǎ< ut&< t< t<=<$:fHMT t t t pȍv΃+D$CD$1(k$zf+EEIED$ ED$$D$|iEwD$D$(k4$%ǃH +0D$@D$(k$eǃH +D$X(k$e+D$CD$(k$qe+{D$CD$(k$Be+EEu͍ED$ED$ D$$D$.g?+EEED$ ED$$D$gED$D$(k4$-$ ǃHt&+u}8lucD$(kC$iEEUD$ED$D$ |$$D$gCW0A(kD$K(k4$D‰D$_#ǃH  t tUT$UT$D$-(kT$ D$ $T$*iED$D$(k4$"ǃH 0tPǃ<ǃ@D$K(k4$^"ǃHEkD$(k4$)"ǃH ED$ED$D$D$ $D$D$rsD$$cKǃ4(k8A<ǃ@(kD$D$S(k4$UX!UH$(kxED1Uу $7D$DD$E$xU11эQ$7D$D$E$xEUT$$xD=D$(kD$D$ D}ǃ${zuӍUljT$ D$(k4$D$U ǃH ǃ0ǃ4D$D$D$D$D$ D$D$$:~$LP(kǃǃOD$|(k4$ǃH<t'ǃ@(k 6A@tJǃ@@ 2(kD$$T$>\Dtǃ@(ktǃ@(kjD$(kC$dmU]Éu0t&82BB1]u]ÍvD$$@P(k…҉uː&UESt1u'4(k`(k(kǀEˉ41[]USÃǀ0ǀ4(D$@(kD$$u ǃ8[] USÃ<tM8t!ptD$<$$LJ@wLJ @q'kLJ$@f'k>tq(D$D$D$ D$t$$qu-LJ8pt[uU ]u}]Íve(k $D$3D^U <$T$뮍&'U8E]1ۉu}xp @,Ev>0 w3F0 w'F0 w1ۀ~ QE䃸8t؋u]}]ÍWNv $D$D$o(kU؉M U؋M܅u EǀDvD$D$t(k $u t ]f< < < < v'N1< tx< tt< tp< Iti9uу 4$D$ D$(kU5M܅u E䃈0)υR]럍D$ D$4$x$h/U D$D$z(k4$MM܅uEރ0D$D$(k4$MM܅uEރ0O1ɉDD$D$(k4$MMM܅uMEރ0ui4$D$D$(kUM܅E䃈0D$D$(k4$u[E޹0 tɃJ4$D$D$(kUM܅mE䃈0@^޹R&'UX}lj]uEE)0tYLJ4 (kt$D$(k$#UtẺ$LP(kE̋]u}]è@ hED$Eྀ(kD$ LJ4D$D$$XEuU҉UB4$E=,UЍt$D$(k$T$ %Uԉ8ǀ8]1u}]ÐLJ4 (k&8t&D$(k$~ZCt&E D$Eྪ(kD$ D$LJ4@D$$eE ED$ Ez(kD$LJ4D$$JXE&'U8]]uE uP1tF ]u]ǃ㐍ED$D$ D$(kD$D$gPu}tM$(kDP(kǃ +Eǃ8n]1u]苴ˍ'U($D$D$D$D$D$D$ D$Ð&UH]U ]u}$vt]u}]Ð3FFD$D$4$HD$D$4$D$D$4$hD$D$4$ qU 38U ƅ+U :H  @ }8)D$$(k<$LP(k&vt_L$ |$D$(k$MME]}u]Í$(kd|$D$(k$C<$LP(kvU $T$E|$D$G(kMD$ $<$LP(kM>B$(kDP(k&$(kDP(kP<$LP(kM $LP(ket&USÃ@D$D$$m@u[]ǃ4(k[]G&UX]]u}E80;}uTU܉T$ U؉T$t$$u)8Ut܆t8vEǃ8]u}]É4$]u}]ǃ8ˍ8$(k8E랋 byǃ878 c4<t&@<pM<?D$(k4$ 'ǃ88c$T$D$(kM7ǃ8 T$D$(k $MCNEEf$T$D$(kMCwNEEuʍED$ED$ D$D$$ ]'UT$D$(k4$ u ǃ8 UEЉ$LP(kEN$T$D$(kLCN EET$D$(k $LCuNEEEUԋ}ǐ< < t< t<=z<$芿fHM=T t t t >ȍv΁0a$T$D$(kK@83cT$D$(k4$K {NEEUT$ UT$$T$N7U,T$D$(k4$ u ǃ8Ut&NEEDUT$UT$ T$$T$LUT$D$(k4$3 vǃ8gNEEUT$ UT$$T$Mt,&8c0P$T$D$(kI 8 cT$D$\(k $IEEUԉD$ED$D$ |$$D$?Ml[0tꋻPuD$(k4$ǃ8(p=D$$fǃ4(k<  t tUT$UT$D$(kT$ D$ $T$dNqUfT$D$(k4$@ǃ8 1T$D$(k $2H7ǃ8 (ǃ4D$(k4$D$ǃ8EUT$UT$T$T$ $T$T$kWxUmT$D$(k4$Wǃ8HD$(k4$*ǃ8D$D$D$T$D$D$ D$$Eǃ8E  < vzt&U(]] uu}\$4$ۉu]u}]ËH tb]u}]ÍvUWVS,U2eM 11ۋ,'uN 8,;]  8(kũ,t]uЋ 벋D$D$(kU $MU䋆,< tmǂ, e ..1;] ǂ,Zu<;} tn 1,[^_]ǂ,&D$D$(kU $葸U,;} ǂ,u,1[^_]ÉU$PP(kUzD$h(k4$DaU8uu]}N,9}u^(MѤ}w $$T$t$|$ u)Åut2蝤M䋱 $$T$t$|$ 躤u)9Oމ؋u]}]ÐU8]]u}s,0$E8~TSt8D$ |$D$$Lqte1uD]u}]f뿍&ED$ (k$]C]u}]Éu}C0u]]&ED$8(k$C]u}]Ít&'U8]]u}s,0$MU~dS=Lȸt;L$ |$D$$Sp4$E*tC]Ћu}]É뼍&$D$ (k`B]uЋ}]跢D$ET$$u}tMtu}C0u]]UD$8(k$AaUS]C,CCC 6C S$[]Ív'UX]E ]}us,D$$J(k(tE<$D$t<$LP(kts$E蹴NjE$TED$|$ \$04$D$=umu8E9tkUz)ljȉZz]u}]ù]ȋu}]ËUt$\$D$T$ $M>Mf$ELP(kM뷉Mԉ$LP(k]CCCC S$M냍t&'UED$E D$E$ U1WVS\u V,UċU]ȁÌ]]]ȋV+N }ԉ߉ӉMЁ?u^UԉD$\$$?FE$LP(kFEU1]ԋMBHF 9~rUԃ]{ UuUȋL$ UċF D$$D$?U4$T$V4E1ɉ^ 99M{~ Mпt,N $PP(k^ N$\$L$4]1FE^ ]1t&UĉL$ T$UȋF D$M$D$}EDžE}hht$LP(kdžhD$$0(k`lE؉hEED$(k$L$LP(kdžLE\t$LP(kdž\PtD$(k$Jdž\Et EED$(k$#D$(k$8EUE|[^_]ËU$(kfǃED$D$D$ D$D$D$ D$4$TEE@(kEG(kvǃEU(kEc(kEl(kfEr(kt&Ew(kt&E}(kt&ǃEȆ(k{Eȏ(koD$(k$7E+fD$(k$7E+t&}D$}(k$ED$(k$&(kEEEaEPt$LP(kdžPT$$(kiEP?EE|[^_]D$(k$EN@LU2vEEED$(k$#(kEEȉD$a6E+t&D$(k$`;-Xt$LP(kdžXT$$(khEXD$h$@P(kEE䉃4D$(k$t\D$(k$5E+D$@(k${5E+P$(kD$gE\:E[EP`Eȉ|$ D$E(kT$#(kEEȉD$E$E6EЅt%UЋED$a(kT$$E U#(kM}D$o(kҋUDЅɋMUUDЅ}؉UUDЉUUҋUDЅɉUUDЅEE؉UUE؋ET$UD$ET$ D$ED$E؉D$E$Ek}t}tE$D$EEE4$D$E+}Eǃ} }}Eǃ D$D$(k$+D$ED$D$ t$$&1ɃD$EL$D$D$ D$D$ D$4$E䃃H(@, LL$D$$4${*DEE! ǃHEEЉUEU܋EỦUȋU)EŰ}UԋEỦUȋU)EŨ}}Eǃ D$D$(k$t$E؉D$dD$E$D$ED$D$ t$$eU U[M1(SE؋U܃ EȉUt EЋUԉEȉŰEȋUD$(kD$ET$ $8tED$|(k$|tE}uCǃxtpU܃EtMǃHEEOD$(k$uD$(kE$t\udtk1EуMD$(k$ND$(kǃHǃD$(k$^0}EE&vUWVS<} {M$D$HP(k9]St] T$$L$ >$F^F ÍC9sCEEԉ׉ʋEgDUD$4$L$ ML$MыU=9E}D${(k$/MD$(k $ /]t $LP(kLJLJ<[^_]Ët $LP(kLJLJ<[^_]É}D$j(k$.bM)u䃹 u ]~a>$BFMԉ^F ÍC9Mԋ9KM䋇EM}ԉх]U)Bt t $LP(kLJ1LJ<[^_]ËU]rt }ԉыU<$u $MPP(kML$t$$MMt $LP(kMM1t&ωы]넋}ԉrUED$ D$D$0$qZ1t-u&t!UtED$$Ív'UWVSø$c]ȉ$UE\$D$<$D$ &E)ÅUD$ dD$$T$GSUЉt$ \$<$T$UԉT$i!tUD$(kEĉ$_%UȋEątUȉ$LP(kEă\[^_]ÍU1E(kU-t&MȅtӋUȉEĉ$LP(kEă\[^_]ËEȅt Uȉ$LP(kUD$(k|$$$uuD$ D$(kD$<$q%cUD$D$D$8T$D$D$ D$<$ETEUD$(kEĉ$F$Eă\[^_]ÐU6WVS}UEMԋEM ELM MċM (DM MLJEEEUMPMD$(kL$ ML$Mԉ $_(Mԋt$LP(kMǁ M$(kL$ML$U D$ D$D$(k<$ $ELP(kEd(kj(kE`(kDE̅Һh(k$(kE̸l(kDEMT$ D$L$ML$=UEU MD$(k $El(k MԻl(kD$(k $xMD$(k $p(kD[l(kHl(kT$Ủ\$D$(kDD$EȉT$ 4$D$EЉD$UȀ:MЉE $LP(kEt$MD$ (kE $!EČ[^_]Ét$<${uD$(k4$GuM D$ |$4$L$ML$uMDf軁 $L$\$ $T$ہM)^MԃpM11EEEMM̉MMԁM&SEȅH( $L$\$ $T$HM)NыMĉT$ D$D$ $NMED$@+EЉ<$L$MĉD$ L$Q{cEoEЃ}_U UR)EU}|4~,M1ۉ<$EMEt?Č*[^_]Ã}w΋MEEM1ۉ<$u}?vMԁt<$fEMԋMDlDMD$D$(k $#MԋMD,t&MԻD$h(k $%vE1f1MԋP,LJ1EfEMԋMԋ=D$D$$(k $ UtMԋt LJMDČ8[^_]Í&4$ELP(kEČ[^_]ÍMԻD$(k $E]1ɉ}σ9}2{ ]uMԋqMԃM̉t$ L$MD$ $~M< R< JuD$$y(kMM̉L$賑tMԁMD$$(kL$聑tMԸtnMD$$(kL$NtnED$ D$$茌EUM]̃19}}1fM̉D$L$M $tjMD$(kD$(k $AE늋Mt$ D$L$M̉L$Mԉ $ eMD$@(k $Č8[^_]ËMUT$ D$<$L$E<yEMD$(kD$ $9 xt&MD$(kD$(k $utiMD$(k $M $E&MD$ D$D$ $HČ1[^_]ËMD$(kD$(k $ME܉D$D$(kL$ M̉ $MԋMDHt $LP(kMLJHǁ0 $D$T(kM1ǁ\E$LP(kEMD$(k $MEDEԉ}EEtxM1EDMMMԋDL9E$(kD$KE]Љ$LP(k4$LP(kxE{MԉD$D$((k $_<;U UEE3EԋUǀED$ (k$M)ȉD$E̋Mx EUT$ <$)ȉD$ẼD$7EMD$(kD$ $4EM<$D$MUED$(kMT$ UԉD$$EM)ȉ)EU}|}wE4MD$(k $EE'U1]]u}t u ]u}]ËUǃD$ $D$D$]u}]ÐUWǃt$ g)k$LP(kGt $LP(kGt$[8u.u61tft}Ív7tf뽍vUH]]uuȉ}C 4$D$|(kD$ D$ /CD$ Ct$D$C$u9$$g)kC$`g)k]1u}]t&=i)kׅus׺EЉS륍&'USS T$S$T$6C []U(]Éu։$D$0t$D$(kD$D$ $*]u]U(u։}lj]t $LP(kt!t$C $LP(k]}u]Í&'U1]É'U]Ít&'U1]É'U]Ít&'U1]É'UE])'U(]]uu }1$ uJǃttWt]u}]É&fǃ]u}]Í&t(k댍v(ksv'U8Euu }]EEt^C $$g)kKC M$`g)kEtCE[UEU1]u}]Ð+s}T$$t$|$ HsSHEtK9C҉Sv[CЋU1CL$$ ]u}]ÐCȋE(kN벍v'U1]É'UWVS] Uut$\$$*~#E\$t$$D$ "e[^_]Íut$\$$*E1<E3D1D rUuBtUȉ$D$|(kD$ EED$ *D$D$@P(kƋEUEǂF ǂǂ8E1tftUVEF$EF(EF,EF0EF4EF8EF*U\$t$$T$ e[^_]ËEpUE54$xg)kDg)kU$D$*U\$D$(kD$ $EUȉD$ ET$D$$‹E;i)kD$E$*U\$D$(kT$ UD$$1fF ~ 7g)k0fU]ÐUWVS,UEM u¼Hh)kE‰Eu t $ӅuD$t$$(k΂1t6F~Hh)ku7t&UtzuTU1&,[^_]ÃtΉ$ӅuU|$T$U$-uɋU뾋UD$ (k$Y, [^_]ËUD$(k$7U tt&UWVSL} EEE8H $}ԋ}ԅG I(kDׅDϋ>ED$ED$ t$L$$MuWUt$LP(kUu I(kJ(kT$$Q(kDD$>E$LP(kL[^_]Í8T } t$LP(k}G L1[^_]Í}|$}|$t$ L$T$$oUtt$LP(kUM J(kT$$Q(kɹI(kDD$>E$LP(kUԉB L[^_]fU]ÐUVS HˈU ڈUP XM HU ӈ]X PM]$ ȈEUt$$ [^]Ít&UuEu }}]h8\$|$4$D$ EڃG\$D$4$D$ Eڃ\$|$4$D$ h]u}]ÍvUWVS} ]<$N} )|$Zt$<$TT4$hD$D$~Mt$\$D$ $l(ka$Ct$D$ D$$l(kCCļ[^_]1OvUWVSM $Mn|<Ɖ<$PP(kMøt`0tt&DCC9uu4$"|$\$4$Et$$E@@$LP(k1Č[^_]ÐUVS ED$ED$E EE$!juXuU}BvD$D$(k4$ |t*4$ LP(kED$(k$ [^]ÍFD$D$(k${uFV V V ЋUBFBFB 4$LP(k뜋E D$t(k$J 념&US$D$p(kD$l(kD$hD$dD$`D$\D$XD$TD$PD$LD$HD$DD$@D$<D$8D$4D$0D$,D$(D$$D$ D$D$D$D$D$ D$(kD$ E\$D$ $D$ED$ BiĄ[]É'UWVS ]u DžSƅt 1tftD$\4$y)\T\p\$xlD$<$c<$xhttѥpѥlѥhCUD$D$$EKDžK D$ D$4$L$PDžDžDžEET$LD$E$D$Z(kD$D$ }$uыEh$D$jh !%tDJD+D$i$Ƅh/i$(k s !%tDJD+$<$D$D$ D$D$X(k<$ h !%tDJD+$hT$<$D$U <$1t$w &>D$Z(kD$D$ }&$Fu$(k KD$ D$<$ D$D$X(k<$ ڋ !%tDJD)ډ$<$\$D$ <$D$D$X(kg $Qg$q<$D$D$; <$D$D$X(k# $ g$-<$D$D$ <$D$D$X(k $f$<$T$D$ <$D$D$X(k &T$<$D$ { <$1t$ >D$Z(kD$D$ }G$s uыEDžuserDžnamefDž="D$$f!сကtDʍPDɃ)",reD Balm=B"T$$0f !%tDJD)",no@nce=@"D \$$e !%tDJD)",cn@oncef@="D$D $|e !%tDJD)",nc@=D$D$'e !%tDJD),dig@est-@uri=@ "hD$D $d !%tDJD)",re@spon@se=GD$D $edE D$t$D$ED$ E$HSvU]&U8E]]uu}} EE$t$EEE܋E D$st]u}]fE܉}}uu]]EEEEE ]tt&U]ÐU18uu]] }t>t t;u ]u}]Ét$$^ct]u}]D$*4$bEt2D$.4$ibt@D$.$Pbt9}v \$4$bt1hD$D$p(k4$wcuD$.$aEtƋE<$D$btE))9|E\$4$)D$'ctu}))ωL$|$4$c1%e)k%e)k%e)k%e)k% e)k%$e)k%(e)k%,e)k%0e)k%4e)k%8e)k% fL$$Q D$,1l$4\$0fl$4T5F\$$C tXtl$$M@;H s AM@t;t$,uŋl$4\$0D$$@+)t$,9t$$VD$$@@$ D$, 1l$4\$0fL$41F\$$C tX$tl$$M@;H(s AM@t;t$,uƋl$4\$0D$$@t)t$,t$$VD$$$ w;D$,t{t$,|$( v NEEvt$,|$(ˋL$$A%;D$( $F(k\D$$PD$DD$<$G T$8W/L$,OD$(T$$B8Z$E )D$T$$B$tt$,|$(T$,D$$ ֋T$<)~@;|$8v|$8)|$8)L$$A@D$0y D$<9B9D$< D$4D$< Ш D$4L$4@ 1l$4l$< L@9rl$4L$<L$0+t$0T$0;|$0t1l$0ՊT@9ul$0|$v*|$$W1T$$v ,Z$A(k|$$҈l$(+\$Tt$$D$4@$G(kD$TL$Tl$()t$$҉V@D$$l$()Ӌ|$0T$$ftzpG|$0zhD$<$G T$8W/L$,OD$(T$$B8Z<\$@\$<$f |$<$ml$8$+[\$,\$$[8\$(t$$^< džt$$F @1vD$,Mt$,|$(=t$l$@$UL$$Amt$l$@$5L$$A$F(k|$$"f)V4mFDT$0VHt$,D$(D$|D$(D$}D$D$|D$C$CS\$$҉T$\D$TD$dHD$`#D$(L$TЍT$0PT$4fPD$4L$dL$T9swD$,t$Tt$,|$,|$dCNED$(ED$(#D$`D$\T$,T$0PT$4fPD$4 89wt$,t$TL$TL$dl$()l$(D$4)Ë|$Tt$$҉V@|$0+T$0.@M$A(kt$$|D$,t$(%$G@(kD$$K|$$GL(kGT GP)kGX$g$A(kt$$ET$0T$l$L$$A$ t$$FfD$(D$|D$(D$}D$(D$~D$(D$D$D$|D$C$d C\$$N\$$S@1ۃ$D$(t$$D$($F(k|$$D$(D$|D$(D$}D$D$|D$\$$C$ CS19s:t$(ىT$\ v0HUE9rt$(ˋT$\L$,l$()ӋT$0?T$0t$$LVnT$(T$\l$(|$$WHD$,9s3tt$(ىT$\HUE9rt$(ˋT$\|$(L$,t$\|$()1ɋt$\+t$T1D$(t$$D$DV\$$C t T$(D$(\$$1D$(B\$8\$@1D$()lj|$T$$B$ D$$@hD$0‰QD$,|$$?fpp$@<(kD$<$G T$8W/L$,OD$(F8^ t?k {D$DЉC C{k ukF 1,[^_]f> uKD$D$$T$T$T$L$DL$$9FT$t멍v럐둍vD$T$t$@t@tP B01fffUWVST$('Bj8p<X8X8x<ݍL$t<A9t A9uGh8@<@h11۽$f)ȉC9s%t T8t1C9rf$Hhj:1ۅtBw=$)ȉC9t"w8t1C9uދ$Hh)Z߉:ZZr@(@,@0@BBBH@@ @@ @8@<0HlHPHLǀǀZr 1[^_]AfA fx<+׍vJ0nHh[^_]Ív@h1뛍vD$t@t8 t1Ð@<øfUWVSt$4L$0 ^F V$D$D$V($ЉŅC4D$K$D$F($V |$0fSL09s>Sl)0UltK$s4ljE4D$0h1[^_]Ð9w)0ULSP)0UP땍v1i븸뱉l$F($V$fD$t@tǀÐffD$t8Pt7 t t1ÍvJ@f+J@øøUWVS1DŽ$ 퍜$$39u1v1t$t3>uu򉼬E u1v1t$t3>uf@I!΍TJfr) tt$(΃t.9v|$WЃG|$J!T$()#T$H!‹t$<frJ) uL@Al$4l$< v@+J!֍TJfr) t΋l$4t$P΃9v |$WЍK9G|$˺z!NjT$PT$T)T$+T$L9T$Tt$T)։t$49t$XsT$`T$0T$lT$,T$\+T$4T$0L$49L$()L$(L$`+L$t$\)+t$P)t$8~|$p|$GL$4L$PL$l|$4 D$hL$p  L$hN9D$t|$9|$pL$tL$tL$h1ɉl$hl$f||A9L$Pwl$ht$t$l|$4+|$lT$lL$l9L$4t1ɉl$PōvD DA9ul$Pt$4t$T$+T$T|$(v2L$t$(NJBABAwL$t$(L$(t6J|$O|$(RWt$f|$WG|$T$9T$st$ 9t$fT$))ˉپN!ƍB$D$@G D$)Ѓ$B\$ +D$$GD$p8H<Ą[^_]Ív t7T$ x|$wWKЃ|$$F&)k|$3v$G&)kT$T$+T$Tt$(l$4D$TD$JHJHxjJHw܉D$t$(l$4D$TMJ|$O@RWt${T$49T$,t$,t$\+T$4T$0+L$,L$49L$()L$(L$`+L$$)+t$P)t$8NL$tL$AL$|L$4L$hL$p |$4 D$xL$t L$| L$xN9L$|D$L$9L$tL$L$L$x1ɉl$xl$D$tDDA9L$hwl$xD$tt$t$pL$4+L$pL$hT$pL$p9L$4t1ɉl$pŊD DA9L$hul$pT$T$4T$4t$(9t$,T$,)։t$($+T$,T$PL$ыt$@|$, ‰֋T$8 ʃ!֋T$89D$y9|$8T$T$1ҋ|$844B9T$DwL$4L$@t$,+t$@|$d|$lT$@9T$,t1҉D$DDB9uD$T$4T$,T$+T$TWЃ|$q|$LT$,+T$4T$09L$()L$(L$`+L$t$,)+t$P)t$8~|$p|$GL$4L$PL$l|$4 D$hL$p  L$hN9D$t|$9|$pL$tL$tL$h1ɉl$hl$||A9L$Pwl$ht$t$l|$4+|$lT$lL$l9L$41ɉl$PŊD DA9uG|$O|$C׋T$0|$$F&)k5t$,L$4|$lr|$4t$X|$4t$뀋t$4t$ht$q%i)kffffffS(,E)k$D$$,E)k$D$(E)k$xD$D$D$D$D$D$0$ËD$$Z,E)kD$$I$(E)kp([ÐD$0$g)k(É[Ív'D$ $1ÐD$$tt t&T$(D$D$ T$$  'S=dP(kD$$t dP(ktt;[ ()k()ktftЃ()ku[ D$(D$D$D$ $4 f1ÐD$Ít&'D$ÐSg)kD$\$$D$$&)k@D$ 8D$ \$D$g)k@$ UWVSLL$ A)kT$A)k1ۋB9rzG9 9u4$  [A)k<C T$$E CA)kT$g)kD$D8T$$Ӄ T$RD$8A)kD$T$4$Ӄ RD$8u2D$4$D$D$D$L[^_]fT$$g)k뭍t&@tɋD$0l$ g)kl$ D$@D$D$$$ӃD$4$D$D$D$yD$8@ttD$ l$ D$D$0D$D$$$ӃL[^_]Ív@D$0 A)kT$D$@D$D$$L$ $g)kT$Dg)k$d')kD$t&1?A)kD8D$E$0')kD$yt$$')kit$D$$0')kQA)ktÍUWVS\A)k@ A)k)čD$A)k ()k- ()k R ()k()k=()k()k* ()kC ()ksJs{$k$kM t>t$$')kEOe[^_]û ()k ()ksS$k$kUȍUv ()krԡA)k11}̅뭃 ;5A)k}A)k؋tD$|$@$g)k EȉD$ A)kD$E؉D$Ẻ$g)k뗍v ()k3?K/$kfHM)ց$kUMĹ ()k08H$k)MčUĹ]뿋M$k)UĉMĹ>A)kCD$C@$0')kD$D$$')k`P(kt fС`P(kP@`P(ku Ít&S@@(kt$t@@(kvu$$(k"[1ÍC@@(kuƍ& A)ktÍt&A)k딐LhP(k\$D$(k$D$>uBF=Hi)k$fERF$}Q|$ D$D$(k$ԋ/F<tr <t*<uH6D$ҥ(k$謋4D$֥(k$蚋"D$ܥ(k$舋D$(k$v<#t <'t+<Et$D$(k$F~D$(k$"H7tu&D$(k$&D$(k$T$D$(k$ӊF9uw+7FD$(k$T$貊 9uwՃ}tD$(k$芊e[^_]UWVS<8Eԍ9ƃvƃ9v @ $ED$ D$D$0$Xi)kyi)kD$,(k<$D$÷0D$ D$\$$Xi)kyi)kD$,(k<$D$賃e[^_]UWVSL <+$"B<#tL<'<1߃D$$D$ \$D$D$֍~:(1߃D$$D$ \$D$D$#֍~D$D$ D$(kD$$;D$ t$\$$Xi)ky i)kD$,(kD$$S><$uD$D$'D$D$ D$(kD$$菛1э<wlhD$ $D$D$ (kD$)hD$5T$D$D$ D$/(k$[q)D$3D$D$ D$8(k$FD$ D$\$$Xi)ky i)kD$,(kD$$ր>4$ze[^_]ÐffUU$$kÐ @(k)(k0(k(k5(k0(k5(k4(k4(kP@(kN@Dlibgcj-13.dll_Jv_RegisterClassesCouldn't open file %sCan't open %s for writingCan't get the size of %sContent-Length: %lld Accept-ranges: bytes Last-Modified: %s, %02d %s %4d %02d:%02d:%02d GMT Can't get the size of file.failed to resume file:// transferFILE*a(k`$k@$k$k$kP$tI%c%c==%c%c%c=%c%c%c%cABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/%s:%d%255[^:]:%d:%255sResolve %s found illegal! Added %s:%d:%s to DNS cache %2lld:%02lld:%02lld%3lldd %02lldh%7lldd%5lld%4lldk%2lld.%0lldM%4lldM%2lld.%0lldG%4lldG%4lldT%4lldP 4$kP4$k4$k4$k4$k#5$k 4$kV5$k04$k3$kCallback aborted** Resuming transfer from byte position %lld %% Total %% Received %% Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed %3lld %s %3lld %s %3lld %s %s %s %s %s %s %s $tIzDrb-application/octet-stream}F$kI$kI$kpI$kH$kH$kK$kwK$kVK$k}F$kJ$kJ$kXJ$k8J$kI$kI$kJ$k}F$k}F$k@I$kContent-Type: multipart/form-data%s; boundary=%s --%s Content-Disposition: form-data; name="" Content-Type: multipart/mixed, boundary=%s --%s Content-Disposition: attachment; filename="%s"; filename="%s" Content-Type: %s %s couldn't open file "%s" --%s-- --%s-- image/gifimage/jpegtext/plaintext/htmlapplication/xml.gife(k.jpgf(k.jpegf(k.txtf(k.htmlf(k.xml&f(k0123456789abcdefTRUEFALSE/unknown.#HttpOnly_%s%s%s %s %s %s %lld %s %sReplacedAdded%1023[^; =]=%4999[^; ]securehttponlydomainskipped cookie with illegal dotcount domain: %s skipped cookie with bad tailmatch domain: %s versionmax-ageexpires %s cookie %s="%s" for domain %s, path %s, expire %lld path ^$k]$k]$k`]$k_$k^$k`^$knone-rSet-Cookie:w# Netscape HTTP Cookie File # http://curl.haxx.se/docs/http-cookies.html # This file was generated by libcurl! Edit at your own risk. # # Fatal libcurl error %s WARNING: failed to save cookies in %s Rewind stream after send NTLM send, close instead of sending %lld bytes Empty reply from serverAvoided giant realloc for header (max is %d)!Failed to alloc memory for big header!HTTP/RTSP/Proxy-BasicProxyServerDigestNTLMProxy-authorization:Authorization:%s:%s%sAuthorization: Basic %s %s auth using %s with user '%s' The requested URL returned error: %dAuthentication problem. Ignoring this. Ignoring duplicate digest auth header. 100-continueExpect:Expect: 100-continue Host:Content-Type:Content-LengthConnection%s Invalid TIMEVALUE%s, %02d %s %4d %02d:%02d:%02d GMTIf-Modified-Since: %s If-Unmodified-Since: %s Last-Modified: %s HEADGETTransfer-Encoding: chunked ][Accept: */* 1.11.0Proxy-Connection: Keep-Alive ; User-Agent:Referer:Referer: %s Cookie:Accept-Encoding:TE:Accept-Encoding: %s Transfer-Encoding:Connection:%s, TE TE: gzip Connection: TE TE: gzip chunkedHost: %s%s%s Host: %s%s%s:%hu ftp://;type=;type=%cAccept:Could not seek streamCould only read %lld bytes from the inputFile already completely uploadedContent-Range:Range:Range: bytes=%s Content-Range: bytes 0-%lld/%lld Content-Range: bytes %s%lld/%lld Content-Range: bytes %s/%lld %s ftp://%s:%s@%sProxy-Connection:%s HTTP/%s %s%s%s%s%s%s%s%s%s%s%sCookie: %s%s=%s%s%s Content-Length: 0 Failed sending POST requestInternal HTTP POST error!Content-Length:Content-Length: %lld Could not get Content-Type header line!Failed sending PUT requestContent-Type: application/x-www-form-urlencoded 0 %x Failed sending HTTP POST requestFailed sending HTTP requestupload completely sent off: %lld out of %lld bytes Chunky upload is not supported by HTTP 1.0no chunk, no close, no size. Assume close to signal end Keep sending data to get tossed away! HTTP error before end of send, stop sending HTTP/%d.%d %3d HTTP %3d RTSP/%d.%d %3dHTTPThe requested URL returned error: %sHTTP 1.0, assume close after body Maximum file size exceededNegative content-length: %lld, closing after transfer keep-aliveHTTP/1.0 proxy connection set to keep alive! closeHTTP/1.1 proxy connection set close! HTTP/1.0 connection set to keep alive! identitydeflategzipx-gzipcompressx-compressContent-Encoding:Set-Cookie:Last-Modified:WWW-Authenticate:Proxy-authenticate:Location:HTTPSJr(k$kx$k0|$k t$k z$kt$kp(k$kx$k0|$kt$kPPOSTPUTs(ks(k s(kk(k[%s %s %s]%sFailed writing body (%zu != %zu)Failed writing headerRecv failure: %sSend failure: %sfromtos(ks(ks(ks(kHeaderDatas(ks(ks(ks(k* < > { } { } USER %sPWDPBSZ %dQUITFailure sending QUIT command: %sMaximum file size exceededftp server doesn't support SIZE Offset (%lld) was beyond file size (%lld)File already completely downloaded Instructs server to resume from offset %lld REST %lldRETR %s%sConnect data stream passively got positive EPSV response, but can't connect. Disabling EPSV PASVAPPE %sSTOR %sSIZE %sCould not seek streamFailed to read dataFile already completely uploaded getsockname() failed: %sfailed to resolve the address provided to PORT: %sbind(port=%hu) on non-local address failed: %s bind(port=%hu) failed: %sbind() failed, we ran out of ports!socket failure: %s%s |%d|%s|%hu|Failure sending EPRT command: %s,%d,%d%s %sFailure sending PORT command: %sError accept()ing server connectConnection accepted from server Doing the SSL/TLS handshake on the data stream /no memoryUploading to a URL without a file name!Request has same path as previous transfer ;type=%c%c%c%u%cWeirdly formatted EPSV reply%d,%d,%d,%d,%d,%dCouldn't interpret the 227-responseSkips %d.%d.%d.%d for data connection, uses %s instead %d.%d.%d.%ddisabling EPSV usage Bad PASV/EPSV response: %03dCan't resolve proxy host %s:%huCan't resolve new host %s:%huConnecting to %s (%s) port %d unknown proxytype option given$k$kl$kl$k$k`$k$k`$kTYPE %cMDTM %sCWD %sLISTNLSTPRET %sPRET STOR %sPRET RETR %sREST %d %s%s%sFTP response timeoutFTP response aborted due to select/poll error: %dWe got a 421 - timeout! Checking for server connect Accept timeout occurred while waiting server connectThere is negative response in cache while serv connect Error while waiting for server connectReady to accept data connection from server Ctrl conn has data while waiting for data conn Preparing for accepting server on data port Got a %03d ftp-server response when 220 was expectedunsupported parameter to CURLOPT_FTPSSLAUTH: %dAUTH %sPASS %sACCT %sACCT requested but none availableAccess denied: %03dACCT rejected by server: %03dPROT %cCCCFailed to clear the command channel (CCC)SYSTEntry path is '%s' Failed to figure out path OS/400SITE NAMEFMT 1QUOT command failed with %03dMKD %sServer denied you to change to the given directoryFailed to MKD dir: %03d%04d%02d%02d%02d%02d%02d%04d%02d%02d %02d:%02d:%02d GMTLast-Modified: %s, %02d %s %4d %02d:%02d:%02d GMT unsupported MDTM reply format Given file does not existThe requested document is not new enough The requested document is not old enough Skipping time comparison Couldn't set desired modeGot a %03d response code instead of the assumed 200 Content-Length: %lld Couldn't use RESTPRET command not accepted: %03ddisabling EPRT usage Failed to do PORTConnect data stream actively bytesMaxdownload = %lld Getting file with size: %lld Data conn was not available immediately RETR response: %03dFailed FTP upload: %0d $k%k`%k!%k!%k5%k.$k$k$k$k$k$k$k$k$k$kr$k%k%k$k$k$k$k$k$k$kq$kq$k0$k$k$k$k$kw$kWildcard - Parsing started Wildcard - START of "%s" Wildcard - "%s" skipped by user %k%k%k6%k%kRemembering we are in dir "%s" ABORFailure sending ABOR command: %scontrol connection looks deadpartial download completed, closing connection server did not report OK, got %dUploaded unaligned file size (%lld out of %lld bytes)Received only partial file: %lld bytesNo data was received!QUOT string not accepted: %s@%k(%k(%k(%k(%k(%k(%k(%k(%k@%k@%k(%k@%k@%k(%k(%k(%k@%k@%k@%k(%k(%k(%k@%k(%k@%k(%k(%k(%k(%k@%k(%k(%k(%k(%k(%k@%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k@%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k(%k@%kFTPS(k$k`%k%k %k$k@$k%k $k $k$k$k gFTPd(k$k`%k%k %k$k@$k%k $k $k$k$kfSSLTLSEPRTPORTEPSVPASVd(k$kx$k(k$kx$k%llu-IDN support not present, can't parse Unicode domains *, CURL_CA_BUNDLEClosing connection #%ld Connection (#%ld) was killed to make room (holds %ld) This connection did not fit in the connection cache fileLDAPDICTftphttpIMAP/][%15[^:]:%[^ ]%15[^ :]://%[^ /?]%[^ ]%[^ /?]%[^ ] malformedFTP.DICT.LDAP.IMAP.%255[^:@]:%255[^@]:%255[^@]%25Invalid IPv6 address format Protocol %s not supported or disabled in libcurl%s://%smemory shortageno_proxyNO_PROXYhttp_proxyall_proxyALL_PROXYsocks5hsocks5socks4asocks4socks[%*45[0123456789abcdefABCDEF:.]%cIPv6 numerical address used in URL without brackets;type=%c%s://%s%s%s:%hu%s%s%sPort number too large: %luCouldn't find host %s in the _netrc file; using defaults anonymousftp@example.comConnection #%ld seems to be dead! Connection #%ld hasn't finished name resolve, can't reuse Connection #%ld isn't open enough, can't reuse Re-using existing connection! (#%ld) with host %s Couldn't resolve host '%s'Couldn't resolve proxy '%s'://deflate, gzipALLSESSFLUSHSet-Cookie:CURLOPT_SSL_VERIFYHOST no longer supports 1 as value!Connected to %s (%s) port %ld (#%ld) proxy User-Agent: %s About to connect() to %s%s port %ld (#%ld) Connection #%ld to host %s left intact  (kr(k`r(k(k (k (k(k(k@a(k(k(k`(k (k(k(k(k@(k(kdefault!./MATCH:/M:/FIND:lookup word is missing CLIENT libcurl 7.28.1 MATCH %s %s %s QUIT Failed sending DICT request/DEFINE:/D:/LOOKUP:CLIENT libcurl 7.28.1 DEFINE %s %s QUIT CLIENT libcurl 7.28.1 %s QUIT DICT(k %kD @Operation too slow. Less than %ld bytes/sec transferred the last %ld seconds,Microsoft Corporation.LDAP local: LDAP Vendor = %s ; LDAP Version = %d LDAP local: %s LDAPoneonetreebasesubsubtreeLDAP local: %sLDAP local: Cannot connect to %s:%huLDAP local: ldap_simple_bind_s %sLDAP remote: %sDN: : ;binaryThere are more than %d entries cleartextLDAP local: trying to establish %s connection encryptedތ(kЉ%kErrorClient helloClient keyClient finishedServer helloServer verifyServer finishedRequest CERTClient CERTHello requestCERTServer key exchangeClient key exchangeCERT verifyFinishedUnknownTLS alert, TLS handshake, TLS app data, TLS change cipher, TLS Unknown, SSLv%c, %s%s (%d): ߗ%k%k%k՗%k%k˗%k%k%k%k%k%k%k%k%k%k%k%k%k%k%ku%kk%k%k%ka%kW%k%k%k%kM%k%s:%s(%s) %s: %s %02x:SSL_write() returned SYSCALL, errno = %dSSL_write() error: %sSSL_write() return error %dGMT%04d-%02d-%02d %02d:%02d:%02d %sPEMDERENGP12 Signature: %s SignatureSSL read: %s, errno %d(critical), %s: %s %s %s%c--- Certificate chain %2d Subject: %s Subject Issuer: %s Issuer Version: %lu (0x%lx) %lxVersion Serial Number: %ld (0x%lx) %02x%c Serial Number: %s Serial Number Start date: %s Start date Expire date: %s Expire date Unable to load public key RSA Public Key (%d bits) %dRSA Public Keyrsanedpqdmp1dmq1iqmpdsagpriv_keypub_keydh%s CertSSL: couldn't get peer certificate!Server certificate: SSL: couldn't get X509-subject! subject: %s start date: %s expire date: %s subjectAltName: %s matched subjectAltName does not match %s ssluse.cSSL: illegal cert name fieldSSL: unable to obtain common name from peer certificateSSL: certificate subject name '%s' does not match target host name '%s' common name: %s (matched) SSL: couldn't get X509-issuer name! issuer: %s rSSL: Unable to open issuer cert (%s)SSL: Unable to read issuer cert (%s)SSL: Certificate issuer check failed (%s) SSL certificate issuer check ok (%s) SSL certificate verify result: %s (%ld) SSL certificate verify result: %s (%ld), continuing anyway. SSL certificate verify ok. Signature Algorithm: %s Signature Algorithm Public Key Algorithm: %s Public Key Algorithmlibcurl is now using a weak random seed! noneSSL certificate problem, verify that the CA cert is OK.SSL connection timeoutOpenSSL was built without SSLv2 supportSSL: couldn't create a context: %sSSL: couldn't set callback! SSL: couldn't set callback argument! unable to use client certificate (no key found or wrong pass phrase?)LOAD_CERT_CTRLssl engine does not support loading certificatesssl engine cannot load client cert with id '%s' [%s]ssl engine didn't initialized the certificate properly.unable to set client certificatecrypto engine not set, can't load certificaterbcould not open PKCS12 file '%s'error reading PKCS12 file '%s'could not parse PKCS12 file, check password, OpenSSL error %sunable to use private key from PKCS12 file '%s'private key from PKCS12 file '%s' does not match certificate in same filecannot add certificate to certificate chaincannot add certificate to client CA listnot supported file type '%s' for certificateunable to set private key file: '%s' type %sfailed to load private key from crypto engineunable to set private keycrypto engine not set, can't load private keyfile type P12 for private key not supportednot supported file type for private keyunable to create an SSL structurePrivate key does not match the certificate public keyfailed setting cipher listerror setting certificate verify locations: CAfile: %s CApath: %serror setting certificate verify locations, continuing anyway: successfully set certificate verify locations: CAfile: %s CApath: %s error loading CRL file: %ssuccessfully load CRL file: CRLfile: %s error signaled by ssl ctx callbackSSL: couldn't create a context (handle)!WARNING: failed to configure server name indication (SNI) TLS extension SSL: SSL_set_session failed: %sSSL re-using session ID SSL: SSL_set_fd failed: %sselect/poll on SSL socket, errno: %dSSL certificate problem: %sUnknown SSL protocol error in connection to %s:%ld %s%sSSL connection using %s old SSL session ID is stale, removing failed to store ssl sessionSSL Engine '%s' not foundFailed to initialise SSL Engine '%s': %sset default crypto engine '%s' set default crypto engine '%s' failedSSL_ERROR_WANT_READ SSL_ERROR_WANT_WRITE SSL shutdown timeoutOpenSSL/%lx.%lx.%lx%s zlib/%s7.28.1i386-pc-win32dictfileftpftpsgopherhttphttpsimapimapsldappop3pop3srtspsmtpsmtpstelnettftp>(kC(kH(kL(kQ(kX(k](kc(kh(kn(ks(kx(k~(k(k(k(k(k%%%02X%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%ld.%ld%k%k%kC%k%k%k%k6%k%k%k%k'%k'%k%k'%k'%k'%k'%k'%k'%k%k%k'%k%k%k'%k8%kU%kU%kU%kU%kU%kU%kU%kU%kU%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k%k'%k'%k%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k'%k%k'%k'%k'%k%k'%k'%k'%k'%k%k'%k'%k'%k'%k'%k'%k'%k'%k %kc%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k%k0%k%k%k%k%k%k%kP%ke%k%k%k%k%k%k%k%k%kD%k%k%kP%k%k%k%k%k%k0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyz(nil)(nil)EXOPLDONTDOWONTWILL%s IAC %s %s IAC %d %s %s %s %s %s %d %s %d %d Sending data failed (%d)SENTRCVD%s IAC SB (terminated by %s %u %s%d, not IAC SE!) (Empty suboption?)%s (unsupported)%d (unknown)Width: %hu ; Height: %hu IS SEND INFO/REPLY NAME "%s" , = %c %.2x %c%c%c%c%s%c%c%c%c%c%c%127[^,],%127s%c%s%c%s%c%cUSER,%s%127[^= ]%*[ =]%255sTTYPEXDISPLOCNEW_ENVWS%hu%*[xX]%huSyntax error in telnet option: %sBINARYUnknown telnet option %sWSAStartup failed (%d)insufficient winsock version to support telnetfailed to load WS2_32.DLL (%d)WSACreateEventfailed to find WSACreateEvent function (%d)WSACloseEventfailed to find WSACloseEvent function (%d)WSAEventSelectfailed to find WSAEventSelect function (%d)WSAEnumNetworkEventsfailed to find WSAEnumNetworkEvents function (%d)WSACreateEvent failed (%d)WSAEnumNetworkEvents failed (%d)In SUBOPTION processing, RCVDTime-outWSACloseEvent failed (%d)FreeLibrary(wsock2) failed (%d)WS2_32.DLL&kp&k<&k&k.&k&kt&kG&k&k&k&k&k&k&k&kTELNET(k%k%k@@ECHORCPSUPPRESS GO AHEADNAMESTATUSTIMING MARKRCTENAOLNAOPNAOCRDNAOHTSNAOHTDNAOFFDNAOVTSNAOVTDNAOLFDEXTEND ASCIILOGOUTBYTE MACRODE TERMINALSUPDUPSUPDUP OUTPUTSEND LOCATIONTERM TYPEEND OF RECORDTACACS UIDOUTPUT MARKINGTTYLOC3270 REGIMEX3 PADNAWSTERM SPEEDLFLOWLINEMODEOLD-ENVIRONAUTHENTICATIONENCRYPTNEW-ENVIRON(kd(ki(km(k(k(k(k(k(k(k(k(k(k(k©(kɩ(kЩ(kש(k(k(k(k(k (k(k%(k/(k=(kH(kW(k^(kj(kq(kv(k(k(k`(k(k(k(k(kEOFSUSPABORTEORSENOPDMARKBRKIPAOAYTECELGASBIAC`(kd(ki(ko(ks(kv(kz(k(k(k(k(k(k(k(k(k(k(k(k(k(kHOMEr machineloginpassword_netrc\%s%s%s&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&kw&k&kh&kY&kJ&k;&k,&k&k&k &k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k}&k&k&k&kn&k&k&k&k&k_&k&k&k&k&k&k&k&kt&k&k&k&k&k_&kJ&k;&k&k,&k&k&k&k&k&k&k&k&k&k&k&k&k&k&k operation aborted by callbackread function returned funny value%x%sseek callback returned error %dthe ioctl callback returned %d ioctl callback returned error %dnecessary data rewind wasn't possibleThe requested document is not new enough The requested document is not old enough select/poll returned errorRewinding stream by : %zd bytes on url %s (zero-length body) Excess found in a non pipelined read: excess = %zd url = %s (zero-length body) Ignoring the response-body HTTP server doesn't seem to support byte ranges. Cannot resume.Failed writing dataProblem (%d) in the Chunked-Encoded dataLeftovers after chunking: %zu bytes Rewinding %zu bytes Rewinding stream by : %zu bytes on url %s (size = %lld, maxdownload = %lld, bytecount = %lld, nread = %zd) Excess found in a non pipelined read: excess = %zu, size = %lld, maxdownload = %lld, bytecount = %lld Unrecognized content encoding type. libcurl understands `identity', `deflate' and `gzip' content encodings.we are done reading and this is set to close, stop send Failed to alloc scratch buffer!We are completely uploaded and fine Done waiting for 100-continue Operation timed out after %ld milliseconds with %lld out of %lld bytes receivedOperation timed out after %ld milliseconds with %lld bytes receivedtransfer closed with %lld bytes remaining to readtransfer closed with outstanding read data remainingNo URL set!HEADGETMaximum (%ld) redirects followed%15[^?&/:]://%c//Issue another request to this URL: '%s' Violate RFC 2616/10.3.2 and switch from POST to GET Violate RFC 2616/10.3.3 and switch from POST to GET Disables POST, goes with %s Re-used connection seems dead, get a new one Connection died, retrying a fresh connect unspecified error %d%sCONNECT_ONLY is required!Failed to get recent socketalnumalphaxdigitprintgraphspaceblankupperlowerdigitFL&kK&kK&kVK&kL&k -> total rwx-tTsS0123456789-APM0123456789:\&kZ&k\&kZ&k6[&k[&k\&kE\&k`\&kZ&kY&kE_&k_&k^&k_&k^&kS^&kp_&k]&k7^&k]&kb]&k0]&k]&kt`&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kk`&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kb`&kY`&kP`&kZ&kZ&kZ&kZ&kZ&kZ&kZ&kG`&kZ&kZ&kZ&k>`&kZ&kZ&k`&kUnrecognized content encoding type. libcurl understands `identity', `deflate' and `gzip' content encodings.f&kh&kh&kh&kh&kg&kg&kg&kg&k0g&kg&kg&kgetpeername() failed with errno %d: %sgetsockname() failed with errno %d: %sssrem inet_ntop() failed with errno %d: %sssloc inet_ntop() failed with errno %d: %ssa_addr inet_ntop() failed with errno %d: %s Trying %s... Could not set TCP_NODELAY: %s TCP_NODELAY set Failed to set SO_KEEPALIVE on fd %d if!host!Local Interface %s is ip %s using address family %i Name '%s' family %i resolved to '%s' family %i Local port: %hu Bind to local port %hu failed, trying next bind failed with errno %d: %sFailed to connect to %s: %sconnected Timeout %s Couldn't bind to '%s'proxyhostConnection time-outconnect() timed out!couldn't connect to %s at %s:%dAfter %ldms connect time, move on! Connection failed Failed connect to %s:%ld; %s&k&kԑ&k&k&k&k&kp&kp&k@&k#&k#&k#&kInternal error clearing splay node = %d Internal error removing splay node = %d Pipe broke: handle 0x%p, url = %s In state %d with no easy_conn, bail out! Resolving timed out after %ld millisecondsConnection timed out after %ld millisecondsOperation timed out after %ld milliseconds with %lld out of %lld bytes received1&kq&k &k&k&k &kc&k&k&k?&k|&k,&k&k%&kۡ&k&ka&kDelayed kill of easy handle %p Error while processing content unencoding: %sError while processing content unencoding: Unknown failure within decompression software.1.2.71.2.0.46&k&k&kp&k`&kP&k%02xDigestnoncestaletruerealmopaqueqop,authauth-intalgorithmMD5-sessMD5Proxy-%32ld%s:%s:%s%s:%.*s%s:%s%s:%s:%08x:%s:%s:%s%sAuthorization: Digest username="%s", realm="%s", nonce="%s", uri="%s", cnonce="%s", nc=%08x, qop=%s, response="%s"%sAuthorization: Digest username="%s", realm="%s", nonce="%s", uri="%s", response="%s"%s, opaque="%s"%s, algorithm="%s"'k'k'k\'k'k'k\@01234567890123456789abcdef0123456789ABCDEFUnknown errorCURLSHcode unknownCall interruptedBad fileBad accessBad argumentInvalid argumentsOut of file descriptorsCall would blockBlocking call in progressDescriptor is not a socketNeed destination addressBad message sizeBad protocolProtocol option is unsupportedProtocol is unsupportedSocket is unsupportedOperation not supportedAddress family not supportedProtocol family not supportedAddress already in useAddress not availableNetwork downNetwork unreachableNetwork has been resetConnection was abortedConnection was resetNo buffer spaceSocket is already connectedSocket is not connectedSocket has been shut downToo many referencesTimed outConnection refusedLoop??Name too longHost downHost unreachableNot emptyProcess limit reachedToo many usersBad quotaSomething is staleRemote errorDisconnectedWinsock library is not readyWinsock library not initialisedWinsock version not supportedHost not foundHost not found, try againUnrecoverable error in call to nameserverNo data record of requested typeUnknown error %d (%#x)No errorUnsupported protocolFailed initializationURL using bad/illegal format or missing URLA requested feature, protocol or option was not found built-in in this libcurl due to a build-time decision.Couldn't resolve proxy nameCouldn't resolve host nameCouldn't connect to serverFTP: weird server replyAccess denied to remote resourceFTP: The server failed to connect to data portFTP: unknown PASS replyFTP: Accepting server connect has timed outFTP: unknown PASV replyFTP: unknown 227 response formatFTP: can't figure out the host in the PASV responseFTP: couldn't set file typeTransferred a partial fileFTP: couldn't retrieve (RETR failed) the specified fileQuote command returned errorHTTP response code said errorFailed writing received data to disk/applicationUpload failed (at start/before it took off)Failed to open/read local data from file/applicationOut of memoryTimeout was reachedFTP: command PORT failedFTP: command REST failedRequested range was not delivered by the serverInternal problem setting up the POSTSSL connect errorCouldn't resume downloadCouldn't read a file:// fileLDAP: cannot bindLDAP: search failedA required function in the library was not foundOperation was aborted by an application callbackA libcurl function was given a bad argumentFailed binding local connection endNumber of redirects hit maximum amountAn unknown option was passed in to libcurlMalformed telnet optionSSL peer certificate or SSH remote key was not OKServer returned nothing (no headers, no data)SSL crypto engine not foundCan not set SSL crypto engine as defaultFailed sending data to the peerFailure when receiving data from the peerProblem with the local SSL certificateCouldn't use specified SSL cipherPeer certificate cannot be authenticated with given CA certificatesUnrecognized or bad HTTP Content or Transfer-EncodingInvalid LDAP URLMaximum file size exceededRequested SSL level failedSend failed since rewinding of the data stream failedFailed to initialise SSL crypto engineLogin deniedTFTP: File Not FoundTFTP: Access ViolationDisk full or allocation exceededTFTP: Illegal operationTFTP: Unknown transfer IDRemote file already existsTFTP: No such userConversion failedCaller must register CURLOPT_CONV_ callback optionsProblem with the SSL CA cert (path? access rights?)Remote file not foundError in the SSH layerFailed to shut down the SSL connectionSocket not ready for send/recvFailed to load CRL file (path? access rights?, format?)Issuer check against peer certificate failedFTP: The server did not accept the PRET command.RTSP CSeq mismatch or invalid CSeqRTSP session errorUnable to parse FTP file listChunk callback failed(k(k(k(k(k(k(k(k(k(k(kG(k`(k(k(k(k(k(k(k4(k(kl(k(k(k(k(k(k=(kK(k(k_(kx(k(k(k(k(k(k(k1(kC(k(kX(k(k(k(k(k(k(k8(kc(k(k|(k(k(k(k((kH(k(kt(k(k(k(k:(kK(kf(k(k(k(k(k(k(k=(kU(ko(k(k(k(k(k(k.(kH(kp(k(k(k(k,(kO(kb(k(kPlease call curl_multi_perform() soonInvalid multi handleInvalid easy handleInternal errorInvalid socket argumentUnknown option(k(k*(k?(k=(kS(kb(kz(kUnknown share optionShare currently in useInvalid share handleFeature not enabled in this library(k(k(k(k=(k(k%d.%d.%d.%d%lx%31[ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz]%02d:%02d:%02d%02d:%02dJanFebMarAprMayJunJulAugSepOctNovDec(k(k(k(k(k(k(k(k(k(k(k(kMonTueWedThuFriSatSun0(k4(k8(k<(k@(kD(kH(kMondayTuesdayWednesdayThursdayFridaySaturdaySundayh(ko(kw(k(k(k(k(kGMTUTCWETBSTWAT<ASTADTEST,EDTCSThCDT,MSTMDThPSTPDTYSTYDTHSTXHDTCATXAHSTXNTIDLWCETMETMEWTMESTCESTMESZFWTFSTEETWAST\WADT CCT JSTEASTEADTlGSTNZT0NZST0NZDTIDLE0A<BxCDE,FhGHIKXLMNOPLQRST\U VWXlY0Z;Zx0NReceived last DATA packet block %d again. Received unexpected DATA packet block %d, expecting block %d %sTimeout waiting for block %d ACK. Retries = %d tftp_rx: internal errorConnection time-outset timeouts for state %d; Total %ld, retry %d maxtry %d Connected for receive%s Received ACK for block %d, expecting %d tftp_tx: giving up waiting for block %d acktftp_tx: internal error, event: %iConnected for transmitbind() failed; %s;mode=octetnetascii%s%c%s%c%lldtsize%dblksizetimeouttftp_send_first: internal errorTFTP finishedInternal state machine error'k'k'k'k'k'k'k'kReceived too short packetMalformed ACK packet, rejectinggot option=(%s) value=(%s) invalid blocksize value in OACK packetblksize is larger than max supported%s (%d)blksize is smaller than min supportedserver requested blksize larger than allocated%s (%ld)requestedblksize parsed from OACK%s (%d) %s (%d) tsize parsed from OACK%s (%ld) invalid tsize -:%s:- value in OACK packetInternal error: Unexpected packetTFTP response timeoutTFTP(k'k'k`'k 'k'k'kP'kP'k'kE@aConnection time-out%hu.%hu.%hu.%huFailed to resolve "%s" for SOCKS4 connect.Failed to send SOCKS4 connect request.Failed to receive SOCKS4 connect request ack.SOCKS4 reply has wrong version, version should be 4.SOCKS4%s request granted. Can't complete SOCKS4 connection to %d.%d.%d.%d:%d. (%d), request rejected or failed.Can't complete SOCKS4 connection to %d.%d.%d.%d:%d. (%d), request rejected because SOCKS server cannot connect to identd on the client.Can't complete SOCKS4 connection to %d.%d.%d.%d:%d. (%d), request rejected because the client program and identd report different user-ids.Can't complete SOCKS4 connection to %d.%d.%d.%d:%d. (%d), Unknown.SOCKS5: server resolving disabled for hostnames of length > 255 [actual len=%zu] SOCKS5: no connection hereSOCKS5: connection timeoutSOCKS5: error occurred during connectionUnable to send initial SOCKS5 request.SOCKS5 nothing to readSOCKS5 read timeoutSOCKS5 read error occurredUnable to receive initial SOCKS5 response.Received invalid version in initial SOCKS5 response.Failed to send SOCKS5 sub-negotiation request.Unable to receive SOCKS5 sub-negotiation response.User was rejected by the SOCKS5 server (%d %d).SOCKS5 GSSAPI per-message authentication is not supported.No authentication method was acceptable. (It is quite likely that the SOCKS5 server wanted a username/password, since none was supplied to the server on this connection.)No authentication method was acceptable.Undocumented SOCKS5 mode attempted to be used by server.%d Failed to resolve "%s" for SOCKS5 connect.Failed to send SOCKS5 connect request.Failed to receive SOCKS5 connect request ack.SOCKS5 reply has wrong version, version should be 5.Can't complete SOCKS5 connection to %d.%d.%d.%d:%d. (%d)Can't complete SOCKS5 connection to %s:%d. (%d)Can't complete SOCKS5 connection to %02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%d. (%d)4'k4'k4'k4'k4'k4'k4'k4'k4'k4'k4'k4'kw4'kp4'kg4'k`4'kW4'kP4'kG4'k@4'k74'k04'k'4'k 4'k4'k4'k%s LOGIN %s %s%s LOGOUT** Got unexpected imap-server response%s STARTTLSAccess denied. %cSTARTTLS denied. %cFound %llu bytes to download Filesize left: %lld Select failed%s FETCH 1 BODY[TEXT]%E'k"F'kE'kE'k%E'kE'kPE'kINBOX%s SELECT %sIMAPS?(k='kG'k='k@'k@'kI'k?'k?'k@?'k0eIMAP(k='kG'k='k@'k@'kI'k?'k?'k@?'kdABCD(k(k(k (k(k$kx$k?(k$kx$kUSER %sQUIT-ERRUSERAPOPSASLLOGINPLAINCRAM-MD5DIGEST-MD5GSSAPIEXTERNALNTLM+OKLISTRETR%s %sCAPA . Got unexpected pop3-server responseSTLSSTARTTLS denied. %cNo known SASL authentication mechanisms supported! %02xAPOP %s %sNo known authentication types supported! Access denied. %c%sAccess denied: %dpopAuthentication failed: %dPASS %sAUTH %sX'k['kZ'k`\'k['k['k[Z'kY'krY'k2Z'kX'kX'k]X'k]X'kd['k;['kX'kPOP3S(kJ'k@R'k`J'kL'kL'kT'kpL'kpL'kK'kEPOP3(kJ'k@R'k`J'kL'kL'kT'kpL'kpL'kK'kn@D(k$kx$kn(k$kx$kEHLO %sRCPT TO:%sRCPT TO:<%s>QUITlocalhostSIZEAUTH LOGINPLAINCRAM-MD5DIGEST-MD5GSSAPIEXTERNALNTLMNo known authentication mechanisms supported! AUTH %s %sAUTH %s . <>%s<%s>%lldMAIL FROM:%sMAIL FROM:%s AUTH=%sMAIL FROM:%s AUTH=%s SIZE=%sMAIL FROM:%s SIZE=%sGot unexpected smtp-server response: %dHELO %sRemote access denied: %dSTARTTLSSTARTTLS denied. %cAccess denied: %dsmtpAuthentication failed: %dMAIL failed: %dRCPT failed: %dDATAq'kv'kv'ku'kt'kq'ku'kJu'kTv'kPt'kt'ks'kNs'ks'kr'kr'kEr'kr'k r'kFailed to alloc scratch buffer!SMTPS(k0b'k m'kk'kd'k@d'k z'k d'k d'kc'kESMTP(k0b'k m'kk'kd'k@d'k z'k d'k d'kc'kD(k$kx$k(k$kx$kserver response timeoutselect/poll error%s response reading failedExcessive server response line length received, %zd bytes. Stripping The CSeq of this request %ld did not match the response %ldGot an RTP Receive with a CSeq of %ld *Accept: application/sdp RECORDSET_PARAMETERGET_PARAMETERTEARDOWNPAUSEPLAYSETUPANNOUNCEDESCRIBEOPTIONSGot invalid RTSP request: RTSPREQ_NONEGot invalid RTSP request: RTSPREQ_LASTRefusing to issue an RTSP request [%s] without a session ID.Transport:Transport: %s Refusing to issue an RTSP SETUP without a Transport: header.Accept:Accept-Encoding:Accept-Encoding: %s User-Agent:Referer:Referer: %s Range:Range: %s CSeq:CSeq cannot be set as a custom header.Session:Session ID cannot be set as a custom header.%s %s RTSP/1.0 CSeq: %ld Session: %s %s%s%s%s%s%sContent-Length:Content-Length: %lld Content-Type:Content-Type: text/parameters Content-Type: application/sdp Failed sending RTSP request'k'k'k'kЈ'k'k'k'k'k'ku'k 'k0'kCannot write a 0 size RTP packet.Cannot pause RTPFailed writing RTP dataGot an error writing an RTP packet: %ldUnable to read the CSeq header: [%s]Got RTSP Session ID Line [%s], but wanted ID [%s]Got a blank Session IDRTSP1(k'k`'k'k'kЄ'k'k*6\Failed sending Gopher request GOPHER(k0'kF1.01.1][Proxy-Connection: Keep-Alive Establish HTTP proxy tunnel to %s:%hu %s:%huCONNECT%s%s%s:%huHost:Host: %s Proxy-Connection:User-Agent:CONNECT %s HTTP/%s %s%s%s%s Failed sending CONNECT to proxyProxy CONNECT aborted due to timeoutProxy CONNECT aborted due to select/poll errorProxy CONNECT abortedchunk reading DONE Read %zd bytes of chunk, continue Ignore %lld bytes of response-body %zd bytes of chunk left Proxy CONNECT followed by %zd bytes of opaque data. Data ignored (known bug #39)WWW-Authenticate:Proxy-authenticate:Content-Length:closeConnection:chunkedTransfer-Encoding:CONNECT responded chunked HTTP/1.%d %dTUNNEL_STATE switched to: %d Received HTTP code %d from proxy after CONNECTProxy replied OK to CONNECT request %dCould not resolve %s: %s; %sproxyhostinit_resolve_thread() failed for %s; %s getaddrinfo() failed for %s:%d; %s NTLMNTLM handshake rejected NTLM handshake failure (internal error) Proxy-%sAuthorization: NTLM %s KGS!@#$%NTLM handshake failure (unhandled condition) NTLMSSPNTLM handshake failure (bad type-2 message) NTLMSSP%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%s%sgethostname() failed, continuing without! NTLMSSP%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%cuser + domain + host name too big=%s %02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02xnonce="realm="algorithm=md5-sess:%02x0123456789abcdefxn--1.2.7need dictionarystream endfile errorstream errordata errorinsufficient memorybuffer errorincompatible version(k(k(k(k(k(k(k(k(k(kincorrect header checkunknown compression methodinvalid window sizeunknown header flags setheader crc mismatchinvalid block typeinvalid stored block lengthstoo many length or distance symbolsinvalid code lengths setinvalid bit length repeatinvalid code -- missing end-of-blockinvalid literal/lengths setinvalid distances setinvalid literal/length codeinvalid distance codeinvalid distance too far backincorrect data checkincorrect length check|'k\'k'k'kD'k'k'kD'k'k'k<'k~'k'k'kL'k'k'kh'k'k$'k.'kD'kh'k8'k'k'k'k$'k'kP'k'k`Psp0  ` @ X ;x8 h( H T+t4  d$ D \ S|< l,  L R#r2  b" B Z Cz: j*  J V@3v6 f& F  ^ c~> n. N `Qq1  a! A Y ;y9 i)  I U+u5  e% E ] S}= m-  M S#s3  c# C [ C{; k+  K W@3w7 g' G  _ c? o/ O `Psp0  ` @ X ;x8 h( H T+t4  d$ D \ S|< l,  L R#r2  b" B Z Cz: j*  J V@3v6 f& F  ^ c~> n. N `Qq1  a! A Y ;y9 i)  I U+u5  e% E ] S}= m-  M S#s3  c# C [ C{; k+  K W@3w7 g' G  _ c? o/ O A@!  @a`10  @     0w,aQ mjp5c飕d2yҗ+L |~-d jHqA}mQDžӃVlkdzbeO\lcc=  n;^iLA`rqgjm Zjz  ' }Dңhi]Wbgeq6lknv+ӉZzJgo߹ホCՎ`~ѡ8ROggW?K6H+ L J6`zA`Ugn1yiFafo%6hRw G "/&U;( Z+j\1е,[d&c윣ju m ?6grWJz+{8 Ғ |! ӆBhn[&wowGZpj;f\ eibkaElx TN³9a&g`MGiIwn>JjѮZf @;7SŞϲG0򽽊º0S$6к)WTg#.zfJah]+o*7 Z-A1b62S-+ldEw}ZVǖAOIъ OM~-QJ#SpxAaU׮.7׵Y-6]]wll?AԞZ͢$ Faw$eڪ]]FD(koipvk19Z* ,  m86F߲]qTp0ek*1u4yީ%8S1bSWĔՖk1**ykʬHpo].*F6fcTT"eM©g0&):{ϼkZ> 8$,52F*sw1pHkQ6Fzw]cN̵J #pAF]#l8?1(BOgT~yUbL8^#ܖTZ1ObbSyOIV~P-{b-R4٠~^eGnHl/Su6: #jT$+e?yHf'*b#ٽЧ ?&~?$pi;FBzw[keZ~7 Sv8H 3?r$7jnԄYFܨ |OQ;օ U d S - =G\ p&Gw)` /a߫i5&LsZ<#0zMzFM8,9; :R:(q-v,.7/pXqYs3r%w+OQvrtEux܉~OK }!b|tyBxʠz{.lD~m8onlk[wjR1h58ib?mcf+aQ`צedd"fig HINSKyuJcO NZLݘMFGN@E$DD2AsX@*IBCPhTg3U>uW ַVS:R|P~Q9ZS [fYX4])\ZEo^m/_5qϱ٥s\ۼqދ!K7 kfֶԁ-b3Πjp]$^'~*I@VW<âM˟ŏ{ tDCm-@wm.B+(铜>Td"ŀǼϭ~8y$owJ1}05_K^ iϏ은BI#ƈdX܁T̓cQ: rՆ⩗ fn|xK)o%ƭ/3vUuA?)C:|sĵ@͂ Ͳ;bIUeh"׻_HS1޼^Z4eg Wb27_k%8ם(ŊO}do׸Jj3wVcXWP0qB{߭gCru&op-?'Bs ưGz>2[Ȏg; i8P/ _Y=чe:ZO?(3wwXR @hQ+ğH*0"ZOWoI}@mNП5+#*'G| AH=XX?#1jvʬ`p^Y<L~i/{kHwâ hs)aLoD~Pf7VM'(@ﰤ ہg9x+n&;f?/X)T`D1 ߨMߒ.FgTp'Hq/L0UEc?kǃh6ry7]P\@TN%s7@'>$!AxUʰ\3;Y^U~PGl!;b F2ȂpԞ(Q_V:1X: n3 m:@/)IJNv"2x+ٗ Kx.HҥfAj^y9*O]#kM`~b_R 7zFh!1߈Vc0a"j6nS Nr)Υ{t*F8#vufz`rs"WG9^EMvc΍&DAdQy/4Aڱ&S֚E biLQ<6'5P..T&q]w4.6IE? v\[YI>U!lDa>Ԫ΋ϩ7~8A]&nv|oY yKiw\¹9~$ 66nQfq>,o,IӔ 渱{I .H>C-Yn馑gQz tafw0a, Qmpjc5dۈ2yܸو L+~|-dj qHA}mԵQӅlVdkbze\Ocl=c ;n Li^`Agqr<KG k5Blۻ֬@2lE\u ϫ=Y&0Q:Qa!V#Ϻ(_ ٲ $/o|XhLaf-=vAq *q3xɢ4 j m=-dlc\kkQlabe0bNl{WeP|b-I|LeMaX:QΣtԻ0JߥA=ؕפmCij4ngF`D-s3 L_ |Pq<'A  Wh% of a^)ɘИ"רY=. \;l  tҚG9w&sc d; mj>zjZ '}DhibW]egl6qnkv+zZgJoC`֣ѓ~8ORѻgWg?H6K +گ L6JAz``ègU1nFiyaf%oҠRh6 w G"U&/ź; (+Z\j1,ٞ[ޮd°c&ujm 6?rgWJz{+ 8Ҏվ | !Bhݳڃn&[owGwZjpf; \ebiaklE x NT9§g&a`IiGM>nwۮjJZ@ f7;𩼮S޻G0齽ʺŠS0$6TW)#gfz.aJ]h*o+ 7 Z-1A26b+-Sdl}wEVZOAي»IˬO ~M-JQS#xpaA.U7Y-۩6˚w]]llA?Z$㧲F waރ$Ųe]]DFok(vpi91k *Z  ,8mF6]pTqke0*1¶u4%y<8syjHA}X*ݹ1SbSW§ٖծ1k**kypH]oF*.f6TTcMe"¤0g)&Ůޟ:{kZ >8,$5*F21wsHpQkzF6c]wN̵ׄJ# pȄA#]F8l1?(gOB~TUyLbˁ8#^TO1ZbbySIOP~V{-b-4R^~Ge­lHnuS/:6# $Tj?e+y䏼Hf*'˼Ѝb# &??~p$iBF;[wzek~ZS 7H8v ?3$rj7nFY |OQ; U dؓS - \G=&pGw`)/ ai5&sL <:R=Pe6^X7}o5641W0ճ2k3$k%'1&-[#bML"'{ "!$*x(+)`F(> q-q,v.Ț/7pqXsYr3w%vQO+tru՛E~xKO} |b!ytxBz{l.m~Do8nkljw[h1Ri85bcm?a+f`Qeddf"giH IKSNJuyOcN LZMݥFĚGE@ND$A2D@XsBI*CThPU3gWu>V SR:P|Q~Z9[ SYfX]4\)^oEZ_/m5qs<\kg2z &J8 좞V`a/6i\lU,zB\uHƒ=&FW A+Ox]`غ7W>9q߳!7Kk ֩fض-bѠ3pj$]^Ĝ'*~@IWVÕ<ӂMʏş{ Dt͆mC-@mw+B.(>dT"ş~Ϝ8yo$w1J}50K_ ^ϋiBۉI#dXфTQc:r Р fΫnx|)Ko%3/uUv?A)ġ:C|sд@͉ ;IbeU"hH_S1ފZ^ھ4ge ȋbW72%k_ܝ8Ŵ(}OodJֿjw3XcVPW0Bq{gǧurCo&p-?О'sB zGɠ2>[ ;g/P8i_ Y=嗇e:ϏOZw3(?RXw@ Qh+HZ"0*WOIo@}m5N#+'*GA |􏒨HX=#?X1vjʨ`^pYL'!$UxAׯ3\Y;U^GP~b;!lڇF 2p(ԐQV_:X1: 3n :m@I)/NJ2"v+x xKH.jAf^O*9y]#Mk~`bю_޶ Rz7hFм!10cV"ajحn6 SrN){t*8Fv#fu`zrϮsɛW"G9E^vMcD&dA/yQA4S&ֿ EbLil!>aDƋΪ~7A8n&]|vYoᡱ Kyi׫w¡\~9$66 Qnf>q,o,ӹI 散 I{.C>HnY-Qg̰t zfa inflate 1.2.7 Copyright 1995-2012 Mark Adler  #+3;CScs !1Aa  0@`ND@@invalid distance too far backinvalid distance codeinvalid literal/length code(kMingw-w64 runtime failure: Address %p has no image-section VirtualQuery failed for %d bytes at address %p VirtualProtect failed with code 0x%x Unknown pseudo relocation protocol version %d. Unknown pseudo relocation bit size %d. D)kA)kmsvcrt.dllzR| 0|DpXplp zR| TPAA AAC@k AA AAB E AA AAC M AA AAC p AA AAD  <TWAA AAC0 AA AAB HlAA C0  AAD _  AAC H  AAB ,TZAC  AA H AC 0H_AA @ AA Q AB lt AA AAF AA AAC  AA AAD i AA AAC (WAC G AD <LAA AAC@Z AA AAC 6TAAA AAC( AA AAA @ AA AAD *< NAA AAC0 AA AAB L.`LzR| < NAA AAF?AA AA\0Pp$AA AAC( AA AAB EAA AAzR| LxLAA AA A AAA p A AAA HltAA AAh A AAA ZA AAHAA AAh A AAA ZA AAzR|  AA AAFm AA AAA  AA AAA [ AA AAA Q AA AAA zR| @PX AA AAF AA AAD PvR;;(PQR0B C@B@F`AHPEH0A 0F`O`X`p0@`?@pppЗ жP@P`@ `;@<>@>0RRRRRRRR SS*S;SNSaSmSzSSSSSSSSSTTT*T8TNT`TsTTTTTTTTU(UkJkZkxkkkkkkkkll$l,l4l>lJlVl`lnl|llllllllllllmmmm&m.m6m>mFmNmVm`mjmtm~mmmmmmmmmmmmmn nnn(n2nkJkZkxkkkkkkkkll$l,l4l>lJlVl`lnl|llllllllllllmmmm&m.m6m>mFmNmVm`mjmtm~mmmmmmmmmmmmmn nnn(n2n.h(Licensehttp://curl.haxx.se/docs/copyright.htmlDVarFileInfo$Translation p0"0N0_0d000011%121F1K1q111111122!2H2O2Z2222I44444445b566-7G77}89==!>(>I>? t00&1I111:2&344444495D5W5i55555T6s666667 77,757B7Y7d7O88$9I9Y9b99g:>z?0( 00;11#2\222 3T336}8G:\;c?@d0V2 3T3h333^4~445555-66667-7789::;$;;>A>>?1?O??????P11122m2222$3C3333/4e4445H5c555]6666y77799::%:5:E:N:b:j:::::::::;8;J;Bj>>>X?^?`xC0S0c0s00001111I223 444255556666P7777 88888L9k9t999!:::: ;;;;<c>>> ??pX0[0?1Q1x11111t255678'9399:;d;;<= >z>>>>>>> ??,?X?b???t00911}333?4]444w5m66677@88888E9L:`:h:::4;;;'H>>>$?@?l???????0000001 1311]223:3333#4@4444 555I5\555556h67)7Y7q77778&88888A999 ::: :T::;;>>6>C>{>>>>O??d+0F111!2A2Y2q2y22333445X5v55M6666?7T7i77888^99;;;"<*>?B?J?u??d191A12.2c2223C333=4444,5H5w55556;7k778Z88909O99x::;;s;;;;{k>>?6?m??`@0-1:112z2J3 55555627q77777(8G8899F9R9l9999:;;H;< <(>?`0D1Y22:3b3r3y33(4445+5:5a55555B7M7778O889)9H99:D;s;;n<<=[=>5> ?F??t0x000O112s2 3&3j3334w4~4455555!6P6r6z697778M8~89&9u9999>:::::b;z;;;<<<=7=O==?D345-5U666677'8R8}8889$9[;:>8?_? x0S001Z111222i33334505M5j555555656R6o66666z777^88@9p9:::;";K;R;g;<%=^==*>=> ?@?0ds1182L2v33334{4444556777888888F9p9:.:A:{::";Z;~;;;R<="=z==:>Q>}>>?@`50P0001121O1b1111E222332335T5e5555566^9r999%:E:@;F;W;];;; <=??PH=0x001/1111 2)2F222I3333 44677!8>8m88889{999`a88[9=>p0$2245U5555<6h669<:<<<=??0i0H222X34)4Z4t44445595l555(656E6l6667e88888899{999::9:W::::;x;;;=i>>>>?'?5?????? 080000K1j1112%222 3_333 4<4[4o4444445'5\56677777777777888$8)8I8N8X8b8l8v888889R9 :':n:::m;;;;<<>>??x000:1|1122$3B3Y333434g4{4444`555 66X6l66677778H89O:p:::::<<==>>>C?_?????010[0w00001-1I1s111112E2a22222;3p334B4W44555H6x66"7A7777898c889,9q::N;; <5>???|#0+0D00}111122G3333:4r4444@5*6a666,777777+838T8x9997:Z:u:::;;T;o;;;,?N?\????????L0G112;33N444444515?5V5d55"6o;;<2<+=z==>>>7>?>F>>?40(0H00]4n4}405W5J66666J7l799;>>2? @0J22!445K7 8D888U9w999 :<<==>9>X>>"?G?k?z?0L000 1Y111112=4X4777;6;;?=?b?j????????????@l>0\0b0k0t0}00000000<3S34<44445k55u777w88=9t99::;u;;;Tf>>???P`00 1.1[1s111111223222'33333+445)56667i7,8d8}8h99R:::X<=}===>`(0003556L66r79:<<<\=pd00.0p0000121L111|2235_666Q7d777Z88i999:T:t::v;;;p>K?L000<1\1222 334E4455r677M8{88D9u9:+::::<<= >7>>,Q0Z014I56666!77p9::;;?u?(11>9:X:;;Z>???`B000-1m122 2N3333444556F7e8899::;;/;F;;;">>>2?M????040H0t0000111Q12(22223I3~3333,4A4k44444575e55J666666 77 8/88888819Q9:::g;<"<3>}>>>>?K???????000"0,060A0K0U0_0i0s0}00000000000 111<1F1e1o11111111Q33333*444n6V778+838b8>9p9997;b;vt><&181"556T8o8888F9]9t999999 :<=Z>>g???`p0$1202C2222394444 555677 888p99:&;M;d;;;;;:A>>???`00070?0012B2n2222[3h333/444l5566Q7]7e7777*8O8o8w888888 :r::R>k> H0044m556R7778!808m88:::;(;L;;;;f<<<=>>>?0l/000D1d1122=346666]7778Q8u888<9Q9m99::^:t::: ;;;;4<^>>>>>p???@X00)112i33455616666 7778888A:::;;;U;];;7<===>>>>?PX 0=0|00011133R444Z5t6v7Y8q88=9i99J:::%;S;;7<<<;>>> ?3??`p 0000#1q1112S2X2]2232333 55f5p556778N8888@9999@::;&;P;;Ch>>>>>s???pd0!040a00000000)1 2222;3334:45455166777(8g888`99:;; <&>??h?(090>12Y33P44455666737Z7~77778x888888888 9949^9u9999:T:^:::::;-;I;j;;;;<<=>A?h???+0C0f001112@23R33w44444I5556666678899b;;;;;; <{<7=c========>>E>T>d>l>>>>>>> ?=?h1 22233454445g555566966666o77778l88999::::;;;;;=Q===>??`000000#111112C2}222I3k33?4P4U4`4|44#545?5F5`5V777@88889K9Z999W:a?LG111)4/455566}7778B8Q8{888I999:j: ;;<2Q>r>X}55566646;6E6L6k6r6|666666666677!7(7G7N7X7_7|77777777%8<223389<>=U={=====>>>>>5?P?m???? (00T000?1b1w1~11111112.23282O2\2j2o2|2222222333#3<3C3c3z3333334:4V4j4~444444445"545b55555555666'6,6;6@6H6Q6[6a6j6{666666667E7T7Y7_7l7r77777778 88Q8Z8e8p8v888888[9i9p9u9999:&:-:G:Z:a:g::::::);;;C;R;c;;;:<@R>_>g>>?d?????@ .0D0P0 0 0@0D0H0L0P0`0`h@1H1L1T1p1333 33333 3$344444444444444444444H6T6`6l6x667777777p`2h2l2t2x22222222333333333333(9,9094989<9@9D9???????????????????????????`000 0000x0|0000111111111222 22222 2$2(2,2024282<2@2D2H2L2P2T2X2\2`2d2h2l2p2t2x2|222222222222222222222222222222222333 333 3$3(3,3034383<3@3D3H3P3333333333333333@4H4L4:`:d:h:l:p:t:x:|::::::::::;;>>?????????????????????????????,0?????????????????l0 00000 0$0(0,0004080<0@0D0H0L0P0T0X0\0`0d0h0l0p0t0x0|000000000000000000000000000000000111 11111 1$1(1,1014181<1@1D1H1L1l1p1t1x1|111111111111111111111111111111111222 22222 2$2(2,2024282<2@2D2H2L2P2T2X2\2`2d2h2l2p2t2x2|222222222222222222222222222222222333 33333 3$3(3,3034383<3@3D3H3L3P3T3X3\3`3d3h3l3p3t3x3|33333333333333333333333333333888888888999 999 9(9,9::::::::::::::::;;; ;;;;; ;$;(;,;0;4;8;<;@;D;H;L;P;T;X;\;;;;;;;;;;;;;;;;;;;;;8<<<@>>>>>>>>>>>>>>>>>>>>>>>??? ????? ?$?(?,?0?4?8?ljWRڪ 3hKhd jj tFhOhd PlV^_h5hd jj v_VW|$ 3t+NjhZhd jj Jlt%P;} lWQthahd jj tF_^_3^̡l̡l̡l̋D$l̋D$l̋D$l̡l̡l 1D$l̋D$l̋L$3D$AËD$L$H̃=lt3ËD$l̡l̡ltltЋT$3ɉ B T$3ɉ B̋L$T$V2;1us3^1+u"Bq+uBq+u BI+^̋D$L$@A̡l̋D$l̡lu% ̋D$} Ã)} VpסlP趧;~x ^Ë lVQ諧^ø =lVh l tb3ɀ8~WT$Rh 4V  uPPV  3҉D$T$ T$ D$_t /L$t$##. ^ËD$u2BL$MZf9uA<8PEu;H4t l  U l3ʼnElSVWP th P lll  ؅EPjjjS  zE=pFu MQVWjS tZE@E3h WfG ؍e_^[M3 ]ÃJЍe_^[M3 ]Ãe_^[M3 ]̸F l3ĉ$j` tCP t8$ $PQ @P  $3 Ë$ $RPL$ hQ Ƅ$ =sX~OVh j jD$PjjjjjjT$(VT$( V ^$3 jh L$ Qj  $3g ËD$ L$T$PQRh j j  @Vt$ }K=lt`Vxt!L$T$QRPD$PlV^h hShd lltL$T$QL$ RVQЃ^̡ltS\$Ul$VW|$$WSUj [D$$0t$(WSUj 0E _^][̃=luhhd jdjgjR3álthhd jj Сl=luEllu2thhd jj Ѓhhd jAjgj3Åthhd jj ЃVhhd j$ uhhd jAjgj3^hhd lFu"Vh hd jAjgjT3^álthhd jj ЃlWjPVu lQ謠OlWRǡ lthhd jj Ѓu$Fh!hd PlV _^G_^̋D$Pp̋D$L$PQx̋D$P(=htXL$tPT$tHD$ t@ ptpx| 339hu3ËL$V;tEt$ ;t=D$;t55|p tx ^3^̃=ht+L$t#D$t 3̃=ht+L$t#D$t 3̃=lu3_'D$L$T$ lD$ lL$ll l̋D$t3Ɂ=tpI# pD$t3ҁ=|J#xD$ t ̋L$ttЁp#ЉL$t|Ё#ЉD$ t ̋D$t3Ɂ=I# D$t̋L$tЁ#ЉD$t ̋D$t lD$tlD$ t lD$tlD$t lS\$3;3[álUVt$W|$ h;tQVWSQ lЃVWSl t jVWSUЃ_^][̡lVt$tjVЃVl^t jjЃS\$3;3[álUVt$W|$ h;tQVWSQ lЃVWStl t jVWSUЃ_^][VW|$ ǍPI@uL$+‹T$QR@Pf +Au_^Ul$uD$L$T$ PQR3 ]S\$[3]álVt$W|$t jVWSjUЃVWSU| lD$tjVWSPUыD$,_^[]S\$uD$L$T$PQR [W|$~;|$}_3[álUl$ VtL$ jUQWjSЃT$ URWt t!D$PSV L$$QS&SltT$ jURWVSЃ^]_[̡lVt$tjVЃVl^t jjЃVt$t-ltjVЃVlt jjЃD$ hh, P ^̡lt̡lt3Vt$NFFt PV+^̸v V5mhh< jj lD$ $'mm3mmmD$P=mtL$Qhl9tRhh< jj hh< jj hh< jj %mT$4RhlC8m8mt/=mt&-mu mhh< jj chh< jj M^ÍI&{&8'& 3mta$VPh&h< jjmuL$Qhl#u3h+h< jj^̋D$m̡m̋D$iiɻEV4+^̋D$VPE~iiɻE4+ƃ^̸ 39mte$VPmL$QRQ t@W~tmGWP衞F~FtGV._^̸v QShh< jj emD$PG=mtL$QhlltRhh< jj hh< jj hh< jj %mT$4Rhlv8mVhh< jj hh< jXtj=mu&hp(h(藜mu V;VjD$L$ T$FmVPN V^F tFhh< jj "^mt.=mt%)mu mhh< jj hh< jj [3̸ V3~.Shh< jj mD$Pt95mtL$QhltRhh< jj @hh< jj -hh< jj %mT$8Rhl8mhh< jj hh< jj mt.=mt%)mu mhh< jj hh< jj |[^̸V V3.8Shh< jj BmD$P$95mtL$QhlJtRhh< jj hh< jj hh< jj %mT$8RhlT8mhh< jj t uuhh< jj nmt.=mt%)mu mhh< jj 8hh< jj "[^̸ D$0W|$$VjDhh< j$su!WDhh< jj = mu/h@h@(葘 muWVmD$0L$4T$,>FN VtFP83FFm;muFF@mmtj FFL$QmF tT$RP誙tF @ mVP葘tx tH IP(hh< jj mt/=mt&-mu mhh< jj Jhh< jj 4^_̸$ |$,Vt$,= mjM mD$PQt$8 tF t PV9hh< jj mt.=mt%)mu mhh< jj Xhh< jj B^$̸$ D$<W|$0Vt$0u#D$@L$RF F N PQVsPhT +RD,0P&]D$4P@u~+YAu}++;эT}}+QWR }#+QWRD$( P@u+¹hP +QT$R\L$D$(PRvtD$PV:]$_^[3 ̋T$L$̸ W39= mu 9=mShh< jj mD$Pm9=mtL$QhltRhh< jj 9hh< jj &hh< jj %mT$8Rhl8mVhh< jj mt$4t$|$|$;t.L$Qh4PDD$$ ;tT$PRh VZ}hh< jj m5m=m;tP= mm;t!PMumPۍ=mh4h< jj 5mhh< jj ^mt-9=mt%)mu mhh< jj hh< jj [_̸ = mSSVhh< jj tmD$PV=mtL$Qhl{tRhh< jj !hh< jj hh< jj %mT$8Rhl8mhh< jj 'Phh< jj $mt.=mt%)mu mhh< jj shh< jj ]tD$PjjjVVV^[̋D$PQH RP@QL$RP҃̃= mt?hfh< jj mD$Ph7Q^hih< jj ,̋D$u Ãuht hd j$h4mX4mÃu"hP h@ hhXmWXmÃuh4 h$ jh mW mà t ̸?̡xouDhh jj =xou xohh jj xoVhh jj 395xouD$xohh jj ~^Vhh jj W=|ouh@h qL|ou3hh jj ^Vt$FhP!PMV ^̸ ֿ =|ou hu Wh0h jj \$ |oD$PQ臎uIVh4h j < t-~虇FVu |oR;^hEh jj BuhGh jAjijf_ SVhSh j uhVh jAjhj^ [ËD$ L$T$h^h D$ NL$$jj VFN WRv;G#GjPctCOQS;G~݋_GVSSPWg hkh jj A^[hch jAjhjdVVhuh jj hwh @jj  ^̃=|ou Rt3|oh9P |oQB |oxoS\$[uËL$T$WQL$RT$QL$RQ<_̃=xou?hh jj $=xou xohh jj xõ=xou?hh jj =xou xohh jj xoH̃=xou?hh jj d=xou xohh jj ;xoH̃=xou?hh jj =xou xohh jj xoH ̃=xou?hh jj =xou xohh jj {xoH̃=xou?hh jj D=xou xohh jj xoHW|$?u'uh]h jAjfj3_ËSVP襁\$;jQ莀tF;~T$RSP褁 ^[_hgh jAjfj賺^[3_Vt$u3^P+L$;}QP'^S\$W33؅u_[ËD$UVhh jj8KQր~Bhh R t"3~$CVP覀F;|hh jj~#uhh jAjlj踹^]_3[3~StGxtA\$tP+;|3 VQ' QRIRVSPD$,PуF;|t W^]_[̸ָ UVt$39.u^E]YS\$؅u[^]YWhh jjCPQw;}~@hh R# t 3~d$CVPFDF;|hh jj7~$u hh jAjjjW_[^3]Y3~r\$ tP~;|3 VQ~D$Dt!xtPRQL$$VT$RPSQ҃D$L$PVQF ;|t U_[^]YS\$W3s؅UVhh jjDCP~~;h h Q t3~SVR}F;|hh jj~!uhh jAjkj^]_[Ë\$3~QtCx t=tPw};|3 VPs} QRI RVSPD$,PуF;|t WtP|^]_[̡PŭuhP3hj'̋D$P uVWD$ _^VN򸫪*ʍI+io*‹9$Q@+t$9i0^́ k9SVWi:ȍqi⻷ڋi+ʍ=iҏ gfff‹T$+ȉ 颋.ϋkd‹T$ @+D$28_^[SUVW\$E.iQl$+؋t$FV+ȍ+ȍ؁Q| EQ }MÀQF ~NlAP|$ L$D$L$QT$RL$$QvD$( wdFD$HFų L$‰FN ʋ<G_V++ډ^][_^]3[% % ̋T$ L$3tVt$ +W<+uJAu_^SVt$ \$W= umhD u _^3[hDjP  PSX @@u0P _^3[Ë@PR\ tH,hQDP  _ƂC^D[ ^3[Vt$t,t&@PT Q ^ 3^;3+|$uj\h jejmj謱3̃=ou oUSVW1ҜX5 PX111GenuŁineI Łntel AuthƁenti ƁcAMD g=UƸ ́8F9￁@* ,1%== _^[]Ð11ҍ !1Ð !0X%!X 1RP1+$T$11ÐX 2D$L$ؐ1@;11Ð11ҍ  !F ffffffff㐍D$ÐT$L$S[ÐUM $UT$ED$ED$ ED$E D$E$D$U]ËT$L$1*RÐ IRIRÐ=D̸H |$TL$LD$PSYUiVq T$\WP@ P@ P@ @P@ P@ P@ PL$0@3͉|$P@L$d#38L$` x@ x@ ׉T$ 3#3T$0@x0@ x@ x@ t$@3#3@t$ 8h @h @h@ |$ @h3#3|$@@X h@ h@ ݉\$83#3\$@h@ h@ h@ ͉L$(3#3L$8@h@ h@ h@ @hT$H3#3T$(@p h@ h@ t$ @h3#3t$H@x h@ h@ |$43#3|$@h@ h@ h@ ݉\$$3#3\$4@h@ @( h @L$D3@h#3L$$@P h @h @T$ @h3#3T$D0 @h @h@ t$<3#3t$@h8@ h@ h@ 3#3l$<@|$,8X @X @X @|$L3#3|$,@ 3#3L$L#ً # T$Pٍ2yZ#ދ ։T$T# T$\$`*yZT$T# T$l$:yZ #Ջ# T$ߍ yZ ## T$0ٍ2yZ #މ\$`# l$8\$+yZ# T$`\$T$4:yZ # # |$<yZ #ˋ# L$ ߍ1yZ#ދ Ή\$`# l$(\$+yZ# L$`\$L$$yZ #Ӌ# T$,:yZ #Ӌ# T$@ߍ2yZ #ދ# l$H\$`\$+yZ# T$`T$D yZ #Ӌ# T$L:yZ 33T$P2n33T$n 33T$ n 3ы3l$/n3T$ 2n33t$$n 33t$(n 3͋3t$,>n3L$0n33T$4n 33T$8*n 3Ӌ3l$<.n3T$@ n33L$D9n 33L$Hn 33L$L1nL$\iy iq|$dYq T$`h_^][HËT$UW|$Vt$F ;sFFFXSNtjn@s" 8@sWRUk ~X[^_]û@+SR(RI jUVD$0j@ jUL$<+FX( l$<$vSUV| +tW~XUV [^_]̋D$L$jPQ? SVW|$_Xw3C8v @+PjS蛥 jVW38+QjS{ WV8G8FOFNWFVGFFOFNWFFGFj?VWj@jVGX D$4@@@OH@@@@O@@@у$@O H@@@_^H[̸\ Vt$luoD$P~u^\ËL$hT$dQRD$ PL$QVT$j\R^\̋T$UW|$Vt$F ;sFFFXSNtjn@s" 8@sWRUˣ ~X[^_]û@+SR(R詣 jUVbD$0j@ jUL$<+FX舣 l$<$vSUV, +tW~XUVN [^_]̋D$L$jPQ SVW|$_Xw3C8v @+PjS jVW38+QjSۢ WV8G8FOFNWFVGFFOFNWFFGFj?VWKj@jVGX~ D$4@@@OH@@@@O@@@у$@O H@@@_^H[Vt$j\jV #EgFFܺF vT2^̸\& Vt$luoD$Pu^\ËL$hT$dQRD$ PNL$QVCT$j\R'^\VW|$ t$L$US@Q_OW ϋ.1!ߍ(xj1n1!Ǎ*V1 n1!׍)p $1n 1!ύ+ν1n1!ߍ(|1n1!Ǎ**ƇG1 n1!׍)F01n1!ύ+F1n 1!ߍ(ؘi1n$1!Ǎ*D1 n(1!׍)[1n,1!ύ+\1n01!ߍ("k1n41!Ǎ*q1 n81!׍)Cy1n<1!ύ+!I1nˍ(b%1!׋n1؍*@@1!ϋn,1 )QZ^&1!ߋ.1э+Ƕ1!Njn1ˍ(]/1!׋n(1؍*SD1!ϋn<1 )1!ߋn1э+1!Njn$1ˍ(!1!׋n81؍*71!ϋn 1 ) 1!ߋn 1э+ZE1!Njn41ˍ(1!׋n1؍*1!ϋn1 )og1!ߋn01э+L*1!Njn111ߍ(B9n ߍ*q11Njn, 11׍)"amn8׍+ 811ϋn11ߍ(D꾤nߍ*K11Njn 11׍)`Kn(׍+p11ϋn411ߍ(~(.ߍ*'11Njn  11׍)0n׍+11ϋn$11ߍ(9n0ߍ*11Njn< 11׍)|n׍+eV11ϋ.1 ߍ(D")1ϋn1 Ǎ**C1ߋn8 1 ׍)#1Njn1 ύ+91׋n01 ߍ(Y[e1ϋn 1 Ǎ* 1ߋn( 1 ׍)}1Njn1 ύ+]1׋n 1 ߍ(O~o1ϋn<1 Ǎ*,1ߋn 1 ׍)C1Njn41 ύ+N1׋n1 ߍ(~S1ϋn,1 Ǎ*5:1ߋn 1 ׍)*1Njn$1 ύ+ӆ1׋l$@}}}} E]<$MU 9X[]_^̸T L$XD$\SYUVW9iq IL$H @H @H @T$TP@H @H @H @T$X@3#3΋T$T@ʋT$t$lyZH P@ P @L$$@3#׋L$X3ыL$l yZ@l$( ͉|$@( h @L$,@L$$3#3L$)yZh@ h@@ ( L$4L$t$3#3H@l$,@ P@ P@ ʍyZL$+@,h@0kA@4ك@8y!~@<[H@HDHHHLǀ@ASVt$W~P;Cpv +PjS8x jWV3)9p+QjSx V@WF@NDG~V@FDW}VDN@O|F@VD w G{F@VD(w GzFFGyNGOxVHWwFHNLGvVHFLWuVLNHOtFHVL xw GsFHVL(hw GrFNGqNOjWVOp>8D$ u_^3[Ë0c@uyUNV@X@@XX@وX@@@XV@N@@@و@X@X@@N@@@@X@وX@@@XV@N @@@و@@@@@ ]G_^[ËN@X@@XX@وX@@@XV@N @@@و@X@X@@VN@@@@X@وX@@@XV@N@X@X@Xو@@@PV @N$@@@@و@ڋ@@P@@N(v,@@@ֈP@P@@_^H[[̋D$UVt$ W|$nPD$NDǺS^@;wr;sFHVLF@VDtX+;sL$WQPAs [_^]ËT$SRP"s D$$jU+Vdž 4\$rQSV3߃ +߅tWSUr [_^] ̋D$L$jPQ3 ̸r l3ĉ$$V$uHpD$P$QRT$RD$PVL$hQ$ ^3q ̸vq l3ĉ$$V$upD$P$QRT$RD$PVHL$hQi$ ^35q USVW]57-V)%@ =P6Al$t$D$LD$h} ^NV ʉ$\$L$T$ F^NVʉD$\$L$T$F ^$N(V,ʉD$ \$$L$(T$,F0^4N8V<ʉD$0\$4L$8T$yZ|$ 3|$13|$,!3<$1|$ yZT$3T$13T$013T$T$ nL$3L$13L$413L$L$n\$3\$ 13\$813\$ \$nD$3D$$13D$<13D$D$0nt$ 3t$(134$13t$t$ >n|$$3|$,13|$13|$|$$nT$(3T$013T$13T$T$( nL$,3L$413L$ 13L$ L$,n\$03\$813\$13\$$\$0nD$43D$<13D$13D$(D$40nt$834$13t$13t$,t$8>n|$<3|$13|$13|$0|$<n$3T$13T$ 13T$4$ nL$3L$ 13L$$13L$8L$n\$3\$13\$(13\$<\$nD$ 3D$13D$,13$D$ 0nt$3t$13t$013t$t$>n|$3|$13|$413|$|$nT$3T$ 13T$813T$ T$ nL$3L$$13L$<13L$L$n\$ 3\$(13$!3\$\$ +ܼ!D$$3D$,13D$!3D$D$$(ܼ!t$(3t$013t$!3t$t$(.ܼ!Ջ|$,3|$413|$ !3|$ |$,/ܼ!͋T$03T$813T$!3T$$T$0*ܼ!݋L$43L$<13L$!3L$(L$4)ܼ!ŋ\$83$13\$!3\$,\$8+ܼ!D$<3D$13D$!3D$0D$<(ܼ!4$3t$13t$ !3t$44$.ܼ!Ջ|$3|$ 13|$$!3|$8|$/ܼ!͋T$3T$13T$(!3T$<T$*ܼ!݋L$ 3L$13L$,!3 $L$ )ܼ!ŋ\$3\$13\$0!3\$\$+ܼ!D$3D$13D$4!3D$D$(ܼ!t$3t$ 13t$8!3t$ t$.ܼ!Ջ|$3|$$13|$bʋ|$43|$<13|$13|$(|$4bʋT$83$13T$13T$,T$8 bʋL$<3L$13L$13L$0L$<bʋ$3\$13\$ 13\$4$bʋD$3D$ 13D$$13D$8D$0bʋt$3t$13t$(13t$<t$>bʋ|$ 3|$13|$,13<$|$ bʋT$3T$13T$013T$T$ bʋL$3L$13L$413L$L$bʋ\$3\$ 13\$813\$ \$bʋD$3D$$13D$<13D$D$0bʋt$ 3t$(134$13t$t$ >bʋ|$$3|$,13|$13|$|$$bʋT$(3T$013T$13T$T$( bʋL$,3L$413L$ 13L$ L$,bʋ\$03\$813\$13\$$\$0bʋD$43D$<13D$13D$(0bʋt$834$13t$13t$,>bʋ|$<3|$13|$13|$0bl$`T$d}uE] M}@u;T$hEω] ։ML_^[]ÐUSVW]fo}foEfoM foU0fou@|$l$T$fD$pf$f$f$f$@$$$$_OW oEoMoUo]f8f8f8f|$`f8ffff$ffL$ffT$ ffo <$1f:foffD$@!1fs1ffT$1ˉf!1f|$01fofoL$1fs f!1fr1fofrf\$ 1ljfrf!1foD$`1ffoD$1f:foffL$P!1fs1ff|$1щf!1f$1fofoT$1fs f!1fr1fofrfL$1؉frf!1foL$p1ffo\$ 1f:foʼnffT$`!1fs1ffD$$1f!1fL$1fofo|$(1fs f!1fr1fofrfT$,1foD$@frf!1foT$p1ffoL$01f:foΉff\$@!1fs1ff\$41ljf!1fT$ 1fofoD$81fs f!1fr1fofrf|$<1foL$Pfrf!1fo\$p1ffo$ff:1ˉffd$P!1fof1fL$1fof\$0!1fr1fr\$1ljf!1foT$`1D$ fo1!11|$ff:1։ffl$`1foffT$1͉fof$$1frL$1fr1f\$1fo\$@1foD$ ff:1fft$@1fo$ff|$$1Չfofl$1frT$(1fr1fL$,1fod$P1fo\$0ff:1Ɖff|$P1foffD$41foft$ 1fr|$81fr1fT$<1fol$`1fo $ff:1މffD$`1foff\$1ʼnfof|$01frD$1fr1f|$ 1fot$@1foT$ff:1ΉffL$@1foffL$1݉fof$1fr\$1fr1fD$1fo|$P1foff:1|$ !ffT$P!foff1fofL$1T$$!fr!fr1f1foD$`L$(!!foΉ1щ1\$,!!1ˉff:1D$0!ff\$`!fo$ff1fofT$ 1|$4!fr!fr1f1foL$@T$8!!fo׉11L$USVWt$|$D$]4$|$D$\$ _OW PSQRG_OWPSQRG _$O(W,PSQRG0_4O8W<PSQR@ |$d^N~ \$L$|$ V^N~\$L$|$\$\t$11|$1T$!֋T$ 1 \$1 t$1ڋ|$ˉ$ !!u ȁt$ދL$d 111 $1\$xt$1\$\1|$1T$!֋T$ 1 \$1 t$1ڋ|$ˉ$ !!u ȁ؋$xqN$`\$L$|$ ^N~ ^N~ D$\$L$$dVF^NVF^N`;|$d$ _^[]Ð/BD7q۵[V9Y?^[1$} Ut]rހܛtiGƝ̡ $o,-tJܩ\ڈvRQ>m1'Y GQcg)) '8!.m,M 8STs e jv.,r迢KfpK£Ql$օ5pjl7LwH'4 9JNOʜ[o.htocxxȄnjlPxqSHA256 block transform for x86, CRYPTOGAMS by USVWt$|$D$]i 4$|$D$\$ "ooNoVo^of on(ov0o~8PL$T$\$l$(t$0|$8WʉL$LT$H_ˉD$D\$@ol$(ot$0o|$8oosd$ sossss}oL$oT$od$$oo\$HssossssooÊUǁ5ol$(ot$0o|$8oosd$ sossss}oL$oT$od$$oo\$Hssossssoot$Xo$oÊUǁosososos ss*o$s8sԔ$ss*\$Hol$(ot$0o|$8oosd$ sossss}oL$oT$od$$oo\$Hssossssoot$Xo$oÊUǁoL$oT$o\$ol$(ot$0o|$8NV^f n(v0~8NV^f n(v0~8Ā;|$XHwd$\_^[]Ð_OW PSQRG_OWPSQRG _$O(W,PSQRG0_4O8W<PSQRG@_DOHWLPSQRGP_TOXW\PSQRG`_dOhWlPSQRGp_tOxW|PSQRǀH$|$󥐋L$(T$, 1111111111L$0T$4t$8|$<D$@\$D11#L$(#T$,$$11u}ӋL$ T$$$\$ӋL$T$ D$ \$$1111111111L$T$ t$|$$\$ #L$#T$#t$#|$ Ӊ$\$Um$8$<111111111$\$$$1 1 1 1 1 1 1 11$@$D$\$$$$$L$(T$, 1111111111L$0T$4t$8|$<D$@\$D11#L$(#T$,$$11u}ӋL$ T$$$\$ӋL$T$ D$ \$$1111111111L$T$ t$|$$\$ #L$#T$#t$#|$ Ӊ$\$Um$H$L^NV D$\$ ^L$T$NV F^NVD$\$F^L$ T$$NVF ^$N(V,D$(\$,F ^$L$0T$4N(V,F0^4N8V<D$8\$2-m1?!'Y= % GoQcpn g))/F '&&\8!.*Zm,M߳ 8ScTs ew< jvG.;5,rdL迢0BKfpK0TQlReU$* qW5ѻ2pjҸSAQl7LwH'Hᵼ4cZų 9ˊAJNscwOʜ[o.h]t`/CocxrxȄ9dnj(c#齂lPyƲ+SrxqƜa&>'!Ǹ}xnO}orgȢ}c ?G5 q}#w($@{2 ˾L*~e)Y:o_XGJDlSHA512 block transform for x86, CRYPTOGAMS by ̋D$@RRRRH H%%%%HH̸1 l3ĉ$$U$VbSHW} @D$FFNFV FFF NF VF NF Š D$D$ D$(E$@ W\$ \$(FE-L$0QW.jT$?Suj}h QZR 3FRPQ( [t_^VWjNh4 h R3 ;u_3^ÍFP>~ ~~FBD$x$p_^W|$Wu_ËD$ SVp_SVRS tF tNQPW- u^[3_^[_̋D$VpNQ?N t$Ft PQyV RRF VR^̋D$@x u3ÃP-tL$PhWQ` ̋D$HAT$ L$RQPC ̋L$QVt$ F%PRhV7F^̋L$D$WxQ^P}3_Vt$D$tT$ RPW u^_ËD$ ^_̋D$T$Jt_t0tËBR@RR@RPQSu1ËT$D$ u|PRQK u 3ËT$W|$u3_Vt$X ƍ$:utP:Quu3uD$ @jWP ^_ùP Ɛ:utP:Quu3u[L$QW^_ùl ƍd$:utP:Quu3u/W!tgL$ QIQPjjR^_ùP Ɛ:utP:Quu3u^T$RWgu^3_ËL$D$ @|jjQVPcuV3G^_VG^_^_̋T$UW|$Vt$F ;sFFF\SNtjn@s" 8@sWRUK ~\[^_]û@+SR(R) jUVbD$0j@ jUL$<+F\ l$<$vSUV, +tW~\UV [^_]̋D$L$jPQ SVW|$_\w3C8v @+PjS{ jVW38+QjS[ WV8G8FOFNWFVGFFOFNWFFGFj?VWKj@jVG\ D$4@@@OH@@@@O@@@у$@O H@@@@OH@@@_^H[̸` Vt$pupD$PNu^`ËL$lT$hQRD$ PnL$QVcT$j`Rk^`̋T$D$V WrUzSlh$l$Xh \$l$ Xh\$l$Xh\$l$X h$\$ l$$X(h,\$(l$,X0h4\$0l$4X8h<\$8l$pm) T$ 1Ⱥ >pm)D$ 1 >pm)΋T$ 1غ >pm) D$ 1 >pm)݋T$8 1 >pm)D$ 1ʸ >pm)T$$ 1 >pm)΋D$, 1ڸ >pm) T$ 1 >pm) ݋D$0 1 >pm)T$ 1Ⱥ >pm)D$( 1 >pm) ΋$ 1غ >pm) D$ 1 >pm)݋T$4 1T$ >pm׉)!! ‹D$ vmzƉ)!! ЋT$ vmzщ)!! ‹D$ vmzʼn)!! ЋT$ vmz Ӊ)!! ‹D$, vmzlj)!! ЋT$< vmz։)!! ‹$ vmz)!! ЋT$ vmzՉ)!! ‹D$0 vmzÉ)!! ЋT$ vmz ׉)!! ‹D$4 vmz Ɖ)!! ЋT$$ vmz щ)!! ‹D$ vmz ʼn)!! ЋT$( vmzӉ)!! ‹D$8 vmzlj)!! ‰ vmz1T$01 Ɖ1؋T$<1 1T$(1 ʼn 1T$1É 1 T$1 lj 1T$1Ɖ 1΋T$ 1 1T$1ʼn 1݋T$1 É1ȋT$1lj 1 T$41 Ɖ1؋T$81 1$1 ʼn1T$ 1É 1 T$$1 lj 1T$,1 ֋$ ΋BËD$HËBŋD$LŋB D$PBƋD$@ƋNjD$DNj$jJ-r z%$ϋ$@$+l[]_^̸p̋D$ H HP HP HP HP HP HP P̋T$3V$: u Ar^3^SVW|$ 3+׹ǃr;uF|_^3[_^[̸& T$ SUVrFFF VFF VW~ V ~6  ׋333֋3̋333ϋ33׋3UUUU33֋D$333΋3UUUU33֋   ,( T$zt  ʁ  ϋ (# T$ ("   (! փ? ( T$ Ӂ ('  (% ? (& ? ($  8 L$9t  t$ \$ʁ  ΋ (# T$߁ ("   (! ׃? ( T$݁ Ձ ڋ('  (% ? (& ? ($  0 L$yt  |$ l$݁ʁ  ϋ (# T$ ("   (! փ? ( T$ Ӂ ('  (% ? (& ? ($  8 L$yt΋  t$ \$΋ցف ف Ӌށ(#  (" ف  (! ? (  4('   4(% ? 4(& ߃? 4($ ڋ  ։T$l( T$@_^][YVt$Vu^Vt^ËD$ PV3^̃=pt=Vt$V\u^Vzt^ËD$ PV3^ËL$T$QR3̃=$t2h( h( h( h( h( j hq"$q̸f D$ P@ P@ P@ @P $@ P@  ыL$T$T$QRD$PF8L$ D$@@@L$@@@H̸ |$$D$ SUVWH@ H@ H\$(@ht$$|$ @ H@ H@  CD$(؍ ÉL$0OG OG OG OG_G _G _G3‹T$,j ˉD$RD$3PGL$$7D$ FFFD$$FFFF Fl$(R\$0|33wL$OOGO GO GO GO_O _O  3͉L$L$,3jQT$RD$ E6D$ FFFD$$FFȃ F_F^][ H@P@ H|$$t$ @h@ P@ H@ D$(  \$4l$D$0HL$(ٍȉD$0.NFVFFF^F NF VF FF ًL$, jQT$ Rl$ F\$$-5D$@3D$ L$3L$$GGGGGGG Gl$(l$4\$JD$0݋l$NFV FNF FVD$FNF   ȋD$,jL$PL$Qh4D$<3l$$3\$ | ̓w=$OOOOOOO__^][ Ë|ukaY̸v D$ P@ P@ P@ P@@V0L$H@  ʃ|$$L$^tL$T$D$QRPL$ Q4T$D$L$RPQT$ R5L$D$ @@@L$@@@H̸ |$$D$ SUl$V0W|$,M_ P@ P@ @PL$@ P@  ыL$(jT$QT$RJ2L$È@@L$ @@@ HL$2>AL$L$ >FAL$ 5D$0_0^][Å|$,M_ P@ P@ @PL$@ P@  ыL$(jT$QT$Re1L$È@@L$ @@@ HL$AL$ >>2L$ FAL$ 3D$0_0^][ø |$,D$(L$SUV0W|$4IL$4_ P@ P@ P@h@L$H@ ȋD$( ʋT$,L$L$0QRPL$Q1L$ È@@L$$H@@@HL$4>T$2BT$T$ >FBT$ #D$8_0^][Å|$4IL$4_ P@ P@ P@h@L$H@ ȋD$( ʋT$,L$L$0QRPL$Q0L$ È@@L$$H@@@HL$4T$BT$>>2‹T$ FBT$ !D$8_0^][̸@ l3ĉD$T$\$?T$@\$AT$CT$EЈD$DD$GT$FD$ID$\\$BшL$H؁T$JL$KyKCT<QRD$DP   T$=d$<*Êd$=T$<T$>d$>T$=T$?d$?T$>T$@d$@T$?T$Ad$AT$@T$Bd$BT$AT$Cˊ\$DT$BT$C ӈT$CD$=\$<L$>T$? D$@ L$A T$C L$B  ‰D$D$9l$V9l$`$T$,L$0)l$RD$(D$,PQT$,R\$0,33UL$D$w_$ FND$FN VN FN ȉL$FND$VN VN VN ‰D$T$\ u \$D$@u ؉L$T$=\$T$\$?T$@\$AT$CT$EЈD$DD$GT$FD$ID$\\$BшL$H؁T$JL$KyKCT<QRD$DP   T$=d$<*Êd$=T$<T$>d$>T$=T$?d$?T$>T$@d$@T$?T$Ad$AT$@T$Bd$BT$AT$Cˊ\$DT$BT$C ӈT$CD$=\$<L$>T$? D$@ L$A T$C L$B ыL$ ‰D$D$3D$ 3L$$Uw=$,OOOOOOOOD$9l$0T$8t$4FFFFFF[L$H_^]3) @ÐlbXPI?5-B 8 . "     m e ] U R J B : ̸0f L$<$AUVt$H%t$yH@ID$ ?gD$PH@PSXD$ @ H@ @P @ P@  ʃ|$XW\$L$PO;|$Ht$D)l$L$,L$TjQT$0R\$4&U 33wM$NNFN VN FN FNVN VN VN 3D$(3L$,Uw=$0OOOOOOOOT$L u L$PD$P?@u D$L$P1T$PL$l$D$L$T$ L$8T$D$?D<8\$H2l$$D$HGEE|$l$$d|$tNt$,FFVFFL$FV9_^][L$03 4ËT$:_^][L$03j 4ËD$L$@8_^][3Q 4̸(UV339-0qu#jmh( h@ 0q;u^]9- qujrh( h@v q;tա$q;ujwh( h@P $q;tSW=,q;t`t$;}* (qT$WQR _[^-,q-(q]Ë(qL$VPQ +=,q_(q[^]ÿ@9|$~|$\$I q+RPS  u 8t _[^3];~|ċ qQ1A P@@ ;z}F‹3~<+RQS  u 8r h; q|ċ|$;j}h(tD$(T$$P$qRVPQ( T$(D$$R$qPVRQ.$qL$0WPQX +5,q=(q_[^];}Y(tT$(D$$R0qPVRQ D$(T$$P0qRVPQ 0qT$0VQR _[^](t D$(T$$PD$ RVPQd _[^]ËT$(D$$RT$ PVRQd_[^]_[^]̸ l3ĉD$D$,S\$(Ul$0VD$ 4q3W|$0l$;u3jeh) h@ 4q;u_^][L$3 95,t5,@~]t$~>l$,+=@~@L$T$QRP7PUJD$;|ƋD$_^][L$3b Ë@@X}*St$ WR +PL,QoC‹t ;|ËT$jRUP4qPV |ËL$4qjQUPRV3~8l$, 4q+PQU  u 83;|̋ËL$$_^][3[ ̸( l3ĉD$$D$8L$4SD$D$DD$UV0W|$LWL$ ʍGPD$(@@ @X @l$@@ X ؋ ӈD$-وL$,D$.L$/ʋ‰T$ T$0|$DD$\$L$1D$2T$3L$u\L$$jQT$$Ro\$(D$9D$,L$:T$;ȋЈD$< D$\$,L$1T$2D$3L4,D$<2FM@E|$D$<z|$tRD$(@L$ @H@@@HD$_0^][L$$3 (ËL$_1^][L$$3 (ËT$L$4_2^][3k (̸ & Ul$0E@D$D$4D$o ~#@D$ |D$3MHD$"u D$ HD$ D$T$Rl$hD$$L$(1!!ʉ11ʋ]1؋M111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]1؋M 111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ]1؋M111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]1؋M111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ] 1؋M$111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ](1؋M,111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ]01؋M4111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]81؋M<111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ]@1؋MD111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]H1؋ML111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ]P1؋MT111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]X1؋M\111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ]`1؋Md111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]h1؋Ml111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ D$$L$(1!!ʉ11ʋ]p1؋Mt111%1ہ1ɈÈl$3|3 33 %1ߋ 1ߋ1ߋ1ߋl$ D$$L$(1!!ʉ11ʋ]x1؋M|111%1ہ1ɈÈl$3t3 33 %1ދ 1ދ1ދ1ދl$ $KƉ$T$ω1檪111%11 13333111ǁ11 1Ɓ11z_^[]̸覨 l3ĉ$$U$VuAEW) AuA$ȈE) 3ɊBt Ars+R jP茨 D$P$QäVWT$$RD$ PbD$$ȋЈ$$D$($$ȋ$$$$Ƅ$$_2҄tuA҄tuA҄tuA҄tuA҄tuA҄tuAҊ) .F r$^E ]3 ĘËD$L$h8qPQ ̸趦 SUVWD$DP@ P@ P@ @P@L$H P@ ЋD$H P@L$H P@ P@@ @0 p@  փ|$LL$HT$JL$@Q  JT$B JBl$8|$0Br  B rB  ΃t$4UT$DڍTT$8_G _G _G _GoG oG oG 3ЋD$<3T$33\$jPL$,QGT$0\$4oD$03D$TFFFVL$43L$FFFFF Fl$D@l$8|/33҃wL$tX_OWO WO WO WOoO oO  3ЋD$<3T$33\$jPL$,QT$0\$4D$03D$TFFFL$43L$FFFFу VT$@t$FFFFF_N^][ËD$@P@D$ P@@ |$4@p @ p@ D$8 ֋t$0T$DD$8PBT$ ڍЉT$8$.FFV FFF FFF3T$$VF NF ‹T$< D$L3D$jD$,RD$,PFL$P3L$43D$03L$$3D$ GGGGOGGD$XGGL$T Gl$ ݉D$D1ЋD$8.VFF FVF  3D$$FFNFV NF  ‹T$AL$L$ >FAL$ 7D$0_0^][Å|$,M_ P@ P@ @PL$@ P@  ыL$(T$QT$R L$È@@L$@@@HL$AL$ >>2L$ FAL$ 5D$0_0^][̸(薊 l3ĉD$$D$8L$4SD$D$DD$UV0W|$LWL$ ʍGPD$(@@ @X @l$@@ X ؋ ӈD$-وL$,D$.L$/ʋ‰T$ T$0|$DD$\$L$1D$2T$3L$uZL$$QT$ R\$$D$5D$(L$6T$7ȋЈD$8D$\$,L$1T$2D$3L4,D$<2FM@E|$D$k̸覃 D$SU(VpD$W8ыt+ы+hX+ыHΉT$t++ +8+p3ʁхt+ы++ыH3΅t+΋++33Ջh3L$3ځхt+͋+ʉL$ ++щT$pXPω\$t++ ++x3L$ׁхt+ы++ыH3υt+ϋ \$++ϋx33֋3L$3ځхt+ϋ+ʉL$ ++щT$pXP͉\$t+ڋ+ ++݋L$3L$xхt+ы+׋h+ы3υt+ϋ++ϋx33֋3L$3t$хt+ϋ+ʉL$ ++щT$phPˉl$t++ ++L$3L$xхt+ы+׋X+ы3υt+ϋ++ϋx33֋3L$3t$ځхt+ϋ+ʉL$ ++щT$pXP͉\$t+ڋ+ ++݋L$3L$xхt+ы+׋h+ы3υt+ϋ++ϋx33֋3L$3t$хt+ϋ+ʉL$ ++щT$phPˉl$t++ ++L$3L$xхt+ы+׋X+ы3υt+ϋ++ϋx33֋3L$3t$ځхt+ϋ+ʉL$ ++щT$pXP͉\$t+ڋ+ ++݋L$3L$xхt+ы+׋h+ы3υt+ϋ++ϋx33֋3L$3t$хt+ϋ+ʉL$ ++щT$phPˉl$t++ ++X3L$Ӂхt+ы++ыH3ރځ˅t+ˋ l$++ˋX33֋3L$T$3t++ ++X\$H΅t+Ћ+ ++׋D$ _҉0^ ]H[Y̸F{ |$(SUl$,VW%UEEMEE\$, E ыt$(|$$EM EE MEE M D$8CD$,؍ÉD$OG OG OG OG_G _G _G3‹T$0 3L$8D$RD$PGL$$T$ FNFFD$$FNFFFFFl$,D$8K\$|3D$,wV$OOL$,OO L$,OO L$,OO L$,OO_O _O  3ʋT$0L$L$,3RD$PL$$T$ FND$$FFFFFȃFMEMEMEU>]EEMEU|$(t$$EE E MEE U ME L$,\$D$8l$4L$AD$,؍T$.FFNFVF^F FF NF V ؋D$0F PL$ Ql$ F\$$D$3D$ L$@3L$$GGGGGGGGGl$,l$\$8Gl$4\$D$8L$NFVF NF D$D$FFVFNF V D$,D$D$0PL$QL$D$@3\$ 3D$$|w9$8OOOOOOO_L$T$,L$T$8ً‹MEUEME]EUEME_U^E][Ë& ̸Vv |$$D$ SUl$V0W|$,M_ P@ P @@PL$@ P@@ ыL$(T$QT$RL$È@HL$@@@@уHL$2>AL$L$ >FAL$ 1D$0_0^][Å|$,$M_ P@ P @@PL$@ P@@ ыL$(T$QT$R+L$È@HL$@@@@уHL$AL$ >>2L$ FAL$ /D$0_0^][̸(Vt l3ĉD$$D$8L$4D$D$@D$SUV0W|$LWL$GD$(@@ P@X@ P P@ @ ؋l$@D$,D$- ӋD$.D$0D$1|$DD$L$T$ L$/D$2T$3\$AL$L$ >FAL$ 1D$0_0^][Å|$,$M_ P@ P @@PL$@ P@@ ыL$(T$QT$RL$È@HL$@@@@уHL$AL$ >>2L$ FAL$ /D$0_0^][̸(6` l3ĉD$$D$8L$4D$D$@D$SUV0W|$LWL$GD$(@@ P@X@ P P@ @ ؋l$@D$,D$- ӋD$.D$0D$1|$DD$L$T$ L$/D$2T$3\$ |$$D$ SUl$V0W|$,M_ P@ P @@PL$@ P@@ ыL$(T$QT$RL$È@HL$@@@@уHL$2>AL$L$ >FAL$ 1D$0_0^][Å|$,$M_ P@ P @@PL$@ P@@ ыL$(T$QT$RL$È@HL$@@@@уHL$AL$ >>2L$ FAL$ /D$0_0^][̸(6< l3ĉD$$D$8L$4D$D$@D$SUV0W|$LWL$GD$(@@ P@X@ P P@ @ ؋l$@D$,D$- ӋD$.D$0D$1|$DD$L$T$ L$/D$2T$3\$>^//q^//qSSSShh,,@ `@ `yyȶ[[[[jjjjFFggr99Kr99KJJޔJJޘLLԘLL԰XXXXJJkk**OOCCņCCŚMMךMMf33Uf33UEEϊEEPPPPx<!>!KKݖKKaa pppp|>>B|>>BqqffffHHؐHHaaaaj55_j55_WWWWiiXX:':'''88++"3"3iiiipp33--<"<" IIUUUUP((xP((xzzYY   ee11BBƄBBhhhhAAÂAA))Z--wZ--w{{˨TTTTmm,:,:c|w{ko0g+׫vʂ}YGԢr&6?4q1#'u ,nZR;ֳ)/S [j˾9JLXCM3EPfHa5WiَU(ߌ BhA-Tc|w{ko0g+׫vʂ}YGԢr&6?4q1#'u ,nZR;ֳ)/S [j˾9JLXCM3EPfHa5WiَU(ߌ BhA-Tc|w{ko0g+׫vʂ}YGԢr&6?4q1#'u ,nZR;ֳ)/S [j˾9JLXCM3EPfHa5WiَU(ߌ BhA-Tc|w{ko0g+׫vʂ}YGԢr&6?4q1#'u ,nZR;ֳ)/S [j˾9JLXCM3EPfHa5WiَU(ߌ BhA-T @6USVWt$|$$_)ہ)܁D$]$) +oofd$t$fw_^[]Ðl$^NV \d$t$^NV _^[]Ð|$33_3O3W t64t$}u}u}u }@u`Ɓt5|=1|=1|=1t$ށt5|=1|=1|=1t$΁t5|=1|=1|=1TL 1ʉ\1D1‰΁怀 )%1Ɖ怀)11މ怀,)111111111ًD$1L$ ց怀)1މ怀 )11Ή怀, )111111111ʋ\$1T$Ɓ怀 )1Ή怀 )11։怀,)1111111111ށ怀 )1Ή怀 )11։怀,)111111111ӋL$ 1T$|$l$33_3O3W ;|$|$Ɓt5|=1|=1|=1t$ށt5|=1|=1|=1t$΁t5|=1|=1|=1|$TL 1ʉ\1ڋ\$D1‹D$3G3_3O3WÐgt64t$D$D$ E]MUE] M@U`p ~p L5~Tpt5 t5 pt5 t5 n~L5~t5 t5 t5 nT5t5 bt5 \ t5 nD nb;|$wooppoopprrrrrrrro\$ddoooorrrroT$ddpٱpddoopѱprrrroowErr]rrMU@gÐ|$33_3O3W t64t$Ɓt3t3t3tt$ށt3t3t3tt$΁t3t3t3t|$T3T3T\$3TD$33_3O3W ;|$|$%}u}u}u }@u`mƁt5|=1|=1|=1t$ށt5|=1|=1|=1t$΁t5|=1|=1|=1|$TL 1ʉ\1ڋ\$D1‹D$33_3O3W ÐQPQP~AeS~AeS:'^:'^;k;kEEXXKK 0U 0UvmvmvvL%L%OO**&5D&5DbbޱZIޱZI%g%gEE]]/u/uLLFFkk__mzmzRYڕRYԾ-Ծ-Xt!Xt!Ii)Ii)DDu‰ju‰jyxyxX>kX>k'q'qݾOO f f}:}:cJcJ11Q3`Q3`bSEbSEdwdwkk++pHhXpHhXEEllR{R{s#s#rKrKWWfU*fU*((//{{770(0(##jj\\++yyNiNiee4b4bĦĦ4.S4.SUU22uu 9 9@`@`^q^qnQnQ>!>!==>>MFMFTTq]q]oo`P`P$$ֽֽ闉@C̉@CgٞwgٞwBB[8[8yyۡ| G| G|B|B 2+H2+HpplZrNlZrN8V8V==6-9'6-9' d dh\!h\![Tћ[T$6.:$6.: g gWWҴOOa a ZwKiZwKi⓺ ⓺ **<"C<"C  ǭǭ--WWuLuLݻݻ``&&\r\rDf;Df;[~4[~4C)vC)v##ܶhhcc11BcBc"@"@  J$}J$}һ=һ=22)m)m/K/Kܲ0ܲ0 R Rww+l+lppHHGd"Gd"ĨĠ??V},V},"3"3INLJIN88ʢʢ 6 6Ϧϥz(z(ڷ&ڷ&??,:,:Px Px j_̛j_̛T~FbT~Fbظظ.9^.9^ïï]]iГ|iГ|o-o-%%Ȭ;Ȭ;}}cncn;{;{&x &x nYnY욷욷OOnene~~!!ٺJo6Jo6 )|)|11*?#1*?#1ƥ0ƥ05f5ftN7tN7ʦʦаа33JJAAPP//vM֍vM֍CMCM̪MT̪MTߞѵѵLjLj,,FeQFeQ^^5]5]tsts A. A.gZgZRRV3V3mGmGaa7 z7 zYY<<Ω'Ω'a5a5zGh>8$4,8$4,£@_£@_rr% % (Ky xZݨ31Y'_`QJ -zɜ;M*Ky xZݨ31Y'_`QJ -zɜ;M*Ky xZݨ31Y'_`QJ -zɜ;M*Ky xZݨ31Y'_`QJ -zɜ;M*t$ |$$_OW L$(vt$ WT$$L$('t$0OW ^NV d$_^[]Üw׻)9 <1|$0֋_D$(8d$4oof|$,t$ D$<\$$L$(|$0oongo/c[\$$vt$ L$(wd$_^[]Ü`w߉Ɛd$_^[]Ü^NV |$<_OW |$,}|$0t$(33_3O3W ^t$(t$$^NV vt$$t$<^NV _OW t$ vt$ yd$_^[]Üt$<^NV t$ ^NV _OW L$(|$$t$̀       p*Zx"RM|1}0L>PˏᛱD*nzߥ#6. ;$q Ɠz/U^) @iJ#cǽomxzj_t5+Aѐ                                    }|M*p[[[[[[[[[[[[[[[[`)Ih!APQ \] G@]Ziܵ6_wA(G]Z@6wA(i_POLSJ[Ȃ4~o%ՆPOLS{O1 j^~o[%4J3'bQvE鬟_T [PENAJe`㆔rwֆSLOPI;e,^rW}9D*n$< 1)Bd"F&`gYͦkU2> &Fd`B"ͦU 2>Ykg@~S->ԹmDVYKVector Permutation AES for x86/SSSE3, Mike Hamburg (Stanford University),$fo}fouÐfofoUfo*frff8foEf8ffxfoe f8ffoE0f8ffom@f8foL foUPf8ffo$ fof8ff8܁ff80f-foffrffomf8ffof8ffof8ffof8ffoo*f8f.foe`foEpf8ff8foL @ff8Ð`fofoSfȉfro*ff8foCЁ0f80ffof focf8ffoCf8fāf8fo#f8ffoCf8f-f8foc f8ffoC0f8ff8foc@f8ffoCPf8ff: foffrffoUf8ffof8ffof8ffof8ffof8fofoc`f8ffoCpfof8ff8Ð,$ofo@fo؍]fT$Ofo fo f80=w! 7H+oFfoff:H`oFfoH,fpf|$fofo|$fo f8` fPfffffffffpƀffpffofÐfoT$ff:f:ffpf:fT$fofsffofsffPfoefomfoffrffoUf8ffof8ffof8ffof8ffof8ffoe f8foE0f8fffoÐfoUfoffrffof8foCf8fÐfofo<fPf8fof8ff8ffoUfoffrffof8fo^f8ff8foV f8ffo^0f8ff8foV@f8ffo^Pf8ff8foV`f8ffo^pf8fځfo f8ف0ÐUSVWt$\$ȋD$T$܉\$00- d$01_^[]ÐUSVWt$\$ȋD$T$܉\$0T -Xd$01_^[]ÐUSVW-1t$\$ȋ|$T$܉\$0old$0_^[]ÐUSVW-1t$\$ȋ|$T$܉\$0old$0_^[]ÐUSVWt$|$D$T$ -\$ȋl$$L$(oM)\$0<$T$l$Ǎ-JN of$T$fo3vKofL$fD$ $T$fD$foL$ 3v\$d$0 _^[]̋D$T$ D$JR Wf8I Rf8ÐD$T$ D$JR Wf8I Rf8ÐJR Wfff8f8If8Jf8f8؍R f8f8f8f8f8f8f8ÐJR Wfff8f8If8Jf8f8؍R f8f8f8f8f8f8f8ÐJR Wffff8f8If8f8Jf8f8؍R f8f8f8f8f8f8f8f8f8f8JR Wffff8f8If8f8Jf8f8؍R f8f8f8f8f8f8f8f8f8f8JR Wff8ff8fIf8ff8ff8f8&f8f8If8f8f8f8Jf8f8؍R f8f8f8f8f8f8f8f8f8f8f8f8f8f8f8f8ÐJR Wff8ff8fIf8ff8ff8f8&f8f8If8f8f8f8Jf8f8؍R f8f8f8f8f8f8f8f8f8f8f8f8f8f8f8f8ÐUSVWt$|$D$T$ \$$%AՉ=`oo^of on0ov@o~Pv`-`@o_o^g of o0on0w@ov@P`o~Pv`-`_g o0w@P``= O^uf =@n0v@W _g o0w@,JR Wf8I Rf8W__g _g o0Չ=`oo^of on0ov@o~Pv`-`Ao_o^g of o0on0w@ov@P`o~Pv`-`_g o0w@P``= O^uf =@n0v@W`_g o0w@JR Wf8I Rf8XW_@_g k_g o0_^[]ÐUSVWt$|$D$T$ \$$L$(<l$0o;o$ D$ D$D$ 1\$l$l$l$*fo,$fo׉f8E6WMWƍU Wf8If8Jf8ЍR f8f8f8f|$f8f8HvWfo7f8d$0|$(_^[]ÐUSVWt$|$D$T$ \$$L$(<l$0o;o$ D$ D$D$ 1\$l$l$l$fo,$fo׉Չf8JR Wf8I Rf86f|$vWfo׉7f8-qEMWU WWf8If8Jf8ЍR f86f|$f8f8ٍvf8f8~JWR Wf8I Rf8ًd$0|$(_^[]ÐUSVWt$|$D$T$ \$$Xl$P={o;$ D$ D$D$ 1L$L$L$l$f:f:"fffo$f:"kf:"Cf:"Ef:"Cf:"Ef:"fL$0f8fD$@f8fpfpـ=;f|$ Չ-fp@foL$ fpffpffp@ffffEMU Ifff8ff8ff8ff8ff8f8$FWN WfoD$WfoL$0_g ffD$@fo$^0f@W^Pv`fL$0f8Wo0WfD$@f8w@fpP`fpـ-ZL fo|$ f=fp@ffpf=fpffFWN WF0WN@WW_g o0w@JR Wf8I Rf86W7[.vWW_a;.vW~ WW_g 66~N WF0WW_Wg o0d$P_^[]ÐUSVWT$$t$(JR Wf8I Rf8ыt$|$D$T$ xD$`D$dD$hD$lD$pl$tfoffo\$`ff%Չ-` fpff $ffffffpffL$ffffffpffL$ ffffffpffL$0ffffffpfL$@fEffo^Wof fon0fov@foNPfv`f$f|$PfMU f\$f8fd$ f8fl$0If8ft$@f8ff8f8?foL$PfW$ffW\$Wd$ _Wl$0g Wt$@o0Ww@fpP`fo\$`ffffff-`pL `=fo= fpffffff/fpffofffff=@Gfpffoffffff,$ft$Yf|$ fpfL$0fffoo^of f$on0f\$ov@fd$ vPfl$0f|$@fk(L$@W$W\$Wd$ Wl$0_Wg o0w@PEvWJR Wf8I Rf8Wfo(^v WWWWW_ fo(^f v0WWWWWW_g 0fo(^f W$n0v@W\$WWGW$W\$WW_g o0@fo-D$p%foD$p2D$pf%rffD$pfpffl$`fWvO-+|$pWWJR Wf8I Rf8WWd$t_^[]ÐUSVWT$$t$(JR Wf8I Rf8ыt$|$D$T$ x1۩)D$`D$dD$hD$lD$pl$tՉfoffo\$`ff%-` fpff $ffffffpffL$ffffffpffL$ ffffffpffL$0ffffffpfL$@fEffo^Wof fon0fov@foNPfv`f$f|$PfMU f\$f8fd$ f8fl$0If8ft$@f8ff8f8foL$PfW$ffW\$Wd$ _Wl$0g Wt$@o0Ww@fpP`fo\$`ffffff-`pL `=fo= fpffffff/fpffofffff=@Gfpffoffffff,$ft$Yf|$ fpfL$0fffoo^of f$on0f\$ov@fd$ vPfl$0f|$@f(L$@W$W\$Wd$ Wl$0_Wg o0w@PEvWJR Wf8I Rf8Wfo(^v WWWW_ fo(^f v0WWWWWW_g 0fo(^f W$n0v@W\$WWgW$W\$WW_g o0@fo-D$p%D$pBD$pf%ffD$pfpffo\$`ffffffpfofffWJR Wf8I Rf8WNvW-+|$pWJR Wf8I Rf8Wd$t_^[]ÐUSVWt$|$D$T$ l$$|$(}Չ\$(=g->vJWR Wf8I Rf8щى-(|)1ى=P)<$-P)$?oo^of on0ov@o~PDFW$WN WF0WN@WFPW_v`g o0w@P-`((Pt(=^(= f =0n0=@v@)<$WFW$WN WF0W~@W_g o0@(-PJR Wf8I Rf8W(-WWW(Ӎ(- o{WWW(_ ~ -0:;NF W~0WW_Wg 0(-@%)$)d$l$$}_^[]ÐWRw Bf:rf:af:Vf:Kf:@f: 5f:@*f:Ȁf:f:6 JP1RWWWÐ~P Bf:gf:f:Af:f:+f: pf:@f:ʀZJ01ÐR(foWfsWfpUfffpfÐ(D*NZR P RBRf:f:f:{f:f:ef:zf:Of:df:9f:Nf: #f: 8f:@ J1ÐRWWWÐRWWɪWøÐÐD$L$T$ /ÐD$L$T$ T$ ID  R@f8f8ɍR@@J9f81AES for Intel AES-NI, CRYPTOGAMS by ̃|$D$ L$T$PQRu % l ̋D$L$T$h3PD$QL$RT$PQR(d̋D$L$T$h3PD$QL$RT$PD$QRP_ ̋D$L$T$h3PD$QL$RT$PD$QRPb ̋D$L$T$h3PD$QL$RT$PD$QRP#c ̋D$L$T$h3PD$QL$RT$PD$QRPR KUSVWD$t$|$O)ف)́\$D$]^NȋV d$ȋt$ ʉ^NV _^[]Ð+D$D$TUSVWt$|$O)ف)́\$D$]^NȋV )d$ȋt$ʉ^NV _^[]Ð33_3O3W wD$\$L$ T$13_3TL3T3L3D$13\$ 13w1ʉT$1ىL$ 13W3\D3\3D3L$13T$13w 1É\$1ЉD$13_$3TL3T3L3D$13\$ 13w(1ʉT$1ىL$ 13W,3\D3\3D3L$13T$13w01É\$1ЉD$13_43TL3T3L3D$13\$ 13w81ʉT$1ىL$ 13W<3\D3\3D3L$13T$13w@1É\$1ЉD$@;|$:!ƋT$Ɖ1 O \$3L$ wL$ #O1D$1ʋwT$)ڋD$ \$13_3O3W USVWD$t$|$O)ف)́|$<\$]j^NȋV d$ȋt$ ʉ^NV _^[]Ð+D$D$TUSVWt$|$O)ف)́|$<\$] ^NȋV )d$ȋt$ʉ^NV _^[]Ð33_3O3W wD$\$L$ T$13_3TL3T3L3D$13\$ 13w1ʉT$1ىL$ 13W3\D3\3D3L$13T$13w1É\$1ЉD$13_3TL3T3L3D$13\$ 13w1ʉT$1ىL$ 13W3\D3\3D3L$13T$13w1É\$1ЉD$13_3TL3T3L3D$13\$ 13w1ʉT$1ىL$ 13W3\D3\3D3L$13T$13w1É\$1ЉD$@;|$:!ƋT$Ɖ1 O\$3L$ w L$ #1D$1ʋwT$*ڋD$ \$13W 33_ÐUSVWl$$t$(|$,^NV ʉ_OW HF^NV ʉG _$O(W,33_3O3W ] }7$\$L$T$ 13_3TL3T3L3D$ 13\$13w1ʉT$ 1ىL$13W 3\D3\3D3L$13$13w1É\$1Љ$L$T$ t$,33^3N3V w$\$L$T$ 13_3TL3T3L3D$ 13\$13w1ʉT$ 1ىL$13W3\D3\3D3L$13$13w 1É\$1Љ$L$T$ t$$|$,G_OW G _ OW G _ OȉW̉ G _  G _ OW _  O$ W(G, _@ OD WHGL_OWG _ O WG _ O W؉G܉   WG  O WG  _ O WG _0 O4 W8G<Dt$,F0^4N8V<3F 3^$3N(3V,w $\$L$T$ 13_$3TL3T3L3D$ 13\$13w(1ʉT$ 1ىL$13W,3\D3\3D3L$13$13w01É\$1Љ$L$T$ |$,G_OW G _ O؉W܉ G  _$ O(W,    艗_OWG _ O WG _ O WȉG̉ _ O WG OP WT GX_\OWG_ O W G_ O W G_W@GD_HOL W` Gd _hOlWG_O  G _ OW  _ OW  G0 _4 O8W< _p Ot WxG|_^[]ÐSL$\$ T$F> RQS, 1[Ðf;XzsL/7ƾOSTo'-hˆVpppppp,,,,,,'''WWW###kkkWWWEEE555 OOOAAA###kkk|||EEE>>>^^^!!! 999OOONNN]]]eeeZZZQQQlll|||ttt+++΄>>>000444___vvv^^^mmmũ :::999222GGG]]]SSS===ZZZzzzQQQ$$$VVVlll```MMMiii fffbbbTTT---dddttt+++ uuu ߇LLL444sss~~~vvvmmmRRR111сooocccXXX:::aaa)))///222xxxqqqSSS"""rrrDDD666***zzz<<<$$$@@@CCC```iiiwwwPPP'''}}}555bbb AAATTT[[[!!!NNNdddeeeҽďHHH000___uuuGGG=== ???VVVMMM \\\fff---JJJ 333sssLLLggg~~~111RRRXXXaaa&&&777;;;"""DDDoooKKKccc...PPPyyy}}}[[[nnn)))HHH///???YYYxxx\\\JJJjjj333gggFFFqqqԛ%%%&&&777BBB;;;KKK...rrryyynnnUUU YYY666IIIjjj***FFFhhh<<<%%%888BBB@@@(((UUU{{{ IIIhhhCCC888((({{{www888AAAXXXvvvgggNNN```rrr ®jjjuuu]]]WWWFFF'''Ɋ222KKKBBB''':::%%%III{{{ CCCqqq\\\___>>>>>>ggg|||```nnn///444 MMMSSSrrreeeꎎzzz---kkk+++666555MMM333fffaaaXXXZZZ:::$$$ VVV@@@xxxccc BBB333̿&&&充aaahhh???;;; oooSSSbbb...((( ,,,tttooo666"""888ddd999NNN,,, 000 yyyDDD"""eeeYYYkkk===###HHHtttQQQTTT000~~~(((UUUUUUhhhAAAPPPCCC111///˨***<<<+++ppp222iii bbb$$$GGGEEEsssmmm~~~)))JJJø...攔%%%!!!HHHfff{{{;;;qqq)))777ͱlllLLLdddnnncccvvv---KKK&&&}}}___\\\OOOFFF???777yyy^^^GGGRRRmmm[[[^^^iiiZZZ<<<111LLL 555󌌌###uuu]]]jjjJJJWWWՄ!!!DDDEEEQQQ}}}999sssܪ|||wwwYYYVVVlllTTT444xxxpppIIIRRR PPPwwwݓ䆆***[[[zzz@@@===OOOUSVWL$GD$\$T$$l$(t$z)߁)|$,t$D$\$L$ T$$l$(] E] M@U`ND$$t$L$ |$(T$_OW 33^3N3V ˋ|$$Nt$|$ɉʉ_OW L$ vt$WT$L$ #t$(OW ^NV d$_^[]Ü|$P)9 <1_t$_D$ )D$T$$;t$|$(|$,^NȋV ˋ|$$|$,t$ 33_3O3W Ut$ t$|$_OW t$,vt$|$|$,t$(_OW ^NV |$,_OW N|$|$|$,^NV _OȉW ˋ|$$|$(t$33_3O3W ^NV vt$t$,^NV _OW t$vt$L$ L$ i|$t$,<tى󤐐d$_^[]Camellia for x86 by ̸ D$HPSUhVW8p  H P P H H P  P  P  P  P@  9Gah D$3d D$3` D$3\ D$+ցGaډT$h 3d T$3` T$3\ X3׍ É|$3݋\$ۋ+sn<|$|$l$2MD$@EFKD$l$uۃT$L$+ljD$ D$+ЉL$T$T$$D$(RWPT$@T$4 Es%+>AT$ 3T(uD$ 3l$utDD$$l$(PWUT$@ l$L$>++(2K@uD$0_0^][YËL$0_1^][Y̸s D$ SUl$VWD$t,t(L$,t$2L$AF@ML$t$D$uԋ|$(w ΃rc;s+3D$$L$T$WPSQRT$HƃȉG u D9I9uu\$\$+sD$tp\$,L$$WQ3jCSCSC T$HΉw u D9I9uu|$L$t$++2M@uD$08_^][YËL$0_^][YUl$w3]S݃VWuD$(t$$L$ T$|$PVQ+URWl$0v+ˊ0@uD$ PVVT$4|$$L$$SWQq FGNOV W _^+[]S\$s3[ËD$L$T$Ul$VW|$$PWQ+SURu_^][ËL$v+ωt$$0@l$uD$ PWWT$4L.WT.GD.O _L.^][̸ p l3ĉD$D$0L$4T$8S\$,Ul$,Vt$8D$ L$T$w^]3[L$3op WujQP+VSU3WD$8D$_^][3o ̸ o l3ĉD$D$0L$4T$8S\$(Ul$0W|$8D$ L$T$s_]3[L$3o VjQP+WUS҃u^_][L$3ro 3V߉D$ D$$D$(D$,D$ SPo L$$T$jQRjD.PL$_^][3h 0̸vh |$ D$SUV0W|$(\$ t,It#T$ 0 >>L$ABFKL$T$uكr~T$L$+ljD$0D$+ЉL$T$T$$RWWT$@ s++>A$T$01(uD$03l$u6D$$PWWT$@l$$L$( >++(0K@uD$,_0^][YËl$ \$t/t>L$2Ј>AFCML$u߉\$l$ r{D$+ljD$0D$D$$PWWT$@ s3+\$+>AT$0(Ћ43*0ul$ \$D$D$0+3l$l$ \$ut;D$$PWWT$@L$(++σ >L$0L$0< 2шM@uD$,_0^][Y̸4Ff l3ĉD$0D$8SVt$DڍVW|$PD$D$LT$WUT$ WPT$(W WWT$4эF‹ |$Xt4~wD$׍l$,+++ËˉD$I 2\$) AuG3~AD$,+؋D$++T$D$ $T$L,,  2ЈD$E;|D$LȁyIAL$u0LT DHWP ]OW _^[L$03Be 4ò*шT$GtVL$L$L$ ӈWVL$L$ ÈFL$ ӈWVL$ ЃWu]L$<_^[3d 4̸vd U39l$vSVWD$݃+ϸL$0QL$"ЋD$(Ҁ T$T$,RPjQL$HT$&D$'ϋ|$0$">E Ѓ>;l$ r_^[]YS\$vAUl$(Vt$W|$+D$,L$$T$ PQR7jP͋RFu_^][̸c D$S\$UV0W|$(t%t! >l$2MD$@EFKD$l$uۃrvT$L$+ljD$ D$+ЉL$T$T$$RWWT$< s#+>AT$ 3T(uD$ 3l$ut9D$$PWWT$++(2K@uD$,_0^][YËL$,_1^][Y̸@vb D$D3SHHH Z UjVrL$$3l<3\3L 33l͋|$h_ OW/T_^[]ÐUSVWT\$ht$l|$pL$tL$t+SK[ D$D$ D$@8D$`$D$ pD$$lD$(HD$,TD$0D$4 D$8@D$<`D$@D$DD$HD$L൐3_ 3O3W3/\$ L$T$,$lL \1$3l<$3\3L 33lO>$3l<3\3L 33l͋|$p;|$t|$p3|$h_ OW/T_^[]ÐUSVW|$t$X_1ɉڈѽoDo ~sosD /s< M~ <sosDs< ~ sosDs< ~ sosDs< ~ s ~s ~~w_ WO/_^[]ÐUSVWD$\$L$T$ ^ ʉ$ $($,À$$SoCo[$SoSokGsosT$SoKs<ocWsooEs]T$SoCs<o[OsgoUsmT$SoSs<okGs_oMseT$SoKs<ocWso oEs]T$SoCs<o[Osg(oUsm T$SoSs<okGs_0oMse(T$SoKs<o#Wso8oEs]0T$SoCs<o[Osg@oUsm8T$ S(oS(s<ok Gs_HoMse@T$ S8oK8s<oc0WsoPoEs]HT$ SHoCHs<o[@OsgXoUsmPT$ SXoSXs<okPGs_`oMseXT$ ShoKhs<oc`WsohoEs]`T$SxoCxs<o[pOsgpoUsmhT$s<Gs_xoMseps<E]xo0XP 3Q 3Y1I$$$$1Љ$o|oĐ~soމs$s8|Đ2,~soމs$s8^|Đ2 <Ћ$~soމs$s8 N|Đ2,~soމs$s8^|Đ2 <~soމs$s8N|Đ2,~soމs$s8 ^|Đ2 <Ћ$~soމs$s8N|Đ2,~soމs$s8^|Đ2 <~soމs$s8 N|Đ2,~soމs$s8^|Đ2 <Ћ$~soމs$s8N|Đ2,~soމs$s8 ^|Đ2 <~soމs$s8N|Đ2,~soމs$s8^|Đ2 <Ћ$ ~soމs$s8 N|Đ2,s~sos|s<^~Ns s $$~oqqp;$($ P X0$,w_^[]ÐT$D$YofpNfpfofsffs?fffsffiffofofpNfpNfff:Df:Df:DWWfofsfsfffofsffsffs9fofsfsfffofsffsfffsfBËD$T$Yofo)f8fofpNfpNfff:Df:Df:DWWfofsfsfffofsffsffs9fofsfsfffofsffsfffsff8ÐUSVWD$T$t$\$ Y'ofo)of8Ł2oovf8f8ffofpNfpNfff:Df:Df:DWWfofsfsffRv fofpNfpNfff:Df:Df:DWWfofsfsffoffovf8f8fofoffofsffsff:Dfs9fofsfsffpNfffpNff:Dfofsffsfffsff:DRWWfofsfsfffo)v fofpNfpNfff:Df:Df:DWWfofsfsfffffofsffsffs9fofsfsfffofsffsfffsfąof8ffofpNfpNfff:Df:Df:DWWfofsfsfffofsffsffs9fofsfsfffofsffsfffsff8_^[]Ð  @8`$plHT @`FN V   ^ f(n0v8~@89;:H?><=P6754X1023`$%'&&h#" .!p*+)6(x-,.>/pBqsrwJv tu~R}|yZxz{lbm$onkjj,hibrc4a`ezd΀BƓJ ΔR֝Zޚb$揨j,r4z<DȮ LЧTؠ\"d*l2t:|GHASH for x86, CRYPTOGAMS by ̋L$QD$ ;P}D$L$ L$SUhVt$WyߍG+;FPV!L$ u_^][ËD$~96UPWVg~ t(t@KuuD$@D$t;tKu_^]@ [ø, S\$Ul$EVW{+D$y hhl jdjsj-_^]3[YËL$;APQ6 L$ tًE+ 3҉|$tU]8\$ t;҃+\$ B;+9l$uɅttz9Nuu;t:t6NNt*PQNtPQNtP Q uʋD$T$BB ~ L1uHB_^][YS\$ { Vt$Wt,~ tËދCN;|$;G8>~ t3Vt$SV u_^3[É~ _^[PWtVS1"}SVW" tG _^[VSW tG _^[SVt$^ W|$G 3tdtƋWV!})Vt$WV u_^3[_F ^[WVt$V~ t_F ^[WVt$V _^ ^[̸L6* Vt$\FD$ ~(|uhhl jkjkjT*3^LFD$`u@tD$ xuhhl jgjkj*3^LÃ|$ uAPV }3D$XtVPytD$Tt jP^LS\$hUWS'SR(S|$D$$PSS~l t3L$ VQtT$$RSWW(-t F;|D$l$;t WUD$$P"D$_][^Y̸# $@D$ t"hhl jBj}j#P$u$jPSU$WU!U0"U(" |$PL$VQ$$~ t"VSrUST$(C RUVD$(P膕 K$UVQW.0u$jRD$ D$=~ D$5=~ D$$O~ D$3ɃL L$~lUD$$PWWSL$I;~:IU DT{TPUL$$QSRP虗^F;|ɋD$$xjVD$|$ ,$$|$I$WPu.9D$uUL$$QVVVO|$뽽39t$~-O|($WR+t +ރFO;t$|ԃ|${u/3~)$P$L$$QPPP蔖tVF;|؋$RD$$P$L\QPPft($+T$D$D$$^U)D$ PD$ _][̋G;Ul$ V~ȍ43vL$ ʋ@;r^]Ul$;o UWu]V43vL$T$SL$@;r[o~t0uMo^]UP l3ʼnESVuFW3}ȉ}ĉ}Eu.hWhl jfj|j3e_^[M3q ]ËUR؃u"EjPpe_^[M3= ]ËMQ`EtE&EURVP6 r~ E.2~ EY~ E3DEMU ;ȉE< E}}G@ u#hO@hl QG Eփ?+W@jVu 3Ɂ }MEuE̸MMЉMM؋MEE܋EQxPWUuuPUR虙LE x u6MQP}"UEM RPWQUR^uGE MUQRPEPjMUQREWPQURUj΍}EUPj΍}}MUQRPQUR賘fEUPj΍}eH9EE~OMUQREPMQReEMUPQ΍}E@;EE|KÙ}3|USR5NK^_^[Vt$ W|$ ;F;G PWu_^ËV~.SUqyY )(pxX Jߋt$|$][Vtt uQPQP VWF G _^̋D$L$QSXUhl$ h VpW8l$)(ihihi h 9|$y|$y Yރ x_ ^]Q[Vt$W3;tNQWPg ~ ~_^̋L$A~u3Vt$~W})jV<t9t P'6>FD$3҅F _V^_3^̸F S\$W3|$;uD$;u_3[YËD$ȉL$;u {_[YUHVF;sVSL$(uD$t P^]_3[YËD$s{  I@Mu <3ouދC~ LuHC^]_[YS\$CUVHtRd‹t9W|$INƙ‹yIAGGu_^][̋T$JVt$ +Fu%AW:| +1;u H}3_^_H^W|$Vt$tqG ;F t3Ʌ^_L Uu hOV;/}]^_ÍQS|6 +ދ4 9;w rJ}3[]^_[]^_^_39D$ _̋L$}3ËSU‹؋VWyMEt$9^={;~ WVYu_^][ËF;}@;|~ͺ_^][ ̋L$}3Ë‹T$9B~V2yIA!0B~ L1uHB^̋L$}3Ë‹T$9B~yIA̋L$}3ËyIAT$;B}VuBpr2!0B~ L1uHB^̃|$D$txt@ @ ̋D$ T$VtW|$ L;t _H^ÍH|+0;uI}_3^̋D$S\$ Ul$Vu}+Ћȍ:u?A|W|$~ I9u&HDL;t_^]H[^][_^][ÍM|+0;uI}_^]3[W|$u3_VmtWVuV^3_Ë^_Ul$ VW|$3;o~DSUWv؃ttGFnO N [_^]SD0[_^]tWVhuVk_^3]̋FSW9Fudt@ hRhhn P- u_[ËFt QRW ~t P/>^ND$ F_[̃?tGSV$7>t VNuDPOB/Wu^[SUl$ ]t,VW>t VnuꋛDu_^EEE ][̋N ;Nu}Whhhn hH, u_SU߽S9uFD@>][uFF >~~_ËNDFF ~~_Åuu FDVVAN ̋N A+ʃN tJuN@NHuVt$V3FF F$F(^hhhn j,+3Ƀ ;uhhhn jAjjjQ3ÉHHHH HHHH H$H(W|$t t GPX-WJ-_W|$G$u:9G(u5G VPw^u$hhhn jmhjG$_@G$_Vt$F$tHF$^NFNV W<;s+~ _F(^Vt$~$uI~(uCWu$h*hhn jmjtjF('_3^jWF _^3^UVt$W|$;}L$NjD$D$L$D$l$jWPU#N_^]ËL$RWPU NNDD$S\D$$L$QD$RWPMQYJNC~pT$L$PWQU;JN~ST$BL$PWQURJNC~1D$HT$QWREPID$,CNn[_^]̸L$0SUVW|$8Ǚ+‹D$<,u'u(u$D$4L$0T$,PQRZ _^][Ã}RT$4t$,QL$4RPQVlL$PT$TPy jQs _^][ËL$0+PSRQD$0T$(L$D3D$,D$ +PUQRD$DT$4|$dD $\L$0T$+PSQRW\SL$(T$H+D$T$0D$+QSRPW(SL$8T$HD$(QURP QSD$DhT$ D$L$0RSPQWRD$(L$H+RUPQRRD$D/D$ L$T$0PSQRWRD$8L$HT$(PUQRPR(|$< |$@\$8ۃ|$\$uOQWS] 3CCC CCCCT$4D$0l$,RPU]L$T$@)L$<RQPD$(]u||$<uu|$@un\$8ۃ|$\$uW RWS[X j@jSQD$@L$ereeee6S\$0}+D$8T$, D$4QL$,RT$,SQRY[ËL$(UVt$tF>u;|$t1WR+hn LQVu>tF>u;|$uы|$D$jzhn jAjhjt WDD$t P|$utS_^]3[ _^][ ̸ VS\$U3l$ ;lb<-u CD$ \$VW= P3׃tLFQ׃uT$\$ D$;t;u2|$;u3_^][ UPD$|$;G PW<;u9+uW{_^]3[ Ëޅ~g}+3D$$qЃ wqwqw3O @ʋD$EoŅ~LuHGD$L$ G D$9_^][ ]3[ S\$ U37-<-uCVW= P3׃tILFQ׃uL$.D$t9uu3_^][jW;G PWuT$9uW_^]3[ø98T+ƃ u3 3t0@CtQЃ uhʚ;W] VW 33 uЋGo ~LuHGD$8D$_^][]3[Vt$ >-WuF80u%HXtxu|$ PWu_^Ë|$ PWku_^À>-uA _^Ul$ VW339u tD$jhn P; ur9uuL$jhn Q: uTS]xAUutL$jn PQ: uyɃy[_^]_^][_^3]̃=hqu"j j@hn jhXqhqXqVJNP?u^ËD$WPjjjV<L$ QVVf8_^̸WD$uD$QP_ÍASU‹YVyKCKhhn UD$( u$hhn jAjjD$(^][_ÍL$Q  $T$ jRUVtEluT3~NID$jPDD$~-U, )C;^|]t D$@_^[S\$ VsuD$ VPxF^[Ul$3W;|+ʉL$;t&;MQUL$ u_]^[ËC E D]Nt~" +D NЅL$_M]^[̸L$ UVt$F A W|$ Ǚ‹VD*;Al$  PQu_^]ÁSyOGD$F +߉L$$T$uFxW<(<H}Bvx6.T L$$N *}l$T$t$ QjRVL$(l* ʼni[~T2uHA_^]̸FSUVW|$ Ǚ‹yKCl$ +ÉD$E;UZ+‹T$D$ ;t';BM J PRcT$ u_^][YÅEM:+΍t$ rutlu_^]A[YË0L$ t"֋0L$ l$ ouދt7_^][YËD$jPu_^][Y̸S33\$9\$u[ËL$Vqx, UW<3 ыL$ S3Q PR$NT$}_]^[øUl$D$u]YS\${u[3]YVU +VSSt$(u^[]YW{x64L$UVQ/1 + Dt$y֋t$C_~ |uHCD$ ^[]YW|$ uG_Vt$ Fu WV^_Ã~ t%WVF ~tm39N N ^_ËLu@;F PV"u^_3ɍ;N|Nj;vAߋF;|@F^_̋D$uVt$NWu PV^t jV_^Ã~ tPVF _F ^Ãu9s+_F ^Ë39s3ҋ>):׋>A9:r)<u FH;uF_^Vt$NWtWT$uRV_^ËRQPP+t)F@;F PVu_^ËFu#~uF |$tFt PWF~D$$tF(D$(tF$N QR|$ F VWPQRuQ%lKtN QR|ubhahn jqhjEV(t%N$tQN WQNQPP҃t _^][ËV NWRQPPuut V33_^][UVt$ 3W;tr9ntm~unF~ u9ntF uD$UUPUUV1F u/|$N WQPPPt6FV WRPPPthhn jkjgj~ uF_^]SVt$>tg~ta~W|$uDFD$tNQPu3ۋV WRPD$PPju3_^[WVu_^[hhn jkjdj$^3[̋D$ L$T$PQjRJ̸S\$UVWSD$3ShS`3ۃ ;D$PW6F;L$QUF;E;u!9G9D$9_~u;UjUuCSUtSUUi Ut+9wu3 o L$ o L$D$} tE  t T$ډT$3SW?uCSW0tSWW t!9uu3E o L$L$ tu3u3}u3M #t T$ډT$D$ PWUUu4Njp 9w,}uM9u} tD$\$T$ R_^][u$Y̸&S\$$Ul$4Vt$4FW|$ u;xD$4H u9 u x U!UUD$,UD$,UD$(UUD$T{؃\$(|$0uD$D$4L$ UVPQQjVD$,6u D$$@PVD$,tD$$uL|$jVW ZjWG @T$ D$UVWRP{VPL$$QW T$jVRF D$UVP@ D$(WP"L$UVQS:UVSWWjWT$D$ L$8UVRPQqtmD$8UVWPP\tXT$8D$RPtBL$UVt$@QVt*T$ RVt7hxh4o jojyjD$t;D$0t PD$U(D$_^][ËL$Q?tT$B D$I|$}D$PXjjVPS>vVS| ~ u VSSЃ H{uL$QS$,T$URS t(u4D$@RD$Vhh4o jqhh4o jpthh4o jqD$$PD$PP L$UVQSS{u:u{ uhh4o jp]D$PWSL$ UVQW1u$T$jR UcD$_^][ËD$8jPT$8D$UVRP\L$ D$UVQPPT$ D$8UVRPPD$xu9u x 'T$UVRW\$(^u 8u tnC\$;\$$UVWWW/u&T$@WQRgD$8xL$jQU9D$_^][ËL$(QWO\$$+\$K~ $UVWW'KT$(UVWWRD$8UVWPPphD$(UVPD$(PPSKL$L$$hZ!uD$6D$4|$xu9u x u3PWuP;tkWp_^]3[Ãu>:u7uD$4jPHPWt_^][j^h4o jpjyjn_^]3[SVF3W~'t ~tVWW< tlVV ~+t$WW}tMVW}%NjWWYt)VVKtCFutSWW u_^3[Ë_^[̸DFUV3WSD$D$(SSS SD$(SD$8SD$4Sl$0 )D$Tu+D$ D$jPjU L$hQWT$\RVzF  uVW|ID$(OL$,WT$0G T$8D$4OSVD$0 PWL$H6lD$L$+D$;G PWtUMPSQRQF E\;u T$JVD$F$;t VPt$$Q軴_^][Ĉ̍ @D$T$L$ nD$nnnbnrnz nXnhn` ~nRs nbnr~Hs nZ nh~Hs nh~H s nh~Hs nh~Hs ~Hs ~H@ s 6nnЍR~s @~wÐUSVW1|$L$\$l$ Q֋CGG֋CGG֋C G G ֋CGG֋CGG֋CGG֋CGGց[  L$ ICGIGCGIGC G IG _CGIG=CGIGCGG։Y_^[]Ð PD$T$L$ nD$ɐnЍR~s @~wÐUSVW1|$\$l$L$ ֋CG֋CG֋C G ֋CG֋CG֋CG֋CGց Ul$MCGMvCGM]C G MDCGM+CGMCG։_^[]Ð 0D$T$L$ nR@wÐUSVWt$|$\$nVGFV GFVG FVGF V$GF(V,GF0V4GF8V< @\$uKVeGFKV SGFKVAG FKV/GF KV$GF(KV, GF0V4_^[]ÐT$D$L$ ÐUSVW\$t$|$l$ 1 NWKNWKN W K NWKNWKNWKNWK l$ M NWMKNWMKyN W MK YNWMK9NWMKNWK_^[]USVW\$t$|$l$ 1)) NW))KNW))KN W ))K NW))KNW))KNW))KNW))K l$ ))M NW))MKNW))MKyN W ))MK YNW))MK9NW))MKNW))K_^[]USVW\$t$|$l$ 1)) NW))KNW))KN W ))K NW))KNW))KNW))KNW))K l$ B)) M)) M)) M)) M)) MW)) M()) |$$l$$)Ձ)) W))KW))KW ))K W))KW))KW))KW))K T$$)Ձ))M W))MKW))MKiW ))MK GW))MK%W))MKW))K) "N)KN)KN )K  N)KN)KN)KN)K zl$$A) M.N)KMN)KMN )K MN)KMN)KMN)K NKNKN K NKNKNKNK l$$R MGNKM:NKM-N K M NKMNKMNK_^[]Vt$ W|$US1ۋ1ɋ1ËD$ыF1ՋWD$ՋHF1ŋFӋWŋӋWŋD$ӋhF 1ËFыWËFыWËыW ËD$ыX F1F ՋWFՋWFՋW ՋWD$ՋHF1ŋFӋWŋF ӋWŋFӋW ŋFӋWŋӋWŋD$ӋhF1ËFыWËFыWËF ыW ËFыWËFыWËыWËD$ыXF1FՋWFՋWFՋW F ՋWFՋWFՋWՋWD$ՋWHF1ŋFӋWŋFӋW ŋFӋWŋF ӋWŋFӋWŋFӋWŋD$ӋWh F1ËFыW ËFыWËFыWËF ыWËFыWËD$ыW X$F1FՋWFՋWFՋWF ՋWD$ՋWH(F1ŋFӋWŋFӋWŋFӋWŋD$ӋWh,F1ËFыWËFыWËD$ыWX0F1FՋWD$ՋWH4F1ŋD$Ӂh8X<[]_^ÐVt$ W|$US1ۋ1ɋ1ËD$ыF1ՋWD$ՋHF1ŋFӋWŋӋWŋD$ӋhF 1ËFыWËFыWËыW ËD$ыWX F 1FՋWFՋW D$ՋWHF 1ŋFӋW ŋD$ӋW hF 1ËD$сXH[]_^ÐVWUS|$t$11ɋ1ыF1ҁՋFO1ҁӋFӋoF 1ҁыFVҁыF_ 1ҁՋF VҁՋFՋOF1ҁӋFVҁӋF VҁӋFo1ҁыFVҁыFVҁыF ы_F1ҁՋFVҁՋFVҁՋFV ҁՋFOV1ҁӋFVҁӋFV ҁӋFӋVo F1ҁыFV ҁыFVҁыF_$V 1ҁՋFVҁՋFՋVO(F1ҁӋFVҁӋFo,V1ҁыFыV_0F1ҁՋFO41Ӂo8_<[]_^ÐVWUS|$t$11ɋ1ыF1ҁՋFO1ҁӋFӋoF 1ҁыFVҁыF _ V1ҁՋFՋVOF 1ҁӋF o1с_O[]_^USVW1|$(t$T$ߍd߉)%)1)ԁ^NV v6D$\$L$ T$t$_l$ nt$|$ l$11n'n.n]ool$nMnFs s AonLnDs ~\s I9o~\s s \ B1n$n.nt$ n]ool$nt$$nMnFs s AKont$nLnDs ~\s KIo~\s s nt$\ R9=wt$k|$ 1ɉ)D Ջ?kD$1ҐōI9ىl|$t$|$ D 1ɉT$L(D$ FAl Iŋ9ىlD |$t$Ł|$ 1T$l T$L(D$ Fl Iŋ9ىll Łl1L$ T$D(IT ;L$D$ 9t$L$ 11ҋ$L$ D$ AI,C; $Él|$t$,C|$ l ,Pl$T(D$ ˁFl ŋDll$Iŋ9ىll ŁlL$ 1t$T$D(T 9ىD$|IL$ D D 19ٍIZ,l I; $lځ|$t$|$ T T l$D$ YFl$|$t$ 1ҐDIDR!Љ! L Kd$_^[]Montgomery Multiplication for x86, CRYPTOGAMS by ́$?,$nnۉL$1T$1L$ f1щl$1fL$1sT$l$!s!n!ns!n s!ns !n s !ns!n s!ns!n s!nsn s$Ð$,?<$L$1щT$1L$ 1щl$1ՉL$1!؉T$!߉l$!1!13!,!11ʋ !11, !11ʋ  !11,!11ʋ !11,! 11ʋ !11,<1111$ÐR/#D$f:DD$ÐUSVWD$\$ oD$\$$oD$\$ 3D$3\$$NjD$os _s ^[]PwÐUSVWD$,\$4D$T$ D$0\$8$T$D$,\$43D$03\$8xl$($L$|$t$ 111؉]1u 11_1^U[E]GF(2^m) Multiplication for x86, CRYPTOGAMS by Vt$VFP 3F(F0^VjKh@ j4q u^VFPF(F0^Vt$t!VaFPXF0t V^Vt$W|$WVu_^ÍFjPzW~ F(_F,^SVW|$WPWڋt0Ul$UVՄtD$L$WPVjQ{bt]W[_^[̸薌SUVW|$,WD$WjWt$^D$( u WK\$ D$u W6؅|$t$(D$$VP}^[3]W|$u 'st/3;u G G_^[]ÃtWVSYv u_^[3]Éo tWqHPWy _^[]̸,f$DW3|$|$u"hh jfjvj舀3_,Ë$<VR}q$LPnqD$;u;u$8jQ@u^_,;t$D$S$TUS)~S~Sl$~S|$0~S$~틬$\D$< $tl$-[l$$TSRU ~ |$,5~ |$,"O~ |$,3DD$,D$=~ D$0=~ D$O~ D$3ɃL L$$D~ u $TRVu} Ƌ$&$TSPV$Vj Tƃxu$@jQsSMUQPVL$0~iT$SUVVRO;~=S|[L$SUQRP;F;|Ë$L~ u$TPVt}%$TT$uD$jPn^ Ul$W|$ ;t?G;E PUlu_]^ 39G~ U @;G|WU}S]K;ىL$+D$,u KD$N9D$tW+yNF+ȍՋ1t +΋1PD$L$(t$(@< D$uyJBʋȋD$1t +1hL$;El$ t9E[~ULuHE_]^ Ël$ L$IyJB؋t +ʅt T$ D$1~Ft‹yJBʋ +14t t1tDL$(E<uN̸6wS\$UV3Sl$uS vu^][YW|$G;F PVjO,  ,  ,  , l        Tal$~~DuO~D$ L$PVQx tSt_^][YS\$K3u[VqUW$ <+Lt;D$}l$\\\$@+Lt;D$}l$\\\$@+Lt;D$}l$\\$@+Lt;D$}l$\\\$@tN R_];D$^} T$ @[S\$ VWjS!j|$3?tǐPSltF<u_^[_^3[̸tL$$jD$PQ tD$ L$$RPQ hvh jjhjt3̸ tS\$0Ul$0D$ ;uD$WSVVWtFT$ URdtD$WSPVVPtyċL$VQ9_tD$[WiD$_^]YS\$UVWS3\h`Fh PƔtoVWS؃ t3;/L$$T$D$QL$WRPQWc_^][hdh jjhjjW8_^][_^][S\$U39+uD$ UP_E][VW|$ WhW>it( IQV;btT$D$WSVRPWh_^][S\$UVWS3P[hFh P薓tjVWS؃ t.;*L$ T$D$QWRP#W8_^][hh jjhjiW _^][_^][̸Fi3Vt$(D$D$9uPD$$Px^^SUW|$8WngWgWl$$gWgD$4L$0VQUI }uT$,jR ^zUS\gH+½;|LWVSS?BWVSS+.D$PSS$ H+E;~WVt$"h< V[ VhT [uPD$LPY_^]8~D$H;tVPMX_^]8S\$L;tC{| jSWu[_^]8Ë3ϋƺ+΍uL$WQU3T$ |$033ۃŋVÉL$L$,l$‹T$ N׋ȋ^L$ÉNL$$L$,L$ˋ^ L$L$D$L$Ë^N ʋVljN3L$D$N~@j PVVjh L$8VQ#NjэT$@## ȃ+΋1uT$L B1uH[_^B]8ËD$TL$HPhT VQn_^]8̸@fbSVt$P~ ^W>hP VUY|Vh ?YuPD$TPmW_^[@~D$P;tVPU_^[@Ul$T;tC}| jUrUu]_^[@Ëuϋƺ+I,(uL$WQSy3\$ T$033+ŋl$\$,+lj++ˋË^3ŋl$ \$0N++ˋË^3ˋ\$$ʼnN+ˋ^ L$ljN +ʋVL$‹T$,ljN+ʉ|$|$NjVL$ ‹T$0ljN+ʋVL$$‰NL~!+jT PVVr@}9@ +jp +RVVZ؋ׁJ%L jhp D$hd VuVVh _VuPD$dPT_^[P~D$`;tVPR_^[PUl$d;tC}| jURu]_^[PËuϋƺ+I,(u L$ WQS3\$<T$@33+++L$,+L$0L$$L$ Nj|$<++ʋ++L$0‹V3L$$L$(‰N++L$8‹V+L$<L$(‹T$,T$0D$PjWRN^L$+D$$+D$hx VRVh lRuP$PP_^[p~"$;tVPO_^[pU$;t>} | j UNu]_^[pËuϋƺ +΋,(u0L$ WQS 3l$TL$L3ҋ|$(\$H+‰D$3T$D$ӉL$D$V+L$ +L$$L$H‹VN+L$$+L$DL$(‹V N+L$(׋|$+L$HL$,L$ ‹T$jjWRD$,N ZL$N3+D$,D$0RD$(jL$PD$(F3PQ9ZNjڋV3+D$0QD$8D$,jD$,WD$PU߉FY+D$4ыN3L$8D$,D$(D$HӉF+ыNӋL$<D$0D$,׉F+ыN D$4D$0D$@ыN$F +D$@D$8D$4D$DыN(F$+D$DD$<D$8D$HыN,F(+D$HD$<D$@׉F,L~ Rj RVV9}2DRj +QVV؋ׁJ%L j h T$XVR#ËэT$`## ȃ +΋1u$ B ,1uH]_^B[pË$$Ph VQGc_^[p̸D&WVt$P~ FW>D$Tmh VNWVh( MuL$PPQ*L_^D~D$P;tVPJ_^DS\$TU;tA{| jS0Ju][_^DË3ϋƺ+΍d$,(uT$\@D$WP5L$3ыL  |@| L$jL$QVVjh T$(VR؋эl$0## ȃ ֿ+΋u C@uH]C[_^DËD$\L$PPh( VQa_^D̸ vUD$(L$$SVW|$3D$ D$L$u GtOD$,L$(T$ RT$(PD$(QRPVCt_#^[ Åu t VF_#^[ ̸ TD$ L$$RT$D$D$ L$ L$PQRD$F̸ TD$ L$D$D$$$RT$PD$L$L$$QRPD$術 ̋D$Pj`h I ̋D$Phh I ̋D$Phh I ̋D$Phh@ I ̋D$Phh@ lI ̋D$Phh LI ̋D$Phh ,I ̋D$Phh  I ̋D$Ul$ PS Gu3]Ã{~ ujS0stWWjVVWUjS? u,jSst1GWjV迍VjUjS tWjV裍 _]_3]̸RD$,xD$<1SUVW|$@WPl$,u WVQ|$,u WDQD$,W7QWD$-QWD$#QL$@t$PWQ݉D$0T$8\$,WRD$WVUP [v\$$WUVS^WVSSZJWVt$UV趆2WUVVZVSS%  { tL$QSS& T$t$0WRVSS%^VSSb& l$DjjUt$(SVDIjVqD$T$D$ jjRPVU$,'L$T$ QRVU'D$L$ PQVU)T$(RVUB GL$(PQ+D|hhؕ hG<tWSRG@j PJW@GOIRW|$SPD$4RPWуthW>+T$(RW(C+ȃ ~D$$QjPM t$&hhؕ jv hhؕ jAjhjLSJSuJtVU臞Uw ^]D$ [_Ë$Qd7LSUl$Whhؕ j j3n9~PuEh hؕ j jnh hؕ j j n ~PuD$PV^FP^PL$ QiSbnPT$Riu EYE~TuIu.h*hؕ j jSnh+hؕ j j @n ~TuD$PVw]FT^Tth7hؕ j j n_][h9hؕ j jm_][Ul$VuWUQSKq^]hIhؕ jj mWUVS)qhKhؕ jj m0^]̸,VJSV3ۃWt$\$\$\$ H;;UWHWIW It$XD$FPi;‹hqhؕ UsȉL$$|$D$PtAt)thhؕ jvpT$@D$DRPUQ28$T$@D$DRPUQ%T$@D$DRPUQ :L$$SUQ? #VRSP@|hhؕ hFtt$V8\$L+QV=+ȃ ~ QjSG l$hthؕ jAjfjzGWEWKEt$,tUVYVr D$]_^[,_^[,̸,FSV3ۃWt$ \$\$\$D;UWEWEWEt$XD$ FP7‹hhؕ U+pD$$Z|$OGD$@;~h hؕ jl:L$DSPQ; /VRS=|hhؕ hF~u -F(~u,F~u,F~ u,F ~$u,F$~(u,F(~,u,F,D$hNPQ-VUjjjSR+} t~d$*|$PFPWL t_L$VQRW|$(WAntDu 8u to|$WjUtG |$tNUjjjSQ|u\$hh jhj93tS 8S7_^][TjjUt t$3T$ F UjjjRP9|tN VQRe0uGrʃts)|$PF PW? NL$V\$QRWS0m/{u 8u{ tH|$WjUusG |$Uhh jxhj38\$$jjU0s N VQR/} FN NF T$F NRVPQR/A~(PFPST c(N PQW9 HT$RW|$,SW@,n~~~~~~ ~$~(~,F8~@~D~H~P~T~LQ$SVjVujV~(>tCF;|T$URFV' [_^][_^3]̋L$A9D$D$ 8PVq3~I Bt@;|;uhh jqjqjk&^Ã}hh jgjqjI&^Ã+;t$ ~hh jmjqj&^VRT$R9& ^hh jkjqj%̋D$Ul$H;~jIh jnjnj%3]ËL$V+Wq FWVi_^3]S3ۅ~$I>ujVC~5>tCF;|T$FURFVr% [_^][_^3]ËD$ }jrh jojrj%ÍH9L$L$ 9QVp3Ʌ~$BtA;|;t{|v<u@|uhh jsjrj$^Ã+;t$ ~hh jmjrjp$^ËL$VRQ$ ^hh jqjrj@$^jwh jkjrj&$̋D$L$;~jFh jnjkj#3}jLh jzjkj#3ËL$PD$PQ# S\$ W|$;~jZh jmjoj#_[Vt$+PjV#L$$W+QV#^_[̸t"l3ĉD$pD$x$U$VT$ RD$L$ l$3D$PUD$SW$3ۅl$jD$ˋӍD$,UPL$T$\$P $T$$QRD$0P;R jL$QT$0RR tkD$ ,0;jL$QT$0R R tF1D$DPL$0QQ t,L$+RD$DPQA" C;(D$_[T$ R1RL$|D$^]3!t L$ T$PD$PD$ QRPa̸ F!l3ĉD$L$,T$4D$$S\$,KW|$8L$K;T$ ~(j-h4 jnjyjS!_3[L$3!! Ã)}(j3h4 jxjyj&!_3[L$3  UVjph>T$LPD$jURPStm+σ)QUjR!L$+W@QP jV ~2jIh4 S J |$u*jLh4 jAjyjz ^]_3[L$3F  jVSW|څ~+ˍ$0@upPSUD$$jP|L$|$l$T$++++֍\$+ދƉ\$00X(0X0X\$0XuҋL$QKL$0F^]_[3 ̸<6l3ĉD$8D$@L$HT$TSUD$D$XVW3H)L$T$|$+\$\yD$3ۉD$\hj|h4 PH ujh4 jAjzjS4/jV(L$hT$QRP^\$(PUSL$DjQv,֍D$ 3+АD 0\0X\0X\0X\0X|蹜PjL$(QUW*~L$+ϋ0@uj~L$lT$PjD$@PQRPD$4׋+֋4;0s|$;}I<8u@;|;tw<8uq@+9l$T}9hh4 jmjzjWH_^][L$83m<UPD$$P WH_^][L$83>h WT$ h@؃ u hh jAhjD$L$T$PQRWSi~+ύd$0@ut+͸ 3;uG$;}F<t Ft#hh hhjA6$|++;t#hh hhj D$ jPL$0QC jT$,h RE l$D$$UPL$0QgE +tWT$,VRJE jD$DPL$0Q>E ttt$ō|$@r;u2stE:uv:V:Wu v-F:Gt%hh jhhjD$D$t SI@L$(Q,E$D$_^][3t̸,fSW3|$ |$9|$HuD$DD$HL$DQ؃;߉\$[Vt$P|$\$L$$+Q_AT$PjRD$0P?AjL$(h QB T$D$HRPL$,QB tT$VRD$,PB j+PL$,QB t|T$$RBD$TL$PQ+PSUTuV+t/D/@~3Ʌ~$T$0A@;|D$ t+ȸ EL$D)D$\$S=D$]^_[,Ë_[,̋D$L$T$ PD$ QL$ jRPQ̋D$L$T$ PD$ jQL$RPQS\$W|$+yjPh jnjj_[ËD$ Vpuj"k~GPhV t>FL$SQVs ^_[Vt$9t$L$u!FHL$VPD$ Pj@ ^_Y3Y̸Ul$ D$uh\ VZX3Ƀ]YhX V@XhSVY hD VXt RVY h4 VWchX VWMhSVY 6h VW GtAPVh VxWt MQVh h VKWhX V5WhSVXh VWGt PV hԥ VV~hhX VVhSV/XtJhĥ VV~8G t PVh h VV~hX VV]YËD$]Y̸vVW|$Pt$=uHSU\$l$\$$U؅t WNt UQ>][u(_3^YËD$u"hX QU!_3^YËD$tT$RPVLs _^Y_^Y̸D$ H$$|$L$$PjjQxS|$uqD$$RPx<|$uWT$ $QjjjR6 |$u7L$$PjjQh<$tjjjT$ PR=YËD$@YøY&E_̸ Vt$ PD$!=t&hh hhj^ S\$ 5u#hh hhjFUQ=t#hh hhjSD$u#hh hhj(RHPBP t u*hh hhjMt@PPPs؃ u*hh hhj؋FWt2PN}(hh hhjZF t1Pt#hh hhjL$4D$ QjST$RP^tcL$jjhjjQ~ET$jWhjjR~(D$UjhhjP~D$_]V\D$[t PZ:D$ ^ ̸S\$UVsD$P3UhjjV ^]3[ËD$:1WVl$XSl$$L$QjhhjV T$(RjhjjVD$(uS責D$(0u+S螬W+qWD$4EH uL$($_|$(t&讲G?L$(QP)S7v@t8 SP T$R v@tfl$8D$PUL$ QA8PUj3 8GT$Rjh>PGP8L$QPW3 tet$8t+T$R$tLPjhPV8D$PjhL$@PQ8D$D$t U7WD$_t P,D$^][^][V誅t5D$ PVt#L$T$WjQVRχVNj_^3^WjbPTujHhx jjtjY3_ËD$SVPjjjWPt:L$QVXt(T$jRVWCV;WBL^[_W31L^[_jch j$B&3Ƀ ;u3ÉHHHH L$@@AA A$Vt$Vu^ËD$ vWxGtPFu_^ËWVGF_^̃~ tËD$Hhh QPd%3҃ ‰F ̋D$Vpt)Ft PgF t P'V&^SUl$ EVuWxF_P莨\$(;t&hh hhjX_^][ËNQTr_u2~uD$L$ WT$RPSQj_D$FuLUtT$ F SRPNQqPV jD$0N WPQCS~ (u4D$L$ WT$RVPSQRqPD$tUNFNVPD$$QN RPQWT$F jWRPW PNT$D$ QL$(WRPQ | T$_^][VW|$ wFNWu_^ËGHV D$jQL$$RPQ |9VROpPGN 9;t h1h jdhja_3^ËFP`;t#h7h hhj._3^ËD$t|N WQPA lBWJT$QL$RQL$T$RQjjPoPw n|$ $VGHD$RT$QL$$RPQ }_^ËT$:_^̋L$AVqVWxFu*D$L$WPD$$QL$$PQRSoP_^Ãu'T$D$RPT$RjQ(_3^Ãu Qu_^ËF L$T$jWPQR~ƋFN VPFQL$ RPQWH3҅_^Ã~ u#hh P~P F tFN T$PD$WQRPHu_^ËD$ ;D$Cv L$rd$;)st,: vV:Qv F:A_^ËD$HPIBT$QL$ PD$QRPz| L$ ̋D$HPIBT$QL$ PD$QRPZ| L$ ̋D$t`u!hh hhjH3Ãu5QGmPu!hh hhj3ø̋D$=S\$VsW3H $lN|$QJ~_^[ËD$3|$|$;tGT$RWWPtD$ ;tPWD$WP\0D$;tP=uF_^[hJh hhj&_^[Ë|$OwINWu_^3[ÃuC uCt~usF~_^[hh h낍;w$VD$_^[9Vthh hA=uNT$_^ [ËD$*_F^[ËD$=}hh h_^[ËD$_F^[9Vth%h h=u(FtL$_^[ËVD$_^[ËL$_N^[ÍIGBmrW|$u"hXh hhj<3_Vt$x Ɗ:utP:Quu3p I:utP:Quu3u +h Ǎ:utP:Quu3u ` Ǎ:utP:Quu3u X Ǎ:utP:Quu3unP NJ:utP:Quu3u7L Nj:utP:Quu3u!jPD$hjjP\^_hmh jvhj^^_ù< Ɗ:utP:Quu3u#W$ L$jPhjjQ^_ù, Ɗ:utP:Quu3u#W$ T$jPhjjR蓾^_ù ƍ:utP:Quu3D$D$WPu^_ËL$T$ QjhjjR  D$P^_̸ Ul$Vu~u&FthPu^3] S[؅u[^] Ã}Wt|$UP3NWQRS_S~D$$jP| [^] [^] ̋D$HQsƒ̋D$HD$ID$HD$I D$HD$ID$HD$ID$uË@@$Vt$FPtPFPF< F<^̸v|$$|$ |$SUWVV/V'Vtc!PD$PUu tK L$PQW] t3VWUS t#T$VSRj"V}_][YË|$ Vi_][Y3Y̸UVt$$3uUu^]SWVV[|$0؃u hh jAhj _u=G OWPQR؃ u#hh hhjRu/Gt(9(t$ȸA$ PQuG<u:GT$PT$PT$P T$@T$  ƒL$D$ OG@WPBPVQSjguhh jhjUPE V|$,u VLu S_[^]Vt$FPW3;tP F< ~PF SgSD$0t P]$D$4[_^]3W|$G<@(t&L$ T$QL$RT$QL$RT$QRWЃ_S\$V| 3c bL$(T$$QL$$RT$$QL$$jRQPVSWc(^[_̸UV3t$wuGPV~tS_u؅G(uDL$QNT$VL$N T$VL$ L$$ уD$T$ ƋW URPGPStw_D$ tu S[tu VUD$ ^]Ë^]W|$G<@,tWЃ__̋D$pq̡pqu pqVt$F<@tVЃF@tPF@D$ F<@tVЃ^VWhḩ jD 3 ;uhḩ jAjgj M_3^ápq;u pqS\$F<;t6Su$hḩ j&jgj VN [_3^É^@;F@F@;t=P諱F<;u-hḩ j&jgj F@PV [_3^ËN<^4>~F~ ~~~~~ ~$~,F0Q SVjV(%F<@ ;t.VЃu$F@;t P]SVj%V{ 3[_^Vt$hḩ jF0jP<N_3^_^j)̃|$uFjHhP j ujKhP jAjrj r3ËL$@ø̋D$ L$T$hP PQR̋D$L$hP PQLv ̋D$uCL$ÃuVt$ R^ø̋D$ L$T$h PQRw̋D$L$h PQu ̋D$ L$T$hl PQR7̋D$L$hl PQ|u ̸ة ̋D$ L$T$h4 PQR̋D$L$h4 PQ,u ̋D$Phl AVt$W|$VW迼D$(PVWu L$_^hP T$RVtL$(V_^̸VD$u ^YËD$hP PL$ QT$RtD$ L$T$PD$QRPL$Qh^YËD$HD$%h)hܬ jjkj t W|$Du UT$RD$,PD$][_^,hhܬ jejkj p_3^,̸@SVt$X3D$9^ F9^UP2t2t*t"hDhܬ jfjqj ]^[@ËF P='~hJhܬ jgɍL$WQT$,RD$DP L$\9X9X VRP/vD$\@9Xf9X ]NQPHVD$\HWRQT$4R *L$X;~ȍD$PQL$\QU VWRD$0PL$ QRFl$\UWPL$0QRPF(tN WQV,jRQ؃F<@SWt#N QNT$4RQNT$(RQT$XRVЃ$#F VPL$4QNRD$(PQT$XRqS t@FWPL$DQT$ Rj脩t#EPL$QFt"t$hhܬ jjqj t WT$RWD$,PML$DQC _]^[@h<hܬ jejqj ^[@̋D$H(̋D$@,t PL̸ VtSD$,L$(D$D$$T$RT$ PD$ L$L$(QL$ RPQVD$ t^ V3^ øvD$ PL$QT$RD$ PjuË $VWQT$RD$Pjy D$u=D$HL$PT$Rj ucj^h jhjuj Ht"tjmh jijuj $_3^dujgh jAjuj YD$PL$ Qj u jsh jhjWFu7jyh jljuj t Wt Vt_3^WT$(VjtRv^_^̸D$ S3VpW\$ 9Xto9^ tj9^te9^t`mGPV=hh jAjvj D$ t PBt S%_^3[YË߿L$ QVFh뜋T$ PRSWjt@PD$(PBt_^[Y̸ &SUVW|$8WD$(PL$QT$3RV3ۉt$43D$$PL$ QT$ RU D$ 80L$uvD$PQU" U蟖SU蜖jU蒖?uT$8OL$|$T$88uxXPD$(D$PQj跂؃ tY{u&ST$RD$0Pj؃t*|$u#D$HL$,PT$0Rj uh%h jryjS/FuhY Fuh h jAFo|$ uhh jA)F NVWPFQRP!uPhh jmjsj L$4Qxt Sh U荗VW _^]3[ ËT$4VjtR][W7th UU_^][ S蟁_^][ ̸SVW3|$ 8uh:h jAjtj \$KFPQthAFSBjPuhKh jmjtj =L$ QW豀WـT$ SRVjjjtPD$,P& u2D$ t Pt Vdt W臀_^3[Y_^[Y̋D$HL$D$HQ T$̋D$@x txt xt3øS\$ CH WQu_[Vt$VB t P Fx KQRt.F@t P߽NySBPu^_3[ËNAt P諽V^z_[Vt$ FH W|$ WB QPu3_^Vt$W|$FPz}hh jijvj%_3^ËNQzVHPRz uh"h jhjvj%踳_3^ËWP uIh(h jjjvj%荳h Whص j{$_3^hh jCjvj%^3_^UWu hBh johj%._3]VhGh j( u!hKh jAhj%^_3]3FFF FFFFF F$\u8Ot /u8Ou3>$ʃ\H$pH+GnF ~nu+GN~VG+̓unN=N6+ͅuN)~%tu nF+N n N$GN39Fu9F uF9FuF9F$uF ^_]hdh jshj%诱V^_3]GGGHS\$ UVPd$@u5, W+h Sփu+h Sփuh Sփu hh3hGh W1 _uhh jmj}j%袰^]3[Sth V(  ^][h V(  ^][̸0l3ĉ$,$4$8$u DŽ$4IVh8  u4hh jlhj%^$,3ʯ0SW= h V׋؅u=VP hh jlhj%讯_[^$,3w0Uh Vh VjjD$ӋuVP hh jl.L$QWD$ $Յu3WH VP hh jrhj%<ID$,$D;r T$0;r7D$PWT$uWH VP 3$<]_[^3觮0WH VP $8P@u+‹$L~!;|p|$V$<QW覮 7F̸0l3ĉ$,$4Vh8 D$  u3hh jlhj%3^$,3߭0S Wh VӋutEfH fE J MVRPMQ ~ ~u ~u~ t$/\FtV RPU ~ /:GNVS+;~hʉL$+D$ tt/T$ BCT$ ;rL$L$ \$ +SP/R D$/\NV@+ك G;NV+;~iIʉL$+D$ tt/tuT$ BCT$ ;rL$L$ \$ +SP/R D$/\NV@+ك G;N$V QR/P N$ [͋]9_É\$ (\$ SUl$V33Wu*l$u~h+h jChj%P_^]3[Ë|$ōPI@u+h0@h P uh4h jA룋׋+Պ @uōPI@u+h;@h P6 uh?h jAP׋+Ս @u3ۋ8uhJh jA؅u(hQh jAhj%GV_^]3[Ã>u~uKNSVC F FuKNSV<\t~~~ ~~~~$~(~,~0~4~ F8QSVjVFD@ ;t.VЃu$FH;t PSVj"V 3[_^Vt$hh@ jF8jPNDAtVЃFHt P脂Vhh\ jAjmjFD$t Pt Sh_^]3[YËWjR@tčD$PWtFT$L$ W|$I9Hu 9x u9Pt >u_^ËHP 8QҋP¶>_^S\$t*V3tWFN >PV莶u_^[S\$t*V3tWFN>PVNu_^[W|$uhh jCjyjŠ3_Ëx(uhh jBjyj螊3_Vhh jD uhh jAjyjg^3_ËH(VуuV蘵^3_Ë^_Vt$t@,tVЃVb^Vt$t,H0tV @,tVЃjDVV ^̋T$Vp4uhh jBjrj蝉3^ËL$ ;thh jejrjy3^;u^QRփ^W|$u3_ËD$ VPXt&WVhuA,tVЃV\^3_Ë^_̋D$Vt$H8uhh jBjj轈3^ËT$ ;th"h jejj虈3^RVу^Vt$H3W|$(;uwD$ ;u _^3[UWHxWx;L$ t$Q^HSmjS`sT$(D$,WRPU蟅tWUVtRVЃ FtUPPmtfL$ T$(WQRSRtHt WSSVЃt/jU:t FHPUyp@D$WwD$]t PwD$_^[hhĻ jghjy^3[̸xD$Vt$W3|$ |$;tNHQPdlu_^S\$ Ul$(;u;9t\|$,uvD$u][_^ÅtWNtQSV҃tVtJWQUV҃t7-;tFtPSkt1;tƈVUktD$D$t PuD$][_^̋D$HD$h wSVt$W|$3ۍGH\$ D$\$;u/1uD$;u h+hĻ jAhjw`UVuVvVvVD$0vVD$8uVut=VWtRSWЃT$ VQRWЃ|$ 1OtQSjLjW|$$Wlj{u t|$T$$VWSR;toD$$VWSPU覃tZL$$jUQ袒 tFT$ VWRUt2jU诗t#D$$VWUPSJt{tD$]t VtD$_^[t Ps$ Vt$FPgNQgV,Rg F@^Vt$ W|$ FPOQ9iu_3^ÍVRGP itN,QW,R itӋF@G@_^̋D$@@,jPJj̸tV3W|$(t$t$ ;urD$;u_3^ËD$SUl$ ;Ƌt$t7WNHQP]S蚀tWSSVЃD$(t3WNHQP]S[t WSSVЃtzD$,tjWNHQP],S t\}0u:u }8u3t%tt WSVЃ WSSVуtD$ h@D$D$][t PqD$ _^øsSUVt$3Wl$9|$,u=qu_^][YËD$ \$tWSRPVЃD$$tWSRPVЃtyD$(tiW,SPV҃tYOD$ t$tNQPftGD$$tVRPft.D$(t,VPftD$t UpD$_^][YVW|$t*t$t"D$PdcL$T$PVWQR"_^hhĻ jChjr_3^̸rD$SW|$P3W\$\$t#hhĻ jjhj,r_3[Vt$0;uoD$;u ^_3[UV pVpVD$ pVpVypD$T$(t"L$V,RQWЃD$T$(B,x9x tEL$,tVRQWЃXT$(D$0@VRPW҃#D$,tRPdT$(D$0RP~dVOHQPS蚥u hUhĻ jhjpVu SUW GHPSU~D$,tT$(VURPWЃti|$0tZVuL$SUQWGHPD$SUP:}t-T$D$(VRT$8PRWЃtD$VPnD$]t PmD$^_[ø(voUl$KtqSOWQIPUtXT$(D$$L$ SRPQV3[D$u)R3PpJ3D$;t PV;t WzD$_^][̋D$uh h| johjjX3ËL$T$QL$ PD$RPQ̋D$uhh| johjX3ËL$T$QPD$PPR̋D$uh%h| johjW3ËL$T$ QPPD$RP6̋D$uh1h| johjzW3ËL$T$ QPD$RP̋D$uh=h| johj*W3ËL$PQJ̸ ̋D$L$D$L$UVt$W33u,Tu_^]VTV4US\$SP8MuD$ǀSPMuD$ǀPgSPLuD$ǀ0DSPLuD$ǀ !SPLu$D$ǀL$ T$VQRSP"hh4 hhjU[VSt WlS_^]̸&USUV33Wt$9t$t`|$tX9t$ tR\$$tJt$(u RtqD$ VSPW]tDD$VHHQWW҃t*'hh4 jChjT_^][YËt$tUR_^][YËD$_^][Y̸VTSUl$V33Wt$tS|$tK9\$ tEt$$u QޅtoD$ VPWc tCVMHQWW҃t-*hh4 hhj$T_^][YËt$tSQ_^][YËD$_^][Y̸ VPu^ËD$S\$Ul$W|$ WSUPVul ZȁuG%=t=u4hTVmPGtT$WSURV@uVc_][3^_][^VzNPt*D$L$T$ PD$ QRPVau V3^Ë^̅t-jihL j>| ujlhL jAhjR3É8@@@ @@@Vt$jhL j$FjPu^W|$tUhhL j$GjPpu7Vwt%tPFuOQO}WF}^_W|$tehhL j$GjPuGVwt-tPjV譣F uOQ|jW茣W| ^_ø&QS\${UVD$ l$u;L$3t?˅tD-T>+;T$rL-#ˋ;~h;}dt_+t D-;t;uHD$L$F>9D$$RPH؍D-;th.';t$vWh5h"h hhhL jDhjOL$ Q{^D$D$][ËT$(D$ 2^][̸\6OL$dS\$dVW33|$\|$X|$D|$1SPT$\2;\$$v\$$D$@D$;D$,4\$4|$(t$$L$HRPQL$xRPQz 3IG;r‹|$hE;l$,M$L$LT$0PD$tQRP!\$$t$@4u#$D$tT$pQPPRtk|$D$D$ l$|$++ljD$hD$h9(43t;D$Dt6|$@u"$T$tD$pQRPb 39L$DL$D|$@t%L$tPQaD$@)$ D$tRT$tQPPRD$@D$;D$-t$@tD$tL$pPQͽ!|$Dt$D$tL$pRPQ t%D$dhhL jAhjaFD$`t P0DD$Ht PD$4t P~qD$t Pmq|$ t$tPTqFuWAq|$Lt$tPػFuWqD$t PqD$d]_^[\̸(FESUVW|$;uhh jA,D$W3D$t P4t U7<D$t P/D$t P/D$ t P/D$(t P/D$t P/D$,t P/_^][$VW|$ ~m3ɸ 98t ACr9 ]=ĉQPQI PH u#hh hhjl=3_^WVm_^̋T$Vt3t$ t+CrC3v Wy<‹9|@;r_C^̸<UVt$3D$D$ u,U:D$ ujGh jAhj<.SV:VA;؃W|$VW訯ujQh jvhjl<GujXh jqhjH<VPW uj]h jkhj<W.tvVSW譬 tg{ujfh hhj;DVjjSUW膹t0UW8ujmh jzhj;D$_[t V9D$ t PR9t U5D$^]̋D$L$S\$Ul$VSjjUPQpnu^][WjIh Vd t#T$D$SVWURP9nuW~dt8Fht1~lu+L$ tD$tNdD$tV`^jqh$ jBhj73^̸ ̸ ̸ ̸ ̸X ̸ ̸ ̸ ̋D$L$h PQ ̸v6U3Wl$;;;t P%G;t PVS薭P@VL u&h<h$ jhjW6D$ ^_]Á8(u hDh$ jAhj6jjjUS%u hJh$ jhj5jU;GthQh$ j hj5\h wu&heh$ jAhjz5D$ ^_]SFSu&hoh$ jhj=5D$ ^_]WJFu huuVD$PSRFuhh$ jAwL$QP8uthh$ j Tu{T$RD$PL$QStTh Fuhh$ jA T$L$FHVD$BD$ t U%D$ ^_]Fuhh$ jA_3]̸3Ul$(Vt$(W3|$$|$|$|$D$;;9}9}S%؉\$$;%D$;V藪PAW=u4D$PSWV艥u;hh$ jhjJ3L$QSWVեuhS1$‹T$R$‹uD$D$^hh$ VD\ t$uhh$ jAVS(؃umhh$ jhj2D$t P]D$ t P]\$$3;t S#D$[;t P#D$$_^]Ét$uj|$T$ESRP R MVWQQ t$0~<}umBEuahh$ jA-hh$ W*[ |$ uhh$ jAL$WQ'YhE` EH V@F.3^ËuFt PuFth PW/tW1t'PCjFuh Vɹ3^Ë^̸F-SU3ًCVWl$l$l$;J9(B9h9C;9(9i@;9hSPUQR" |$;u hh$ jhj,C@PUQRB" t$uh 뾋CQG=#Sr=D$~#h h$ hhj,"zu h&h$ jAhj\,FP{G=uwFu h3h$ jshj",P06;~~VU#Wh;h$ hhj+m=~u hNh$ jshj+;6G;O;;~~~zVU#QU #WRU"GPU"jU"L$T$jQRU%hTh$ hhj*|=u hah$ j~hj*Uhfh$ jshj*5=C@u hth$ jshje*jPe0u hzh$ j hj4*} }U"=D$~#hh$ hhj)jVWUu hh$ jhj)QKytdF3Ƀ̸fSVt$3W3\$ ;9^U D$t~~ um tlD$PFUP tVUW4tG9_t^uNQM؃t)T$FRjjWSPЗt~ ^D$U t~u S貏t~ u Wo D$t PD$]_^[Yhh< jChj_^3[Y̸&Vt$ D$NFPQĔt"h?h< jjhj%D$^YUqFWP!CNVUQRƔ u hKh< jkhjFx SXu hRh< jzhjNUSQjWPCuhWpVWRuh\F SP[| hfh< hhj)rF NUjjPWQѕuhlh< jhj;VFURWPthrh< j{hjD$[Ut Wn_D$]^Yh9h< jChjz3^YVt$Ft PD$ P3ɃF^Vt$F t PnD$ P3ɃF ^Vt$Ft PΌFL$ PQ蝍3҃‰F^̋D$L$H@t L$D$ËD$H L$頊SUVWhh< j!j I9|$4t$$\$0l$,F WS UPj uL$WSUQV赉hh< j!j 8_^][̋D$@t D$<̋D$@uÉD$;̋D$L$ H̋L$D$!HVztD$PFu V3^Ë^̸6U3W|$l$ l$;x9oo9l$e9l$[V;@GSPW؃;OQ1P=uD$VV|$D$Vt2T$(D$$ORPSQ訍T$GVRUSPl0L$(T$$GQRSPL$WVQUSRʍtkD$ UP u;L$T$$QRr u%SWDt5Wt(D$hh< hhjVt SՉ[D$^_]hh< jChj_3]Vt$FHPNtQƈV ^Vt$FHP2NtQ)ƈV ^Vt$FHPNtQR3 F\F`FdFhFlFp^Vt$W|$GHPNHQu_^SWtR^tSu[_^UPUrtsO\N\W`V`GdFdOhNhWlVlGpFpF\;F| PSt)F\; PUu][_3^ËFx;F|}$ @;F||񋆌;}IU@;|][_^̋D$UVt$ WP~HW3Sj^\SW豛H t*t%hh hhj[_^]ËL$SQ~tW迗 t;F| PWtFx;F|} ,@;F||D$ SPWn t; PWvf;},@;|[_^]_^]̋D$Vt$W3tNHQPVu_^ËD$tVtRP:t!D$tƈVPt_^̋D$HPsHUVt$W33u* uhh jAhjISV V؃t%D$H\QPS t 9kt[t V1t W _^]Vt$FPNQy,Vp ^Vt$FPNQ ,V ^Vt$FPNQyV,Rp F@^̋D$ SU3tm\$teVt$WP~WtGUWN~SWtt/UW6~,~PWWtUWn@_^][hlh jChj]3[̋D$SW|$WP3t hh jjhjN_3[PO,QGt hh jBhj_3[Vt$tWRVt4jVNt$tWVitjV*^_[̸ V SVt$4W|$03VW\$0\$( tD$tVt$PhVyuGPVsu^_ËNQ^_V*pt5D$ PVut#L$T$WjQVROrVGv_^3^Vot2D$ PVut L$WjjVQrrVu_^3^̸Vl3ĉ$U$Vu^C]$3VĈÅtF~UD$j PUL$QW%8^3]$3 ĈVh W9} ~39$+ÉD$+‹+u/URD$j PD$ MQT$RW7lL$3;$ t QRh W|5F;$ujhX W873Ƀ ^]$3ĈWKP<uj^h j hj3_ËD$SVPjjjW>9mt:L$QVst(T$jRVWoVsW4^[_W34^[_WzJP<ujnh j hjf3_ËD$SVPjjjW8&mt7L$QVrt%jjVWoVKsWR4^[_W3A4^[_̸4S\$@U3VWl$,D$ l$4l$l$l$l$ l$$l$(l$0;u D$CF|$4;+SUtpD$Pt$HhPVk7 S PyPh8 V]zhX VGzD$,Sl$P[P运D$8=uD$Lt$uD$dD$SD$$BD$(1W9l$PtL$T$QRVSVD$L$PQVSUu D$S u D$T$$jRSS t6D$(jPSS t"SYTWjPVSD$T誧|$ u D$L$Q‹T$R;sD$P;sW;sL$$Qu;sT$(RV;sSSD$@t SSD$0h h  Vp t$P|$HhVW4 {D$8PPh( WwZ|$LtUS؃@hVW4 )SPh Ww L$VUQh   T$VURh WU`D$VUPh W8`L$VUQh W`D$D$t PD$t PD$t PD$ t PD$$t PD$(t PxD$4t Pt U D$,_^][4VDP$6ujNh jhjv3^ËD$WPjjjV2L$$T$ QRV_V. _^̸S3V\$ \$t$,;uD$ ;u^3[39\$(WVD$0V|VD$L$UQW 2T$URWW1D$4UWPP[Vt#CtPWSуL$VUQWWT$RKtQWS҃D$4UWPPt1VQWSЃUW|$uD$L$,t$$T$ SPQVRwD$(ULWQe D$ HPU|h됃u%}~ UtF39D$t hfD$,t$$L$ SUPVQNt5T$ SVRkR uhhH jkjgjoD$SD$t P+D$_^][ ̸ VW3|$ |$t$(;uD$ ;u_3^ 39|$$SUVD$0V{VsVkVD$DaD$;|$ L$(G\PQU[ r}VuG\PD$8RPj)USW҃6VSQSW҃SGtPSZ SUSZ VG\PS\$S_kuasȁu/%tu%hh` jnhjhh` jhju{~D$uD$L$0VSUQW҃t>D$;D$,tD$0UPPY t!L$0T$$VQURWMtD$V|D$][t PD$_^ ̸ SUVt$$WD$t/t*t%hh` jhhj_^]3[ ËD$$|$ PWOtDD$,t/|$0s%hh` jdhje_^]3[ _^][ W"F‹u ML$T-T$|$,D$9D$0s%hh` jdhj_^]3[ Ët$4uTD$D$4bVVVVNVD$$DWL$T$$D$ VQSRP1L6D$(L$,t;{t5L$D$ VSQWP҃~ tD$,S1+;v hvD$,V@jP" ~L$,RSE;thmD$(tuT\$S+;vh7vL$,VjP T$,PS;|$tChh` jDhjGt$HVD$t P _^]3[ ËL$4QrD$t PD$_^][ ̸U3Vt$(l$ l$;u#h5h` jdhj^3]ËD$$W8ǃD$ tttth?u ;thD;uBt$hLh` jfhjR_^3]ËL$$T$ QRWG_^]ËD$ SPC‹CtD;t%hXh` jfhj[_^3]Ët$4;uOD$;tVVYVQVD$@GD$JL$,UASQ 2D$$HPU2| hlh` jfhjSuT$|$(D$$VRUWPL$0T$,QDSPw D$$L$0HPQ|hy놋\$$uDD$0|$VUPWSуtw~t39D$t h9D$0|$(VPUWS]Gt8L$$VWQZJ uhh` jkhj[D$VD$t PD$[_^]Vt$HLuujMhx jBj}j3^ËT$ ;tjRhx jej}j3^t?xuD$L$PD$QPRV^ËL$D$QL$PQRVd^ËD$PD$PD$PRVу^Vt$HLu!ujohx jBhjH3^ËT$ ;tjthx jehj$3^t?xuD$L$PD$QPRV^ËL$D$QL$PQRV^ËD$PD$PD$PRVу^Vt$HPu!uhhx jBj{j3^ËT$ ;thhx jej{jt3^t3xD$L$PD$QL$PQRVu ^J^ËD$PD$PD$PD$PRVу^Vt$HTu!uhhx jBjzj3^ËT$ ;thhx jejzj3^t?xuD$L$PD$QPRV^ËL$D$QL$PQRV^ËD$PD$PD$PRVу^̋D$|q̡|qu B|qVhh j uhh jAjej+3^á|qu |qF ~uQFFt;P"F u+hh j&jej+FP蹬V3^ËN QFPVj V ^Ã|$u3W3._Vt$Ft P^FPVj jV'Vq^S\$Vh/h/h/SEu2W3_u^3[h/h/h/VS$^[Ë^[̋D$L$T$ PD$ QL$ RPQj ̋D$PfuÃD$"̋D$PFuÃD$̋D$VP%u^ËFtP=FL$ N ^̸@"̸|$ D$$vj{h jAjdj+U+`VWUU3U|$,'t$@VD$(ID$uhh jdjdj+YSV uVb=؃\$uhh jAjdj+ D$ L$4UPQjSVEuhh jejdj+oV>PtU=uT$ RWSV@u"h붋D$ PWSVQAuhVm:‹W艸‹;vhh jDjdj+Ohh V D$u hL$+SjQET$D$0SP@;thh jjdj+jD$W7PmSj=uL$QUW9uhiT$RUW>:D$$L$l$SPQU}l$$hh jhjgj*;hh jjgj*!l$$t VPD$t P?|$(u SϽt U肱D$]t P5D$t PD$[_^hvhh jlT$$SRVVIuhh jGl$0Et P蠰|$,t P荰D$El$$7D$4jyh jAjgj*jdh jCjgj*_3^̸薾SUl$4V3WU\$ \$\$ZUkU芎 D$ ;;;&D$;uhh jA~@D$$b؅SD$BD$USV. uhh jS |$4 ;~F‹T$D$,RWP> uhh jA ;~.D$+RPP uhh jt$4t&D$8tPD$Qu*h)T$L$_^Á_vjth jAjdj_3^ÍOFSۅujyh S& j{h SPuj~h jAjdj芴3[_^Ë+RjQF^蜴 [>_^Vt$ W|$ ;r+PGjPd 7_^ËO;rW+QjRA 7_^Á_vhh jAjij_3^ÍVGSۅuhh S$ hh SQP=u hh jAjij聳[3_^Ë+RjQG_蓳 [7_^̋L$T$ Vt D$Dv7IAHu^ËL$DtSd$HAu[^W|$u3_Vt$j[h VA uj^h jAjgj踲^3_VWPֲ ^_̋L$ D$Vt$W3vt IF@GwtP@u+_^̋D$ L$S3Wv 9tHCAw3Vt$vt HAFGwtƍP^@u+_[S\$u3[VW|$jKGh WO ujNh jAjhjı_^3[WSV _^[̋T$u3ËVp@u+PR^̡(#PŨuh(#P3hqj'SUl$Vt$W3^8SVW.~~~ F~~~~ ~(~$F,~0~4E ;tVЃuSVW= _^]3[_^][Vt$u3^juhT jF,jPCFtjjjjjVЃ~-N8QVj t@ t VV^{̋L$D$!H̋D$@#D$̋D$L$ H̋D$L$A̋D$Vt$tx tyS\$Ul$W~tjjSUjV׃~#~ u"hhT jxjoj 舯_][^ËH SUVу ~F0tPjSUhV׃_][^hhT jyjoj 9^Vt$u3^ËW~t{xtuS\$Ul$tjjSUjV׃~#~ u"hhT jxjqj Ȯ][_^ËHSUVу ~F4tPjSUhV׃][_^hhT jyjqj y_^Vt$ttyxtsS\$W~tjjjSjV׃~#~ u!hhT jxjnj _[^ËHSVу~F4tPjjShV׃_[^hhT jyjnj í^Vt$t~txxtrS\$Ul$W~tjjSUjV׃~#~ u"h5hT jxjhj \_][^ËHSUVу tPjSUhV׃_][^h)hT jyjhj ^Vt$ W}3D$;~t|$ hl WNtu u_^_3^Vt$u3^Ët[xtUS\$Ul$W~tD$jUPSjV׃~)T$ASURVЃtL$PUQShV׃_][^hhhT jyjgj -^Vt$u3^ËtVx$tPS\$W~tjjSD$$PjV׃~)T$A$RSVЃ tPjSL$$QhV׃_[^hhT jyhj 蟫^̋D$jjj P̋D$jjj PVt$uD$ ^Ã~$N$tx$H$uL$ H$tA(PjjV\^Vt$u3^W~$VjjV4F(tN$H$F$tV(P(_F$F(^̋L$QtII$tQuL$tP̋D$uVt$ ցt uu ;t@$u^̋D$uË@$Vt$t"W~,Ƌv$PRu_^̋D$H$Q PIH̋D$L$T$ PD$ QL$ RPQj̋T$8T$`L$8L$D$t@03̋D$t@43VjFhT j@ ujIhT jAjlj F3^ËD$PVu Vw3^̋D$T$ L$QL$D$D$ RPQ?̸膨L$T$ $PD$ QRPD$3ɅI#L$̸FSUVW|$3l$;jFhT j@ SVVtyGFONW V GFOjNWj WVvtbG8PN8Qj tU|$ut$ VU$eD$_^][Y&jIhT jAjlj § VD$t P_^]3[Y̸6l3ĉ$S$UV$W$,yVh D$hP=C= U$TBPh hL$*Q}PN$$t QRPQhl L$2hQ=NRPQhX T$.hR</$$t&N@QPRQh< D$2hP<HFQRPh( L$.hQ<BPh hL$*QB$$PQh T$*hRX<H$$QRh D$*hP.<hWh hL$*QNWh hT$*R;wD$t@~ Ct ~_^[ËvP>RV   _^[Ã>u݋st j S_^[̋D$Vp W|$uhh jsjuj x_^@thh j~juj R_^SUjPl$$<+WV;uD$NUPQF ][_^][_^̋D$SVW|$w H=0Y$YFGNt+_V^[QjP֡ _^[39_^[ËT$W_^[ËD$tXN_^[WD$ T$G W_^[ËD$t(_0^[Ë__^[ËL$O_^[Ë_^[3_^[Ð=X}XXXXXXXXXXX   VW|$ w jWD$H;}ȅ D$_3^Ëv3~<0 t@;|@t$PVW ~0_^̋T$Vp@u+PD$ RP ^̸%̋D$3@ HH A̋D$ ̋D$H wZ$Z3ÍIZZ̋D$uÍPd$@u+̸%Vh%u^ËD$L$ PQjhV^̋D$3ɉH @H @AVt$u3^Ã~t!~ t FPXF F^̋D$HV=\$h\3D$L$QjPRT ^ËD$HVjQT ^W|$ WYT$L$Gw O_^ËD$x tL$tPp^Ã^ËD$p^ËL$T$ƉJ^3^Ð[[G\R\a\c\["\[a\     ̋D$t t3ø̋D$tt3 t uVt$ 3tRWPD D$|$ OPVQLjW?"tu t u j W9_^VWjD D$L$|$ WPQRPjW"tu t u j W_^̋T$Vp@u+PD$ RP{ ^̋D$ Vt$ DD$;sfS\$ U-D WtRjՋKjVQLjS?~> t+F;t$r"tu t u j S+_][D$ 8^tPI@u+3̸%̋D$3ɉH HH @AVt$u3^Ã~t=~ t0F t)FPt(4 F FF ^Vt$39F L$ FF T$WPRjQt< uFt F P< N Q8 t;hh  Pj jhh jhj ߙ(_^Ë_^̋L$39A t>T$t6AVt$tA PjVRI QjVR@ t^̸&L$ SUVt$F IWl$v,d$dFjtT$$RP0 _^]D$[YËL$$QP\  _^]D$[YFPt$_^]D$[YX _^]D$[YFPt4_^]D$[YT _^]D$[YV\$$|$(#Չn - V~ ;r;shV6Ft(N ۀbPQ D$_^][YWP th@PL D$_^][YhPL D$_^][YV \$$#ՃVtjt h( D$ PHh <Ãtt!jh$ T$ R'tjh  D$ Pjh L$ Q|$$ OuGGuf f$GGuf f|$$L$QWH u_hh  PUjh T$0Rh Wh  jhh jjtj ޖ@D$D$_^][YhVF n D$_^][Yh{h jejtj 薖_^D$D$][YËL$$t_^]D$[YËF_^]D$[YËL$ D$_N^][YFPt8D$_^][YD D$_^][Y_^l$][YÍI` a:accccjacb5c    ̋D$Vt$ W3@t@ L$PQVP D$RPV`  t>tƍPI@u_+^Ë_^̋T$Vp@uL$+Ƌ39A t4t0AtA PjVRI QjVR@ t^U$#l3ʼnEE SVuEWuHI@u@ jj+xWVjhEӋJ =uMjjWQjhEӋ =YUERP6MUVPWQRhEӅtquƍP@uj+MQ@PVjhӅtJE܍URPd = ׃8t׃8 uMUQRH hh  Pjj*EMh Ph Qh  j , 8u1hh hjmj 3e_^[M3豒]hh jjmj 踒3e_^[M3胒]h%%u V4 3e_^[M3P]hW1VjjjWǍe_^[M3!]Vh%u^hVD$L$PQjjVM^̸&Vh&eu^ËD$L$ PQjhV^̋D$3ɉH HH HAVt$u3^Ã~t*~ tFjPx NQ| F F^̋D$Wawmli$TiVt$ VL$D$F~ V^_ËD$x tL$tP@_Ã_ËD$@_ËL$T$J_3_Ð7i@iKihiOi̋D$t t =3't3ø̋D$tt3t t t=3'uVt$ 3t[WPl D$|$ OjPVQp jW)tu"t t t=3'u j W_^VWjl D$L$|$ WjPQRh jW")tu"t t t=3'u j W_^̋T$Vp@u+PD$ RPk ^̸膎F(UW3|$ tD$ H$o~?t<:t t ~Mt$tCjl D$KjPVQp jSWt j S_^[S\$Vs >t ~Ijl D$L$SjPQRh jS!V$t j S%^[̸Ƈl3ĉD$D$S\$UVs HW|$4zlu$8u3 _^k][L$3蘇Ã>_^][L$3uÅD$0uN#uVu7 uF{ ^ _^][L$3Å;D$0k u1Ft PbWi_F^][L$3҆Ãu1Ft P,W3_F^][L$3蜆ÃuaOWGQRPQhh T$$jRFt PϱD$PF_N^][L$36ÃZRhd D$j PuFt PuL$QxFf_fV^][L$3څËD$0_F ^][L$3辅Ã{ t!tKk_^][L$3藅_^][L$3Ëk_^][L$3gËT$0_^]S[L$3KËFtPjjdWFtPjjdWF jPjfWN(QjW_^][L$3ËV(_^][L$3Մ3L$ _^][3辄Ë5ruttu!utur\rt6tru   ̃|$t3ËL$Q L$ J(̋T$Vp@u+PD$ RPK ^Vh(&Ut#D$PjjdVt^V3^̸P&̋D$3@ H HA39D$̋L$u3Vt$F$u^ËT$WRQPJjVV_^̋T$t7L$ ~/Vt$F$u^WQRPjVV_^3Vt$F$u^ËL$ W tKetT$RT$RQP_^jVRD$ L$V$PQjeRVe_^3_^̸x&VWjah j 蠫3 ;t$jch h脫 F;uVT_3^jeh h[ F;uFP(V"_3^ËD$ ~ ~~~Fxp _@ ^Vt$u3^W~ Gt P¬t W責F P覬_F F F^̸ր|$ u3YUl$ Vu }$SjUD$\$$W$~ t3;~FFL$WPQ~)~ |$ ;+|$;2VPE$RP U|UtcF~ 뎋L$U$SQR UP|t0t$;t(t$+ɋD$_[^]YËD$_[^]YËD$_[^]Y^3]Y̸|$ SD$T\$HUl$Vu 0}$&WjU~+~+~;Ftd~$VVL$WQR|$(|$ +~NFNU$PQR; UL~)~u;^F|?$D$M$SPQ U|LtZ|$|$+tN;^}ȋ~+~+~;9FFT$FSRP~L$ ^_^][YËD$_^][YËD$_^][Y^]3[Y3[YSUl$VW|$w Ey,$3FF FF$;L$ T$QRUWx_^][Ë^_^][ËN39^ ~rFv 8 uC@u_^][Ë^uN$`D$ L$PQUW_^][Ë^ u!$3T$ D$RPUW؃_^][Ë|$;>~0hAh W؃ Ft P诨^L$ VWQRF~ n} _^][ËD$ t8u Fl$D$L$.L$l$l$V~T$~!;.tD$hch PI t=D$=~Q;FtLL$hhh Q D$u-;~t Whh jAjrj ~|_^]3[ËF;tP跧3~FF .F;D$P蔧T$3FFD$_FV^][Ã$tjWL$(T$$G$QRUPRW_^][ËG$j~L$ T$QRUP_^][jW~~AINFNW$PQR|W^)^jWK~‹L$3FFD$ W$PQUR_^][Ë|$ jPjuW+tNjQjuW_^3][Ë{{P| ~~#|}{||W|     ̸zD$S\$UVp WjPD$Kl$$V N~LN33~;}UE< t@;F |@D$)F F+؅uuED$_^][YËT$PB$QP L$QE|tʼn~ FjD$_^][Y̋T$Vp@u+PD$ RP[ ^Ul$tqE;rj?u>>uhh Q莢M v QRP6y  hh PPM;sE;s T$ tE]ËE]̃|$ S\$Ul$VWD$$uD$ L$38t@<u+y3D$ t݅~'ID$$;D$(}L$j QD$,MދT$t2d$L$(9L$$}$T$PRD$$D$,@D$$u҅}"D$(9D$$}L$j QD$,E|_^][̸@vwl3ĉD$$%u D$$!Ë\$8PD$Pt$|$TG|$L|$$uL$@T$<t$HH;D$؉tHD$\$8D$jPL$ T$L_^I] [$ÃwO $L$G$L$GL$G L$GL$GD$$R tD$(ˊTAЉT$(G*uEGt$D$(|$LD$$.uhD$$GyQ t*D$} D$D$ÊLPЉL$G>*uUGt$T$|$LD$$Ã%H$4GD$ |$LD$$?lu_GGD$ |$LD$$GD$ |$LD$$GD$ |$LD$$GD$ |$LD$$hÉD$$ۃS$pD$ t'tt E^ F\$0ET$0T$<\$S\$ S\$0S\$rIxӊ,BV(Ќ        ̸jl3ĉ$S$(V$(W$(hD$$h h D$$D$ D$VL$,QT$$RD$,PL$(QT$8Rt$0$tD$PVW\VĕL$QT$$RW? U$ _^[3"j̸iD$PL$QT$ RD$PL$jQL$0"<$u D$=v̋L$T$D$ PQR ̸fiD$L$ T$RD$L$D$PL$ QT$RD$jPL$0<$u D$=v̸il3ĉ$$$S$UL$$3V$D$L$D$ l$~!d$D< tuKEl$$}3"t ~V$`j Rh Ƅ4\΋+W+Ù;}@3D$$h!$dRD$DPD$H贵Uh L$XL$4h0 Qh j([h-hd jwjij X8|$T$$RP|$ut U| $[_^]3Wt̸&̅t7Ft PFt P߂Ft P迦VƂYVw Ft!jPx FP| FG^W|$u3_ÃVw tCFt!jPx FP| FGKG GG ^_Ul$W$)tu9E$ڃ}$t _3]SjUNFENPQ؃uj UHE[_]Å|jS[UPWU+PW~t)jSuhh jijdj U4FtP5؃tWSutWUeut W[_3][_]ËFuhh jojdj pU_]ËVRPMt~ t4jWu%W| hh jjjdj "U_]É~}_]VW|$ jW¤w $uW ~&$tD$L$W$PQRڤW貪_^VW|$ jWbw $uW~&$tD$L$W$PQR:WR_^̋D$SUVW|$w H=At$<3ۉ__^][W'_^][Ël$ D$u&O Ft PU _F^][;u _N ^][ÃFt PϢ_n^][ËL$_N^][ËT$ O GFD$GO _^][à tD$ tN^_^][_^][à tD$ tV_^][Ë__^][ËD$G_^][ËL$_N^][Ë^_^][3_^][ 35¦#)3    ̋T$Vp@u+PD$ RPK ^Vh&et#D$PjjvV t^V3^Vt$hh j F FFz t43ɉHH HHH@HF F^3^̸&jjh  jz uËL$@A A AVt$u3^ËF t P|F F F^Vt$ u3^W|$ $u_3^jWKD$jP1D$ L$$;~ȅuj W;_^QO$VQX } W)_^UW|$l$Vt$~$u^_3]S^ jV蹟C~ CD$jP0D$ ;~uj V蚟[^_]ËN$UWQu } V膥k[^_]_3]̋D$@$uÉD$;̋D$@$uÉD$鋡̋D$@$uÉD$黠̸&hh0 j_x uËL$@D@A S\$W|$ jW\$z u_3[ËG Ul$Vq FFu19Fj WOF;w ^^]_[ÉF^]_[;sD$\$IN F;w+FWPUthh0 j{jgj F_^3[Ë|$uh h0 j}jgj F_^3[9~tFtPrF~_^[ËF_^[ËL$QSY_^[ËE_^[Ã>t~jF+F_^[ËF_^[_F^[ø_F^[Ë|$Q_^[ËL$|$_^[ËT$R_^[ËD$L$PK_^[Ã~_FF ^3[ËC_^[ËL$_^K[Ë6V B_^[Ã~F_^[ËD$VH _^Q[ËD$t@ xpxf_^[,JQaluu|سȳ! ̸'Vh'՚u^ËD$L$ PQjhV^Vt$WhhX j8F Fkm u_^j8jWD ~ _F^Åu3Ã~t*~ tFjPx NQ| F F̸6CN y(y,~PQ4I0MbijT$RhD$ FhP` }ht l Y̋D$SVt$ HW~ =8$L$D$_F^ V^[Ã~ D$tN^_^[Ë^_^[ËT$_V^[Ët$t_^[ËVWFGN O _^[tu _^[_$^[ËG$_^[Ë\$_$_^[Ët$t0_tFGNOV W _^[3GGGG GGG_^[t t\$t;~؋L$SWQA _^[Ët$mNOVWF G _^[ËD$O(PW,_^[ËL$QMbjT$Rh iD$FhP` ht l _^[ÍL$QT$RhFhPD$$H }h l _^[ËL$Mb‹T$i+i_^ÉJ[ËL$QMbjT$Rh#L$QT$RhZ L'u G _^[3_^[[dqʹҷٷ{A;   Vt$u3^Ã~t*~ tFjPx NQ| F Fv t VVj^̸>Vw D$~( ~,WSD$ PL$ QhhRH l } h Ӄ)L$Mb‰F0i+iN4D$Pp D$N(iV,+L$+yI@B}3ɍQF0u9F4t ;uK9V4|FMbi‹WjL$ QhhRD$` } ht Ӄ[^̸$v=l3ĉD$ S\$0UVt$4n 3D$ WPl 3D$D$D$D$ D$$D$(D$,~T$@D$PFL$QjRSP$ }u|L$Qjj,V臐jV}.u)t Ӄt t=3'uj VӉE b_L$,^][3<$SVW|$_ jl {tD$L$WjPQRh + fuL$WPD$SjPQR  jW;2tu+U-t Ճt t=3'uj W+ՉC ]_^[̋T$Vp@u+PD$ RP+ ^̋L$AT$Q;tAVWj}h j0e3 ;t!jh je F;uVf_3^É8NyVL$ zFx >~_NF ^Vt$u3^ËF A;-hFh RPNetЋN ɉFN S\$;}=|9;ËVWz| ++@< 9uVD$_[F^ËNT$[F^W|$t4L$ |,;}&GVr;}SW\A;Ή|[^_3_̋D$T$QRP ̋D$L$jPQ ̋T$t4~.JVpW9t3~SJ\ @;Ɖ|[ _^3̋L$u3Ë~HPQVt$t~NPjQ9 ^Vt$tFt PdVd^̋D$uË̋L$tD$| ;}I3̋D$tL$|;} PD$ 3Vt$t#~uFVPjQRt F^̋D$uË@VW|$ GPt8O Vjch QRbuFt PcVc_3^ÉFORQPq8WVG F O _N^jI̋T$SV23W~z\$9t@;|_^3[Å|;}+ iȉ dw`w'D$\$L`w=} ;~ `wgfff ɋ+3Ʌ lwuhh! jj (OT$$R ZUNBT$~jjPD$0PZjL$PQT$8sT$(~!+PQR[SD$8h8sP[ VQR}[ \$VD$,SPj[jL$0QT$VjL$HQT$,RWjD$LPL$8QWWT$@SRW+ 08sT$~$ +PQRWWD$(h8sPW j QR{W jD$t*P@u+@;sWVt$VFo ;rt_^3[dbu h0" Ӄu," >tBƍP@u+ƒ;s-WVt$VnWh V'oWh$" Vo$_^[Ët$_^[̡wt"PqD$wwËL$ w̡wu2VtVuwuVw^É5w^VW|$ 3t*W^tW!uW_3^áwt P=w_5w^̡wu6VtVwuHVcw^t @tСwtP9wwÉ5w̡wu6VqtVEwu Vkw^tt 5wáwu6V!tVwu@Vw^t'H t D$D$$T$ PRуÉ5w̡wu6VtVwu!V#w^t@t 5w̡wu6VQtV%wu!VKw^t@t 5w̡wu6VtVwu!Vcw^t@t 5w3̡X'PeuhX'Phx'j̸̃ v='t=D$PL u''M$L$ jQ='u'< $$T$jR Utl3ĉD$pSVW =s @=Ijjjh" 4 W 5 jW\$Hj WD$L։D$8D$DjPW VSt$P L$PQjVD$X  t$`t$Xt$\hh" VF D$<L$83ۉL$8~xT$DD$@h SjWjRjjP$ L$3T$RVhh* jj3(^̃=u?h'h* jj 2=u ) h*h* jj 2Vjыu^hh* jj 2T$RVshh* jj ~2(^̃=u?h'h* jj T2=u ) h*h* jj +2Vjыu^hh* jj 1T$RVchh* jj 1(^̋D$Pƻk Vhh* jj 31u?9t$tAhh* h@* =hp(hu?thh* jj 41^Vt$t*>t%hh* jjhz1^̸ =u?h'h* jj 0=u ) h*h* jj 0Hjу$uYVhh* jjb0T$D$RP3hh* jj>0BL$,QЃ,^Y̸ =u?h'h* jj /=u ) h*h* jj /Hjу$uYVhh* jj /T$D$RPshh* jj ~/BL$,QЃ,^YVh'h* jj L/*h)h* @jj *,/ ^hDh* jj /=*uhGh* jj.hKh* jj.hLh* jj . =*uhOh* jj .S UVWw>nu(U tj PWӃ G>>uX* E t~hlh* jj *,._^][Vt$t=WƈtF@tP6F@u_^j6^V>t#t  Q V҃>u^Vt$ >t*S\$ t  QV҃>u[^̃=u?h'h* jj D-=u ) h*h* jj -H̃=u?h'h* jj ,=u ) h*h* jj ,jу̃=u?h'h* jj ,=u ) h*h* jj [,Hjу̃=u?h'h* jj $,=u ) h*h* jj + Q̸=u?h'h* jj +=u ) h*h* jj +D$  $$BQЃuË@øF=u?h'h* jj :+=u ) h*h* jj +D$   ȡ$ $HRуuË@̸=u?h'h* jj *=u ) h*h* jj *D$ V ƉD$QD$P҃u D$t$QP҃u^Ë@^̸$tPD$P% $Q$=u?h'h* jj )=u ) h*h* jj )H$$RсĔ̸v$P}$=u?h'h* jj ^)=u ) h*h* jj 5)B$ $QЁĔ̸SVW39=u>h'h* jj (9=u ) h*h* jj (D$ P#L$QT$RB$ QD$ Pҋ;hh* h!/ ;t^D$ PV#Ix8u Q VҋءHVу;tV_^[Ę;t S_^[Ę̃=u?h'h* jj '=u ) h*h* jj 'H(VWiut7tFP/D$L$DŽ_^ËT$ D$_^ËL$ T$_^̸SVWh8h* jQP}-3ۃ ;9\$U\$~qD$ hD$tMōP$@u+;~ hGsh* FPW-t6NQUW6QD$ L$A;L$L$|jW]_^[W.]_^[;u3ËLDV;UW3DD8|H9t'tP.H9u dž;X_]9u3^Ãd^̸=u?h'h* jj $=u ) h*h* jj $Hjу$Vhh* jj p$T$D$RP=u/t&PDu Qhh* jj $HT$Rуt V^Ỹ=u?h'h* jj #=u ) h*h* jj #='V'tH Vу>u=((tIB VЃ>u=((t( $ Q V҃>u=tH Vу>u^SL$ \$[VW@%yH@;uAyIAL$ T$3|D$%   ‰DHD$T$H9t7t7P*_^É_^Ëȉ_^øUVWȉL$ 3~~;tF@tP#*L$>~@džuĉ_^]Y̸6l3ĉ$SU$V$W V-VD$VD$) |$D$uPh* $j@QQ|$uWh* T$$j@R5T$uSh|* D$dj@PT$\L$uL$D$u$$RQPVhd* WUߓŃP$@u+O;u3v.3=h j:P׃tT.;vD.:F@|ދ$_^][3Vt$ uD$hVP ^̋L$D$PQW;u3_SVpyNF\HDHttP'DŽDŽ^[_W ;u3_UVpyNFlHL$DHt(T$t u `*  HttP''DŽDŽ^]_WZ;u3_UVpyNFlHL$DHt(T$t u `*  HL$u3ttPo&DŽDŽ^]_ÅuD$ t^]_ÉD$t ^]_k;u3AyIADH;;u3ASVyIAt$ \Ht.W|$t$u`* _^[ÉH_^[;u3ASVyIAt$ \Ht*W|$t u `*  H_t$t3uD$ t^[ÉT$t ^[+9u3ËDH 9u3S\HVt$ t.W|$t$u`* _^[ÉH_^[9u3S\HVt$ t*W|$t u `*  H_t$t3uD$ t^[ÉT$t ^[K=QZלF-nt4A@B+2SpN8x^EE̸vl3ĉ$U$(VW$,D$P]L$ QST$RD$$PL$$QT$ RhL$(QPD$$ D$u T$PD$RPL$0QVh* $<hRb$D PI@u+UP$,P׃ ~$L$QT$RD$PL$Q+c$$_^]3̸@V[PD$PVEL$TQjT$jjRID$\PL$h Q菌d̋D$Ph`̋D$L$T$ PQR G ̋D$PhA̸ u&j"j " uYË *VWAPt$ *L;= *}cj!jVh* j j!ttjF F! VPj|!G;= *|t$ VQ߻L$tL$tHL$tH_^YjZh* jAjjj_3^Y̋+uc Wt->Ph;~ WPeKV@QRЃ_ËKF:utP:Quu3__̸(uËL$T$L$VL$QPT$ gtM t1W>P輺;~  WR踺N QNRPQ҃_V>^3^̋L$D$;uP@T$L$̸ VD$L$T$$$PL$ (h@QT$ʽ̋L$D$;uPVp @^̋D$QD$I:utA:Bu u3̸(Vt$ hAh* Pt$訿PKT$RD$(L$h@PD$ t$(D$,L$0L$ T$$hjQRt 3(9t$~!S\$(W|$0D$ WQF;t$|_[T$ R^̋D$t $|;uPQR̡(tiVt$Wx hP5$@ }6(PD hP!Q _( ^Ë(z _^̡ Vt$Wt/>P);~ WP&NQ҃ ȋ_3^ËFPɽȋ_3^S\$ Vt$ Q^[̃=(tj:hhp˽j(39(øSW|$3ۅu_3[9(u tD$ (V%D$ D$ PQ|$&t/xt2u.C P (D$ PQT$u^_3[Ë@ ^_[̃=(u7jPhhpj(539(u3hh* jO tL$HL$сVP(L$PRH EtI t0W>P*;~ WP'N QNRPQ҃_V^á(x\t3^̸S\$ CVW$ H @3L$D$ 3UL$,ÙF3;t$|ދ\$ ]_%?^ [ËR& _%?^ [Ë@P _%?^ [Ëx _%?^ [_^3[ R r  V+vR$ N +B RvWr;ustD:+u1v5Fz+u v$Fz+uvFJ+_^_3^Ëu^Ë u^Í$:utP:Quu3^^ËFtJt:ut݊P:Quكu3^ËF+B^ÍI    T$L$̋D$HA@H ̋D$@@Vt$FHFxu QV^̃=0u|$| 0̃=0t 0á,tLh P@ Q,h@ PA ,hP Q0,Rt,̋ $*L$ $*̸S3ۉ\$\$\$ \$9,u+h h |3Ƀ;,;u3[Vt$WVD;hh$j D$;tg9_ t!9^thh$j D$ ;tA9thh$j D$;t!9_tchh$jo D$;uGhh$jAjij3D ;t PF~;t W _^3[3D ;t",PR0x1;t PF~ЃgG_^[̸ D$$=w&t @< uh*I@  Ë ,tDD$$PT$ QD$T$ 蝸t@ h;h$jejgj3 ̸ vD$$=w&t @< uhJK@  Ë ,tFD$$PT$ QD$T$ t H h[h$jejhjH3 ̸ D$$=w&t @< uhjL@  Ë ,tGD$$PT$ QD$T$ mt HA h{h$jejfj3 ̋D$V0N @ +B urRvWr$;ustD:+u1v5Fz+u v$Fz+uvFJ+_^3_^Ul$VW33u_^3]S3ۅ~Kd$++‹D$|$ |$WPT$,}~^;|ͅtD$(uG3[_^]D$(t7l$ ~&~|$L$WQT$,uNt$[_^]̸SUl$Wj3USS\$/;_]3[YVhh$Wn u hh$jAjdj^_]3[YjUWVt7 $*A $*L$ QL$ QPVRs؃t S|$SV^_][Y̋D$L$T$ jPD$QL$RPQ`̸ l3ĉ$SU$VWhD$P3U|$W8 ; L$IL$ $L$L$Q t$R u>.uF>twFFPӃtNFQӃu>tQ$RӃuG8u?t-GGPӃt$OGQӃu?u33|$t2WT$VR tD$hD$PUH7  $D$_^][3 øvL$ W3;u3_ËA;,;t%L$L$QP|$ -;t PB_SUVYI;+‹D$, P}~~;|υuu ^][3_Ëm^Lm ][_̸8l3ĉD$4D$@Ul$@Vt$LD$ D$~|$PSWubV؃tSSuSt5tL$QWU0 ǍP@u_[^+]L$43H8ËF V3ɉD$T$$D$ L$L$33D$$@l$D$$utÃPQL$Ӄ ycu:vOuD$VQL$jQQ ~L$]U|$ taD$ Pr'tjPQ=L$Pkt|$~ 0]EL$D$tpQPI@u+‹t1\$~E.EKSWU;/ ;~ D$+މ\$L$T1WT$ ^Vh@D$0jPwD$8P@u+‹t/|$~'WL$,QU. ;~ D$+|$t$|$D$t PD$_[^]L$438ËL$t Q_[^]L$438ËL$<^E]338̸ vD$$D$ ,t+$RL$ PD$L$ ;t @@ SUVW3L$4)+‹ x ^_]̋D$3@HXH\̸T$  U3W|$l$8u;~JBO8t~"D:uO%yH@t_]Y;SVt$ BBZBB\$ B\$xNxJD$ uCx?D$  L$ F ˋFFF;vD$^[_]Y^[_]YVt$W|$t$L$PFPQ }_^_^̸fD$S\$D$W|$P~hhh Vw ;}T$ SRD8P ^_[YUl$$t=+VL8UQeWRG+ދt$,PV F@D$t$;_|/OQUVOT$ F+ك ;_DD$}хtSOUQ T$ D$]^_[YW|$3Ʌt+Vt$PGPV3L$ 0 @0^_ËT$ _øSUl$ EXMD$ E\V3҉D$ D$4W3D$T$ L$;;ut$46;‰T$j|$(P`D$4@D$4Ãƃ=t1P|hhh(L$ 3҈\)AD$(L$ =u |$uL$GuCT$(9T$D$@T$;D$8u>t;t53|)=u|)=u#G uÉT$(9T$tT$qut@|_tD$;~.QL$0EPQ D$L$|H+T$ 33;E};u,9T$E}#D$,D$@;D$8D$3T$0t$ 2T$_ML$$^MXU\][̋L$3AAA AAhhj t3ɉHHH HHSVt$ WjVAF|$tt/;%t Pٳ\$t1SHu1hhhhj_^3[ËP؅QS/u*hhhhj=SW_^3[Ë^;t}txDt V R`F>u\DtVGhFODhQS F u3hhjA6F떃>uhhhFt"VjjhjPm Ft _^[ËBVЃ_^[̋D$D$PVt$x@W~hhh T$ARVЋD$tQ@ tVjV&} QDF RjP _^Vt$t\x tjV}u H Vуt6xDt0~ t*jV|uBDN PQe!V R Ft P̜Ft P\3FFF FF^̋L$3AAA APAD$ PQ ̋D$ L$Vt$WPQVV_^W|$ /?&Gt)P-uh.hj&jnjq3_SVt$;u^ jV{3VWVGFO N WVGF t&ADttE^ BDO V PQR& GFGtWPFuGV^[3_hBhP) F uhEhjAjnj^[3_ËAt WVЃ^[_^[_h'hjojnj_3_̸3VD$D$D$ D$D$D$D$jP_zL$3[Ul$ VWU:#{`tX@ tOhhdPm E`u"hhdjAhj_^]3[Ë Q K`RQP @tP,UjjS҃_^][_^][hhdjohj~3[̋D$SUV3Wu t$FD$(;t D$(D$(t$F|$9nt;t;+;!;9.t^\VT$,V^\\$;t/S茥u/hhdhj{j_^]3[ËP{;tQS;uh뻋^n>G ;t9hhdP F`;u"hhdjAj{jY_^]3[Én`WVXn\@@t@UUUVu0hA9.u"hhdhj{j_^]3[ËAt!tthhhdS Vo|$(Vo%$$8nPVg~hhhd ;tVfPVWRVfPFPN QnP;tVfPWV RbD$ ;uA tL$(RQWPV҃=n ndH_INh^][Ë77v7s7s77̋D$xt D$D$$̋D$L$T$ jPD$QL$RPQ ̋D$L$T$ jPD$QL$RPQVt$tV1Vk^̋D$QyT$jRjPËL$9HXt ~t HXh^hdhjzj3Vt$ W|$ thjW D$L$T$PQRjVW _^̋D$xt D$D$Vt$ W|$ thjWi D$L$jPQjVW_^Vt$ W|$ thjW) D$L$jPQjVWm_^̋D$u8jOPh8  8%8̸&l3ĉ$SU$VW$$u=8t8$ =|$PSUjWVg$(t'$ =|UPSD$PjWVV]V L$hQ$ _^][3菾̸xFl3ĉD$t$$$SU$D$ $VD$$X Wx3@L$(T$ t$t$~hj~h| ~h`jh| 9t$ u_^][L$t3ԽxÍL$,QT$jRD$8Plt$$Ft$$tT$RD$HPL$4QU _$D$ RPL$4Q3 =D$(tjPT$4R D$PL$HQT$4R 9$vdD$jPL$4Q T$RD$HPL$4Q T$RD$HPL$4Q F;$rt$3t;tL$t TDAL$O@ut;t;ttLDMEK@uut1T$jRD$4Pi _^]3[L$t3-xË$QT$D$,PqL$Hj@Q%$D$$ _^][3x̋D$L$ T$PD$QRjPe`jY̋D$Wy9|$r;)|$SUl$Vt$3+D$P@`RP .QVվ߃;\$ v^][_S\$Ul$Vt$W|$@r<CPPC PC`Ph@WVl$8@@@űl$ tC`KPQS RPUWV_^][S\$ Ul$Vt$W|$@r=D$ FN`PF PQh@SWc@@@l$ u̅tVN`RF PQUSW/_^][S\$ Ul$Vt$W|$@r@FN`PFPPF PQh@SWDl$<@@@uȋl$ tVRV`FPPN QRUSW_^][̸膹L$D$;sL$SUl$VW;3D$߃+˺L$$ "Ҁ T$PRH @`QPjjL$'QT$*RT$/˸ ".G ˆ.D$0;rD$L$(D$$+L$(;sL$<_^][S\$ Ul$Vt$W|$@r?D$ FN`PF PQh@jSW@@@l$ uʅtVN`RF PQUjSW_^][̸̸̸̸L̸̸̋D$H`T$QRÑ|$tVt$jVx3^V^S\$ Ul$Vt$W|$@r=D$ FN`PF PQh@SWsR@@@l$ u̅tVN`RF PQUSW?R_^][VW|$@;stBS\$Ul$;r1CPC`KPQL$ S RPWQU0Tt$4+;su][_^̋D$Wy9|$r;)|$SUl$Vt$3+D$P@`RP .QVUH߃;\$ v^][_S\$Ul$Vt$W|$@r<CPPC PC`Ph@WVxUl$8@@@űl$ tC`KPQS RPUWV@U_^][̸̸̸P̸̋D$Vt$PVpbN`PQE ^̋D$Wy9|$r5)|$SUl$Vt$3+T$B`P .QVkB߃ ;\$ v^][_S\$ Ul$Vt$W|$@r=D$ FN`PF PQh@SW#9@@@l$ u̅tVN`RF PQUSW8_^][S\$Ul$Vt$W|$@r<CPPC PC`Ph@WV?l$8@@@űl$ tC`KPQS RPUWV`?_^][VW|$@;stBS\$Ul$;r1CPC`KPQL$ S RPWQU=t$4+;su][_^̸̸̸ ̸T̸$V$u&V.`%tV`%u!F`$PQ A^Ë$T$RP@N`QT$RBD$hP^Ul$EWx9|$rPL$)|$SVt$3+ΉL$ L$E`URRRPQV荺߃;\$ v^[_]S\$ Ul$Vt$W|$@r;F`NPQV RQRPh@SW~ @@@t%F`NPQ VRQPUSWC _^][S\$ Ul$Vt$W|$@r;NF`QV RQRPh@SW^ @@@t%F`NQ VRQPUSW# _^][S\$ Ul$Vt$W|$@r?NF`QVPRN QRQPh@SWڻ$@@@t)VF`RNPQ VRQPUSW蛻$_^][̸&W39|$SUl$VT$߃+˸L$  "JQ$D$B` RRQPjjT$/RD$2P$D$7˺$".G Ѓ$.;|$$y^][_YS\$ Ul$Vt$W|$@rMD$ NF`QN QRQPh@jSW蓼$@@@l$ ut'F`VR VQRPUjSWO$_^][̸̸̸̸X ̸ ̸ ̸ ̸(!̸\!Vt$F`W|$PWN`QWv` _^Vt$F`W|$PW輪N`QWR謪F`PW蚪_^̃|$tVt$W|$ GXPV豎_3^VOX| NQ=X| V+_^ø$ S\$ Ul$Vt$W|$@r=D$ FN`PF PQh@SW @@@l$ u̅tVN`RF PQUSWO _^][VW|$@;stBS\$Ul$;r1CPC`KPQL$ S RPWQUt$4+;su][_^̋D$Wy9|$r;)|$SUl$Vt$3+D$P@`RP .QV߃;\$ v^][_̸!̸!̸!̸,"̸`"̸"̸"̸"S\$Ul$Vt$W|$@r<CPPC PC`Ph@WVhl$8@@@űl$ tC`KPQS RPUWV0_^][̸0#̸d#̸#̸#̸$̸4$VW|$;stTS\$Ul$;rCC\ tNjKQSPRS`K QRPD$,PUt$4+;su][_^̸h$̸$̸$VW|$@;stBS\$Ul$;r1CPC`KPQL$ S RPWQUt$4+;su][_^̸%̋D$PXH`D$QRP }jvh8%hhj蕩3ø̸T%̸%̋D$Vt$PVPVN`PQ3% ^̋D$L$ T$PD$QH`RQD ̋D$VqWx`tu:|$u3PXD$WRP3Ƀ LJ@I*BPXD$WRPc LJu LJ*3ɃI0_^}!hh1hhj?3ø̋D$HQL$P @`RT$PD$QRP̋D$L$;JrP@`RT$PQL$QRËL$D$Vq`W|$u u _^ÅtuIXSVQPihVSGdž0u9t%xt|RWSH dž[_dž^Ãt|PWQiH|xRWP1 _dždž^S\$ Ul$VW|$w`u u_^E][Å|$ Vt$GXPSodždž"OXQSkdž@dž%GX R+QP džt$EG MO$UW(E G,_^][ËD$S\$VW|$w`u u _^C[ÅtUOXVQPhVRPQJT |$u@džt#+ RSW蚥 dž_^[̋D$Vp`WytuW|$uPPXD$VRPt#躺3Ƀ džIqgjdžNPXD$VRPt# 3Ƀ dž0I!jdž@3Ƀ I_^}!h7h1hhjT3ø̋L$A`t%VqV QL$PD$ PD$ QP҃^ËRT$ QL$PD$PQR*̋D$Uh`Wy9|$r VW|$ G\ w`t3OPD$QL$WPRT$ WVPQR= _^S\$rEUOT$PQGPPG PD$,VhRPh= u]t.WQRT$GPPD$$ WV QRP*= [_^Vt$FPD$F`t4QL$ QV0RT$ N QL$ PD$ RPQD.T$( VP^ËQT$ RN0QL$V RT$ PD$(PQR,D$(FP ^%u%$&uX&&u&&u('\'u''u',(u`((u((u0)d)u))u*4*uh**u*+u8+l+u++u,<,up,,u, -u@-t-u--u.VW|$ w`hxPx ;t V_^̋D$SVW|$w`._$^dždžQ x_|dž^[Ë\$_^3[Ã~;;|~3x ;t P#hh1S0 xt_|^[Ë\$CwuL$SQ0W豝 _^[ËD$Ph^QPD$ 0WPi _^[Ë\$u.|T$xQRP< _dž^[Ã|+˃tT$xSRP t|x+QRx}_^3[Ã|xPQR=L$$ ~ |;~|+xQPD$$Pd|x TLH uu_dž^[ÃD$|L$P+xQR|xPQR<_dž^[ù 9L$D$W0PW4PW8@ G<G;O< uGuT$jRSFT$ 7 +Ћ<;8usvVU^k̋L$Vq`W39ti9|D$T$PQL$ b_^9t@Q;u9|1D$;L$;u#|$WPV: _^;׋t;;׋|$tRWQPV|@tQ_^WQPVF;t;_^Á;׋|$tRWQPVABt_^WQPV=u'_^Ã0;u!RQPD t"_^jQQE dž_3^D.ux..u./uH/̋D$@`39L$tÉ̋D$ S\$ VW|$w`u u_^C[ÅdžVtg|$ tGXPSkdž0OXQS豫džGX R+QP%dž0e|$ tWXRS$[dž@GXPS[džGX QR+PZdž@D$$ t#W HO$PW(@ G,_^[̋D$H`UW}txT$ tjS\$VtYT$tQt$ rHt PD$PWVSRу^[_]ËHQVSR PUZMu ^[_]^[_3]_3]|/u//u0̋D$T$Vr`Wg$g3G_dždž ^ø+D$D$Hw_^_3^S\$u!C wJD$3;t;u;u ;u [_3^;tSP0Rdž袓 [_^39zt9tL$T$QRPK o_^_^Ëe(ffRf3ff̋D$S\$VW|$w`u u_^C[ÅVt2OXQPh0VRPQA:WXRPWh@VPQR@dž@dž t#+ PSWE dž_^[̸ 覑l3ĉD$D$$L$(S\$0Vp`WD$L$ uu_^[L$3舑 Ãxu t܅utu@\$<+SQ PWb@u_dž^[L$31 Ãt$ trD$P譪7F t{jU LtJVP~=QjjhjVH~"T$$RT$D$4PQRVPVG_^][L$`3yd3ҍN,t9Et4B|jmh9jnjljy_^][L$`3ydËF(u*jrh9jijljy_^]3[L$`3TydËM$QL$RT$QRL$@QRЋ$_^][3yd̋L$tA t @4tL$3̋L$tA t @0tL$3̋L$tuT$A| Q=t3̋L$A t @@tL$3̋L$T$;tËA t@Ht T$L$Vt$W|$;t_^ËF t&@Ht WVЃ~N At WVЃ_^ø_^hh9j 3Ƀ ;uhh9jAjjjw3úHPH HHHPVt$>th h9jjyjw3^ËFPQDF^Vt$>tth4h9hjxj^w3^ËFPF^Vt$>t"hJh9hhjw3^ËFP\F^Vt$>th`h9hjwjv3^ËFPF^̸fvD$VPL$Q?t03D$t PX^Y̋D$VPL$ Q>t03D$t PsX^Vt$ F t@ tL$T$QL$ RVQЃ^ËT$W|$ hRWPɌPh9h9W _^Vt$ F t@,tL$T$QL$ RVQЃ^ËT$W|$ hRWPYPh:h9W _^Vt$ F t@LtL$T$QL$ RVQЃ^ËT$W|$ hRW!PPh:h9W( _^̋L$A t@XtT$RjjQЃøVt$ W|$ ;thh9jejgjt_3^ËF t.@@t'VЃthh9jgjgjft_3^ËF t@DtVWЃ_^̸sSUl$3W\$ ;tP9^t*F ;t@T;t VЃ^F;t PV^;nu 9^ F;t PU^;tD$PL$WQ< T$ UR<;uD$ ;t PU;u%hh9hhjvs_]3[Y;tD$ ~ Fn_][Y̋D$Vt$jP3^̋D$ L$Vt$Pj^̋D$Vt$jP3u^ËD$3ɅF^Vt$jj3u^ËD$ 3ɅFt P]?^Vt$jjt3@u^ËD$ 3ɅFt P^Vt$jh3u^ËD$ 3ɅFt P*W^Vt$jj3u^ËD$ 3ɅFt P^Vt$tnhh9j FjPPF t@Tt VЃFFtPSFFth>Pu:V謜^̋D$V38tjNh :jjhj/q^Ë@L$T$ jPD$QRP辁^̋D$V8tjNh8:jjhjp^Ë@L$T$ jPD$QRP荁^̸*k3;u3ËL$A Q QVt$u3^ËF P躣F F F^S\$ u3[UVt$n tRF$tKL$WQSP" ~ t~WSU蛟 _^][jV褿V _^][^]3[S\$ W3trL$~jUVt$n tF$t QSP` ~ t%~!WSU ujV*^]_3[Ã~$tjVV< ^]_[_3[S\$ Vt$ F KWw($$~ tGjRP ~D$L$V$PQSR_^[Ã~ tL$_^[3_^[ËT$~ _^[Ã~ tD$F _^[jVEL$$T$ F$QRSPVY_^[ËL$jQPC u_F ^[ËT$J PQmy_^[ËUq6̋D$@ T$ ;Q}3ËT$L$QRP ËD$̸*jthp:h\ _3҃ ;u3ËL$PP P@@PA A QQVt$u3^ËF PڗF F F^̸lU3l$9l$u3]ËD$Vp ;9h$jP9FtFFP.nn蠔;SW;F}h:hhp:X >+~\$,;~Nρ|h:hhp:% VL$(WD2|PQk~F|$4+߃ |$\$,9u .n\$,;݉l$9nFL$$+RQ$0ZPRT ;,D$$H$jQ|$ K~9n?3~^9n<T$$jRĺtn+9nZD$EE< uI~ t F 8+QWT$$RN|FQP!*|$u#~tFPD$@;ÉD$|>Z;t!+׍Z3~I8@;|FP輒F9\$uMZ;u'uF F39l$,;t+3~ 8@;|nF3 }9nD$$jP{F|tiWZUP; D$~7Y=uH7X=D$uHD$;t+S /QU  D$ ^~3SZRVPFP貔n;ʼnn|=>\$,;~L$(WF|PQi|$ ~;>u.n+|$(\$,l$.T$$R踾D$_[;u D$^]3^]̸6hSUl$Vu Wj3U\$=9FtFFP^^܏~|h<;hhp:謊 >~h;hhp:荊 ;N}h:hhp:o >+~;'l$^;t \$ 3_^][YËl$VM$WD|PQ芸؃ ~m;~h ;hhp: ^~~h:hhp: ;V}h:hhp:ȉ +{3QD$P*_^][YË\$ L$jQF~h:hhp:N +~;~VW2ZUPf~F|$ PZQV|R4 =~h;hhp: ;F}h:hhp:ƈ FjVUUU<@WN|UQ =~h;hhp:r ;V}\h:hCWUVF|PNQ >~h;hhp:- ;V}h:hhp: |$+߉\$ FFT$L|B$SQP9 ;~h ;hhp:贇 ~+߁~~h:hhp:菇 ;N}h:hhp:q w|$ FD$_^][YWZURd |$D$~_^][YËD$P蘺D$_^][Y̋L$S\$Vs AWdwd$D~~FT$D$RPQK$Q膷_^[Ã~~ً;V}h:hhp:舆L$ >+~u<9~t;9~t6_^[Ë;V}h:h hp:JL$ >+~D$T$PC$RQP_^[3;Ntd$WWS ;N;VujSNt%F;t>PZPN|Q ~~9~t9~FtVV|RP~脎 넋D$L$T$PC$QRPW_^[jSƲL$$T$ D$QK$RPQ.Sָ_^[Ð+sߕƕ̋T$Vp@u+PD$ RP ^̸*VWjphd;h]3 ;u_3^ÍFPtL$>~~ FFy y_q ^Vt$u3^ËF P藚N hQyV RF F F^̸`D$ $uYS\$ Vs `{$VUl$W>~C+~;~NWRP!a~F|$(+ |$l$ 9u FI~C$hUP 9K$jQmVVSR~ FF"WUVFSP|>Ft4>l$ ;~~CWS\$$SO`|$+ ߉~l$ \$l$ \$9|$u |$\$jS讯SصD$ _]u F^[Y3^[YS\$Ul$ Vu WjU\$e>+~~/FU$W0QRT؃ ^+Ջ\$|$FD$WPVPFPJ|$,+ߋ>\$F~+NE$W1RP˯؃ ~@^+ً\$FU辴D$_^][U謴_^][U蜴D$L$ ;t_^]+[_^]3[S\$Ul$Vs EW=$tNQjjj~F jV豙T$8D$4K$RPUQ](_^][Ã~ST$ D$K$RPUQ5_^][Ë>+~(T$ D$K$RPUQ _^][Ë;VtjjS ;Fu~ u0VQVRF F ~_^][ËD$ L$S$PQUR蓰_^][Ë~_^][jSD$(L$$S$PQURcS _^][ËD$ 0{ _^][Ë\$ C xW~VWT t%_^]C [ËL$ T$C$QRUP_^][ÍIЛ 5`"לQVt$t^FS\$W|$ tjWjSjVЃ~9D$L$V WPQjSRF 3FtjWjSjVЃ_[^̡*PRuh*PC[h`-j7[̸F̋D$ L$;tT$RPQV[ +zaˢPPPʫPPPP莢P裫P}ȫPrPghx hhGCjhx hhG/jDP1h} hhG jh} hhGiP)Ph| hhGih| hhGi֡P@8P PBPGPPPPPh u hhhGgh u hh`Ggh u hhTGgh u hhHGwg@oPyDPn虶Pc讶PXöPMXPBͶP7RP,P!hX hh@GfhX hh8GfĶP@薶PPPP誶PPPhTX hh0GzfhTX hh(Gff!PkP`KPU@]PGrP<P1|P&聿PPhW hh GehW hhGe蓖P舖PͩP蒩P@蔩P蹪P^P胩PhL> hh GiehL> hhGUe蠩PZePOjPD_P9P.YP#@h$> hhFdh$> hhFdP踩P轩P肪PP謩Ph= hhFdh= hhF~dH;PPh$ hhHFdh$ hhG2dPPPh0z hhGchz hhxt cDPKht hhy cht hhGcht hhGcPPPhPt hhGjc@hPt hhGScP#PxP}PP ̋D$@̋D$A̋D$D$A̋D$A̋D$@X̋D$u"hhHhhjRË@̋D$L$ H̋L$D$!H̋D$@#D$̋D$L$ H\̋L$D$!H\̋D$@\#D$SUVW39|$tUl$Ep vh(HjbhHt L$V]SQ貁 ;t_^][Å~VS U0R _^][S\$ 3t4VW|$p vh~~~ F~~0F4B|D$x xp _^Vt$u3^ËF P~N hQIV RyF F F^̸Mv\FVD$FD$G@SIيʊ\$\$\$\$\$\$uڈL$VT$NV[Y̋D$SX CHUk Jv][jPU{ tnUBM VWPQ.UBM PQD8P~MMyt8EH j4htHU| _^tjD8PU| uL$ jQϜ]3[ËUB]C0C4[̸HvLl3ĉD$DSUV^ C P+Kk;ʉt$ }^][L$D3hLHWjPUzz t[EHSQM D8PQLUzu -EHKj4htHU{tjT$RU{ u$t$jV_^]3[L$D3KHËEhs͍T$t8r;ustC:+u1v4Fz+u v#Fz+uvFJ+3K3k;u*͉s4;t+PT8RC8P   K) ssL$T_^][3̸KHVs WN8V9PV<r];u3st*:]uvY:]u vA:Et FL:N VF>F0L$P_^][3̸jID̸&I|$ $u3YUl$ Vu }$} S\$W~0t`>+~;~FT$WL08QR:I~F|$|$( +9u+N F3+~tPD18PN8Q   >~0tS+PE$T18RPΘ ~4~4u t<~4u Ut)~~CjU;UeD$ _[^]YjU _[^3]Y^3]Y̸GD$Ul$ Vu D$4}$*}  ~4tU5 SWjU蠗>+~~0t0~*FU$WL08QR芘؃ ^+~0uҋ3F0;Nu F\$|$ 8v+WL08SQ{G>)|$,߃ \$=r tF|$ GjUUD$ _[^]YUjU֖ uF_[^]YjU謖_[^3]Y^3]YS\$Ul$Vs EWow'$3FFF ~FF0~4L$ T$C$QRUP蹙_^][Ã~~؃~0tҋ>+~ǃ~0u u_^]3[Ã~0t F0Iu~F~jSϕL$(T$$C$QRUP;S_^][Ë~_^][ËL$ jQVs t{ _^][Ã{ tVD$ _^][3_^][ÍIz7̳ Ab̸XDl3ĉD$TW|$`WjjD$jPEu_L$T3DXVu(jNhHjAjojD^3_L$T3^DXËL$Q_PV) u9jThHjvjojPDT$RD$$jPP1L$,QhHj.G,NV B$t,WVЃuj_hHh^_L$T3CXjehHhjojCVL$`^_33CXVZuj|hHjAjqjC3^ËD$ L$A t\@(tDQVЃuhhHhEN IB$ARP#^hhHh hhHjvjqj CV3^̋D$t4thhHjujpjB3ËD$H ËD$̋D$HL$-T$BD$p0T$BD$'L$QT$'L$QT$'D$L$PQn-̋D$L$T$ PD$ QL$ RPQ1̋D$L$T$ PD$ QL$ RPQ3̋D$L$T$ PD$ QL$ RPQ1̋D$jPËL$T$+uA+B̋D$T$+uA+B̃=uhmhhtJjj uhhtJjAhjA3ËT$L$PT$L$ P HL$PRHp̋D$Vt PD$t P詷L$T$QPVRj.^̸@D$u3ËL$D$ $t%$RP tPPuhjjL$ hHQYtL$ tPL$$tP L$(t@áhP!P̸dV?l3ĉD$`D$xS\$pUl$|Vt$tL$QT$D$RD$PVZPjughhtJjyjtjH?ujPL$ h Q萌 VT$ jPR, D$PhHj B ^]3[L$`3>dWu3$uÍP@u+‹D$u_3D$3$T$QPVUWSRT$4hhtJjxjtj>_^]3[L$`3M>dPTPMuhhtJhjtj<>_^]3[L$`3>dPNTPVhhtJhjtj=_^]3[L$`3=dËL$p_^][3̸=d̸f=l3ĉ$$$$S$V$D$WD$(3PL$(T$|$k;>9~vNL$RD$ PWr ;u/jahJjrjuj=_^3[$3<ËF;u D$ PAD$UhL$$;t$uÍP@u+‹T$jRD$4Pj WL$0SQ7l T$$RD$0UPl V[rjL$\QT$8R ll$UE}3$]_^[3;Ë\$;~Xd$jD$0UPi 3WL$XQT$4Rk jD$XPL$4Q|k F;|t$(V貓@~hJhhJ^ V荓PT$\R$P;V4 ~hJhhJ] V PV L$l+QT$LRO;$PD$(L$TQ$RjVPv$tCL$Tj@Q$j@R݌D$TjPьL$DQjt$L$,Qjsj[p̸06:l3ĉ$,$<S$8V$XW$TWD$(|$$D$SD$}_^3[$,3:0ÍL$(UQ $`ƉD$u $H&$HuÍPI@u+‰$Hƅu/L$,Q0$<]_^[390ËD$t$ ;D$jT$ȋЈD$$LWPL$!L$8SQT$*h $PD$(RPL$4QF jT$RD$4P* j$QT$4R V$PU9$` ~PHD$T$ j$QR$TPQSW ~$+͋0@u|$$l$uD$D$+D$D$,P 3̋D$L$PQ聶T$L$PD$RT$PD$QRP ̸\7l3ĉD$XD$dS\$pUVW|$p3W|$$D$t$3uhhKh~W@vhKhhKZ C;:CHL$$PT$(Rjo u hWPFtP<;thhKj{F t PGRjL$ QjPjuhhKj}T$R MPԨD$uh#hKj}8th)hKj~j@xFPe;T$L$,QL$ URP$SWPQ$tA$L$ RjD$0PjjQqD$hhKjrhj06T$(UR"VnL$tD$ _^][35\̋D$SUV33W89p@HL$ PT$$RUm u hQPEthhKj|VPPPKPe uhhKjk\$,|$SjjjPWptpNQRW}hhKjzBD$(L$$SPBT$ QL$(PQRWV"m_^][hhKjrjvj4Vl_^][øpf4L$tD$$L$t%T$RPc|PP0ph3 jjL$ h/QMtu3pVWhh0Kjl]3 ;u_3^jlWVH4D$L$ ~~ ~~~~~ ~$~(~,~0~4~8~<~@~D~H~L~P~T~X~\~`~d~h_N^̋D$L$ tD$tI̋D$PL$QP Q PQPQPQPQP Q P$Q$P(Q(P,Q,P0Q0P4Q4P8Q8P1^VW|$ phS\$LKÐ:utP:Quu3D$uNt&PɢtPjjhjW[_^hh0Khhj0[_3^PSWփ [_^hh0Khhj^0_^̋D$L$T$ H P$̋D$@ ̋D$L$A̋D$@̋D$L$A ̋D$L$A̋D$L$T$ HP̋D$L$T$ HP ̋D$L$T$ H$P(̋D$L$T$ H,P0̋D$L$T$ HF^F F;thh0Kj jSP3F;tV׃V_^]3[_^][S\$ Ul$ tEE t>V0Et؅t3Su1hh0Kj&hj-^]3[]3[VR.WVt S/'uhh0Kh5hh0Kj(,V3 ;u/;t Shh0KjAhj,_^]3[hh0Kj njU^>FF FOFtV׃V_^]3[_^][̋D$L$P3pVt$t5x t/Ft,P uh<h0Kj&hj+3^Wh@h0Kj(U NOFthJh0Kj jPNVWF thOh0Kj jPNF G GGNOB VWЃ~_^W_3^Vt$t4t.x(t(F@$u^VЃ)F^jXhTKhhj*^Vt$x(~t jphTKhhja*^@S\$W|$t@FP0u ˉG_[^9s!jshTKhhj*_[3^ËD$ L$R(PQSWV҃_[^jkhTKhhj)^Vt$t4t.x0t(F@,u^VЃ)F^j}hTKhhjk)^̋L$t;t5@0t.yt"hhTKhhj*)ÉL$hhTKhhj)Vt$t4t.x8t(F @4u^VЃ,F^hhTKhhj(^Vt$x8~ t#hhTKhhj>(^@S\$W|$tCFP u ˉG_[^9s$hhTKhhj'_[3^ËD$ L$R8PQSWV҃_[^hhTKhhj'^Vt$t4t.xPt(F@Lu^VЃ,F^hhTKhhjH'^Vt$xP~t hhTKhjij&^@S\$W|$t@FP轭u ˉG_[^9s!hhTKhjij&_[3^ËD$ L$RPPQSWV҃_[^hhTKhjijc&^Vt$t4t.xXt(F@Tu^VЃ,F^hhTKhhj%^Vt$xX~t hhTKhjhj%^@S\$W|$t@FPmu ˉG_[^9s!hhTKhjhjO%_[3^ËD$ L$RXPQSWV҃_[^hhTKhjhj%^Vt$t4t.x`t(F@\u^VЃ,F^hhTKhhj$^Vt$G=x`uxPu xX'@dNt3t+t#h)hTKhhj"$^W|$WjjVЃ~tFuh7hTKhA;th>hTKje-W u5NWQu$hKhTKhhj#_^ËF t P貱Wj~ BdjVЃ _F ^h[hTKj jWF_^h#hTKhhj!#^Vt$x`~t#hjhTKhhj"^@S\$W|$tCFP蚩u ˉG_[^9s$hmhTKhhjy"_[3^ËB`SWVЃ _[^hehTKhhjD"^Vt$t4t.xt(F@u^VЃ)F^jIhlKhhj!^W|$twtqxtkt jahlKhhj!_Vt$u^_Ã>u\PQWҋP莯^_j[hlKhhj8!_Vt$t4t.x t(F@u^VЃ)F^jzhlKhhj ^W|$tzttx tnt#hhlKhhj _Vt$u^_Ã>uIP QWҋP{^_hhlKhhj" _̋D$L$A̋D$ @H T$H T$QP@Ѓ̋D$L$H@̋D$uD$@$Å|L$;A$I 3̸&D$ L$VPQD$ >u^YV,~+T$D$RPjjjV~L$QV]VD$^Y̸~SUl$uUWFtJuLD$PWة~L$Q4P豐؃u!jThKhhj]3[YÃ|$Ft3JDtVPу~܋F@@P3]3[YËI} ( t&T$QR4HD$T$X3T$Rȃ$I;|$ ~5D$,;t U?hG hKPD$,> td$L$hQOp3.FuT$XD$L$(t?<;|$PN~I.ˀN@]@ˀ .FuƉD$T$,;t UJ?D$t PD$[^_]L$438j}hKhjdj D$,;t U>D$t P8[^_3]L$43d8hhKhhhKhhhKjkjmhKjzjdj >L$X[^_]33 8̋D$ L$T$jPQR.̸Xl3ĉD$TUl$`VW|$ll$ t$SjWjPP.؃O~ChkhKU < u[_^]L$T3`XjWUVb.l$ #j hKU:c [_^]L$T3(XSVUcL$ ;t V{=[_^]L$T3Xjh UbL$l _^]3Xh_hKjO;3Ƀ ;uhbhKjAj{j 3ÉH HHH@Vt$teFt,t P<Ft P<FFtFt P<FF Ft V<^øT$ D$(L$,T$$D$L$L$$$RD$ L$D$ 3S\$ Ul$W33;~J<u ;tWtR@A;|D$V;t 0;tFurh_hKj9 ;uChbhKjAj{j q^_]3[h,hKhhj L_]3[Éx xx8x@~3F;t9n }oF ;t W\;ŅuhChKP^9 u7hUhKjAhj D$(t90t V^_]3[ÃNUSWD$ ݉~n Ft0D$^_][̸ L$Vt$QT$RD$ D$ PL$QT$(R1yf |$t&thhKPhj 3^ ËD$T$PL$QR tL$^ .̋T$u3SW:~xZ tmJD98uOHD9uPtGt<t1t& t@t$ۃ3Ul$GD$t-VuBWPFV? ~ VD$u^]_[SUVW|$3}dD$t0uj.tyD$(EN D$ ׃EOF ~shhKW6؃ u?AhhKPhj _ tD$t90t V._^]3[WUSb L$( D;3ۋF>t P`8D$^Ft0L$_^)][SUl$Vt$ƙW+θ؉D$Ӆul$ŋt$tqf O;| ~FuhhKQ5 hhKQRP6u"hhKjAhj . _^]3[Ë+A~QjPB  GD$nV": : ؈>~N|uH_^][ËL$Vt$ ƙWt/P9|(yt!+κ_^и_3^̋T$VtCrt<3ɸ~4Ul$W|$t;}9 A;|_]^ø^̋D$xt3SVWx3 3u 8Ztz+tu-tp 809@0;809@;Í LJ; K; LFy 8Zu@3;_^[À+t-u@H;T3 80|J9ET8@00|590ҍ LJ;K|;L@|3;_^[_^3[̸ SW|$ D$ PI@u+‹؍D$P\$ |$t2Vt$ tSWV) u^_[F^_[_3[̸$ S\$,ujF*؃u[$VD$PL$8QRD$8L$ut;t>{uKPQW! #_^[ËRjW! u_^3[Ë3CUo85VrhMhMUNK VQUO ]_^[̸T$Vp@u+ƉD$D$PT$D$D$ ^u $QD$8u3ËD$t$RP%t̸$vD$, $QT$0RD$4pJu!jshLhhj 3$Ë@2|!=}D$,L$(jjPQ$ËT$,D$(jjRPl$̋D$L$QVp;t 3^DQP}!t^SVW|$32ۅWUou D$AL$u ~-2$=u ;~<u@;|t$T$u D$]_^[Ë tA7uD$]_^[ËGuVPQxT$$D$ ]_^[À|0D0L1u HIN8tÈNIH~ ӈNIHD$]_^[_^3[̸UVt$t.uju^]ED$SW|$$hW ;hMRL$ ( D$u:hhMjAhj ptt9.t U_[^3]Åu EPE;utCO|;T$D;L:utHIN8tu@:Gt$Et P4)D$E}t.L$T$ _[^ʼn ]ʈNIH~҈NIHE;utCOWSP ̸ Ul$Vtuuj>u^] FD$T$ WRD$PL$$L$QT$RD$,Pyf1|$ts#L$ h1AhMQN& u>AhIhMPhj tt9ut V_^3] FD$ t+L$9ut AHL$D$ PQWT$ T$Ft P'~D$ tuL$T$ _^ ] ̸ l3ĉD$Vt$>F}2Ft P='h[hMjI% Ft3ɉH~u*h`hMjAjvj 3^L$3| ËL$} F3It L@r3ɃxS$\VAy[L$ ^3̸ ̋L$W33u_ËQuVqu^3_Ãt_3Ʌ~SA ;|[t^_S\$ VWujuhhMj:[|$G %WFt @9}LNhhMPQ{$u.hhMjAhj <;t Vo_^3[ÉFVRWYu F_^[̋D$VW|$ WPQR9 uhhMjijwj _^Áu jV_^+̋T$VW33L$ SJtrAa|z~UA|Z~K tF0|9~<'t7(t2)t-+t(,t#-t.t/t:t=t ?tyu[t_^øu_^Vt$~t3^W>%yH@u-N3~ Q<uzu:u ;|;}_3^ø;~IVA;|PFPF_^̋D$xu8t3Ë̋D$xtL$jQzD$T$uD$ L$%AËT$ D$PVt$W|$tWtRVu)Bu_3^PD$jPy _^t׋L$ PWQV _^ËD$ D$xtT$jRD$8uL$_q^ËT$_r^̋L$tAD$t9;u3t+t@D$IL$PT$AD$3Ã̸6SVt$3ۅu^3[UWV l$0xjWVPՃ؃y|$0WSjvL$4 D$L$4QWST$0jRD$8|$8iV薼WD$$j{h0MVD$uP6؃ uj~V3N~9$L$$T$$RWV =PՋD$,+VDG ;|΋D$$L$(+D$h0jV|$(PSt hh0MW|$u%hh0MjAhj _]^3[V|$(3蔻~/{GL$$RPQT$0VEi;|؋|$D$L$PWQWSD$(_]^[V3+~$d$T$$RWV$PVG ;|D$$L$(D$_]^[̸,D$0SUVWtt؋l$Dt$HML$u433؅uhh0MjAhj _^]3[,Í1D$,+PD$(PL$(QT$4RD$ P8D$FL$X;L$$t hh0Mjehj T$T;T$ t hh0Mjhhj ]L$(|$9;T$,v#hh0Mhhj )!u M+ΉL$(9T$,;sat$LL$(D$u 3QD$P@L$0|$D$u(QL$QPփ t/PSVtL|$;|$,rD$@t}_^][,hh0Mjqhj vET$$+RPft1D$@t9t%D$PtPST_^]3[,S衸_^]3[,̸W|$u3_YS\$VjWjK hHMP]ujMhHMjAjoj ^[3_YÍD$PWt$$PL$Qjt$ T$,V^[_Y̸6D$ $uYVt$ VL$QP褉L$ ujhhHMjAhj D3^YVPT$RjL$ D$Pn^Y̸4SV3t$ 3G;D$;u!hh`MjAjkj ^[4UW$+w\+>;PD$PJ<L$QD$HWRP@ } m~ ;sL$y+RD$0PL$0QT$;l$PU;t|v%$EL$HWPQO? ~j+u|$0L$l$;rm؅C?hh`MhFh2hh`MjA+hh`MjAhh`MhhhD$L_(]^[4̸&L$VWD$PQD$3K|$|L$ WPD$PQT$T$$ t W8_^øL$VWD$PQD$3|$|!WL$QPD$$T$T$RP贝t WU8_^VRP4Duhh`Mjhj 3^ËD$ WPjjjV@L$$T$QVRL$$T$QVR"V*: _^VOPdAujh|Mjhj 3^ËD$ WPjjjV=L$$T$QVRV9 _^̸ l3ĉD$Vt$>F }/Ft PjQhMj Ft3ɉH~u'jVhMjAjpj 3^L$3 ËL$} F 3t L@r3ɃxS\VAy[L$ ^3̸| ̋L$W33u_ËQ uVqu^3_à t_3Ʌ~SA ;|[t^_S\$ VWuj  uhhMj:[|$G % WFt @9}LNhhMPQu.hhMjAhj ;t V _^3[ÉFVRW_^[̋D$VW|$ WPQR uhhMjijqj +_^Á u jVr_^Vt$ 3^ËT$ xT$Ƀ ^Ê$ɋ?  P@@?? ʁ&T$ ^Ê$<Br$<B$<B$<R:ɋ? P@? P@@?? ʁ T$ ^Ê$<3Br$<5B$<(B$<B$<R:ɋ? P@? P@? P@@?? ʁT$ ^ø^̋L$uQD$ =stËT$=s#|t$? A=s1|ƅt" ?Aʀ$? A= s@|t1 ?ʀQA?Aʀ$? A=sSCt@?ʀQA ?ʀQA?Aʀ$? AÃtO?ʀQA?ʀQA ?ʀQA?Aʀ$? A̸,VSUVW3D$$P3|$|$|$|$ 43ۋt$D;tt$H;to|$XquNQln+Ft8tPR:FvRGP9h 3Cwt$PW|$DVhhMPD$,F \$hSihhMPD$4D$@" $D$L$ QVl$(׋T$`3WRD$4P}L$QT$(UR tmS\$D$PL$,SQetQt$LF;t P~ F T$^|$F WhhMh7hhMh&hhM|$jhhMD$jAhj L$$QtT$RU3U t$tD$PV3V D$_^][,̸SUVt$<3WVl$l$l$$؋FP\$,;9;F;>O Ah\$0;T$8L$4RT$@QL$4SRQVЋuYT$8D$݋D$@P D$tSP2L$Q tT$ RU2U D$_^][ÅhhMjhj m~3L$$A t9G ;tRQPWPD$ P uhhMhNIL$W B$NtL$jVQPSB\$4tT$jVRjPS D$,T$sD$ hhMjAhj 'hhMhhj PPLu#hhMhhj zD$PcL$LQ ;t#hhMhhj ?QjWD$(jP臼hPWPT$$R uh6D$WP+L$Q6D$L@RPL$0Q豽hhMjhj 3]T$[R _^$̸&Ul$ VWIu FD$ O{uFD$  FD$ Yu6D$ VF ‰D$ NF FD$  FD$ D$ PWV |-+D$ tL$QPӃ~ W_^]Y_^]Y̋D$̋D$Pjj2L$ ̋D$T$̋T$D$SQ[̋L$Vt$ @@H^̋D$Vt$ PhQ ^̋D$v3Ãa|z~,A|Z~"0|9~ tPhNh tǸSVt$ W|$tVutvt vtvu_^[É_^[̸@l3ĉD$_^̸&D$$u3YPD$PtL$ Vt$WhPQV{YhPV>M_^Y̋L$ S\$ VWhPQT$RjD$$ptVhPVL u_^3[WIWNt|$D$t QI7_^[W|$u3_V]Hu hhPjAhj _^3_WVMD$PVVmN^_̋L$ S\$ VWhPQT$RjD$$otVhPVL u_^3[W^HWNt|$D$t Q7_^[W|$u3_VmGu h,hPjAhj o^3_WVLD$PVV}M^_̋L$ S\$ VWhPQT$RjD$$ntVhPVK u_^3[WGWMt|$D$t Q١7_^[W|$u3_V}Fu hRhPjAhj ^3_WV LD$PVVL^_̋D$L$ T$Vt$PQRPu^W|$t0NAt PbVL$ zFF` vN _^̋L$D$tL$tPRHL$ L$t̸ Q̋D$ L$T$h QPQRWm̋D$L$h QPQU h QE̋D$h QPAĨ|$D$V0uFu^ø^̸R̋D$ L$T$hRPQRl̋D$L$hRPQT hRD̋D$hRPH̸pR̋D$ L$T$hpRPQR'l̋D$L$hpRPQlT hpRvD̋D$hpRPH̋D$PhpRQ̸R̸,S̋D$ L$T$h,SPQRwk̋D$L$h,SPQS h,SC̋D$h,SPaG̋D$Ph,SVWh,S3rCtUD$ P~҂FtutNWQvu h,SVFt Wu_3^ËT$D$RPW _^̸PS̸lSkL$3҅‰Vt$t)L$ APtu^ū^̋D$V0u^WV D$tt@PVVڪƒ_^Vt$>u衫L$ PD$PQ u5t+T$BPt谪3^3^ø^̸S̸S̋D$HT$ ̋D$HT$ ̋D$L$W8;yu_SUV}+S腩t$ht0t ʈFE|I0}҈0y^)][_W|$~"hhShhj K3_UVt$t t333~ d$t 00@ ;|^]t+L$;Auh놋T$_̋D$T$QhSRM ̸tT̋D$ L$T$htTPQRgg̋D$L$htTPQO htT?̋D$htTPQC̋D$PhtT̸T̸ ŰD$ L$T$h UPQRf̋D$L$h UPQN h U?̋D$h UPB̋D$Ph UVhh(Uj t ~t Fu;hh(UjAhj tt Ps|VJ3^ËL$F FF1^W|$t@V7t8FPGhBQ~F t PV^_{̋D$hBPq~̸ SU3W\$ \$}D$ ;P{~]SQ{9ot"|D$t_T$ PRztMoD$WPpzt8QC`{;|jjhTT$jRFFWPhu4D$ hDP}h"h(UjAhj j_][ ËNQjjhTD$ PL$QT$({FT$ hDR<}_]F[ ̋D$HT$ D$QRT$ QR3ɃIWGP4LV)uL$WQ_S\$ VsWVC u^[3_Ë6U~#d$x RӃt MF |.~IxPӃtMOD$x3ۅ~Y$xCQ t% GIFFCx%R uP GFC;|D$+xL$]^[9_̸VSUl$VW335y~6VW7yjjhTD$ D$ UPD|WFx;|ʋ^][YW|$t=D$ t59t(VPh Uth UP)>7^39_3_̸覱C U3l$ D$l$;t P&k Phxu k]Wy|$; VQ7xUR3xD$9Ft.PyD$L$ PQvVT$htT9POQNE tjT$WRvtXPEw;l|$ 3UNLQ5^v(t>V_^3VRVj ~ ~~(F ~8~<~d~D _^̸V̋D$ L$T$hVPQR\̋D$L$hVPQE hV&5̋D$hVP8̋D$PhV̋D$L$T$ PD$ QL$ RPQj ̋T$T$L$L$D$SUl$Vt$WhVUVP[t&+t)UOdVQw uhVW7_^]3[Ë_^][S\$ VW|$hVSWC tGdSPF_^[̸W̋D$ L$T$hWPQRG[̋D$L$hWPQC hW3̋D$hWP17Vt$W|$u)tFdt@tP`Fdx_^Åt!~duhW%3Fdtvdu_3^Ã~u `FtL$VQWR~ _^Vt$W|$u)tFdt@ tPJ_Fdx _^Åt!~duhW2Fdtvdu_3^Ã~ u ^F tL$V QWR _^̋T$Bdt@tL$tJdQB3̋T$Bdt@ tL$tJdQ B3̋D$WPEu_Vt$ t1~duhW1Fdtvdt>u&qu^3_ËWQn^_̋D$WPu_Vt$ t3~duhWQ1Fdtvdt~upFu^3_ËNWQ_n^_Vt$Fdt8thpPqNd^Vt$Fdt@thpPrqFd@^̸X̋D$ L$T$hXPQRX̋D$L$hXPQL@ hXV0̋D$hXP3̸X̸|Y̸ VUVpW3Vt$l$.n~= It$UV%njL$QhVau|$ tBK_^] Ã{@|$uoC@S@WRlD$jL$QhVF uf9D$ uFvVD$pmL$;}YIQVimx~R=u+D$ V4mL$;|WWFhZ뒁KD$PEm;_^] _^3] ̋D$HH39V~ H~~BH~~BH~H~~H ~ t1H@N 9~ IP N 9~ IQ P ` HQR1̸Y̋D$ L$T$hXPQRgŰD$L$hXPQ= hX-̋D$hXPQ1̋D$ L$T$h|YPQRT̋D$L$h|YPQ<= h|YF-̋D$h|YP0̋D$ L$T$hYPQRT̋D$L$hYPQ< hY,̋D$hYPq0̋D$PhY豲̋D$T$D$T$̋D$V0~uhU)hFFtL$ QPit F$^hzhZjAhj 踣3^̋L$AD@tL$3̋L$AD@ tT$ jRT$RQЃ3S\$VsD~ t+W|$WGPW PN PD$PSу_^[^3[̋D$PD$P@QRPh|Y2̋G Sڅu!u[ËHQSG@[ÅuZVP3Si~4G VPRi8uHQSGtW RFi;|^3[^[̸D$$SVt$$D$QRmiu4hhZjj HQihhZjj $HT$RQj؃}^3[ËBUWPth;}Fl$8QSRmhD$<PQnu ՋuBPC.h;|_]^3[ËD$0t83_]^[@̋D$80$0t80hhZj uËL$T$HL$ PT$H P̋D$t D$>̋D$L$AH̋D$t@t|$uhUPd̋D$SD$yUVWPK,QPSFjjhS' Ct SjjjZSjjjXSCjjhSC$0C(t {$uKjU3f~MWUfP =YuK~UG[f;|=t=tK_^]tKDASЃ3[ËSDBt SЃtCt PJCt P(C$PRK(QRS@hR R-h[Ë 803CCCCC KDCHC@C$C([jEh Zj,B3Ƀ ;ujHh ZjAhj 贞3ÉH H H$@(HHVt$tcj]h ZjF(jPHt PFt PFt P~F$t PVw^̸Z̋D$ L$T$hZPQRM̋D$L$hZPQ6 hZ&&̋D$hZP)̸Z̋D$ L$T$hZPQRWM̋D$L$hZPQ5 hZ%̋D$hZPA)̃|$uD$V0jO諲^̸t[̋D$ L$T$ht[PQRL̋D$L$ht[PQ4 ht[$̋D$ht[P(̸[̋D$ L$T$h[PQR'L̋D$L$h[PQl4 h[v$̋D$h[P(VW|$t7u(#ujTh[jhj _3^ËD$ PVm(uj\h[jVtYtt6=thh[h`L$T$QRFP:| uoj{;L$T$QRFP uQjqL$T$QRjP Fu2jgh[j hj tt97t V)_3^Åt7_^Ul$ VtCutzAD$邻AD$Ɖ̋T$B tnt;jhDOSm u>t!Fh h]jjuj ht$$L$Q蠵_^][̋L$Au L$u L$T$jhD^R 3̸ 覉SVW|$$3D$D$ D$u D$ D$ ;uD$D$(\$ t$T$ u0j hh_Vu j hx]V] D$(u(Q1P@Ph hL_VnrD$(Ujh4_V MS5}S\$pUh CPh Sh_WD$+Q3QK~R|$uhDOW8D$jUR3KPD$jPPcL$$QWREJ;|hX WSh_W\~h CPh Sh_WD$+0NQ3J~T|$uhDOWD$VjURJPD$jPP趟L$$QW[VREQJ;|hX W?Sh_WFt@Ph Sh_W~ tZh Sh_WuF 3ۃ9~- u PPQhx_WGF C;|jhX W ][L$\_^3̸؂XSUVt$Wh`V|$ Q蟇P@Ph h`VWPOjQVBjjP,Sh h`VLSʭh h`VQ RVh h`Vn@$t PV h`VNhX V@QjjRhx`V;hUH ~h``hD`VU3|H~jISUyHh0`VQVOh`VWRV^hX VGjjPh_VDUCH;||$OWQRV= _^][V:PujKhajhj &3^ËD$WPjjjV_L$ QVV<_^W|$uG_à Vn u D$S\$hPS u[^_Ãu L$Qh8aS 3҅[^_UWq*L$PVPVQh$aSx][^3_Ël$ E>-_t T$PRhaS>uVWIvtG3+ʼnD$I‹+ȋ+u,hX SU~aD$$hPS tGL$.;Ϲ t QRh S~F;|jhX S4 ][^3_][^_S\$UVWhaS=t$ P; ua PǕPhaSRλuh|aSjjWSzW @8tHQhdaS VP躙ua PFPhHaSF8h 3~_+ʼnD$98 ɋ+ujh@aS L$.;Ϲ t QRh SF;|jhX S _^][̋D$SVW|$h PhaWt$(>t?Ul$NQUqt uhDOWRW2ۃ >u]hX W_^[̋D$xSVpWts\$GI:utP:Quu3tCˍI:utP:Quu3t wu_^[Ë_^[̋D$L$PQQ}3ËL$tT$ RPQo tVt$ t%Ft@t L$VQЃ^À>tFH w|~$~j貑L$ ^ËVD$^ËL$^hehaj@ u^ËL$@39^P蕜>utH @L$39^Ë/~~~}e~̋D$t'HtItPRуÀ8t xu@̸zSUVF3;t@;t؉\$ l$ 9l$u/w$$F;t@;tVWЃ^][YËv;t;tЉ;ۋF;t PWBVW);tUVWUӃ;t9l$u.FhhaPģ ; NQUPpz VjW* ;fUVWjӃ^][Y;tUVWUӃ;,9l$u=VhhaRB ;NQUPyVUW2*VW{* 9n ^~SW+SPBtNE;n |D$ jVWjЃhhajdjyj NyVWw^]3[YhhajAjyj (y^]3[YËp[4D$wZd$$xHt,u:AvËHt%I tPVуÉD$KD$@Ð'pppDep̋L$W|$j@_Vt$ FЋt2tL$^ËVҋt$P^tD$^t5?uh%hajAhj w3^ËL$^W|$ R_^̸FwL$Wj|$D$^3Ʌ_I# $Y̋T$VtBt@ t^T$D$q:t$uBt>tn wO|$hR\e^ÅtB^^jVvQ ^ËR^Ë2^ M̸&vFt ?St Xt\$ D$\$ɃrU$XFtPW][VW][ÅtjVWjӃ*VW$&;F N,UW(UP)@][L$@VWЃ][VjW% tjVWjӃVWN&F NlD$ ~:jUW'؃ tSWW'SPpD$ @;F D$ |̋\$t jVWjӃ|$uR][ÍI _̸6tVt$WteD$SU(U3 ;~5t$SU;NjD$ы|$UC:;|U:T$][_^Y%PFЋ|$_^YVt$ Wj|$__^Vt$ W|$ j?_^̸$VsT$0 D$4VWL$ t"t _^$Ër%t$D$ 6t|$@D$t$%D$ D$t$D$ D$ |$@?U|$Dt uSgT$83;~tD$tD$|$tu t$ D$$39|$|$$D$ S3F9~;VSH9L$PT$HQD$0BjPL$8jQSF 9;|ŋT$ RWU蘑L$4 D$0L$(tL$QPUx t$T$D$RPVUSL$TQWjPD$DSPfu S8D$[]_^$ËJ PVыT$DPD$DRP0[]_^$̋D$L$SYVWx;| Ur;ustC)+u1v4Bi+u v#Bi+uvBI+3]u+_^[̋T$Bt@tL$ RT$ QL$ RQЃÊL$uzt 9uVW<u @|$9ru |$D$ yHwy$( Aq 33t9rtt z~ _^ÃztȈL$D$XT$ ҍD$#RQ`_^-tPtKz uA tD$t A_^ËA1L$tt VPQt!VNnFT$V l$T$L$H}SD$PL$QT$$RD$XP芆l$,tTL$T$NL$HV +׉FnVu2;~,hhbhjhj c_^]3[ ËL$Hy)hhbjfjhj ct_^]3[ ËT$<|;T$uoT$@;T$uett+t$,tЀT$0t$ D$ t(D$(tT$D$$tT$D$4_^][ À|$Dt _^][ Åth'hbhjhj b_^]3[ ̸ VbS\$Ul$u]3[ ËW|$ ?S|$Q%PFPRL$$QT$4RD$+PjL$8jQ|$<,uh3Cu _] [ À|$uh;hbjx'S\$jVST$ RUu$hChbj:hj a_]3[ +|$߀|$t"SL$&uChNhbht.hYhbjwhj aVU_]3[ ËD$ L$_][ VRT$0RU)_][ ̸`|$D$V0Wu0_^SU~t|>u~uKWD$PL$$QT$ RD$ Pމt$$JxTL$t+t$$tET$4 +tbhlhbhhj |`][_3^hhbjfjhj Z`hahbj:hj B`(][_3^ËD$$][0_^̸_d$(S\$ Vt$(D$u8\$0uL$,^C[Ul$0WD$$|8u x(UT$ RD$4D$ PL$ QT$QS詪T$CVRP _D$ ƉD$L$$+|$8hhbhjjj ^_]^3[Ã|$8D$hhbhjjj f^_]^3[hhbjfjhj D^hhbj:jjj ,^_]^3[h'hbhhhbjAhj ]_]^3[hhbhjjj ]_]^3[ËL$0_]^[̸V]L$ T$Vt$,D$PQRT$42D$ ^̸ ]|$$Ul$0VD$ u h hbj}jlj =]^3] ËD$88SuރX\$W|"hhbjjlj \_[^3] À|$Ht"h%hbj~jlj \_[^3] ËL$L|$8QjjjUT$(RjD$0jD$3PL$hbj:T$8w3;o D$}1VS L$XQT$ RT$LPu*E;o |;o uI|$Lt.WSJ_^][ ÅhShbt$,j:0hbhbhWUS7D$ L$DD$, tjWSjЃ_^][ ËT$8T$It7At1T$AT$u BËI juhbQRPxr3̋D$t2t,D$@t!@t@t@@̋D$tCt=D$@t2@t,Vpt#t P zFF^̋D$VttD$@t @tpu^Ët PyW|$hhbWw u_^ËL$WQPdN ~_F^̋D$ VtYtSD$@tH@tBpt;~u5W|$tNQRP NN D$ _tV^3^̋D$tD$Ë@L$Vt$ u^ËFWЋL$ Gu Gt?_^t RhPRwW 3Ʌ~ 9t6A;|Gu$|$thhbhjnj M3_^_B^̸d̋D$ L$T$hdPQR̋D$L$hdPQ hd̋D$hdP̸d̋D$ L$T$hdPQR'̋D$L$hdPQl hdv̋D$hdP̸d̋D$ L$T$hdPQR̋D$L$hdPQ hd̋D$hdP̸e̋D$ L$T$hePQR'̋D$L$hePQl hev̋D$heP̸ e̋D$ L$T$h ePQR̋D$L$h ePQ h e̋D$h eP̸T$(t$6uWETD$(t W1T3uh,hS躐3Ʌ_^][YÅt(PS蝐>h S臐(`$@D$uL$APSiVSVSVS uT$:hD$ PD$OhX SL$ FjQRPSW\3D$L$(QRVSv D|$thX S膏'_^][YÿNo~̸6=Ul$Vt$(FWtxtT$(|$T$\$@D$ D$ }uIt8uL$4T$(SQL$8hhW賎_^]r$FtT$(SPRUWZD$0L$(T$4SPQVUu_^]ÅuT$4L$0SRT$0|vt4Ft-L$(Sh QUWЃHhX -D$42PhxhWN _^3]VUS|5;F }0F4VUGL$0SVQPW_^]PhXhW 3҅_^]ƒÅuD$4L$0T$(SP'|$0u|$4t6thThW_^3]hX WD$ tL$QVUjЃ#~ VD$T$$~MD$$jPUsVUJL$0D$u ~0un+ËȁyIA+‹;;D$D$ ~P|$4_uhhhVY T$D$hhhVRPZt$t$4D$3~[I3.HЃ wHwWP7L$L$ʊ F|G;|t$4|$$D$ D$D$8L$,PVQ \hhhhjfj /_^]3[hhhhhhhjAjfj `/D$$tPZ_^]3[hhhh땋L$0T$_^]Q[SW|$3ۅu_3[ËUVu&D$jhn P ^]_[3~l$$t3k#+ujhhUs uK؋G0 0 T$jT$ RUL$%6 uF;7|^]_[^]_[̸-D$$L$SUl$(VWP3UQt$t$ t$$P t$|( uH(k|( uH(W3Ҁ|(\‹P|$ ~* *0|9~a|f~ A| FJƒ( +NjȁyIA+‹3;D$D$~K|$4^uhhiVV T$hhiVR7Wl$0D$t$3~^3.HЃ wHwWP7L$L$ʊ F|G;|l$0|$ D$D$L$4T$(QUR~ }hhihjgj U,_^]3[hhihhhijAjgj ",D$$tPaW_^]3[hhih땋t$D$,L$_0^]H[̋D$L$hiPQ ̋D$hiP豷̋D$ L$T$hiPQRW̋D$L$hiPQ ̋D$hiPQ̸*l3ĉ$$$U$VT$$RD$L$D$3^u^3]$3*SWhi)hij(@O{WBG@jUiOhijWVJjD$< ?NNBFy|$ ЋHu jVWV*thhLjS0S D$u hhLjjAhj )WBD$PUl$,hhLjURD$uh뮋jh@jQ?I uh둋VBL$T$RWHGHSQ{|$uD$>jh$j$hRT$(D$t hhLjjghj (D$P@u+ƒ$ D$t[j菦L$Pj$PQR7[j jf j$$f$D$j$QjP$RjP~Pi $hPzNQBj$QjD$e~PT$L$ WD$SZQP[tL$MhiW_^]hhLjhhj #hiW˯_^3]̋D$L$ T$jPD$ QRPSW|$3ۅu_3[ËUVu&D$jhhPFt ^]_[3~l$$t3k#+ujhhUs uK؋G0 0 T$jT$ RUL$%s uF;7|^]_[^]_[̸V"L$(T$D$ SUVt$0WQ3VR|$|$ |$(D$$@ t |$| uH0| uH0v3ۀ|\3҉\$$~* 20|9~a|f~ A| FB;|ƒ0+|$t>0D$u ~0un+ËȁyIA+‹;;D$D$ ~K|$4_uhhdjVJ T$hhdjVRKt$t$4D$3~^3.HЃ wHwWP7L$L$ʊ F|G;|t$4|$$D$ D$D$8L$,PVQs ^hhdjhjej _^]3[hhdjhhhdjjAjej r D$$tPK_^]3[hhdjh땋L$0T$_^]Q[VWjph|jj4I3 ;ujph|jjAhj _3^É>YF;tj@F;t܉~ ~~~~3F F$F(F,_F0^Vt$tmhh|jjF0jPBOFt PLYFt P@F t P|Ft~t PJV}J^̸,S\$8Ul$8VW\$4D$0D$:tuuuFK3L$HD$uL$,D$,L$HQT$R!Cu NL$(D$QT$R~WD$<X u OD$T$0L$(+QD$4D$L$,PNQ u PT$0+T$T$(QV9P4P FuD$WZB8u,@~D$`7@QPN Q 3F F$F(F,T$R=uRhPD$uF(jjP tC>uN,jQW ~L>uT$D$O$RPSQ;l[_^jWh[_3^Ë$tT$D$RPSW l[_^Sk≠D$L$$RjD$D$hPL$!k̸fL$ $PjhQj~T$ $ T$L$ ̸&D$L$$RjD$D$hPL$j̸L$ $PjhQqj~T$ $ T$L$ VWhhhkj@]@3 ;u_3^hhhkj<@ F;tD$ ~ ~~~~0~8~4>FFxp _@ ^̸&|$UWl$|$$|Vw u ^_3]ËSD$B$HF jjP .N$jQW ؃ jWeWkD$ [^_]ËVRUj4+Ń ;FF~hkhhhk8 FD$NVQRUD$ jP9n}VNV G$QRP_3_̸ D$u V0FN@L$V T$NL$ NT$RPQVj ҃3^ ËFWPjQ.hhkPn<|$4D$@Ht7L$ QjjjЃ~T$ RWaD$Po; {0t't@tu,hkFh(z ?h  8hk1t$)tL|$t3hX* WSal$ hkWBaL$QC4;^][hkWa^]3[VWt= <"tP׃tFFu_3^FN_#^_^̅u3ËǍPd$@uS+Vt8;r( $<"tPӃtN;s^3[ÍN;t^[̋D$SVtAP]؃tK;t-U- W>WՃt W F>u_]3ۋD$tP\u^3[3hVhkj7 tpPD$HQ^[̋D$Vt$ t69t/ d$:utP:Quu3^^Ë39^3Ʌ+ȋ̸ F D$ $QVD$T}3 PV  ̸ VD$ $QRD$}3PFPVt$t P_8Ft PO8VF8^Ul$VWuÍP@u+‹Ѓuōp@u+ƋG;B5 jhlSփ u+WKUQփ ujT;hlRփ _^]_^3]̸ l3ĉ$UV$W$Pbu_^]$3 VU`$$yChD$PW\ PL$QV\hT$RW[thlV¡hD$PW] ~pS$3ۅ~%T$JI  u uHtPL$QV'\ tjhlV\ hT$RWp] [jjj V\^V`U@Z$$_^]3̸ ø l3ĉ$SUV$,W3lj\$ |$$l$H@u+D$T$D$eD$hD$,PS\ 3$(_^][32 Ë|$$L$W֍\$,G:u \$\$<|$3~L$(+ˊ1< u< u+|$t8D$tL$UQoePQ`jjhU]|$tjhlUwZ |$tVT$,RU`Z L$ hD$,PQ[ T$UR̋D$SUVWمtGPWu_^]3[À}t+d$>W t W F>u3t;S?W؃t;t'>W t W F>u3h9hkj 2 gh.^F_#^][Vt$t P3Ft P3FthP:Vq3^SVW|$tvD$ L$\$PQSM uj}hkjAhj _^3[ËT$WVRZjjj VZV\VV;u_^[ËD$L$T$ PQRw _^[WJP]uhhkjAhj 33_ËD$SVPW[L$(T$$QL$ D$$RPQVjjj VEZV[W)V4^[_SVt$ W|$ Wh;w"}&hhdshhj _3^ËT$+ wAP$<_^_@^_^hhdshhj L$D$L$Qh\sj _3^_^Ë)̋D$t(|$u!hhdshhj s3Vu"h hdshhj F3^ÍLF t*PQT$@D$AQ ^ËD$ T$D$QT$AQ ^SUl$ VWuōP@u+‹ q3;xuWUPӃ tF 1r_^][Ë A_^][Vt$tJj D$ PV L$ t9t T$ ;u"}"hLhdshhj 3^jPD$P, uhQhdsjAhj 3^ø^S\$UVW|$333ɋÅ~8:tA@;| p++ËSu3h/hdshhj sShsjf _^][éuAL$Aqu,<;t&h<hdshhj %_^][_^]3[w5$D$8thIhdsh뷍XP͋t_^][ÍD$P\$͋JT$:t&hhdshhj _^][Ëuh hdsh͍D_AL$T$^]P@@ [jjjjjD$0P{<_^][ËL$jjjjjQ׋T$jjjjjRjjjjj뵋= jhsV׃ uL$_^]A [jh\pV׃ uT$_^]B [jhsV׃ uD$_^]@ [jhsV׃ uL$_^]A [hwhdshF<*6̸Ul$VWu$hhdsjAhj _^3]Åu L$$A $d ?hhdshhj kVr_^3]Ãt0hhdshhj 6V=_^3]ÍNQT$RD$D$|$ o9hhdsht0hhdshhj Vǭ_^3]Wj7F hhdsht0hhdshhj aVh_^3]jWTFhhdshBt0hhdshhj V _^3](FuhhdsjAjWP uhhdsjAFL$$HVR<hhdshu u5QPUjFWPchhdsjAWhhdshhj V#_^3]BFu-hhdsjAhj V_^3]ÃucL$ QW;uhhdshVBFL$ VD$$B|$$uF` FH L$$_^]ÃujWPH Ƀu/|$$u(Phjj,WHuhhdsh?hhdshhj V _^3]h.hdshhj Whsj V˪_^3]ÍI0 y! ̸$SUD$0Ph@j3ۃj,Q\$ \$$l$Dl$HD$P$Gt ]3[ËD$8VttT$@L$DPRD$;u*hhdshhj ^]3[QL$HQP ;t9l$4u9$u ^][WT$RVQV|$0uD$ 3 t$(D$ 9l$8taWL$4QT$3@?3øUl$ W|$}u_3]ËōPd$@u+‹SVt$9>|Fub^uhGhvP* hhvOQS Fu%hhvjAhj ^^[_3]É>tWUPV ^[_]Vt$FtP D$L$F^ËT$ D$V^hhvjo3Ƀ ;uhhvjAhj 3ËT$PHH Vt$tFtF u P V ^S\$ W|$ +uwSUVwr;ustC*+u1v4Fj+u v#Fj+uvFJ+3^]uG+C_[̸$l3ĉD$ D$(PhLvD$j PtL$QVRnFUPVF y_F<^]3[Ã}t_F=^]3[+>;~t}_F>^]3[Ã~ !u F$+ˉN_F^][Vt$ u3^ËFW|$ GVQRW u_^ËF G _^hhvj uhhvjAhj 3@@@ W|$u3_VtWV@uVs^3_Ë^_̡0Puuh0Ph4j̸D$S\$ UVWST$ RD$ L$PL$QT$$R336l$8D$ |pPqD$0uZ|$uD$(SPQ茍 _^][Åtuu tQD$txjh@h$P uSWjl$( D$tMML$t t3T$$RSWPD$(PVD$,QRP!D$0 L$M_][^Y̸<vD$@U3Wt8uu_]<ËD$LSVt$XVT$ RD$ L$PL$QT$$Ry f\$\9\$t tSL$TT$L$DL$D$,D$`L$ T$8\$0D$4ul$<ΉL$D$L;~NjL$H VPRQ ̋D$L$T$ VjjQRD$D$PL$QjujMh jnjj -^̸S\$Ul$WjjjSjUluj\h jpj~j _]3[YVj_h W9 uj`h jAj~j ^_]3[YjjjSD$ PUt$(D$<t8D$ t0^_][Y̋D$HVRD$ PjL$T$ ujth jnhj (^VW|$t7u.Kuhh jAj|j _3^Åt7|$ S\$jWӃuhh jpj|j [_3^hh P D$uhh jAj|j }[_3^ÉFD$PWӃ[_^Vt$Wt>u1u hh jAhj -_3^Åt>GwtP_D$L$ PVQ6c u hh jphj _3^Ã>uh끋_^̋D$HT$VRL$ PL$Qjpzuhh jnhj o^̸̋D$ L$T$hPQRz̋D$L$hPQ\b hfR̋D$hPV̸VWhD$ "RujWhjAhj DD$PFPhu=j^hjAhj lhVnUD$t P_3^YË|$ uWjQ uje뢋R)!L$ Wt QP4 P+|D$PhV ujpWhVTL$T$QjRWPD$P C_^YVZuhhjAhj l3^ËD$L$T$ PD$ QRPVkt^V3^̸̋D$ L$T$hPQRw̋D$L$hPQ,` h6P̋D$hPS̸4̋D$ L$T$h4PQRgw̋D$L$h4PQ_ h4ŐD$h4PQSSUVWh43O-j؃|$XuhhWE CL$;Wt QP PڧD$VPRh|$$~j^Ft~WP=tp|$ ~.t&F tTjjWPF Pt/jEEEtPh4V u3hhjAhj h4VQU _^]3[ËMVT_^][̸6l3ĉ$$$SUVW$WD$ L$33s؃ujghjlShD$ MFT$D$L$AW辔tD$+P2X_^][Y̋D$WPL$PQZ ujPhdhhj 詿3_VW39~+VW9P@RP|tWF;|^_jXhdhhj G^3_h9h0:h  ̋D$L$jPQ ̸膾S\$VWjUhS3EP$ ujXhjAjpj 虾_^[YËL$SD$PVQA~T$ D$RVPL$( V_^[Y̸\l3ĉD$XD$dL$hT$pS\$dUVt$xWD$$L$ $339$T$D$$L$D$~(T$8uP@PW;~G;$|jYL-hQ u4j\hjAjoj 舽jShjnjoj nS{`WT$(RW<{xW$D$0L$8T$$SPD$4QRVPW>0D$~f~^|$\$$++|$L$d$|$BPnPQU@PURl$uʋ$\$t UD$(j@PL$pD$_^][3`\̸Pl3ĉ$L$T$XS$dU$dW$pWD$`SPL$E ~^VVST$RD$(PD$(xPL$(QL$(T$4RT$4D$0PQR D$<ED$8+(ޅ^$X_][3肻P̸6D$ V38W|$t$ thhjnjnj \@PD$ds dD$h hQ uhhjAjnj SUT$RGxVPlL$(l$4QVT$$RUWD$0\$LL$0QPWD$\T$<PL$LQW`VR=<][t!D$L$PVQ;T$( D$ G`PxWxt VD$ _^̸Pl3ĉD$LD$TL$\T$`S3UD$,D$`VWD$@L$DT$<\$\$\$\$0\$ ;u'ȁD$;ujlh,jAjtj ιD$;D$$PL$ QL$_$_ Gp D$D$(Pd$@:u+ƒ ~D$,l$9\$ iD$9\$,4T$HRPL$WFg|;|;|$@t W$|3W RP`tL$O$T$$W \$l$;t U9\$t D$PD$;t PL$$QT$ RT$W$T$TPjhu$P* $Rу2hhjhjjj _^3]$3輮Sj$QjP$R^S,PP L$ PQ3tdj $hdR  tYL$QV$PRVD$$Hh$R9UW;ZhhjAD$ t$hV\$uhhjAF9\$ 6h$QU ;*I4 yFƄ4 F4AtD$$j $hdR  AD$|$L QWtRGD$V$RP(L$G t$h9\$(u.$QU ;@ghhjA$RU$ ;~: y@Ƅ D$|$t$D$|$l$EP@:u +j $hdPӃ UV$QRӃ j4hpPӃ L$4uF>,uj FhLV׃ thhjjjkj 誚_^3]Ê< t t FF< u>uhhjpjkj u_^3]j FhXV׃ thhjijkj E_^3]à SA|Z~-t ˀ0 wFP EFt$[uhhjrjkj _^3]ËH T$UR_^]^]VP4uhAhjjqj 膙3^ËD$WPjjjVL$,T$(D$$QL$$RPQVUV(_^V*Puhhjjlj 3^ËD$WPjjjVOL$,T$(D$$QL$$RPQVUV(_^̸<vl3ĉD$8D$@L$DT$TS\$LUl$TVt$\D$,D$dWL$$D$ 3L$QT$,T$RD$ D$D$D$4D$PL$$QVtR|$%uoWT$RD$PwL$(QT$$RD$$PL$0QV} uA%luUh<j _^]3[L$83趗<ËD$T$4RPl$tBL$ T$(QRD$$PL$@UQt!T$0D$L$$*t;ut$,WT$Ru UL$H_^][3_^̋D$L$ T$VPQjRu^WVV u_3^Ët$t PC^>_^̋D$L$T$PD$QL$RT$PD$QRPh܌h $̋D$L$T$PD$QL$RT$PD$QRPh܌h $̋D$L$ T$PD$QRPhHh@ ̋D$L$ T$PD$QRPhHh@̋D$L$jjjjjPQhHh`=$̋D$L$jjjjjPQhHh`-$̋D$L$ T$PD$QRPhXh8 ̋D$L$ T$PD$QRPhXh8 ̋D$L$jjjjjPQhXh9}$̋D$L$jjjjjPQhXh9m$̋D$L$ T$VPQjR9u^WVVu_3^Ët$t P#>_^̋D$L$T$PD$QL$RT$PD$QRPȟh$̋D$L$T$PD$QL$RT$PD$QRPȟh$̋D$L$ T$PD$QRPhXh9m̋D$L$ T$PD$QRPhXh9m̋D$L$jjjjjPQhXhp:$̋D$L$jjjjjPQhXhp:$̋D$L$ T$VPQjRu^WVGVu_3^Ët$t P胭>_^̋D$L$ T$PD$QRPhdhM̋D$L$ T$PD$QRPhdhM̋D$L$jjjjjPQhdh$̋D$L$jjjjjPQhdh$̋D$L$ T$VPQjRyu^WVgVu_3^Ët$t Pp>_^̋D$L$ T$PD$QRPhth-̋D$L$ T$PD$QRPhth-̋D$L$jjjjjPQhth$̋D$L$jjjjjPQhth$̋D$L$T$PD$QL$RT$PD$QRPhh`.$̋D$L$T$PD$QL$RT$PD$QRPhh`$̋D$L$ T$PD$QRPhXh:̋D$L$ T$PD$QRPhXh:̋D$L$jjjjjPQhXh`;]$̋D$L$jjjjjPQhXh`;M$̋D$L$ T$VPQjR9u^WVVou_3^Ët$t P3n>_^̋D$L$ T$PD$QRPhhQ̋D$L$ T$PD$QRPhhQ̋D$L$jjjjjPQhhQ=$̋D$L$jjjjjPQhhQ-$̋D$L$ T$PD$QRPhXh08 ̋D$L$ T$PD$QRPhXh08 ̋D$L$jjjjjPQhXh8}$̋D$L$jjjjjPQhXh8m$̡8Puh8PSh9jG̋D$L$ T$PD$QRPh hM̋D$L$ T$PD$QRPh hM̋D$L$jjjjjPQh h M$̋D$L$jjjjjPQh h M}$̋D$L$ T$PD$QRPhhM]̋D$L$ T$PD$QRPhhM]̋D$L$jjjjjPQhh@N$̋D$L$jjjjjPQhh@N$̋D$L$ T$PD$QRPh8hQ̋D$L$ T$PD$QRPh8hQ̋D$L$jjjjjPQh8hQ $̋D$L$jjjjjPQh8hQ$̸ ƆD$$L$ T$PD$QRPjL$QT$RD$ D$u Ë$L$VQT$ D$ D$$RPT$ ujShLj jgj 蠆L$Q^ ̸&l3ĉ$$S$U$VW$ jP tDSjht D$P L$Q("hhdjhjxj V3IPT$RVXVtW@Wt̅tEt PuƋ$_^][3t̋D$VjPCuhhdjjyj U3^ËL$T$D$ WQRPVVp_^̋D$L$ T$PD$QRPhh<̋D$L$ T$PD$QRPhh<̋D$L$jjjjjPQhh<M$̋D$L$jjjjjPQhh<=$̋D$L$ T$PD$QRPh܎h7̋D$L$ T$PD$QRPh܎h7̋D$L$jjjjjPQh܎h7$̋D$L$jjjjjPQh܎h7}$̸ Fl3ĉ$$SU$0VD$ $ W$0PډL$Aujzhdjsj~j =3@uL$,uBV9$,tT$RQV D$PVucuL$QjT$ hRD$PjL$ hQՋ%hhdjoj~j 蒂VL3|$$4$,VjjjUWSRnU؍D$8 ;u UQHV$(tT$SR'"D$jjjjjSPhh<$Sƃ$_^][3 ̋D$VjPuhhdjj}j 襁3^ËL$$T$ D$WQL$RT$PD$QL$D$L$( _^][hhjAhj 9rt W=D$t PK_^]3[Ul$V3WUc‹VUhǙ+ L7~FIu;t$~+~VjPq 3_^]̋F SPb؃uaNQb=uNVRpb;>|$tFPYb;'DSS1[ËNQ?b= DSS2[h<hj~hj q3[̸pFSUl$WPa NQaVRD$a‹؋FPa |$tERSA1D$ _][YERSA2NQ{a;V,R[a;tFP?a;XN Q#a;PT$ RCvNPQT$R߈jD$8PL$$QވT$(RTD$GL$FT$E L$D ( L$,^3X,S\$UVWS3`~3l$IVSY@UPtSF-;|_^]3[Ë_^][̋D$tt @D$F3̋D$uËHA̋D$VWtDt>@Pt.L$QVttKu0hKh`jt3hQh`juhj Wt V3_^hNh`js̋L$D$V0FW9WRPxuO V QRq_^̋D$Q T$D$Q T$A̋D$QT$D$QT$!̋D$QT$D$QT$̋D$Q T$̋D$Q T$.̋D$QT$~̋D$QT$Ul$u3]SVWU3~DSU7D$$NPQwuT$F RP2tUC;|_^[3]Ë_^[]̸Ul3ĉD$|$S$U3VW\$4D$$l$u8|$hWiG$3;u4;tMUL$(豀|$$VhW  D7+R$D$4D$,"PuUXC\$4D$( PQRt$D p tPjD$,uPL$@jPQAT$H T$,PI@u+‹؋Fx(puz%yH@uj333;|$|$L$~<0t ȃD@;|L$ L$ L$tD$ D$D$D$(|$|$|$D$ |$ |$|$|$333;~$Iȃ9|t 0B r~v@;|ߋ|$0L$ӋǍDD$0t@PQL$q;$T$$4:D$,S/PFVSL$D=Qz F3Ʌ~B|t19P^v"\Fx|FF|FA;|L$4t$(FRt$,;l$|$hhjAjtj Rt WX3t$$|$(uƋ$_^][3RĀVWiu#jPhjAj~j qRWX_3^Ë7F jWhj{N AV zt[t$ ‹HVPWl/ t;SVVWu/V-[tD$t!T$RPWU uWĕ_3^Ë_^̋D$tt @D$閎3̋D$SV3Wtt@Pt3|$WVw|$8V_^[hhjtDhhjs6=uhhjuhhjr hhjuhj PV_^[ËԨƨ̋ :tT$;t Au3ø̡:̋D$:SVW3f uF4L$PFPQoV 貔teGtYVPtKD$3Pwe\$ AuB@tWPgt _^[WQV _^3[̋D$L$hPQ ̋D$QT$:̋T$HL$>=̋L$D$PR\:̋D$L$T$ PD$ QL$ RPR>̋D$L$T$ PD$ QL$ RPRm@̋D$L$T$ PD$ QL$ RPR>VW|$ t6?t15:t'>t"jPHQP< u >u_3^ËPBP3~tv NQtϋVjRt>uFHL$ 1VPPL$Qj_^̋D$tt D$ 3̋D$tt D$释3S\$VW|$ÍP@u+‹jUGhؕPv ujVhؕjAhj CM_^3[WSVu }$j\hؕjvhj MV`x_^3[PL$Qjt$ fV>x_^[̸LSUl$VWjUVjnhؕW'vjo?hؕPv ؅t,t(L$QUt$WVS1tVw_^][YjqhؕjAhj [L_^]3[YËD$Ul$VWS3~*VSM(D$WPUуuSF;|_^3]Ë_^]Vt$ W|$VWou_^ËL$ QB u_3^QAdA jqhylЃ_^̋D$ SPD$XPML$[t hh jjPn3̸JS^ U3Wl$l$D$ 9n|tl$D$4F@ h<D$ tD$NQT$9nPFTUPNA u&G(tVjFd"n`~hӃy|$u&G(tVjFd(n`~hӃLWL$t#tt-VB tEtuD$ u_]3^ËO D$!_]^Vt$W|$Gt$F@ uZN VjFd$уu_3^ËT$RD$PW t(L$yu_^ËV VjFd҃t_^Vt$W39~|FH PFTQRPN\QVXR;u hh jAhj A_3^Ãu_xFTP{;NTWQv@(tV VjFhFd*҃tFTPG7;|_^ÃuN VW~hFd+у_^ËVB tF Vj~h~dЃV_^̋D$tT$D$P T$D$t=@ @u7HuL$ QL$ QRPw7ÃuL$ QL$ QRP;ËL$ QL$ QRP=̋D$tPuUl$ VWU3&~7WU(P t?VtPVUG;|h h jkjnj 3@_^3]hh jljnj @_^3]S_|(d$SUPVW*Wy܋D$[t VP V_^]̋D$L$T$ PD$ QL$ RPQjЂ̋T$ꀉT$0L$选L$D$@d̋D$L$Ad̋D$@`̋D$@h̋D$@TWtUP^tFVW3~1IVWhRh jjPbWFk ;|^_3_̋D$@l̋D$@p̋D$@|Vt$ W|$uthW6u hh jyhj F>_3^P(xuVuhPt$u pt$t.VNu hh jxhj =_3^ËL$ t Axuxt Axup_^Vhh hf uhh jAhj j=3^hjV= ^̋D$L$H@$pVt$FHWtVЃFt~|u P_FFXtPCFXFTthPMPFTWVj3 G_^̋L$QT$p L$QT$_D$ L$QPR`̋D$L$A ̋D$@\̋D$PauËL$QD$T$jbVt$FtP^D$F^ËL$ N^̋D$ L$T$PQjRVW|$W|u hh jyhj ;_3^Pnxuj>uhPIpt.VLu hh jxhj );_3^ËL$ Axuxt Axup_^Vt$ t=VKuhh jxhj :3^ËD$@xup^Vt$VuVe^̸D&:l3ĉD$@Ul$PVt$PFW~u;wPL$fHfL$$T$ L$& 4_^3]L$@39Dà |HT$PL$ T$$L$( Zt3-t.+t)P@A@A8.u!P@0|9P@0}0A0AZAZu3=+t -jHSX KX@ I K H0[-u؋+URD$L$^FN ^^P^^L^T^d^\^`^h^l^p^t^x^X^|.JF;u!hh jAhj '_^3[;t?W RPMON W8VH;thKMPFPM ;uh롃H^H;t G;tF(F( ;t G;tF$F$;t G;tF F ;t G;tFF;t G ;tF,F,p;t G$;tF0^0;t G(;tF4F4;t G,;tF8F8P;t G0;tF@F@;t 4;t~DFDPQVjF<зj u'VQhEh jAhj M&_^3[_^[Vt$u3^Ë8ujG8t^D$)W|$u3_ËVpD$;t"PoKtQRFp3^_̋D$tt D$6s3̋D$tt D$s3W|$tD@t;V0D$;t&PJtQPgFQ23^_3_W|$tG@t>VpD$;t(P|JtQBPFQr3^_3_̋D$tt D$`3̋D$tt D$r3W|$u3_ËVp D$;t"PItQ RkEp 3^_W|$u3_ËVpD$;t"PItQREp3^_VW|$ HQBP3B~"QVR?pHQF ;|ދ_B$^W|$u3_ËD$ Vw;tPHtGPoDw3^_W|$u3_ËD$ V7;tPHt P!D73^_̋D$uËB D$&̋D$tt D$Fp3̋D$tt D$^3̡:Puh:P3"h8<j'"̋D$uËD$̋D$uVt$}SUW8WF;} l$VWUP/Ft F;|_][^_][^Vt$t%POL$;~|QQG^3^Ul$VEP t$;z|vSW}VWWD$ ؃ E;tGtNQWhT$jMVWE;h};}VWHF;|D$_[^]^3]Vt$u3^SWSY|$;}|$Unu&u3l$RGPS2pD$8;|tOQSp3 WSp3҅‰T$D$Pit0WUSu u%hh<jAjqj Uh]_[3^Ã|$t1Sw;}$NQSv@F;|]_[^Vt$Wt'|$tP W@B3Ʌ_^hUh<jCjsj Z_3^S\$VW|$t$uUl$~+t#PD:PUVWSwM]_^[Å}ǍP@u+‹KVWQ> u]_^[Ãt#uVWM%SBE]_^[ËCh]_^[_^3[̋D$VW|$ jPW] }_^ÅtQ;~VRu3Ht$N;|$tAVPWR >_^̋D$P3uÉD$Vt$Wt>u {ft?D$PW*tL$T$D$QRPW^ut;>t WIf_3^Åt>u>_^̋D$P3uÉD$Vet7D$ PVtL$T$D$QRPVu Ve3^ËL$ T$D$WQRVP3Ve_^VW|$jW=u-hh<jwhj WhXj _3^ËD$L$T$PD$QRVPV _^̋D$VP2uh/h<jmjrj W3^ËL$T$D$WQL$RPVQEV _^̋D$L$T$ VPD$QRPju^ËL$ T$D$WQRVPVd_^̋D$L$T$ VPD$QRPju^ËL$ T$D$WQRVPV;d_^̋D$uÉD$S\$u[Vt$y3UWS;}!l$VSUPp?t F;|_]^[_]^[W|$u_Vt$y3SUW~؃;}'l$VWy@~ut F;|][^_][^_Vt$t!V!L$;~| QV^3^Vt$t!VL$;~| QV^3^SUl$ V3Wuhh`jCjhj _^]3[Ëuu t9V|$ ;}D$P`؃t4WSV_ u9hh`jAjhj t St V_^]3[Ã}uu_^][̋D$uËL$ɁIH̋L$u3ËD$@IRPQ9 ̋D$u39HVW|$ t7u(uhh`jAjmj _3^S\$t=PSQ;L$ ɁD$$IN@NRPQY8ut;7t V[_3^Åt?u7[_^̋D$VP-uhh`jmjlj 3^ËL$T$D$WQRVP u V;_^̋D$QT$>̋T$HL$>̋T$HL$~̋L$BD$̋L$BD$̋D$QT$nQ̋L$T$>R̋D$ T$L$PQP ̋D$Q$T$.̋T$H$L$~̋T$H$L$̋T$H$L$N̋L$B$D$̋L$B$D$̋D$ T$L$PQ$P ̋D$Q$T$P̋L$$T$P̋D$HL$T$BD$`T$BD$T$BD$PL$QT$L$QT$D$ L$T$PQR ̋D$HL$@OL$L$ P{S\$VW3u!hhxjChj _^3[Ë3u wt!D$PGYt6WVu;hhxjAhj Xt WXt V_^3[Ã;u3_^[Vt$Wt#|$tPfW6_^_3^̸S\$V3t$;u^3[UW|$ %D$t>P.l$,L$(PWUQjBuh'hxj VT$/l$(t&WN4t~D$$UPV93 tk|$Ct[Ctu"؅tEu|$uL$$QWSd t&T$VRS D$HSQ~u%hChxjAhj _]^3[_]^[̋D$xu @D$*39H̋D$uËVt$u3^Ã~uFP39FL$ ;}؃~uQNQ^ËF^̋D$P'uÉD$VW|$ t7u+kVu hhxjAhj _3^S\$t,PS^4L$(T$$QD$$RPVut;7t VV[_3^Åt?u7[_^VW|$jWR2u-h hxjwhj !WhXj _3^ËD$L$T$PD$QRVPVJ_^Vt$tl~uFPh39FL$ ;}H~uQNQSvt*V9D$th[hxjzhj n3^ËF^VWTu hhxjAhj ;_3^Ë|$t,PW2L$$T$ QD$ RPV&uVT_3^ËL$ VQV~T _^̋D$L$T$ VPD$QRPjdu^ËL$WVQV5T _^S\$ Ul$VW|$USW u_^]3[ÃVSW utW;~|VW3~u(FPuL$ jQjV_^][Ã~u_^]3[̋D$VPe$uhhxjmhj 3^ËL$T$D$WQL$RPVQu V_^̋D$L$T$ VPD$QRPjtu^ËL$WVQ]VR _^SVt$ 3ۅu^3[Ul$Wu Vu_]^3[Ëttt u0X(+1$=tw PXFQ(D$tKT$RjP2 t7D$Ct&tu"tt=uW!=u WG_]^[VjEhja63Ƀ ;t,D$NFN N@;tVЃu V 83^Ë^Vt$tFt @tVЃV7^̋L$AuË@ tL$̋L$AuË@tL$̋L$AuË@tL$̋L$At@t yuL$3̋L$At @tL$3̋L$At @ tL$3̋L$At @$tL$3̋D$T$V1+u tt^ËB^D$IL$;RA^T$D$VWhhjH43 ;tph8FF~~9-F ;t@F~^~ ~[;t VӃt/nEVPt_^][ËF;t @;tVЃVB43_^][̋L$t#u;Ahhj jP+ËIhhjjQ+̋L$tuAD$dIL$[̸ l3ĉ$$$$t/t$3 ÍT$ D$hT$hL$T$4$$L$HD$UD$PW|otkVWug;}SVWiу+T$u?tu%L$ PQR脰L$ PQRTuWF;|^$ ]3; ̋D$L$ SW|$ P3_[̋D$L$ SW|$ P3u_3[PW_[øUl$ VW|$WUu_^3]YËttVUp_^]YSUO;}~SUQȃ+ujtu#WFRPm OVQR>u=uGNPQu*WFRPOtUC;|[_^3]Y[_^]Y̋L$Q T$@)D$L$Q PR̋L$Q T$`)L$Q T$`)L$Q T$,D$L$A̸FD$ SUV0Wh)hj j 9(l$0~L$4U3t PWh+hj j 't VL$yR;}^FWPHt$ItxuT$RT$(RUPу|u"FPGv;|L$_^]A[Í\$T$Bu _^]3[Ë D$(KHtu+hhj hhjjQ'_^][SW|$u_3[VhThj- uhWhjAj|j ^_3[h]hj j ~&V|$$GVPVt'*V/hehjej|j 3 OQ/hjhj j 9&^_[SW|$u_3[Vhuhj, uhxhjAj}j ,^_3[h~hj j ~%V!|$$GVPVt'VV0.hhjej}j 3 OQ[hhj j e%^_[̸ 6SUVW]hhj j D$ &%l$0EL$4xj\$4hhj j $t$4L$$QVjU{ uT$Rz_^]3[ ËD$tuD$Pu] L$QUhhj j $Uzj\$4P}*hhj j T$D$ P_^]3[ Ã|$ D$~|MQVRxh hjGjP$\$,WS$tD$@F;D$ D$|5hhj j #WJThPMS_^]3[ Ë\$hhj j #_^][ ̸ fSUVWh hj j D$ V#l$0Et$4xj\$4h&hj j (#L$8QVjU4uT$R_^]3[ ËD$tuD$P[ L$QXSh-hj j "Uzj\$4}*h1hj j "D$ P9_^]3[ Ã|$ D$~IMQWRPph:hjF jP"\$,VS$t2D$@G;D$ D$|hChj j "D$ _^][ h=hj j !VZhUS._^]3[ ̸UVW|$ Wit$ D$PUjVEuLD$ tuL$QLZ T$RQh|hjjhj _^]ÅtP_̋D$T$ RT$ A$RP@QIRPQSP̋D$T$RP@QIRPQSP̋D$ L$PD$QHRPQjR]Pj̋D$L$PQMPK ̋D$L$PQMP ̋D$L$PQMP ̋D$L$PQMP ̋D$L$PQTP ̋D$L$PQTP{ ̋D$L$PQTP ̋D$L$PQqTP ̋D$L$PQ`PK ̋D$L$PQ`P ̋D$L$PQq`P ̋D$L$PQQ`P ̋D$L$PQ=P ̋D$L$PQVhhj t/u Vt3^ËD$Fp ^̋D$T$ ;vVt$t PFthP!PʭV^̋D$Vp Fth0 P薭t P/V^̸D$Ul$8SVȋWT$ <;t,ً+B\$D$MQ3衪~PUVR蠪ƒX@u+;uD$WPR  tMQFU;|\$URA;}ukEhhj D$$h F聧hOhQF ~tyWSP EVP诨t9L$ 9tCT$ L$BhhjAjdj _^[3]V_^[3]_^[]P FthP!P褫V _^[3]hhjqjdj i3]̸l3ĉ$$V$W$|$ D$4D$u_3^$3S$\$(u$$T$`L$`$T$,D$  +^$L$`D$`$L$,D$ U+uBh?hjjgj st U+]$D$[_^3-Ë V|$,WRD$D$ ħ~ $|$($ D$OPQ觧P@u+ƒPU+Kuf~t`T$hUhj jT$DiND$DPQk|VPR:_33hahj j)33D$$L$SPEQMj/Rh(PQ~vET$`RP| (|A$ uNUD$QRPuNUD$QRPA tChhj j T$$BL$~~~~ F;thpPe~_^Vt$W3;t-F>~~~~ F;thpP~VQ_^Vt$t P/D$ P2+3Ƀ^̋D$T$ P H €t ɀH ̋L$D$!H ̋L$L$p~L$L$pD$L$H HVt$~uFu^ËD$ NPQ蹡^Ul$W3;u_3]ËE;thpPS\$;u [}_]蠣E;u[_3]VSL~:IWSIPS t9EVP tSG;|ˁM ^[_]V^[_3]̋D$L$ d$:utA:Bu u3̋D$D$ :utA:Bu u3̡W|$uh ؞u7_VWPbt"VP,P6 VQ蹟^WR_̸ D$$$t& $QPtPR跠 h j jD$ h`Px ̡th Pܢ̋L$uËD$Q Pt@uSVWt39qtu9pu yxuytu9Xu yxu9ytu9xu yxu@ u y` xt@ Q P u9qtu9XuIQPu_^[_^[̋L$Vt$W~QVF~_^̸̋D$ L$T$hPQRW̋D$L$hPQp h`̋D$hPAdVt$ D$PQh FT$RPh0D$(^̸Vh+`t$ujhhئjAjfj"3^YSUl$WU35SU3wƍd$:utP:Quu3u D$PWA0ƍI:utP:Quu3u3L$QWY tTUC苝;Vt$_][^Yjrhئjjjfj"WGRhЦPhȦQhj0T$hRb_][3^Y̋D$Vp4~t2W|$QWtFT$RjP ~u_D$^̸Wj|$ujqhjAjej"3_SUVt$$V3膜UV脜D$$p4F|$N׊:utY:Zuu3Ƀt7ϊ:utP:Quu3t F uT$jQR\ t.|$~tMt$$VE;:|$^][_j{hjAjej"D$$P+^][3_hhjojej"OWQhЦRhȦPhjL$@Q4^][3_̸6VFWt!D$PD$ PSl |m7NjShh,WD$tFT$ RD$FSЃj t$T$D$L$VRPjN>Ȼu#hh,jAhj"_3^V_^Vt$V u"hh,hhj"3^ËL$ S\$QV[^V3ƍP@u+ƒ rFj hHV  u1F W= P׃tNFQ׃u_3^3^SUl$ VuWP@u= +ƒrjh\V׃ uX*ƍP@u+ƒrIjhTV׃ u8X= P׃tNFQ׃u_u^][_^]3[øD$VPQD$ u^YÍT$RV L$V/D$ ^Y̋D$Ht@t 8tL$hh,hhj"3ËD$Ht@t @tL$hh,hhj"3ËT$tL$A@t IT$L$̋T$tL$A@ t IT$L$̋D$L$@?H̋D$L$T$ HL$PT$HL$P K̋D$L$@ ?HSUl$Wڅu!j|h,hhj"Y_]3[VUY u%hh,hhj"%^_]3[Ã~(?@uL$GPQ  WU舖Ehh,jihj"T$,WhdRPhXj^_]3[ËF(USVЃ ?@th= U菘l$L$QUFt@PS^[^_][ËF uF0tT{t){t#WSVЋ؃ u^_][ËVS҃^_][hh,hhj"^_]3[hh,jghj"UPhXj$^_]3[̸FSUl$VW3WU3|$؃u*hh,jsjtj"aUhXjT D$(uL$ D$PQuD$,L$ T$RPu+hh,jtjtj"L$4Qhpj Ijuh"h,jAjtj"D$$WPwT$3SVĴD$SWt VD$_^][SV\$P[tL$T$QPD$VRP^[W|$VWLL$\$$T$PQ u.j]h,hhj"ShxWhXj(_^[SV\$ePt%L$T$QPD$VRPP^[ËL$T$ VQL$ RT$ ^[̋D$ L$SPQ ؃u[UVWS3~Ol$WSP@L$RPUQt2D$ t jVP۱ V2SG蛒;|_^][_^]3[̋L$3t$D$X̋L$3tD$8̸FVW|$3D$tt$D$L$T$VPQRt%t!D$PW{L$h@QR_^Y̸ D$PL$QT$$D$ L$RPQT$R$̸ D$PL$QGT$$D$ L$RPQT$R$̸ VD$PL$QL$$3t$T$PD$RPL$ Q̸ D$PL$QL$$3tT$PD$RPL$ Q̸ D$PL$QgT$$D$ L$RPQT$R>$̸̋D$ L$T$hPQRy̋D$L$hPQa hQ̋D$hPqU̸Xl3ĉD$TD$dVW|$hWD$ 3ŏ~3VWǏPL$jPQT$RD$$Pj WF蒏$;|͋L$\D$_^3XU誐ujh8jAjgj"3]S\$VWS3<~?IVS9GuGjPtPUSF ;|_^[]hpUThh8jnjgj"/GOPhЦQhȦRhj8_^[3]W|$ tVtPVjU@hPR ujVhjAhj"^3_ËOPQV ^_3_W|$ujchjkjdj"u3_VjVt&ǍP@u+PWV7 u#VzjqhjAjdj"%^3_Ë^_̋D$T$ +̸<D$$D$@}3t; uh3 - tVP芈t8>u˸^jLjPh4jAjhj"3^̋D$VPuhh4jfjjj"3^hh4j8 uhh4jAjjj"S3^WL$ HP_^̡ h3 P  S\$ |$Ul$Vt$t S|u,h UhaVWS^uhPVLWS3@I|$ th UhaVW~hDOVWWS H@u PV u QVPQh| VDW|$ thX VSG襇;j_^][̋D$%=w\tCt<=uV|$h QthpRVh\RV3ËFjQ@QPR=tËFQ@QPRfZ̸ UVt$W3Vl$D$;uD$$L$(T$UP/_^] ËFHL$OS;tыVPPL$QUoORD$PUу ؅uT$(L$,jRT$([_^] ËGt.SWЃD$tsL$ PD$0h PhQUXG$t1jSWЋ uD$-D$uF3>0uFhhjhjnj"GOPhЦQhȦRhjѹ0_3^ËD$_^ËL$_^Vt$FPju(NVQhЦRhȦPhjh3^ËL$ ^S V>tPӃt F>u^3[À>u^3[ËƍP@u+W|0;t.$QӃtO;u_#^[;tG_#^[S\$Vt$zWhrDvhP uhshjAjoj"s_^3[ËϋӅ~,AA:ABu֋_^A[^3[̸ƴVt$ uhhjkjqj"3^YËƍP@u+hhP@ȃ L$uhhjAjqj"讴3^YÀ>SUWG:GD$V t V t$V t V D$ˀ0 w ˀawuéȀ0 wL$ ЀawVL$ ؈]E?VD$t+(_][^Yhhjpjqj"轳D$$P_][3^YËL$Qhhjqjqj"艳_][3^Y̋T$VWpI@u|$ +ƋVRW  u>t <.t_^3_^̋D$hP!P!|̸覲D$ PL$ QL$T$ RD$ Phh Q t3S\$w8T$w,L$ w D$=wFVN[3[Ul$V3ɃW~_^3]ÅtEd$Ѐ0NG w0"ЀAw7ЀawW ȅu_^UM]̃|$u3S\$ UVWS3vx~wl$WStxpքt':t,t.t JBuB:t>+uF3ɋ@QL$jjPUVQ蝕tSGw;|_^][_^]3[̸D$ SUVW3P\$D$k< < ̓t3us<,uoiL$QPS 3ۍ~E<:uϽ]؅td~'<,u#D؍~tOT$RjSN FFeu>u*h<hjmhFhjljmj" T$(ROD$h= Py _^]3[ÍL$QjPT$ RD$_^][̃ukte?t`>uh stEGPQKxu4WRtPPtuQ+3øW|$Gu3_ËL$ u#Ou GG_;uG_Ã~, NjT$< uVR48^tG_ËT$PGtG_̸FD$SU3UUhPl$識؃;u]3[YSu~EVWUSuQ~ =uv>u~t$tSEt;|_^SRD$ ][Y̸覭SUl$VWjj0UD$̏؃ |4ISU P t$t\Sj0U蘏؃ }ϋl$U33t~-SU5t8uxt$tUCt;|ӋD$_^][Y_^]3[Y̸ l3ĉD$D$$VL$QhJ jj:PD$(D$,D$0u^L$3Ŭ SW|$ u|$_[3^L$3蛬 Ë\$tD$$u~=_[3^L$3o Ãut$;t_[3^L$3N Åt;t|^WT$RV耬+P >jQsL$4D$8;tK+RT+RDPH _[^L$3 ËL$ T$D$L$VFN L$(_[^3̸貫 VW|$ jjjUWޕVW"PPhPR Vt_^S\$VWS\jjjUVHWQWhPR WZth@VOt,_^[Vt$ j:Vh VtL$ ^Ët$ ^ø薪l3ĉD$Vt$Wj:Vh VtL$ {tCt$ t,s_tWD$ PV豛 uVd__3^L$3BËL$_^3/̸$l3ĉD$ S\$,Wj/Sh u_[L$ 3$VS[t+ÍL$ VQ؃tSGT WRV tI;uE^t:WD$PVϚ t^_[L$ 3h$VVf^L$,^_[33C$̡?P襠uh?PhAj̸и̋D$ L$T$hиPQRX̋D$L$hиPQA hи&1̋D$hиP4̸̋D$ L$T$hPQRWX̋D$L$hPQ@ h0̋D$hPA4̸̋D$ L$T$hPQRW̋D$L$hPQ@ h&0̋D$hP3̸̋D$ L$T$hPQRWW̋D$L$hPQ? h/̋D$hPA3̋D$PhR h R  VW|$ t-t$t%PQ3uVGRP_^Ã_^̋L$D$w$pS T$ PËdS dS dS dS dS dS dS dS dS ̋L$D$tw $S @3ÍIS S S S S S S S S hиv.uËL$T$ L$PA̋L$9t3ËD$tQD$ tIQ̋L$VT$ 1;2$$U B^D$IL$vR^T$AD$BR^T$AD$R^T$AD$*JR^T$AD$醫R^T$AD$B^ÐT T T T T T T T  U ̸fl3ĉ$ $$ $S$W T$Rh<h0@ D$Ph<h$$ L$Qh<h @HT$RQh @HT$RQh j@HT$RQh ~ L@hT$RPML$QT$ RhH9Yu/CKSPQRPhh L$$hQ9у:UVWD$3K PhT$jR9D$$ȍ@u|$+OOGuȃt|$OGGuf fE|_^]L$QT$Rh CD$Phh *HQT$hR聏D$PL$ Qhغ]$D$[3WÌU U U U :V U V aV \W W|$ V$Y D$ hܻP98^_ËL$ hĻQ8^_ËT$ hR8^_ËGHT$ QhR7 ^_ËGHT$ QhR7 ^_ËGHT$ QhR7 ^_Ët$ hV7GhjPVg^_ËG8pu-NVFQRT$PQhhR?7^_Ã8uY|$ ShW7$V QhdW6 uhX Wb[^_ËD$ hLP6^_Ët$ hغV6OQV胍^_W 6X WX X X X xX X tY VW>t{FjjUP芉 |_PFPىtsPtdV3f~'d$WVfPSetVGtf;|V7f_^h'hjAj{j"觟_3^h hj~j{j"芟_3^hhjj{j"m_3^̋D$SUVW338u _^][ËHu 9X tQCD$ @ QT$D$jj0P |$L$VQ%}U vPw|$tT$VR@}Uj NtVtKD$W_P31dt L$Vj0Q3k }_^]C[h|hjAhuhjA hahj}jzj"9WpSj_^]3[Ul$Vj;Uh u^3].Ct@PXL$QVR耮KAS ztWhE+FhP(VUW jW>豾KWS$39_^]SUl$Vu^][WSUu8hYhhhj"F u 9Ft~tQFQruh_jPD$PFL$QRPtML$QT$RWF uhhjA_^L$D3Hjwhjrjsj"WS3L$L_^3HVW|$t'PQRD$PVh4WVGtL$QPj D$Gt'@RPL$QVhܼMVǾD$_^̸D$ SUV3WPD$2ۉl$l$l$$l$ Yd$L$0UQYw4Wƍ:utP:Quu3uGGD$:utP:Quu3utD$mUƊ:utP:Quu3Gt0:utP:Quu3uT$0REX;3D$,;p;t|$t|jjRVC{ ;|PV{;t PD$|$uI9l$uChhj{@hhjxjwj"莑GPhXj~ _^]3[Ät9l$tu;V5PV|$$>PD$;t;uhhjzp<;tq9l$t5X؉\$ ;tD2t9WSVt+L$OD$T$ L$_FV^][hhjAjwj"讐T$RD$ PױL$Qͱ _^]3[Ã8u _^][;hhjyjwj"Z_^]3[̸|̋D$ L$T$h|PQR?̋D$L$h|PQ<( h|F̋D$h|P̋D$Vt$Wh PhaV%|$ ?t/j hVlQVt0jhDOVM tj hV7WRV_^̋D$ L$PQAVt$ V腥T$ J4ytI;t yuVR<^ËAP>^̸l̋D$ L$T$hlPQR>̋D$L$hlPQ& hl̋D$hlPq̸̋D$ L$T$hPQR>̋D$L$hPQL& hV̋D$hPUl$ EVP葒L$T$P@Ph QhR#EP3:T ~\SWMVQ6TRj*L$,؋D$0Sh PhQ~#S訸WD$DRP MQFS4;|_[^]S\$CUVWP3S~3l$KVQSUR[tCPFzS;|_^]3[ËG_^][SUl$ 3ۅ^9\$TD$HW|$uP@u+‹@~!hhhj~j"e_]3[Vuu/hjPuL$QVt"hhhj~j"^_]3[hlG؃t@uD$P@u+‹T$CWRP覫 tNSQEQu@u\FVUݪT$RWUWU+uQ|$h= WEL$ hq QvE_][3^jVahd>D$VPA|$t$FWt$B;h= WED$ _][^hhhhj"{KSQhЦRhȦPhj~0hhjnVMhhjAhj"u{hhhhj"R{KSQhЦRhȦPhj1~0̀?@VuGPS@ WujhhHhhj"z3^UVSj# ?@uVSb]^h= VC]^̸FzD$ SU3Pl$ D؃;u]3[YVt$9.t^]3[YWS@t$US@x>u.=$ tQFϊ:utP:Quu3t ~uT$jQPl t ~tSE_@;dD$h= SBD$_^][Y̋D$SUl$ h HQRh PhdW=$tJV$d$FPUvmt"t3hDOWQW >u^thPW][hX W][̋D$D$tuItQA̸̋D$ L$T$hPQRW(̋D$L$hPQ h̋D$hPA̸0̋D$ L$T$h0PQR'̋D$L$h0PQ h0&̋D$h0P̸`̋D$ L$T$h`PQRW'̋D$L$h`PQ h`̋D$h`PA̸|̋D$ L$T$h|PQR&̋D$L$h|PQ h|&̋D$h|PVS37=~IUl$ h UhaW VS=PW!hX WSF<,;|]^øuSWك;h u#VhW: [Vq_[ËCNQh VhpWD$  hjT$(RWhX Wj0_[S\$ Vt$W|$tT{~h VhW {~h VhW {~h VhWi C tVP{~h VhW9 ;u2{,{&{ { u{h VhhW _^[̋D$UP3c;SVt$W|$hX WBL$ UQG;؋ tECtVP({th VhWx [VT$RE:;|_^[]W|$?D$ S_P茽Gu[_VS3:~0VS:W3ɅQjPRSt9SFf:;|ЋOjQ}WR^[G3_ËGP^[_ø_̸sSUVWGj h@PD$3  u\$DG0:utP:Quu3V9t5Ol$QUu'hhHhhj"r_^][YhWVWU0.VU9~tU9HPU 9 xt hhHhhj"8r=|$t$>tKhhHhhj" rthPR W ;thBU:_^][Yh1ttz_^][Y_^h][Y_^]3[Yø6qSUVWh0t$3V\$7SV7D$ PU|Jw`ƍI:utP:Quu3u*OQURJuch0U_^]3[YùTƍ:utP:Quu3u\$kEt\$t$CV\$7;_^][Y̸ oSUVW3|$38D$;ED$(P|$6 \$$L$(WQ6xuQPRSVSJVSqD$WP[5|$L$ PSQK 9D$UR5h03tpD$VP4tPhtKL$H*l$D$(GP|$5;D$_^][ h0Wh0VhmhHjAhj"nUL$QyT$hP R7_^]3[ ̸6nSUVWh|؃u hhHjAhj"WnD$ PD$4L$T$ QR4D$$woPSKƍd$:utP:Quu3uKQW- Ɛ:utP:Quu3uSRWLƐ:utP:Quu3uCPW譳{Ɛ:utP:Quu3u KQWm>ƍd$:utP:Quu3u7S URtbt$D$ FPt$C3;h_^][YhhHjjhj"xlOWQhЦRhȦPhjWo0h|SY_^]3[Y̡u P2 ̋D$}3à } +ȍDËPR2SUl$ VW=3DuG WL2= ;}b}3 }FPW12=@͍I:utP:Quu3tF_^][_^][̸jD$ HwHÉ$uÍ$RP3t ̸jS\$,Ul$(EVWw},l$uL$QPj3u x u-hhpj u hF3}3+ }4+4DWRP0FtNQgVR^D$@PaL$HQFT~FtstoVD$4L$< ӉVT$H.FN Vu;uh3 -uh'VP0/uh_^][hhpjAhj"]i_^]3[Vt$t)Ft"tFP胔NQzVq^̡VWh P2D Nt+t%tVR&F PNQuƉ=_^̸FhD$PVP趃$uYh@jj L$ hDQY̋Ft)8~ HV 8~ @H N f F t^8uYVWR3.~,FWP.8tNQG.;|@u T$R5 PP_SjjjgWQPGD38.~%VGDSP6.W.ODQC.;|^[̸gWG(`SUV3ۍGPSPwPWqW PW P' uO( RkuO(@SSjWWQ;tJ9tO(F;t'xt9tPkG O(_ G VaO(SShWP;tXG(u SjUWO }SjVWO |O(;tPkG$G$V O(SSjSWRP;t29~H O,8~PR щW,_, o(PWSSj~W_0PO(VD$S,L$;}nQVQ,Pˀ =w=T $0 O0) o0$O0 _0O0O0 O0@O0D$V+L$;|hpV?.jjjGWPO3;t9(~ HW4o4 _(PlUUjRWOUUjZWG8OUUjUWG<OGHUD$DPhWN@GL;u|$tO(Wl$ML$;QW9NVqK tOVQP=Yu _(VQPuD$;tCh@jj L$ hDQ}t$D$WQML$;|O(^][_ÁO(O(^][_Ð         ̋F(ȃtF,t8t ËЃ``uÅtèt F4t3Vt$F(u0hhpjj _8hhpjj E F(ȃtF,t<t ^ËЃ``u^Åt^èt F4t^3^̋F(ȃtF,t>t t6ËЃ``uÅtètN4t u3Vt$ F(t F0u3^Ã|$t~^ñtN,ttN4tڸ^Vt$ F(t F0u3^Ã|$t>^ètF4@ttF,tڸ^Vt$ F(tF0t-|$t^ètF4@ttF,t t F, u3^ø^Vt$ N(tF0tB|$tt2u/F4%tF4 u ytF,u3^Vt$ N(tF0tB|$tqt2u/F4%tF4 u ytF, u3^̃|$ tVt$ ȃ#^ËD$H(tH,u3ø̃|$ t Vt$ ^ø̃|$ Vt$ t^ËN(tF,?u;t7t2~0@u,jj~VI |PVTIPF t^3^Vt$ u3^ËS\$ tK8tQPet[^ËFtPS P^t[^W~t,W3%~VW%8tWF%;|_[3^ËptS>PV t_[^Vt$F(u*jrhpjj R+jthpjj ; D$ u^P2u ^Å}L$Q3P VP҃ ^à }% +ȋDDL$QVP҃ ^ËPR$L$P QVP҃ ^̸]Vt$(FwF#t$tL$QP&t u%hhphhj"]3^ËT$$2^VW|$Wt$PVP]t_^ËGhV謦~,CPV軲~MQG;|_^][_^]3[̸̋D$ L$T$hPQR̋D$L$hPQ h̋D$hPq̸̸h̋D$hP!̸Sl3ĉ$$S$VSD$ 3l~SWd$VSiQT$ljPR@GPL$(jPQx@T$,RD$4P$QQSF0;|_$D$^[3>SĨ̸R!$uj|h jAhj"%S3YËD$SUVWP3}IL$ SQ~FjPsVjRststoht,P8hD$P8L$(QC- ;|D$_^][YËT$h R}hh jAhj"UR_^]3[YËD$h PKhh jnhj"#RNVQhЦRhȦPushhhhj"P=j}hjjhj"OWGRhЦPhȦQhjR0t$hV_][3^Y_][^Y̸̸Hh̋D$hPqhH̋D$hHPA̸NSUVWhH3f|$D$,P3L$,UQVj hR uFx t /FjhP  Fx D$NhL$ L$(j؋D$0T$RPQR̳?u t6SPt&L$,Q3E|$;'_^][Ë|$hhdjAhj"M$hhdhhj"M|$thHWthS_^]3[̋D$VpW8hS u;FNVPFQNRVPFQRPQhSP(G_^à uM3F RhS( uh  th S膞G|_^hLS_^̃VtjWϕ}^Ã~tjV賕|F;G~/^ËO v r;ust :u΃vQ:VuvA:Fu3^̋D$PVq>u3^Ë ;~+Ѐz.t/^RV /^̋D$L$Sh UVpWyj@Vj@WӋ؃u_^]C5[Åu7>.uST$L$ ;~+WV u_^][_^]/[;t++;uPWV  u؍uCSV _^]/[̋D$@UkW=h j:P׃~x/uxx/urVpj:V׃uj/V׃uƍP@u++ƅu ^_5]À}.u ;~!Q+UP9uPUV  u^_]^_/]_5]̸JSUl$VWU~D$Ph VhhSWU3~`t$WUL$h QhaS 8uPRm PS衧hX SsUGl ;|_^][Y̋D$IwG$ HWz_ËJL$H`@JPQËRSXRB[ø3ÍI   s   S\$Vt$W|$hPϋNhQϋ_^[̋UVP33~jVQ;u3xuHxuBt"uQuh/u}PF+;|u^E.]^1]ËOQ3~FWVR;u xu˃xuQt/uGPF;|3^]^0]̸GSUVt$WVhU%|$(~w\$D$l$jj0UD$) t:VU%P xD$ui\$uTVj0U\) uƋt$FHP3 ~)NHUQ HuVHRE ;|3_^][_^]5[̸\̋D$ L$T$h\PQR̋D$L$h\PQ h\̋D$h\P̸̋D$ L$T$hPQR7̋D$L$hPQ| h̋D$hP!S\$Vt$Wh Sh,V|$$t PV0 h VhX VUh ShVGQV2hX V-WB(t@tPh ShV_^[̸Dl3ĉ$ $UkWD$ D$|ō$:utP:Quu39tUjUh`hhj"DKSQhЦRhȦPhjG0_3]$ 3qDËKjQeNj[h`jnhj"]DSC RhЦPhȦQ딿0Ŋ:utH:Ouu3u:D$ 8tjdh`hPSXjjh`hsI:utH:Ouu339>u!xujxh`jAD$S- jhXRՃ KD$PQ-uhh`jqL$@hh`TRP3mt9yT$HRUQ CT$HU)nUnhh`jA@hj"BSC RhЦPhȦQhjE44SjhPRՃ Ch PЭD$ u5hh`j hj"-BKSQhЦRhȦPIT$ hL$QR ;uD$ jPőu΋L$ Q褗hh`j C@hh`T)RPktyHUT$RQA (PTSjhHRՃ CPI@u+‰D$Ihh`DPQ7ktkzL$SQHRQAT$ Hhh`jAhj"@CKPhЦQhȦROh@h`jAhh`hhj"i@SC RhЦPhȦQhjHC0|$R"$_]3̸?̸?D$$SUVW3P|$(|$|$|$薎Ul$(|$$fWUd؋C#@t{@uuL$0@PQm3W;}(T$RUWt$L$EuˋD$0WPml$(7|$L$Qt$L$tVGU|$ ;/t$h+h`hhj">hh`h8SC RhЦPhȦQhjAhh`hhj"f>SC RhЦPhȦQhjEA0XVjY|$=t=uth1h`hD$$uXh8h`jAhj"=D$t P:,D$tP9D$D$t$PP2HyT$D$h= UD$,_^][Vt$t-t P;Fth PVVh^̸<D$Vt$VL$QRD$u3^PFP^̋D$QT$D$QT$a̋D$txu3PvAL$̸6<D$Uh@VW3S|$h0 E;S~lWSL$ QjPLtuVRJW=u }u@u#EVPu)MVQt+SG{;|D$ D$H(D$ Vhq St$Uh RE_^]Y̸ ;UVhh3jl$d ;u^3] SFD$UD$P^h.nF Ww@5%u:9t$ tj\$O(t UNt SA[^] ËEu 9EtÅtxtPs?F MQStjT$RjYW$u9t$ v[^] ËL$ QWD$ xjT$RhW_$u 9t$ +PWD$ jD$PhW$$؃u9t$ L$SQW|$@u.hhjj b\-hhjj J\ G@_̋D$BD$L$HL$]h v̸8D$$Vt$$T$RL$VD$L$ u3^PV^S\$CUVWP3}~Bl$KWQv9nuT$HRQ\tSRG;;|_^]3[_^][̋L$A D$W8uUtPW SVR3~-\$G VPSP\tO QF;|^[3_^[_ËT$GRPY\@_Wjvhj Qa u_S\$Ul$Vt$oGtICPR=u ~u[~+~uh Fvt9WVt+t$ t3~u;FFtSPuWb^][3_ÅtE^][_Vt$FPr%uNh`r Q V hpRV5b ^ËD$S\$Wuu_[ÅtPYu_[3Vjdhj` t"vF uVat W$^_3[ËD$t~ NtSV^_C[F^_[̸5SUVt$W|$_@3Vl$u+D$O(D$h PD$_^][YVcD$UP]OQP =RP=PSEuLthQRVw eHNSVRuV;GN PQ GT$RE;0D$|$D$̸f4SUl$Vt$FW3P\$8~TNSQ6URWVtD$jWUPttGD$NQC;||$uvtT$jVUR8t _^][Y_^]3[Y̋GVP3~)OVQPStWRFk;|׸^3^̋D$ u@VRPj t0D$ D$QL$PWVQVyu V<3^ø^̸2CUVW~p Vt$9GtV3_^]YËt$ UVVWSuD$L$PVQStnT$ REL;|_^]YÃuHPQj t0L$L$BQWVSFm]V,_^3]YSUl$ ًCVP3~0IKVQUWt4SRF;|ՋCtjPPSt ^][^]3[SUVC W{tthhhhj.&3^ËF@ ^      ̋PA=4t+w1$ NAËVB ËF@ËNAËVB=t!hhhhj.8&3ËF@ ÍI     Vt$f^t3Vt$WEu_3^ËD$tPHtP7_^̋D$P6u39̋D$VPt[|$ ttPI^Ã>u t 6N ^hThjAhj.%3^̋L$A $jPQP{:L$PQ-_̸f$D$VWPjL$jQ_T$R?P:P苖u!hshhjtj.j$_3^Y謳PztWjjoVwt_^Yhzhjwjtj. $t VSs_3^Y̸#D$VWPjL$jQ^T$R?D$(hPFytWL$QjjxVvT$$R4P讚;tRD$PPG;t;V:yhPx uhhhjsj.N#_3^YËL$T$QRT_^YVt$PC>t/t"hhhhj."3^ËNqv u3^Ã>u tWxPtWR@uTPW蝮_3^Ë_^Ul$EVP=t0t#hhhhj.I"^3]ËMqu u^3]ËSWR3\$~5IWP8uHSQt0RG;|Ut1_X[^]hhhhj.!_[^3]̋D$VW|$WP~hhjjWD_^Vt$Ps<t2t"hhhhj.!3^ËNqvu3^Ã>u tW5P?tWRmuPWʬ_3^Ë_^̋D$PFuËL$H̋D$PuËL$Vh/hjH VQuC^UVt$ WP3?;t1t$hhhhj._^3]ËNy~ u_^3]ËSR3[~dd$SPW>u=u ttSNQU$t4VhMhjjRBPC;|[_^]hPMUN[_^3]UVt$ WP3O:t4t$hhhhj._^3]ËNy~u_^3]ËSR3h~aSPg>u=u tSNQU4t4Vhnhj jRAPC;|[_^]hUU^[_^3]WP蔦tRVj3Wt6>u t 6N ^_hThjAhj.^_̋D$PfuËu~D$:tx tPQRxy_yD$tS\$ Vt$ Wt Vujrhjjoj.L_^3[ËPk8wS$T _^[V=&V2VGVBtWPq_^[Åu*W&l_^3[hhhjoj._^3[   3   ̸FUl$ WU*u_]YËV@ t{D$hPquhhjijnj.?^_3]YSL$QjjVuohVljjhVYoT$8SRPu tH ` ߋT$ ^jLh0jAhj.y3^̋D$L$PQP;' ̋D$L$PQP;) ̋D$L$ T$PD$QRPhh ̋D$L$ T$PD$QRPhh d̋D$L$jjjjjPQhh0 ]u$̋D$L$jjjjjPQhh0 M}$L$PD$ PQ L$ T$PD$PD$ QRP! L$ T$PD$hPD$QRP SUVt$WP/VP/ uNY3iT$ L$PD$ SUWRPVQ _^][;L$PD$ PQ ̸l3ĉ$UV3t$usPi-$t!nPijjhUdfujPhHjAjkj.hD$PWb ~2VL$QUc ;uphT$RWb Wb= ujjjqWet4|0$tPSUuBjlhHhjkj.Pt$;t Ua$ ^]3 þVtWVgV:a;u_^Vg^Vt$VP,thhHhjmj.3^WjVu_^ËD$S\$PWLg[_^Ul$UuP,thhHhjvj.?3]Vt$u+Ut90ujhHjjcj. ^3]WVU|u_^]ËD$S\$PptjWUn& \$[_^]Vt$V赇P+thhHhjyj.3^Ul$u+V t9(ujhHjjcj.L]3^ËD$L$PQjVB:~WUVu_]^ËT$$S\$$R݋ϋ[_]^̸VW3VVD$PVQr$D$$RPQT$,R$u h$hHhhj.vD$ hPP=tL$ WQ{T$ R+L$ VQS.jjV'+ <|$ u? UC=;|hhHhjqj.~ _^]3[_^][hhHjpjqj.U _^]3[̋D$SUVWP U3~TSUV! u.L$T$QRV<D$ VP~-jjV< 3UCt;|hhHhhj._^]3[_^][Vt$VP#thhHhjpj._3^Ul$u+Vt9(ujhHjjcj.,]3^D$ tFH A VB @D$S\$ u|$u&u"uC[]^ËL$QPV u[]^WUV<u_[]^ËT$(R4݋ϋ_[]^S\$UVW|$SW3uhhHjAjj.M_^]3[ËD$ L$PVQjjj VwZVW$u&h hHjgjj._^][Ëν_^][hHhHhhj.3hNhHhjhj.3VJt)D$ u"PD$ jPVu V3^Ë^̋D$u胄VPLu^S\$@u jVu#D$ SjPVuVI[3^[^W|$ uhhHj~jzj.3_VtD$L$PQWV/u^3_S\$ @u jVPu$T$SjRVuV[^3_[^_SUVWwWD$\$$t$SjPD$ PWAuhhHjcvl$U3~#VUPW;tBUF];|@u jW)PuL$ SjQWt"_^][hhHjAhj.jt W_^]3[̸S\$UVt$$@D$ l$D$ WVjPjjtGhPW t/VjSUWu,hhHjchj.W_^]3[YËL$Qf6؃t܋CRPa_D$(tŋL$QU1t(T$(VjRWtWD$D$(PrR|$s_^][YhhHhhj.^]3[Y̋D$ SUVWPuhZhHjAjwj._^]3[Ë|$W3N\$$~'IVWISPUtKWF#;|@u jUPuPL$SjQUu;U_^]3[hKhHhjwj.'Un_^]3[_^][̡EPUuhEPhGj̃~uj%PFujThjAhj.3j-NQFHARj  FËPtjGhjlhj.;3Vt$V^̋F WP3~TN WQu >}*"u >}u >}V RGP;|FP3>~5$NWQ68u >}VRG ;|ҋFQt >}VR3~FFWPH9u8}>}VRG;|>_}S\$ÃVWtdthhhD$@8u$hhhhj._^3[P%t$FtY_^[P資t$Ft5|$W諣NPQL tWVPBP$ uhhjAm̋L$u%D$ tQD$t IQÃuD$tI3Vt$Wu6|$WPFQ莣 uIWAVPBP$ _^Ãu(|$jjW蛟G8 tNPQ_^Ã_^Vt$PCtjGhjlhj.3^ËFu^Ë@^SUVt$WP3tjGhjlhj.3Fu3XS34~@VS6t u Rt/OQUtSF;|_^][U_^]3[Vt$W|$t6hhjGjPK F t P+W肤F Ft PP~_^̋D$@u%L$ tPL$t @PÃuL$t@3̋L$QT$ Vt$W3P|$ t!jGhjlhj.5_^ Ëvt$;u _^ ËNS^ Q|$UVWR~FD$$P3sL$$UQmVWRtD$$PE?;|Qt6hhjOjQF t P蛉WF Ft PND$~~D$(S3US8uxVWRatSE;|Qt6hhjGjPF t PWFF Ft PMD$~|$t$NGQ|$#;]D$ [_^ ̋D$L$tQ D$ tQD$tQD$tI̸ SUVt$0F3ۉ\$ Q.P(Pk ;T$R'jj4V}MWSSBtGjWPj4VZ~WX_uhhjAjgN QjUT$RD$$PL$Vjj jjQ) hPF T$RPdL$ PQL$Q( T$0RD$jP} ~uL$ Q,$T$4hhR8"D$ t]L$0QPT$R= ~5D$Vjj jjPsBhhjnhj.tD$ t P#L$Q(^]3[ ÍT$R(D$4L$VPQR^][ ̸Vt$$W~ D$u&h hhhj. _^ËFSQP Pi؃ u [_^ÍT$RD%F PjSL$jjj3x PVu hYhjrhj.NQT$UR D$PL$0QT$R/% #hjhhhj.MD$;t hphjxhj.%L$,r;u2st':uvW:Qu vG:Athw-AFN @QRPL$ Qy؃ hhhhj.3ۍT$R$L$p_^][3K`̸L$ $PQD$`.3YË$VPD$RjhPL$Qb^YVW|$3~OWVts-utdV2_3^ËD$VNVPF PW-t$>u ktWQt_^WG-_3^Vz P4etD$L$PVQ- øVWhD Pd|$tjhW h) Pdtjh)W h-Pdtjh-W hPmdtjhWi t~jWtjjW,tVhW%t?j@Wt0jWtj(W%t_^_3^SUl$ VWP tjGhjlhj.3vu3^S3$~CWS&;t%Vw|NERQt0SG;|hhhjlj."_^]3[jjj3PVuhhjrjlj._^]3[jPjj3U_^][̸ fS\$W|$SWuu#h7hhjfj._3[ UVt$ t$PxjjW萑hBhj CjPihChjOjQR4D$0] }tE>} 3EUPWR t$,uCD$PSzL$QPauh`hhUVRrD$HQ3 ~OT$BWPPjL$(jQ/*T$0Re V=f ;tD$HQG踵;|\$(T$BP蠵;ue(t%VWt$NWQyuBW(hhjAjfj.tPUz^]_3[ Ët$C t7@Xt0UjjSЃuhhj}맅hhjo땋\$0} uE cuET$RD$|$t WUh3Wt$t.L$ U@uUuD$$L$ PQ~u_FFUP ^]_[ ̸`l3ĉD$\UWD$ P39n u1huhhhj._3]L$\3`ËNQT$WR V|tCH9T$RD$(PL$Qo T$RD$(Pjj3V@jWjj2V(VGN hhQ4sP uhhjAhj.XV RD$ PL$WQAou&hhhhj.W)T$FRWPu L$ QL$h_]3`S\$UVWPtjGhjlhj.c3Cu3hU3~'WUL$ t%UG迱;|ًKQ_^]B[_^]3[W*u_VP6suW^3_jXwjFNT$FRP^_̋D$@HL$̸`l3ĉD$\UVt$lW|$tD$P3vNQT$WR"D$ PL$,QT$R 9l$xv D$ ;tj~hjyjuj.Nt$(r;u2stW:uvLQ:Vu v?A:Ft7hhhjuj.mL$ F QT$,RPE tL$QL$l_^]3 `Vt$PCtjOh jkhj.3^ËF^̃~uZP/qFuj]h jAj~j.3j@NQ Pj) FËQtjOh jkhj.[3Vt$PstjOh jkhj.&3^ËFu^Ë@^VJt+tL$P jjQRu%V,hh jAj|j.3^Ë^̸VSUVt$WPt jOh jkhj.y_^]3[YËFD$7PoPou|$jjWEgW衎؃uhh jqhh jOjQ D$ ^~t 3VPWR tkC t7@Xt0UjjSЃuhh j}3hh jo!D$HUQu3hh jAjej.>tPU;r_^]3[Y_^][Y̋D$8thh j|hj.3ËL$@tPL$ tPL$t@̋D$8th h j|hj.3Ë@PT$̋D$8t!hh j|hj.OËPBD$̋D$8th#h j|hj.3Ë@L$H̸Ul$W3|$9}t#h:h j|hj._3]ËEL$QSZ VD$@WP5u^[_]V޼Ujj hjVA hJh jnhj.BKSQRD$(PjV~uL$ hQh Qz uhVh jAhj.;SCRPL$(QWV蠼~ T$ D$H RWQf 3D$V#t WD$^[_]̸&D$ QSUj VpF3ۉ\$;u'h{h hhj.>^]3[WSPʵu_^][WüD$Pjj hjWҰ hh jnhj.F PQRD$PjWԼL$hh Q ؃ uhh jAhj.tiv FRPL$QSWyhh jhhj.:/ED$tURP4EPc ]L$MW耯t$u t S; _^][̸vD$8t$hh j{hj.ËL$T$@$D$D$ L$@QT$RøSUVt$WPvt jOh jkhj.)_^]3[YËvt$D$l$$ t hh jvjdj._^]3[YD${Pit[PiwtD\$4t"PhNAVztD$HWQhGh jAjdj.<tPW9l_^]3[YD$QD$D-t,t u) l$$;"hh h hjdj._^]3[YËT$ D$,L$(VVPnQP2NT$ESP$uPhhhjAhj.eL$Qht PSUUW3Ë$_^][3Ġ̸SUSuD-D$ ;s]3[3ҋuVhhhW4L$,D-+PQ΍T$ +RQSU+RD$4PVST$T+QRD$HPVSHjjjjSWVL$(QVSN2NV2VF"N2("рu&Q;rT$QPD$ P I \$WVVP ^][̸D$SUl$VPtȍD3;s^]3[YÁwtgU҈WEЈGMSWUROC ;v+˃QTR藨\$VWD$PWSBVWL$(QWS4(T$2^][Y̸l3ĉ$$$@SUVq3ۍL$\$h Q\$9^u8hWhhhhj.^]3[$3ĠW~R=}G8u,ȋQBD$OQPD$ Pj D$uhlhhhMQhPbP8 u#huhhhhj. $WjjjPT$4RyD$ jPRL$ QRD$,Pv} hhhjfhj.dFPWL$$QNRVQRP̅} hhhjhj.] }L$ Qr3ҍD9 ;hhhPD$u؃ MD$ PQMT$R F T$XN F hhhQ؃ uhhhjAhj.Qv V>RD$PSL$,^ u hhhhhj.LL$]MD$T$ R;t$u t SfD$P h_hhhhj.3$_^][3Ġ̡`JPuh`JPhKjw̡u  @Vt$VЋL$N^̋D$̋D$u P@ЃuhhDjAjoj!3̋D$t Q D$̋D$t QD$̋D$uh hDjijqj3ËD$A$̋D$uh(hDjijnj3ËD$A̋L$uh3hDjijljS3ËD$uh9hDjkjlj03ÉD$L$]S\$ Vt$ W|$WSV uOuhKhDjjjmj_^3[hOhDjljmjWhhSh`j(3_^[S\$uh[hDjCjpjr3[ËD$L$ Vt$ WPQVE u_^[JPVуt+J PVы PWAGRVЃu_^[̋D$uhhDjijij3ËD$A̸ fu 8 P $QҋL$D$T$QL$D$RQD$ P#D$̸ Vt$u3^ áu PL$QҋD$PL$ Qt$ ^ ̸ 覿Vt$u6D$L$PQV uWhKhDjjjmj3^ áu 9@T$RЋL$ T$QRD$Pt$r^ ̸Vt$D$uL$ T$D$PQRV=u PL$QҋL$$T$ D$PQRD$Pt$$q^u$̸ 膾u XP $QҋD$T$L$D$ BQЃø Fu P $QҋD$L$D$ D$QHT$RӳD$WjP,uhhDjjrj%3_ËD$Vu(h(hDjijnjW35 ^_ËT$RWPAЃ W ^_̋D$WjP+uhshDjjjj蕽3_ËD$Vu(hhDjijijpW3 ^_ËQWP҃W ^_̋D$Vh" P(ujghDjjdj3^ËL$T$WQVR=V5 _^̋D$VjP*ujxhDjjgj踼3^ËL$T$WQVRV _^ø 6D$VjPI*uhhDjjhj[3^ áu PL$WQҋD$T$ L$ D$BVQVT _^ ̸ 覻L$t'D$tI$$PQD$ n 3 ̸ fL$t.D$t&I$$PQD$ .t@ 3 ̋D$Vt$WxVWu_^ËL$ QVRt"VW FPwNQnVh_^̸ 覺W|$u3_ S\$Vt$ t[KD$ PQ|$t$Zu_pƊ:utP:Quu3uW u$CT$ RPD$|$t @^[_ ^[3_ W [_ Vt$FWP葆Q臆3_^̋;t):utP:Quu3uFGt-Nt&:utP:Qu u3;Ft3ɅL Vt$ W|$ q_^Vt$u3^Ã~uh@ h? Ftݸ^Ul$}uaS]WSx+VWSFP=NQ4V.y^t S;URU _[]SUVW臀؅hhj tsl$ōP@u+hxhW tEWUP{D$ F^HVQՅthth!h _^][Sz~t VM_^]3[̋D$xtPD$ P Vt$t=~t7F@ FPh B PNh@B Q蚁V^T$ڀ^̸O̸OVhhj t D$HVуu Vn3^Ë^̋D$uO@K@̋D$uO@M@̋D$uP̸6SV3W\$|$ ;u _^3[ËƍP@u+U@PW|$0OQ@tWFQu;;t4 t FFQu"L$QL$0FIFQCt;uFtNFЊiu.;u F;uFL$A FoFCDMt;DF> tjFFFQ=FVEe؋GHuȋVFQtN;tHtN;uhxhj F t$t|$uT$T$D$,P@u+h@hPVF .L$,iAu+AQL$0QPT$$VRWt$L$Ɗ:utP:Quu3t-VW_u#VWuhhjgD$ L$QPW \$0D$D$RP[hhjZL$Qnt U!_^][L$@3蚫Dhjhjeh{ hhhjAjyj腫T$RD$t PD$4tL$$T$$Rh$ND$Hj PAL$PQhj/G9D$8ttP9Gt$t)Ft PJFt P:V1L$P_^][33譪D̋D$Vh" P0uI"%=uhhjrjxj聪3^hhjjxje3^ËL$T$WQVRV_^̃=uruVh+hjv tEL$ D$QM T$FD$VQVF Fho u V 3^Ë^SUl$ VWj.U t+ōP@u+‹ءP3p~5 WQpVSUR tPGo;|_^]3[_^][SUVWhchjٿ3r D$PIL$QF<Fn9ntvtrCtT$RVЋ~Dupuh~VP(nu^hhjAjsjl[ t tVӃt)Ft PFt PV~_^][C_^][̋D$ L$T$PQRjËD$@̋D$@h@ t PHP@uVW+h1x hW u_^WLHPVWh V Wh4V$_^W|$uhLhjsjwj23_U- SV|$ttIPՃtGGuL$QWh ؃;tR?tMtsǍPI@u+t8|$tRՃtFNPՃuL$$+QFVW T$$RjjT$, ~t{T^[]_̸D$ SUl$VWhH\ PQD$& ujjWj*uFnFhpV؃u D$ph`VPSUVu9VD$hhPjuj貥WhXUhPj蟨(3_^][Y̡P%l~RV QkB tVЃHFPNQVRk^Pk̸趤l3ĉD$S\$ Ul$ V;uSuWVvu>u%hhjqjvj赤VhPj訧 ][L$3uUWVg LuGhhjmjvjdUhD$$j P:L$,QhWhxVhPj5@L$][3WzPjx_S\$ V WQj~>ut1WRit PFPVy^[ Q*j_uRijDYVt$t8D$ tPjV& u'D$ thjV u ^UPVu]^SWU3i~;t$SUiL$$xQVpD$tUCOi;|ȸ_[]^SUVW3S32l$tZu tOjVW "D$t0ƨ%ru!'D$L$PQW ؅u VaW_^][f̃= ug 踢D$j0Pj* CV=j @PlthV7VbVj ^ ̸SUV33D$l$D$v؉\$$;.hSWjWhlj c D$;t$47oo hG;j^<hlW"L$ A;j`hlWT$ B ;3;~L$Q,I ,@;|S|$$Ƃ3ItD$PSD$ >CL$SD$0+QRP_KD$(9 8u9#tp@u+|9 ut$$j~T9 >hlQ3 Ɖs3o|$ t' u|$ tbH3ۀ\È@F\$ u݋\$(L$4;uW>uR@RT$BPdhX @P  ;@F;l$4}ED$(VUQL$(Qh @P  D$t$_StD$tcuh @P  t9Ft PeFt PF t PV^]3[Ë^][̋D$Vt$;| F3^ËNu F^ËT$RPlF^̸趝S\$Vt$;| F^3[YËD$ L$PQjD$u F^[YËVUWR[dl$$3D$~:FWPPd؅t SՃtS\$S"ku=G;|$|ʋ\$V<t PfNT$F _,]^[YËNPQFeSF~f _]^3[Y̸趜VW3|$t$BD$;t$$FSUPwc.;ljD$ |$t$,L$VQR^c33ҋ؅~!tp@u+B;|L$D}PQ!tzT$B3~, tt u\@A@u F@;|ԋL$T$(@ I+VQR ;uD$t$@;D$ D$8D$D$L$QvD$ ][_^_^S\$ Vt$ W39>~:F<t,N <t S҃tF SQ!ju!G;>|ƋVSR au_F^[É~_FF^3[39>~6F<t(N <t S҃tF SQhG;>|_^[W|$Vt47x!IG<t Q,dyWR G t PGP7aD$ SUID$PGP"a؋,3u~;t PF;7|$~ I;r;v t P{F;7|t$OVQ`P_ t$y][WRh`W?^_̸|̋D$VWwSt$|$$` PNQ~RPFu_3^Ë΋QNjRQ)~_^ÍI]` ` p` ` ̸h̋D$ L$T$hhPQRI̋D$L$hhPQL1 hhV!̋D$hhP$̋D$L$hhPQ0 ̋D$Phh̸̋D$ L$T$hPQRGH̋D$L$hPQ0 h ̋D$hP1$̃|$uD$QR*&̸̋D$ L$T$hPQRG̋D$L$hPQ/ h̋D$hP#̸̋D$ L$T$hPQR'G̋D$L$hPQl/ hv̋D$hP#̸X̋D$ L$T$hXPQRF̋D$L$hXPQ. hX̋D$hXP"̃|$uD$QR̸̋D$ L$T$hPQRF̋D$L$hPQL. hV̋D$hP!̸4̋D$ L$T$h4PQRE̋D$L$h4PQ- h4̋D$h4Pq!̸̋D$ L$T$hPQRE̋D$L$hPQL- hV̋D$hP ̸ ̋D$ L$T$h PQRD̋D$L$h PQ, h ̋D$h Pq ̸<̋D$ L$T$h<PQRD̋D$L$h<PQL, h<V̋D$h<P̸̸̋D$L$ T$PD$hhQRP[Vt$FP袮L$t]tjmhjnjhj!O3^Ãu!FtHyt3F ^øF ^jghjhjhj!3^ÃuGW|$~ t6VBHQuVBHQGVB@_^jXVt$FPҭtEthhjpjmj!耒3^ËNAt PyVD$ B^ËNAt PUVD$ B^Vt$ VŧNb$xi t$FF\jPΕ(NQF3^Ët$jF讲Ft$FFjRrFjQejVJ^Ët$FFjR jΦNQ^Ët$FFtmjPߔt\j葦NQ^Ët$FFt0jP袔t^hhjpjnj!蟐3^Dh h h ]h 1i h D$PL$T$AQUVt$ FPqt"thhjqjgj!^3]ËFL$QphSWP-V3V~$WVVQ ;tyVGoV;|t;JGt/SKWWV9U u5Wl_[^3]W\hhjAjgj!g_[^3]ËD$PUT_[^]Vt$FPbt!th6hjqjdj!3^Ëv>u'Vuh>hjAjdj!ގ3^W|$hAhjOjQWRRTuW_3^_^Vt$FP販t!thYhjqjej!`3^Ëv >u')VuhahjAjej!.3^W|$hehjO jQKWRSuW_3^_^Vt$WjP谑|$W+2NPQa VBPڮWd:P.NAV z|$hhj GjP蝰L$,jjQ~YPVPRG tF@Xt?VjjWЃ~_^Ãt#hhhhj!_3^hhhhj!ˌ_3^S\$VW|$uOD$PS~wL$QŢPu$hhhhj!k_^3[t/T$WSRVjtD$VPuV_^3[_^[Vt$FP2ufFNQBFHyuhhjAj~j!ˋ3^ËЋBT$ HRPFNQ^hhjqj~j!^Vt$FP蒦uNA^ËVRyuF@^3^̋D$L$tQD$ tQD$tI̋D$t L$QVt$FPt)thhjqjfj!谊3^ËNAVBL$ QP0P^W|$jPQu_S\$S.OPQ t*WBP耫S 7PԯOAW zu[3_VS1tfF t_@XtXWjjVЃuh.Fh4hh;Vh:hjCjP^_[_h%hhhj!茉t V^[3_Vt$FP蒤uL$ APNRQPR2 ^3^Vt$FPRt)th^hjqjlj!3^ËNqVrW|$W@7u hehhjlj!_3^É~ _^Vt$ FP£w]$r FMNQBuHjWNQB+FHAu&j5VJA VB@u3^ÃH L$^Ëq q q q Vt-D$ PVtL$VQ'u V3^ø^Vt-D$ PVtL$VQu V3^Ë^̡OP~uhOP#h8Qj̋FPWuFËNQBvFt 8u@3V:Ptujoh j j}j!Ɇ3^ËD$QPP u jvh jm4PjjoVu 7^VP$ujh j j}j![V3^̸SUl$EV3Pt$-؃;u^]3[YWVSXu_^][YWS^UjjhjWS#hh hhj!贅L$ T$QRD$ PjWe^~sL$hh Q uhh jAhj!Y9T$ D$RPL$ QVW^~T$E RVPإ 3D$SEWRt VRD$_^][Y̸薄D$SUVW3WPكpWu_^ ][YVi^SjjhjV|R#hh hhj!zC PQRD$PjV{^L$hh Q謭 uhh jAhj!oC @RPL$QWV ^hh jhj!3߃3\$tT$ QPR D$L$;V!Qu t W_^][Y̸t&l3ĉD$pD$|SUVW$O3Q|$$D$t$t$ t$33t$,t$(t$0]w$z GhpzGPh@X HT$(L$$;ugh$h jtjij!ւ_^]3[L$p3袂tËGP@X HT$(L$$;uh/붋GPpT$Z3D$,UI~1$VUIP|$o teUFH;|ڋ|$ D$tP|$Dt:|$  PD$uQhNh j jij!t$t VD$t P_3^]3[L$p3螁tÍL$QjhPSSD$8hQS/P*t$D ~T$0URbtD$jjjjSPVT$L$@QRz Px D$4MQPP l$4PjjoUD$4ul$$ UPBVD$8C D;yl$D39\$0PPD$ ;u[hh j jpj!:}hh j jpj!}3ahh jmjpj!|3Bl$PWtw3C~JSWCUB NPR!uFHUBQPtzWC:C;|hh jsjpj!~|33 C~e\$HVW CSL$(QT$$RWP'}WFB;|&D$HPL$ QT$R! |L$ D$P3WhQ|$ 6T$@D$ WWWWRPc(L$8QD$RP,L$Q(h@h PD$8D$;ЋD$RP豵zt$;uL$T$(L$T$|$D$P>(\$ ;t:L$SQu(SVVdT$$D$4 T$D$|${L$WWVWWQu;tSVV |$t$;tT$(RVV |$D$$;u D$ D$$L$ QP5l$D|$ 3ۋUR蟕t$PuSSjUu;tFL$<;~PAPD$,PSShPD$@a9\$,tD$,t$$PV_^][03ۋt$;tL$QVV! t$;tT$(RVV D$$;t PtD$4;t PcD$ ;t PRD$,;t PA_^]3[0̋D$VhPtH$WjjxVtIPfP0;tRVhP uhh jljj!x3^hh jDjj!x3^Ë^̸ fxVt$(FD$WQ͓PǎP 7T$R*FPjWL$QT$$RZD$ VjjjjP&F h`V PL$ QRaL$ PQD$P趧 L$QAT$0RD$jPpZ~nL$,hh Q5 D$t`T$,RPD$P:Z ~8L$ VjjjjQpEDhh hhj!nwD$t P譢T$R萧_3^ ÍD$P{L$0T$ FQRPؗ_^ Vt$FP2t3^ËFtptjV=Vz=L$ ;~QVx=@^ËD$UPUtqtmVWS3>=~.$VS9=UQΚtSF=;|_^3]ÃuWR<tGjP<_^]3]S\$j3d[uË@S\$C Wth>P?|$W;=C u_[VW3u<~-VWwP>|$W<Cu_[VW3;~-VW;PPCVP<tWF;;|^_[^_3[̋Ul$VWu<uB_^3]P3;~-IVP;Q ;tHRF];;|֋D$L$PQU膹 tVR4:uRV'_^3]WD$L$PQUHcWVR!; uW_^3]_^]̸psl3ĉD$l$U$Vt$|W$L$ QD$ D$虡WRt.GPthh jrjqj!sMSRΎ؋ƅthIhPtQD$PjjxVD$$tsP`Pf;tnL$Q`P;tWVuhh jljqj!s[T$ RHL$|D$_^]3rphh jD‹T$RD$(P"t} AWZ90L$QT$@RD$,PD$$S uWRu h:KL$9 pT$<r!$;st,:vB:Fv J:NjS0PPT$,R PD$PW "hUh j jqj!qD$$L$PQT$,RG yD$PҜL$ }QuD$IhIh je둋GVRPL$0Q\Vdhlh ji_D$̋D$L$SY P.[̋D$L$SYP[̋D$L$ T$S\$PQR [̋D$L$ T$S\$PQRd [̸Hpl3ĉD$DS^ j4[u:PV1u,hh jAhj!p3L$D3oHÍ$PL$QW诟 uhh j뼋$RD$PV uhVL$H3oH̸x6ol3ĉD$t$SU$VL$(W3QD$ \$,UR|G]w$ ED$uaMQPGL$ Q؍|$ VL$T$RD$HPQ訞 7T$MRQ D$HPR跎 EPujjjU'L$A T$hRihsh jkEHPBL$ D$;u,j2D$;uhh jAqMQB\$ 3St$46VS4~NR L$ Q؍|$ iT$RD$0PF P;4|$,'@EHAD$;;jND$;uhh jAUJA EPpT$ D$EHQR?9] D$P0MQZEpMQD$BP9] L$QUBXgNQxhQh PD$4VRD$(PL$4WQt5T$$FRWP t$\$ FSt$2;"ShYh jTD$PjjVJhVݻjjhV.L$,D$,P; D$L$,Qj"T$RG&;+l$(Ug=T$RjjU芲PD$,PD$u}hnh jAhFh j juj!^Vh%hLhVh jujuj!^D$,PlPhdjaL$8Q#%(l$L$QSA$tENPuh{h jAjuj!Z^jjhV蓱t$ h$RUG ~0tP$PV h$QU Ѕt>T$ RVAbu#hh hjuj!]ViV$uND$P38$~;L$VQ5$T$VR($PWSU? ~JD$PF#;|D$$D$(9D$u t U:UL$Q#D$,4hh jijuj! ]h5h j{juj!\3$_^][3\ĨWujLh jAjtj!\3_SUVjWptIjWt:D$\$$t>SjPD$ PWu(jXh hjtj!S\WZ^][3_u6l$U3"~#VU"PWtUF";|@tjjjWtPuL$ SQW t^][_VujIh jAjvj![3^L$ PFPQT$VjhR^̸[D$VhPFt->u(NQT$詔PFQT$ Rj ^Y3^YVʔujfh jAjwj!Z3^ËSWP1IL$Qgp\$ ~WFujnujrSWt^ujvh jAjwj!{Z_[3^ËVzFL$VQ_[^Vt$j2V3t3^ËD$ u joPjj2VK^̋D$u,PPq!uhh jAhj!Y3PD$jj4PVt=D$L$ PQVy tT$Vjj3Rt^VZ3^ L$ T$PD$PD$ QRP1QۿL$ T$PD$hPD$QRP,RSVW|$GP t؃uOq3臿T$L$PD$VjSRPWQU _^[[L$PD$ PQX ;L$PD$ PQE WujGh jAjuj#RX3_ËD$PmT$OQD$RP̌ujLVujPh jAjuj#X^3_ËL$Q~m^_Vuj^h jAjpj#W3^hQmD$ F^Vzujqh jAjqj#W3^Whm|$WmPN tL$$T$ QL$ RT$ QL$ RT$ QRPW?* F_uj뎋^V uhh jAjrj#V3^jljFwNuhQPD$PS uhh jdjrj#V3^Ë^Vt$FPqthh jyhj#RV3^VNPQ茋^V*uhh jAjsj#V3^jVuhh jxjsj#U3^W|$ WlPtL$T$QL$$RQP谒T$D$L$ RPQW uhh jAjsj#{U_3^ËVBHQDVBxNQBPvL$,T$D$jQRPCPW|NQBFHyuhh jgjsj#T_3^_^Vt$FPpt3^ËNQBL$T$ jPQRPFHQR;^̋T$BD$p'D$HQT$ PR Vt$FHQothh jyhj#/T3^CVPBPf^̸$ ̋D$ L$T$h$ PQR̋D$L$h$ PQ h$ ̋D$h$ P̸| ̋D$ L$T$h| PQRG̋D$L$h| PQ h| ̋D$h| P1̸ ̸8 ̋D$ L$T$h8 PQR̋D$L$h8 PQ h8 ̋D$h8 P̸ ̸P̋D$ L$T$hPPQR'̋D$L$hPPQl hPv̋D$hPP̸̸̋D$ L$T$PQjhRRC̋T$D$jL$ QjjSRD$ C̋D$ L$T$PQhhRB̋D$ L$T$PQhhRB̋D$ L$T$PQhhRBS\$u3[UVWS3h~*l$VSfPk ;tSF>;|_^]3[ËOQ'tWjR%_^][̋D$HhQnu3Ã8u@@RP@̸hOl3ĉD$dD$lL$pT$|D$D$xL$$$L$8wxm@PST$PL$Qj`؃ u'jWhjejxj#O3[L$d3dOhËCUuh P=TT$ L$VpW8RD$8PQ[$PD$$UjWVRP`$u0jahjkjxj#OS_^]3[L$d3NhËL$D$QT$(RP$T$$PUjWVQR$u jghjjS貄$PD$L$,QL$,T$@RjPQT$Pj@R聠D$HjPu,_^][L$d3ANhjQhjejxj#KNL$x33NhSW|$ ?uu_[û3ۋD$VuaL$T$QL$$RjjjQPtVRsuWuMt23D$PQutR3_ø_̋D$VP\tD$PVhu V;3^Ë^øLVt$WVD$ D$tdD$PV転tL$QPW( t6T$ RVƢtL$ QPW tt$WuWE_3^Ë_^̋D$VWP tkD$tPV|tUD$Vt#L$T$$QjjjRPUV證 tt$ WPuW_3^Ë_^̸XVKl3ĉD$TD$\L$`S\$hUl$tV3W|$xD$ L$t$t$t$9$u DŽ$9$u DŽ$9$u DŽ$9$u DŽ$;u6;u];jghjhjij#J_^]3[L$T3JX;tvSWtٍT$RD$(PPWkUL$WQD$$tjPV' ED$tPT$(RV &t$U3~/WUPD$PhUG;|Ջt$l$ t'$$UQRD$ VPh V3t$$$$UQRPL$ SQ|LjhS |PS$PVR4jhXS |PSPFP4D$tjPV D$tPL$(QVO t$tjjjT$ VRh Vj3, tt\$SW8u W Wh a S$|jPjjjUWueW_^]3[L$T3GHXËt$D$th a P\t$th VD_^]3[L$T3HXËL$d_^][3GX̸Gl3ĉ$$S$U$V$W$L$QD$S{$NR$D$ PQRWPujShjsjwj#xG3jWL$hQ-$ǃPp ujXhjAjwj#0GWST$RD$$VPFuVYrj`hjjwj#3FUD$L$QPT$ Rh uVrjhhjtjwj#3Ft D$}D$t0L$QƋ$_^][3ZFĘ̸FjD$PD$$@L$$QL$$RT$$PD$ QRP; uhhjujjj#F3ËL$S\$Vt$$WQST$Rjt$ |$,t SVuhhjejjj#EV q_^[̸VEVjD$fuhhjAjlj#xE3^YËD$T$WPL$ QRD$ uhhjfjlj#9E_3^YËT$jVNQL$WPD$,RPQ u(hhjgjlj#DT$R=p_3^YÃ|$$tD$WPҖL$Qp_^YVujEh jAjmj#D3^ËWjP`H|$WZN A_t jTh jw"jEeVBFxu"jNh jAjmj#1DVx3^Ë^̸<CSVD$,W3P\$0q|$pWɵW ;|$(} _^3[6 L$QPKV N AVAuhhtjAjzj#5_3^_^̸HF5l3ĉD$DD$dS\$TVt$TW|$du诳L$lPD$lPWQVu,hhtjnj{j#A5_^3[L$D35HËL$`T$ RD$PQSVTu,hhtjmj{j#4_^3[L$D34HËT$ NRD$PBPT u,hhtjoj{j#4_^3[L$D3z4HËL$P_^[3̸c4H̋D$Ul$WuōP@u+jG|hW] u_]Vw3Ʌ~*T;|L$D8D8^t9L$t_]Ul$Vt$ƙ+Wt|.tGj\hWE2] u_^]3Ʌ~S);Έ|[D8_^]̋D$L$PQqP{G ̋D$L$PQQP{H ̋D$L$PQ1PE ̋D$L$PQPkE hhPD$Phh1PD$PfVt$PM=t3^ËNRM=u{PFHQg^Vt$PM=t3^ËNRM=u蛌PFHQg^̸1GHQ$@VQT$Rjg u^YËP#ML$VR6L$L$Vg ^Y̸ f1PL=t  ËL$SVjQRj tKWD$ PC8L$ QT$R7 _t)L$T$D$VQL$RjjPjQO u3^ ËSRss^ SUl$VW3~,D$UPVW#tWFu;|^][^]3[̸lV0l3ĉD$hD$pSUV3Vt$0D$,L$l$(l$$l$l$؃;u^]3[L$h350l,D$;tWSl$T$RSGPHK u W@L$,jQWT$(RWD$4PBxL$ML$0T$ D$jQRPiPWFu#jXhjgj}j#9,Vn_3^_^Vj hjU 3t6FFF FFFD$@tVЃu V'W3^Vt$t@ tVЃVV^Vt$@u^ËL$T$W|$WQL$RQVЃ~~F_^Vt$@u^ËL$T$W|$WQL$RQVЃ~~ F_^̡TPe"uhTP*hTj*̸TVt$t%F9D$rD$ L$VQ@P* F^Ã^Vt$N9t$s^ËD$8u@VPD$P* ^̸U̡ UP!uh UP*h`Vj*VjDhjpS ujGhjAjzj&)3^jpjV*F`PVj FXrm^̋D$3ɉHHH HHH H$H(H4H8HtCNt1׊:utY:Zuu3Ƀt@Ã>t~u^[̋D$SUl$W u@Pt8t xt_][_]3[à t tu#uj{hljChj&_][V u1pPt(| 0^_][hhlh]PPC31tyt ;s@;K$ @ Qt^_]3[Ë^_][DP@u^_]+[DȍQAu+RDtP$@u^_]+[áWP@u^_]+[Dt$ȍqAu+Ph AQW^_][ËWp@uR+h @PW^_][^_D ][hhljnhhlhhj&l^_][Ð {     o Vt$uhhljChj&3^Whhljj <39FXhhljj tFTuWL$QL$V^hhljxhj&m^Åuhhljxhj&I3^ËD$PD$PD$PRVу^̋D$L$jjPjQ}!hhlhhj&3ètVt$t{D$ ts~@t8jPjj Vl~$L$T$QL$RQPVO3҃^Ã|$t e^hhlhhj&S3^hhljChj&43^W|$D$ @VFjPjj W,VWuhBhlh-jjVjWs}hJhljnt7|$thThlhjjjVW/3Ʌ^_S\$uhdhlh{tjSjVW3҅[^_èuhvhljnIj D$PS L$ ;t!9ujjPVW3Ʌ[^_h}hlhhj&[^3_Ã|$t ^_h<hlhhj&^3_h)hljChj&g3_á4̋D$4̸SVhhj3j \$8t$09u)h@h q;D$$P Ul$4W;|$4It$(T$RPML$X;uLhhj? ;F aFV;T$,^PL$0VQRED$8NPQtL^ 9\$~~~ ~~~F F F$bF(;u'hh jAhj&cV_^3]hh jj WPU;u WVQU^ 33hh jj ;t VG_^]̸=WV}zh jjjju#hh hhj&3^Yhh jj :=W}5Whh jj WPWD$uSW\$ 0[tD$^Ỹ~$tF jjPQ7tÃ~$UttV(R荵|aS3W~BF(SP聵N PQR4t"jjWP6WuC;|_[3]_[]3]̸U$W39;uR69{ u!C;u _3]ĜPP4C Vu4hh hhj& Q/^;_3]ĜËS RP~1C;u Q{/hh jhI9{uaSRPJ1C;thЃ=s: Q{{Q/hh hhj&;d^_3]Ĝù|$8D$ D$+T$ D$RD$ PL$ QYD D$0K D$4 D$8 D$< UD$D&CKT$RPUуuF3Rss.h h jmhj&3t$8^_3]ĜÃ{~?Uu2{~'hh jghj&Y^_3]Ĝ5^_]ĜSVW|$D3;uhBh jp9thJh jdD$8$ |$;tǍP@:u+ƒs3F ;t P;tW:F 39^ _^[_^ 3;^[39\$_F^[Ë|$;tǍPd$@:u+ƒs3F;t P;tW9F39^_^[_^3;^[ËD$w _F^[hlh hWu_^[ËD$w _F$^[hwh hk|$;tFǍP@:u+ƒr3W8;uhh jA5N(jPQ _^[hh h hh jwhj&A_^3[3    2 X # VWPVTto WQVt[h:VtIh:Vt7h Vt%jVthP V t^V3^VZtVVh^?^Vt$ W|$ |;t5}t;D$tVD$PW _^_3^̡WPV$t> WQVPt*hVthWV,t3@tV>tVu Y^oVI^ $$̋D$ L$T$h$$PQR̋D$L$h$$PQ, h$$6p̋D$h$$Ps̸$̋D$ L$T$h$PQRg̋D$L$h$PQ h$őD$h$PQs̸$̋D$ L$T$h$PQR̋D$L$h$PQ, h$6őD$h$Pr̸@%̋D$ L$T$h@%PQRg̋D$L$h@%PQ~ h@%n̋D$h@%PQr̸%̋D$ L$T$h%PQR̋D$L$h%PQ,~ h%6n̋D$h%Pq̸%̋D$ L$T$h%PQRg̋D$L$h%PQ} h%m̋D$h%PQq̸ &̋D$ L$T$h &PQR̋D$L$h &PQ,} h &6m̋D$h &Pp̸P&̋D$ L$T$hP&PQRg̋D$L$hP&PQ| hP&l̋D$hP&PQp̸&̋D$ L$T$h&PQR̋D$L$h&PQ,| h&6l̋D$h&Po̸&̋D$ L$T$h&PQRg̋D$L$h&PQ{ h&k̋D$h&PQo̸l'̋D$ L$T$hl'PQR̋D$L$hl'PQ,{ hl'6k̋D$hl'Pn̸'̋D$ L$T$h'PQRg̋D$L$h'PQz h'j̋D$h'PQn̸X(̋D$ L$T$hX(PQR̋D$L$hX(PQ,z hX(6j̋D$hX(Pm̸(̋D$ L$T$h(PQRg̋D$L$h(PQy h(i̋D$h(PQm̸(̋D$ L$T$h(PQR̋D$L$h(PQ,y h(6i̋D$h(Pl̋D$Q T$̋T$H L$n̋T$H L$̋T$H L$>̋L$B D$̋L$B D$̋D$Q T$.̋L$ T$̋D$ T$L$PQ Pg ̋D$HL$T$BD$PT$BD$T$BD$@L$QT$L$QT$D$HL$`L$L$@D$ L$T$PQR ̋D$QT$>̋T$HL$̋T$HL$̋T$HL$^̋L$BD$̋L$BD$̋D$QT$N̋L$T$̋D$ T$L$PQP ̋D$HL$T$BD$pT$BD$T$BD$`L$QT$L$QT$D$HL$L$L$`D$ L$T$PQR ̸SV3ۅjVj hJh)PD$$)D$tojjVD$D$jP9VtL$ WQ T$ R觽L$ jjD$PhnQtD$t P^[̋D$ W|$ QL$(_̋D$W|$ QL$_S\$H UVWjhnQl$$UjBhnP}}_^][Å}_^][_^][Ë Q VREHWQPFRP_^]@[Vt$ H jhnQ) }^ËPB PL$jPR^SV3W|$tOthjWP tX|$tFtDQPt4|$t舓Ft WP*tVjho V_^[VW37tTt$ t/t)PtPt PWŸuWjhp]hpW_^VW3גt,D$ PVvtVWhr V踒_^øVSV3t$؅D$P#Ul$W9ut@CI}tm|tvhtcW1FtW苐tLMjQW t9VFxKVQ艞tuSjhs  D$_]SD$ ^[YË^[YVt$Ft P'Ft PV^Vt$W|$jWnPFh,)Pn_3^ËNWQh t ~VjjjRx+F_^̋D$uVt$PFP)3^W|$t.NjhgQ( uVWRb)_3^ËFjhlP( 3Ƀ_^Vhh)jA3P-FD$F D$$F~FFNhh)Q Fu3^ËD$ u VPhl)R&m ~ًD$tPVptË^̸S W?t$PӃuG8uuhh)jsjvj'_3[YQӃtGGuhˀ?uh뿀?VtRӃuF8u>uhh)jsjvj',^_3[YUj D$PWF L$ 9td$RӃt FFuF>tAƍP@u+D0D$PӃtL$D$HD$RӃutShh)jrjvj'v>uWh)ja ]^_3[YVh)Wh)jC]^_3[Y]^_[YU- Vt$WIu7FNV PQR% FNWPQ% ;=nHd$' FT$RjjP'DL$Wj QՃ /VFNRPQ& ;F>Nug늉L$t< t< AL$uND$PjjQN'L$AL$0҃tmhAL$F~!f)F AnL$u- F=w-DFN9VVjRA#F_^3];~1_^3]ʃNFT$RjjPd&;FNQT$RjL$ t_^]-tt[t_^3]ËVL$QjjR&NT$$+QЋF RP}#~X)F}NjjjQ%V jjj R%~)DF jP)"_^]ËV jRF jP "_^]Ðc% c% & & ̸D$L$ S\$ VWjPQSD$ET$ VRujS!u܋Ft P Ft PV_#D$^[Y̸Pl3ĉD$LD$XL$`S\$XUl$dVWD$L$K>t P肿S)Hu3jth)jxjej'V_^]3[L$L3PPyNjGL$T$RD$ PSQu6hh)jfjej'V,_^]3[L$L3HPËT$NRD$ PQ t\MjST$RUD$(PQRt;D$VPL$ QRF t!|$t7F PnWhF uV_^]3[L$L3PËL$\_^][3PVt$ W|$ QPu$NWQRuFOPQ_^Vt$ W|$ QPu8NWQRu$FOPQuV G RP_^̋D$L$ T$SUD$ VWP445h j:Uփ?@X ͍:utY:Zuu3Ƀu\$$ >@ ͊:utY:Zuu3Ƀ\$$8/x/j/P֋uh aL$$VST$$D$$8tKj:Wh t@;)u)PL$ tWT$uhh)jAU_^][hh)jyjrj']t UD$ t PL$t PzT$t Pg_^]3[̋D$PFP`SUl$ VWuK|$\$tW+qWy S&q3SItWPVU _^][Vt0t PD$ D$t QVRu Vn3^Ë^VZ$tD$ PNQu VI$3^W|$ Bt P($p_^Vt$~uFvu3^W|$tB~u ~FtFWP-u_3^hh)jjW_^S\$ VWSo|$PW t:LGt.t$tfVSru1hh)jnjnj'GPG_^3[ËL$GQHVRQjRPtD$$uDSWt\$ S36~#VS8PWtSF;|_^[̋D$L$̋D$Vpuhh)jljoj'/3^ËQP=mthh)jhjoj'3^VPR;^̋D$uËH L$f̋D$uËB D$Y̋D$uVt$}3FSWx W;})\$VWQStWF;|_[^_[^W|$u_ËGSu3L$@tVt$t@t Pb^D$tOD$tW [_̋D$Vt$jPV }3^Åu3Q PR?L$ T$QL$RT$QRPBL$$t^̸SUVD$WP \$ SXl$,uh@h)j{fL$T$RSL$z~hHh)j~jsj'3D$(|6L$T$R+SL$<}hRh)jjsj'3|$ tyWuh^h)jz(D$L$Q+WD$}hfh)j}jsj'G3SW}hnh)j|jsj'3_^][ËD$QT$鞍̋L$BD$鎍̋D$uËL$tL$tPL$ tPL$t@ ̋L$39AVWt]D$ PQt@|$tFFt,hmVFPPW uV9_3^_^SUVt$W39x u躍A 9z L$(GPQ}D$,tW RP`P-L$QsD$_ttzCsl$$u(hh)jmjgj'#WJ_^]3[+stHVUt:\$ txFt%SPhtt$B WPauW_^]3[Ë_^][Vt$~ upF u^ËF W|$WPu_^hh)jjW_^̸ l3ĉD$D$,L$0S\$,Ul$uNQ[VRRFPI V@^Vt$t#FhG PzN QVj V^U3uhh2jCjmj(蔱]Ãt tu uhh2jijmj(e]hh2j tT$ 39L$pP8X H]̋D$ UVt$WPQYtJ|$u xGt+T$D$ L$$VFNWVRkv_^H]V_^]Ul$Vuhh2jCjlj(莰^]W|$ uhh2jCjlj(h_^]À}St2h PWӃthh2jhjlj(-F>uԋL$0T$(\$4|$,t$QRY[tF|$u wGt'D$L$ FnNWVRnu_^H]V_^]̋D$L$T$ S\$jPD$QL$RPQ3d[̋L$3t(Quhh2jAjgj(D3ËL$T$S\$jQL$RT$QPR[̋D$L$T$S\$PD$QL$RT$PQR3[̋L$3t)Q uhh2jAjjj(褮ËL$T$S\$QL$RT$QRPD$PJQ[̋D$L$T$PD$QL$jjRT$PD$QRP$ËD$SUVW3333ۅtPvuh>XD$tPXuhH:D$ tP:uhRD$$tVP؃uGh\h2jAjnj(螭t Vt Wt U_^][ËD$,L$(T$PQjjSUWVR$_^][S\$ UW3;uhh2jCjmj(_][Vhh2j p ;tP^\$~~~ 9{u tC;t ~~~CVP]r;^_]H[Vz^_][ËL$3t)Quh{h2jAjfj(tSjjjjPD$3PSK1[S\$ UW3;uhh2jCjmj(_][Vhh2j p ;tP^\$~~~ 9{u sC;t ~~~CVP]q;^_]H[Vz^_][ËL$3t)Quhh2jAjej(tSjjjjPD$3PSK1[̸l3ĉD$D$IS\$(Ul$(WtSUPу _][L$3۪22 2fD$2T$2L$:T$fD$fL$ u_]3[L$3胪ËōP@u+VptÍPd$@u+thh2VVT$$RWVUW$tVD$ PWVSWVL$QWL$0 ^_][3̋L$T$AQ̸ 薩L$,3$D$D$D$ D$D$D$D$D$$$D$Bt$RQЃu 3 UVt$ @3tVЃu^]FtVh P {NSWQ3o~4d$9ktVWRoPCVЃt{NQGo;|ЋB tVЃt[tR3FP3{o~Hd$>tNSQnoWPV҃tt3FPC?o;| A_[tVЃ^]̋T$uh)h2jCjoj(CËD$t(th<h2jjjoj(ËBËJ|$ t JÁJ̋D$L$T$ PD$ QL$ RPQj ̋T$ T$`L$ L$D$h̡hu hVhmh2j t'3FFF FFFD$P^Vt$PSVG^̋D$t L$H3Ã̋D$t L$H3Ã̋D$t L$H 3Ã̋D$t L$H3Ã̋D$t L$H3Ã̋D$t L$H3Ã̋D$t@3̋D$t@3̋D$t@ 3̋D$t@3̋D$t@3̋D$t@3̋D$uË@̋L$u3ËtuA̋L$t HwA 3̋L$u3ËuA̋L$t HwAÃ̋L$t HwAÃ̸$Ƥl3ĉD$ S\$,UVt$8W|$@ǍP@uc+‹%F u hP?eh NPQӃu)FRPӃu7G8u_^][L$ 3D$ËNV _^]3[L$ 3%$ËNV _^]3[L$ 3$ËNQhd T$(j RM:FPhd L$(j Q8: ;n}RKheh2jejij(ףh2T$(Rh2D$@Ph2j跦,_^][L$ 3肣$ËF;~/Khmh2jdjij(~h2L$(Qh2T$@R륋v uhvh2jijij(L@PWV L$0_^][33 $VjPh2j ujSh2jAjhj(3^ËD$uhu )hF PVj FFFQ ^Vt$ }hh2jgjkj(肢3^W|$ GP i;|hh2jfjkj(Q_3^ËOVQhtIw@ _^VjPh2j ujSh2jAjhj(3^áhu 1hF PVj FFFY ^̸@ZVt$ V%w&xPVxP0  xQD  ^̸ l3ĉD$Sh V5` WD$ jPփ t&L$j QӃt^[L$3ڠ ËL$^[33Ǡ VWhh3jj 5H h h3p֋= |uף|h  h3փxu ׃@x_^V5 W֋ |=4 ;tQ׃֋ȡx@;tP׃h@h3jj _^̡V5 Pj֋ Qj֋Rj֡Pj֋ Qj ֋ďRjփ0^̋D$SUl$ VWt OӃ u F tuj` Pp _^]P[@u+V5 h`Z jh`Z jh`Z jh`Z jh`Z j h`Z jփ0ď^ø膞l3ĉ$S3U$\$tht\$ 9$uD$ P |QT$hR`  ;tt|PX ua |Q8 uMT$ j Rh ;t 9$tW=|_tD$ PUV |D$ uD$9$][uxhX R  =t|D$hP$ D$3fVW|$WSUthxPWlt50 P֋ xQW`P֋xRD  jW2t$PW ][_^Wt  Pxh@3PӋ x-D QՃjWt$PW ~WW:utH:Nuu3t xh03RӡxPՃ ][_3^][_^Ë xQW\sP0 xRD  jW:t$PW _^S\$V3}^[WtaD$Ul$KSjUjPW |1|$$tL$T$ USjQjRW| WW]~3_^[̸l3ĉ$$ =$$~V$VRPD$PQ!L$hQ$ ^3̚{̸<5̋D$ L$T$h<5PQRwJ̋D$L$h<5PQ2 h<5"̋D$h<5Pa&̸5̋D$ L$T$h5PQRI̋D$L$h5PQ<2 h5F"̋D$h5P%̸5̋D$ L$T$h5PQRwI̋D$L$h5PQ1 h5!̋D$h5Pa%̸6̋D$ L$T$h6PQRH̋D$L$h6PQ<1 h6F!̋D$h6P$̸6̋D$ L$T$h6PQRwH̋D$L$h6PQ0 h6 ̋D$h6Pa$̸6̋D$ L$T$h6PQRG̋D$L$h6PQ<0 h6F ̋D$h6P#̸7̋D$ L$T$h7PQRwG̋D$L$h7PQ/ h7̋D$h7Pa#̸T7̋D$ L$T$hT7PQRF̋D$L$hT7PQ>>|>>>|( -( -gggxgggxsS՗sS՗'''%N'''%NAAA2XsAAA2Xs, , QSQS}}}ϔ}}}ϔn7In7IG؎VG؎V0p0p#q#q|||Ǒ|||ǑfffqfffqSݦ{Sݦ{\K.\K.GGGFEGGGFEB!B!ʼnʼn---uZX---uZXƿyc.ƿyc.8?8?#G#GZZuZ/ZZuZ/6l6l333f333fccc?\ccc?\  98I98Iqqqqqqύύd}2d}2II9Irp;II9Irp;Cن_Cن_11KHۨKHۨ[[q[*[[q[*4 4 R)>R)>&&&-L &&&-L 222d222dJ}YJ}Yjj<x3w<x3ws榷3s榷3:t:t¾|a'¾|a'&އ&އ444h444hHH=Hzu2HH=Hzu2$T$Tzzzzzz􍐐z=dz=d__a_>__a_> @= @=hhhghhhghr4hr4ʮ,A,A^u}^u}TTMTTTMTΓv;v;""" D/""" D/dddcdddc**ssssssHZ$HZ$@@@:]z@@@:]z @(H @(H+V蛕+V蛕3{3{KۖMKۖMa_a_===z===zȗf3[f3[6ԃ6ԃ+++EVn+++EVnvvvvvvႂ2d悂2d((lw6lw6õ[wt[wt)C)CjjjwjjjwPP]P PP]P EE ELWEE ELW88000`000`+t+t???~???~UUIUUUIUǢyYۢyYeeeeejeeejҺhiҺhi///e^J///e^J'N睎'N睎_޾`_޾`pl8pl8.F.FMM)MRdMM)MRdr9vr9vuuuuuu0 60 6$ $ @yK@yKcYхcYх8p6~8p6~|c>|c>bbb7Ubbb7Uw:w:)2M)2Mb1Rb1R:b:b3f3f%%%5J%%%5JYYyY YYyY *TЄ*Trrrrrr999r999rLL-LZaLL-LZa^^e^;^^e^;xxxxxx888p888p匌  cƲcƲA W䥥A WCM١CM١aaa/Naaa/NE{BE{B!!!B4!!!B4J%J%xf<xfс>сDU"DU"  NN%NJkNN%NJkQsfQsf ` `<<|>|j5@j5@  ޹go޹goL_&L_&,,,}XQ,,,}XQkָkָk\ӌk\ӌnnnW9nnnW97n7n  VVEVVVEVDD DI^DD DI^ߞߞ!7O!7O***MTg***MTgֻmk ֻmk #F⟇#F⟇SSQSSSQSWܮrWܮr , X'S , X'SN'N'lllG+lllG+111b111btttttt  FFF CLFFF CL &E &E<<PD(PD([Bߺ[BߺXN,XN,:::t:::tiiioiiio $ H-A $ H-Apppppp׶TqoTqogηgη;~;~.ۅ.ۅBBB*WhBBB*WhZ-,Z-,IUIU(((]Pu(((]Pu\\m\1\\m\1?k?k"D†"D#臸O6yoR` {5.KWw7JX) k]>g'A}|fG-Z3`ZPYuh`ZPCbh\j7b̋D$L$g̋D$W|$9Gu_VP@ujThDHjAjwj/ b^3_ËGP@w^_̋D$W|$9u_VPujjhDHjAjvj/a^3_ËPz7^_̋T$BD$RD$W|$9Gu_VPuhhDHjAjyj/6a^3_ËGPOw^_̋D$W|$9G u_VPdguhhDHjAjxj/`^3_ËG P5w ^_̋D$L$%A̋L$39AVt$tFh@Py)F^̋D$HL$DT$BD$ NT$BD$DT$BD$EL$QT$pEL$QT$ED$ L$T$PQRE ̋D$HL$W|$ u3_VWLt$PhHVWF PV1hHVW~(u hHV|PV0h|HVeW/ u hVMPVD/jhX VW~ЬuPh`HVW PV0^_̋D$W|$9u_VPUAujLhHjAhj/^^3_ËPA7^_Vt$FPrVL$RN?D$F^̋L$T$b̋D$W|$9Gu_VPԀu hhHjAhj/]^3_ËGPBLw^_̋D$W|$9Gu_VPt<u hhHjAhj/]^3_ËGP"<w^_̋D$W|$9G u_VPcu hhHjAhj/3]^3_ËG Pw ^_̋D$W|$9Gu_VP蔂u hhHjAhj/\^3_ËGPw^_̋D$@̋D$W|$9Gu_VPt=u hhHjAhj/c\^3_ËGP"=w^_̋D$W|$9u_VPbuhhHjAjuj/\^3_ËPg7^_̋D$VW|$ 39Gt?t,P5buhhHjAjtj/[_3^ËGPw_^̋D$VW|$ 39Gt?t,Pauh"hHjAjsj/G[_3^ËGPw_^̋D$L$%A̋L$39A̋D$W|$9Gu_VPDau hDhHjAhj/Z^3_ËGPw^_̋D$@̋D$W|$9G u_VPDu hZhHjAhj/CZ^3_ËG Prw ^_̋D$@$Vt$tF$h@P #F$^̋D$H$L$P>T$B$D$GT$B$D$P>T$B$D$>L$Q$T$?L$Q$T$ ?D$ L$T$PQ$R9? ̋D$H$L$鐓Ul$VW3|>PUXMt&G~hDOS0FPh S F}‹_^]SUVVVVհ t SW( hWhJWt UW( hWhJWt VW( hWvhJWh^][S\$Wh0KS?|$P\ w ]Qh S h KSVhKSWR3n ~O$~hKSQGjVPRPSHhX S-OQF#;|uhHShJS^tW8] uhSWhX SI_[Vt$ u3^WV|$PhHW hHWVPW'VޮPW)hpKWV&0t>.tFZF^uj>j؃tD$ PSCu7S-hhKjshj/I_3[L$3H ËL$$_[3H V:tjVLu*jfhKjAjnj/HD$hLjP 3^Ë^̸6H$P u7VhhKjzjoj/^Ht$ hLjVmjVu(3^YËL$$D$YVt$hLjV.jV63^VhhKjPNq uhhKjAjj/G3^jPjVG F,0 F4 F< ^̸6GSU3W\$\$ 3(jW D$ PWNDQRWbjF0N,PVуD$QPW?N8T$RV4D$PQV҃ D$L$PQN$3D$ PW9^u 9^u9^ tT'؅FtPS<FtPSF tPStSWtpF(tjWt[VDRtPWt=F(t|t,EQRUEtUWDuEW'hhKj{j{j/3EFHP觼Q_J uhLjV U貝S<&T$RbD$P_][SUl$ VW*tb?u  tRjjSa )tjPSP%6 uJj'V)h{hKjAjqj/DW#*hPhKjAjrj/D_^]3[ËVQ; tU3, ~:IVU) j@ tPR tUF ;|ˋ_^][̸CFSUWP3Q\$33\$uhhKjxz;uhhKjA^jW譱ZWjPGBNDQ!tBRWaF;t/P1 ~"FSP0 PW9NQC ;|VPRPW蕶؃uhhKjvhXPjj2SfuhhKjwF(tF3PtxUSuhhKjtMW~tOjW?؃\$ uhhKjANLQSZ$ughhKj|hj/{BNHQR7Guh MjVs D$ PU'WJD$ _][SWuh뉋VLFHRWP 3~LD$UVt$ 3WnDnHnL$FH;uhhKjAjzj/AzD$UP!FD;uhTMjVjVKUUV t<3t1;t&PFL;tS[t uWhhKjyjzj/)AFH;t8P)QEu#h0MjV ;uVHR#nHFD~HPnH NHQnD#VLRnH " _nL^]̸V@$VPQRD$PVL$j QT$R ;D$PhhTNjdjmj/H@VbNPhdj5C(3L$QT$R^Ĉ̸?SUVW33~!VWPW\Fh;|h%hTNS!i l$u!h'hTNjAjij/?^]3[W3~JVWPD$o ~/CD$ UPWPS WF;|l$^][̋D$PL$PQKc th?hTNjljfj/>3ø̸ v>l3ĉ$$$U$,P賖P譵Px PYPTPj D$u hZhTNj~hj/G>PIhchTNPEg uhehTNjAhj/=qL$QT$ Rnt[hD$$PS ~.PL$$QT$Rhm t-hD$$PS ҋjQT$Rn u2P\wQhE3]$39= Ë$ ]3̸= ̸<D$ SUVWP$WD$tEMQR?a[tSDmtUDumGP轳;uYOQ譔L$r;u=st :u%vP:Quv@:Au _^][Y3t$hhTNjgjdj/0<_^][Y̋D$PuhhTNjijej/;3ËL$PQ;thhTNjhjej/;3øUl$VW33ISJ;}$WSLUPFGt֍G_^]ÅtG_^]_^]Ul$ Vt$ SWjjUV3$SV0>uyvMP+<;9ucspto}tJPxu:jQv8u(W @RPuOVQR]:tt$VC+;`_[^]_[^]^]̸:l3ĉ$ $UVPIEP>hL$!jQD$D$(B:WNw ]T$ D$ NEWPv~}D$ }tzSD$]$KUQR-tE|$u ,fD$D$Њ@u|$+OOGuȃ]|[|$u L$T$D$ h hTNjkjhj/9t$ ƅuT$L$QhNPhNRhpNj;Vd _^3]$ 38Ë$^]3̸w8Ul$V3}D$ uHUQR{uF^]SD$PjjUW~"؃tKIUz3Ƀu#ST$RVjUWF"؃u[^]Åt S轐[^]̸7D$ SUVhPu3!HQT$@QT$Rj D$ t{]jWAPS*ubS~8W ;~&IVW PS|*WF;|UD$D$^][h#hTNjejgj/ 7UcD$^][̸ 6l3ĉ$$,$0SU$03V$0D$$L$ \$ \$\$\$;uhhTNjfjjj/6zVRQthhTNjrjjj/X6LWV٪;W SWSSjVD$8蕢thhTNjjSUVe D$;PL$SQyT$0U|$  l$ |$UjV |$hT$0RW萅 SUVW0hhTNjm:D$$thhTNjjSjXD$hhTNjnjjj/5_D$PʊL$hPMQT$RQ$8D$$^][34 ̸f4SUl$ Vt$,W3V\$,>WE|$ \$\$\$\$t"MUD$(PD$0QRPxQEt.Vt hhTNjqhj/14EtM VQ EtUMVRUEtM]D$PV|$t$L$0T$QL$RT$t$0|$ 3E tE VPjtwE@t2;t.W|$,uhhTNjohj/P3?Et1M$|$(QuhhTNjphj/3D$T$(RBD$PlL$Q>^D$( _^][SUVt$WVVʊV3P tD$SWP Eu_^][VW|$W3tD$ VWPV_^VjBhNj([ t#3FFF FFFFF F$^jFhNjAhj/1^Vt$uhNjLhNFT 3FFF FFFFF F$^Vt$toFP>'NhPMQV RFPNkNQ\VR,F PN$Q芉$3FFF FFFFF F$^Vt$tVqV[\^S\$Ul$VuhNjvhN^S t3U1WSnшt%PtSF _u^][Ã&SLW蔧Pj Ft_W.WvhhNPFYFt4FPWONPQ`0St:P6F u-tUk_^]3[V[VE[_^]3[Ã&_^][̸/VWD$P3!T$(L$ QR5D$P_?tG|$ jh Wu ~$ƍP@u+PVWX ~3VZL$Q _^̸.l3ĉ$$V$jPD$ hPJPL$QVjhX V$(^3̸.ĄSUVt$hNVD$P4 3ۅWd$L$SQWPVW\u Ph\VjjWVmuh hNVaWRVjhX VC ;{_^][̋D$Q4IuT$aPhNR PDT$PhNR VW|$WĤPHua PhDt$ PhNVhOVW4PjQRVe(_^S\$VWh S3輘tWWWV芤uSh O @P   VB|_^[̸,S\$ UVWh S33Ot$t[tPWWWVnrW3D~OSWFtPU$WC;|Ћt$Sh@O @P   h[WSV}{ _^][YS\$VWh S3蜗tD$PWWVƮuShdO @P   V{_^[̋D$uCD$VhOhOPi u$hOhOhO @P  ^̋D$VW|$hܼWP3Xi u#hܼWhO @P  _^ËL$T$PQR _^SW|$ 3۹Pǐ:utP:Quu3u_[VWtXONJ:utP:Quu3uPPjjdVShVt(hhOjhj/_*WhOjR- t VU^_[̋D$ SV33WuH|$D$h PWPg u-h PWhO @P  V"}_^[PtL$VQ_tV|_^[̋D$ VW33uD$L$ h,[PQng t!PtT$VRthPMVV_^̋D$ SV33WuH|$D$hPWPf u-hPWhO @P  V_^[ËL$QPtT$ VRtVض_^[̋D$ SV33W|$uDD$h$PWP]f u-h$PWhO @P  V_^[jPAIu h$PWhOL$VQ`tV^_^[̸'D$SVt$Wh4PV3P\$3e ;t/Pzv;u h4PVhO @P  fUW*~Ol$ SW(Ht@jPoHtBVUtVWC;|D$]h= W0D$_^[YËL$h4PQhO @P  ĸ&D$S\$VWhDPS3P|$3d ;u#hDPShO @P  PWu;u hDPShOVtUV ~Gl$ WV Ht@PBtjD$TPL$@QBjT$`RD$LPBL$P@QCjT$0jR*^_][L$,30ËL$8_][330Ã|$SUWtWl$tO\$tGtV.C}P%FK}Q FT$jVR0 }C D$|$PuD$tYR1V1FFE NMD$0t,FP NQR1FP1V1L$ Q>      t h d ` T H @ 4 (                  x t l h \ T L D <  .\crypto\cryptlib.cERRORdynamic%I64iOPENSSL_ia32capService-0x_OPENSSL_isserviceOpenSSL: FATALOPENSSL%s(%d): OpenSSL internal error, assertion failed: %s pointer != NULL.\crypto\mem.c.\crypto\mem_dbg.c" thread=%lu, file=%s, line=%d, info="number=%d, address=%08lX thread=%lu, %5lu file=%s, line=%d, [%02d:%02d:%02d] %ld bytes leaked in %d chunks not availableOPENSSLDIR: "/usr/local/ssl"platform: %sVC-WIN32compiler: %scl -D_USE_32BIT_TIME_T /MD /Ox /O2 /Ob2 -DOPENSSL_THREADS -DDSO_WIN32 -D_USE_32BIT_TIME_T -W3 -Gs0 -GF -Gy -nologo -DOPENSSL_SYSNAME_WIN32 -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -D_CRT_SECURE_NO_DEPRECATE -DOPENSSL_BN_ASM_PART_WORDS -DOPENSSL_IA32_SSE2 -DOPENSSL_BN_ASM_MONT -DOPENSSL_BN_ASM_GF2m -DSHA1_ASM -DSHA256_ASM -DSHA512_ASM -DMD5_ASM -DRMD160_ASM -DAES_ASM -DVPAES_ASM -DWHIRLPOOL_ASM -DGHASH_ASM -DOPENSSL_USE_APPLINK -I. -DOPENSSL_NO_RC5 -DOPENSSL_NO_MD2 -DOPENSSL_NO_KRB5 -DOPENSSL_NO_JPAKE -DOPENSSL_NO_STATIC_ENGINE built on: %sFri May 11 04:08:14 2012OpenSSL 1.0.1c 10 May 2012.\crypto\ex_data.cno dynlock create callbackfips mode not supportedINT_NEW_EX_DATAINT_FREE_EX_DATAINT_DUP_EX_DATAFIPS_mode_setDEF_GET_CLASSDEF_ADD_INDEXCRYPTO_set_ex_dataCRYPTO_get_new_lockidCRYPTO_get_new_dynlockidCRYPTO_get_ex_new_index.\crypto\o_fips.cMD4 part of OpenSSL 1.0.1c 10 May 2012MD5 part of OpenSSL 1.0.1c 10 May 2012SHA part of OpenSSL 1.0.1c 10 May 2012SHA1 part of OpenSSL 1.0.1c 10 May 2012SHA-256 part of OpenSSL 1.0.1c 10 May 2012SHA-512 part of OpenSSL 1.0.1c 10 May 2012len>=0 && len<=(int)sizeof(ctx->key).\crypto\hmac\hmac.cj <= (int)sizeof(ctx->key)OpenSSL HMAC methodHMACWW4   P`.\crypto\hmac\hm_ameth.cW`00.\crypto\hmac\hm_pmeth.chexkeykey.\crypto\cmac\cmac.cOpenSSL CMAC methodCMAC~~ t  0~Pp@p cipherRIPE-MD160 part of OpenSSL 1.0.1c 10 May 2012  ##%%&&))**,,//1122447788;;==>>@@CCEEFFIIJJLLOOQQRRTTWWXX[[]]^^aabbddgghhkkmmnnppssuuvvyyzz||        0 0 0  0 0  0 0  0           0  0 0 0 0  0  0  0          $$  $ $ $$  $ $             $$  $ $ $$  $ $                  0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0                            ((  ((  ((  ((  ((  ((  ((  ((          """"   " " " " ((((((((    """" ( ( ( ("("("("(libdes part of OpenSSL 1.0.1c 10 May 2012DES part of OpenSSL 1.0.1c 10 May 2012des(%s,%s,%s,%s)idxcisc2long.\crypto\des\enc_read.c.\crypto\des\enc_writ.c   !"#$% !"#$%&'()*+,-./0123456789:;<=>?@ABCD./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzRC2 part of OpenSSL 1.0.1c 10 May 2012xݵ(yJ؝~7+vSbLdDYOaEm }2@놷{ !"\kNTe`sVu;B=0<&oFiW'C>/fހRr5Mj*qZItK^AnQ$Pp9|:#z6[%U1-]㊒)glឨ,c?X≩ 843H _.G奜w hIDEA part of OpenSSL 1.0.1c 10 May 2012idea(int)j?$.Dsp"8 1).lN!(Ew8fTl 4)P|ɵՄ? Gy 1Ѭߘr/Ḗ~&jE|,G$l iciNWqX~=t XrX͋qJT{YZ90`*#`(yA8۸y:`l>w'K1/x`\`U%U攫UbHW@cj9U*4\̴ATr|*oc]ũ+1t>\3֯\$lS2zw(H;Kkē!(f a!`|H2]]]u#&܈e>#Ŭom9BD . Ji^Bh!la gӫҠQjh/T(3Ql n;zP;*~ev9>YfCoEå}^;uos D@jVbNw?6r=B$7H ۛIrS{y%P;LylO`@ž\^c$johSl>9oR;Qm,0DE ^J3(fK.WtE9_ ӹyU 2`yr,@%g̣饎"2u<kaP/R=2`#H{1S>W\o.ViB~(2gsUO'[iXʻ]=!lJ[-ySeEIҐK3~ˤAb Lw6~д+MەqՓkю%ǯ/[{AI~-%^q h"W6d$ cUYCxSZ٢[} Źv&ϕbhAJsN-GJ{RQ)S?Wƛv`+t恵oWk *!ec.4dV]-SGjnpzKD). u#&İn}ߧI`fqilRdVឱ¥6)L u@Y>:䚘T?eB[k?ҡ08-M]% L&pc^?kh >\D}W7: P tAu8/;21>8TNmO Bo ,y|$ryVw.?rU$qk.P̈́GXzt}K:zfC cdG27;C$CMQe*P:qUN1w_V5kǣ;< $Y,n<pEㆱo ^*>Zw=Ne)։>%fRxL.jxS<- N=+6&9`y#RnfE{7(2åZl!Xeh;/ۭ}*/n[(!pa)uGa0a4c\s9pL ު˼,b`\ndi#PZe2Zh@*<1! T_~}=b7w-_h)5ǡޖXxWcr"ÃF T0.SHُ(1mX4a(s<|J]d]B> EꫪOlOBBǵj;Oe!AyMjGKPb=bF&[$ti GV[ Htb#*BXU >ap?#r3A~_;"lY7|`t˧@n2w΄PU5ai Z .zD4Egɞs͈Uy_g@Cge48>q(= m!>J=+hZ=@&L4)i Av.khq$j 3ԷCaP.9FE$tO!@MpE/f m1'A9UG%ښ ʫ%xP()Sچ, mbhiHפh'?Oz|Ϊ_7әxB*k@5 ٫9N;VmK1f&tn:2C[Ah xN جV@E'H::SU kKмgUXc)3VJ*%1?~^|1)p/'\,(H"m?H܆AyG@n]Q_2Տd5A4x{%`*`lc´2Of#k>3b $;" r(-Exb}doITH}'>AcG t.no:7`L knU{7,gm;e' )̒9 i{f} ϑ^و/$[Qy{;v.97yY̗&-1.Bh;+jLu.x7BjQ满PcKkؽ%=YBD n *Ngd_ڈ鿾dW{x`M``FѰ8Ew63kBqA_^;Z4ٷ,Q+:Ֆ}}>(-}|%rZLZq)GW;()f(.y_xU`uD^mm%adâW<'*:m?!cf&(3uU4V<wQ( gQ̫_QM08bX7 z{>d!Q2Ow~㶨F=)iSHd$m-if! FEdlX [@X̻k~jEY:D 5>ʹrdfGof,ҏ"W##v215VbuZ6ns҈bIPLVq z2E{Sb%ҽ5iq"|˶+v>S@`8G% 8vFšw``u N˅؍芰z~L\HjiԐ\-% ?2aN[wߏWr:Blowfish part of OpenSSL 1.0.1c 10 May 2012blowfish(idx)@0 /kz%?/?!M@`Iɟ'Կ@uИcnafŽ"o;h(Yy#P_w@CV/|-ҢɡHl4ma/T"2*kT:V"AӢ@f/9/Mҹ-?JDvR7,YQoz Zh{.T"5Y/mKdPIf-徐"3;䎴]4K@E?1.eUʱծ-mԢP @"8On ׿r[O/LVSTIiFXEŅc]ՊSW9j7=T*_}x:vbj Bz)^'rgP8ƌDŽZ*'JkѢ*V`C u\B&Ʉq-ls5&5 M{n0f&%HVV^c ϲc|E pPr(pH# yM;-AB G&LjMG\^ |Y#јr8S#/nqFnEV 9q +A|iCHy}B{BIZݿe\ [ ~iC?9zaz UQry吝5rNcZպ60ę _}֡{/6Y DYD̃Aѱ* ;{ B[A%zzӤ0XޘN?wiy{$[Ĭ%5P_aT1bcKU !h]Yfsc=4·~!+g\ab906;Wd/`:5F- 'zy㠌l0^7%oo;j tE'4:NiOM>dY58EfCr3c%N? (Υ'R¦սTUdpfM w&ۄgC!`X0TroSUGڿ]bVhkʃ;n-Ӧ\= wL3{9+^S_aodC x^c"&gI{ڷ"%-U^7rRyL H[k0 cq/޹ S E!5(T<)c)A|-nRPf4,0P1`sY&D\dwR3A+ٺ|o!PaH?ek«dv&4{^% ;MM1$~I,;jx`]sVz\/1Co0TyXR^/2zj0>՚1B°I #ڸ(0q_`ɣaM/Ǚ."płNؼ40y;'ƸaiH?;(ZC/v7ܱӧßn>ƼB7Q(heRwj-K'5.n\ )^XOX{iT̨g&H`K`8# ~8lI `s5GƱVL>#8d^BFzk Od^AE#\;]>rC|m~ll9`qpsv#E@]%=`GK6DήPQH<p}}d^(O= &g.yr?U+HԼ?^!@WNFRWs3NɷdŸW1O g_?@k{C=K۵cUȗn-J(JqoCCl< _P/~׿PZG.Q>pX.0_|r *,]I鎸P'WIoyR}}Yr@EEt]>uOiVA#. '`vteyvvwHNma}/ 4H<0(Ow Vܒ M"7)}V'|@|硴f^aÝ <є`AFv\;x,WGl"}NC~8<=Q8d{h'فI]j~vh]9K9 ;#Im<Eub'z\aBΒ~Brrp }ȡ[(Ob<51)BSjO[}m(Ki\j#MŌ?,-ҖXRg[HJI \EkӉ7`:SRqDIK @sg4|~q6UO/Т`?mPьGnU?墼38Wg} =3{r3O̫]ňv{{uWBdBc+ri/n+_mazgaq%9̸L!gуb>ܷν8\ =Dmn`I:T`H'W+8$ ږ%Eh;}E `P/(b4 ٠m+1+dZ0RXBA\1>26F3S75 {s{OJWdCQz(~c5_ yCdcdJ$_(͸O@C " 0 O7-{$MgQLq_-_d !S^>'_aB%rqK=;&o~~TLmDl߫I&Ǡ36~?Pa w8Pr.PwWFgOT3ɏ1 i5M\=fȦ[]oڑo/"F}F9mOCCN!и  ?X(>nH&p׋wt|% - y#;(8itb߷@!{7Ȋ@ YVvO@/{U MVi35'#WȯVeka^u˅nwU2?-ɿ[%;з$;mc fÀ(82 Tɪס2Zb,gTzuw11&o6F HjyZVLj~CRv/ t,t* M|k .TA5B=!&|,aRe1i%& !\1cr ^Iy p1 d>>̶Ոî 0rlqn/kءDVÈ9/ű1(x⤣2}o~X$Ū{s0a!-!)۳*)e\,0?Б\,ԩT _w:^VxV޾\!uQųåҶw#)Ei/z毲p[v F98/gsD)k)/Ifingӛ/~p%Q^$Slj>!DYܷ)eyCy9A+MWN )zkS< ~U3rŅ?~  ~tn,fy3jXDD1Zs"*ˁc8:$zi HIĀ@8HުL H_%A@N$A UeQr% 9jyMc@hV 틕Z Vqק)N-fcqw*7 5Wa"ɠBր[t!h!hl iwW~@PճMװx QV$AV ʔUWnྵa$ XK'aU{whkldMDfd~i/IO70j5,s@IvM;B(HDLns) _ɋ}oaOw.+rץ<}+FYYEEهTNoHm| ǥcs_DVj͈prͳ]ny` E`1©\1BN"r\,rN@%/N2g@#x\n܃"ukMxnXO DH?{vw#Vu*F_( +8=6 JRft;QZylu eX&hJpFS(l\v0k)h76*g$ k%ֿh,DUueI40" WIbuU~bڨv^FESGml,gHL|3ەCh\SU2` ߝWc9^28aI37~^b<#NygCHKJf-ۄoH Ja)Yfc(` 0qt&@3/C~A^ l( ؒ0~of˜os*`ژ46K-%#= %IH6Jo8C@TzWOpA:Z„TU|5YŗZ:G:%Sj~=eI&TwQ[P]vlDȨ!帊iX`[ŗY)L;]J5ULkD$5鰽bעT/I18T(q)9H[/u@g+3-fVo*ɛ`( ')dMP˲,\26K_A !PNhذy\CPIMAC8wn\e0Rԋ@+>`xT7}02m-yyS"wuXȃoxkcZ\3]ùzv٣Byqj3ƚ`'PC=+mvN%ύHf6AN( aϩI=߹_d: }+p?PO+Zbbyj.H@,Z@"ғ-ES4n)olIIBr~V>olbfLTq*+9)XLVRf.S9v.i硦>iFt+LVvuOx39]O#2]2=&K/~~<O^?vf)o=E4ӷ+4grN=U"g`k8=ü0}8QcÐӝXyTGָaYwSW-XVcNx.F~eyUڑ0@5㶼P?!@=LXI6QpӱڍyKoqK 0ݻ맕d5w$,ϩ/ tцB*v:-7ޚ, )p@ :$7ѴyN] h1 HZ޻Bf1畏?r 3uQB}\cmd!@ WS1zݨ]3CoFq"8ԚέiGb[UgfN G[oLĎnrWxzdD]Ջ` l_9 ]c2֋~ pIPӻߘ)* mS~H~X.t;/RG'~[!<8zvOB9`55'{ɸ6g {pqT3^-ZId, =4pBwbO&Ҹ$d%FN`7>͕xE_{ ۫v"{.1ן%$r_m L=mPO%os#(ĴyI%4aĘnzn|l6AT޾'VAJJ{` ,8I%vK$%Gv XY f 0NndQ& #PhꃢCAST part of OpenSSL 1.0.1c 10 May 2012AES part of OpenSSL 1.0.1c 10 May 2012aes(partial)(length%AES_BLOCK_SIZE) == 0(AES_ENCRYPT == enc)||(AES_DECRYPT == enc).\crypto\aes\aes_ige.cin && out && key && ivec)TPD ,$!%\QM@CCPQA< `cC#( (D@D  "%xsK;;.pp@0 <3?(0226tpD4, TSG\PLXSK=$ $ psC3 2, ,'prB2 ``@ PP@#+  6LCO7XRJxpH8&/`aA!4@AAPRB|qM=  PSC7!=trF6,#/$#'0 +"lbN.LAM haI)|pL<  ?/3>d`D$,".HCK   !!hcK+dbF&5 3|rN>xrJ:DCG%$"&-!000437.426 ""8084'DAEL@L )415 <0 <2><*:LBNTQE83 ;h`H(|sO?HBJ TRFtsG7 -DBF5(# +daE%:#91\RN9&2011*laM-\SO$0 82 :XPH`bB"(! )033( xqI9hbJ*(" *088(! -,&ܳ38/`@ `QET@DDcO/lcK+hSKXbB"`3305! )( "ࣇ'Б <264CK H/쀈`L,l(4AED!3?<1 =< ((BNL62><%9 # +(bF&drJ:x#'$#/,1rB2pBB@AA@sC3pcG'd, 7- ,,*044 .)QM\8SGT. ̂9?qM=|1105 bJ*h1 "" `H(hqA1pؑaA!`>&QIXQAPܒ#+Ё CGD #,쁍 ?sK;xPL\"!cC#`## AM LȒ2 :8 ".,:bN.lRJX23AI HpH8x ;p@0pqE5tsO?|154`D$daM-lpD4t԰4* rF6t >@@@ ్=:0" *(RN\)RFTCC@ 0%@HHqI9x<!!  SO\sG7tPDT2 !%$COLBFD-PHXRBP+rN>| =000aE%d0 <<6$䳋;pL<| P@P1 98"&$220aI)h374' $$$ SCP @L L3 ;8BJ H7)DTP ,%$!M\QC@CAPQ< C#`c(( DD@  "%K;xs;.@0pp ?<3(2026D4tp, GTSL\PKXS=$$ C3ps 2 ,, 'B2prц @ ``@PP#+ 6OLC7JXRH8xp&/A!`aÄ4A@ABPRM=|q  CPS7!=F6tr/,#'$#0  +"N.lbM LAI)haL<|p  ?/3>D$d`.,"K HC ! !K+hcF&db5 3N>|rJ:xrGDC%&$"-Ӂ!000743.642" "8804'EDAL L@)541  <<0A1pqÉ E5tq;8IXQ?I HA 981G'dcӈ8 B@B# #L,l`ӄ$4401HH@O/lc =<1 -,!@@@>><2<*:NLBETQ ;83H(h`O?|sJ HBFTRG7ts -FDB5 +(#E%da:#91N\R9&2101*M-laO\S$0  :82HXPB"`b )(!303( I9xqJ*hb *("880(-,! &38/ ``@TQED@D/lcO+hcKXSK"`bB3035)(! "' <642 HCK/,l`L(4DAE!?<3=<1 (( LBN6><2%9  +(# &dbF:xrJ'$#/,#12prB@BB@AA3psC'dcG, 7- ,, *440  .)\QM8TSG. 9?=|qM1015 *hbJ1 " "(h`H1pqA!`aA>&XQIPQA#+ DCG #, ?;xsK\PL"!#`cC# # LAM:82 .,":.lbNXRJ23 HAI8xpH ;0pp@5tqE?|sO541$d`D-laM4tpD4*  6trF >@@@ =:0*(" \RN)TRF@CC 0%H@H9xqI<! !  \SO7tsGTPD2 %$!LCODBF-XPHPRB+>|rN =000%daE<<0 6$;<|pL PP@981 &$"202)haI743'$$ $ PSC   L@L;83 HBJ7.\crypto\modes\gcm128.c.\crypto\bn\bn_add.c.\crypto\bn\bn_div.c.\crypto\bn\bn_exp.cBig Number part of OpenSSL 1.0.1c 10 May 20128m .\crypto\bn\bn_lib.c.\crypto\bn\bn_ctx.c.\crypto\bn\bn_mod.c0123456789ABCDEF.\crypto\bn\bn_print.c%09u%u0-bn(%d,%d).\crypto\bn\bn_rand.c.\crypto\bn\bn_blind.c.\crypto\bn\bn_sqrt.c.\crypto\bn\bn_gcd.c %)+/5;=CGIOSYaegkmq %379=KQ[]agou{  #-39;AKQWY_eikw)+57;=GUY[_mqsw %'-?CEIOU]ci  ')/QW]ew #+/=AGIMSU[ey '7EKOQUWamsy!#')3?AQSY]_iq   # % + / 5 C I M O U Y _ k q w   ! 1 9 = I W a c g o u {      # ) - ? G Q W ] e o {   % / 1 A [ _ a m s w      ! + - = ? O U i y !'/5;KWY]kqu}  %)1CGMOSY[gk!%+9=?Qisy{ '-9EGY_cio #)+17AGS_qsy} '-7CEIOW]gim{!/3;EMYkoqu %)+7=ACI_egk} %39=EOUimou #'3A]cw{57;CIMUgqw}13EIQ[y!#-/5?MQik{}#%/17;AGOUYeks '+-3=EKOUs !#59?AKS]ciqu{} %+/=IMOmq 9IKQgu{   ' ) - 3 G M Q _ c e i w } !!5!A!I!O!Y![!_!s!}!!!!!!!!!!!!!!!!!" """!"%"+"1"9"K"O"c"g"s"u"""""""""""""""# # #'#)#/#3#5#E#Q#S#Y#c#k################$ $$$)$=$A$C$M$_$g$k$y$}$$$$$$$$$$$$$$$$$$%%%%'%1%=%C%K%O%s%%%%%%%%%%%%%%%%& &&&'&)&5&;&?&K&S&Y&e&i&o&{&&&&&&&&&&&&&&&''5'7'M'S'U'_'k'm's'w''''''''''''''(( ((((!(1(=(?(I(Q([(](a(g(u((((((((((((()))!)#)?)G)])e)i)o)u))))))))))))))))***%*/*O*U*_*e*k*m*s***************+'+1+3+=+?+K+O+U+i+m+o+{++++++++++++++ ,,,#,/,5,9,A,W,Y,i,w,,,,,,,,,,,,,,,,---;-C-I-M-a-e-q-----------... ...%.-.3.7.9.?.W.[.o.y................/ / //'/)/A/E/K/M/Q/W/o/u/}///////////////0 0#0)070;0U0Y0[0g0q0y0}000000000000000001 11!1'1-191C1E1K1]1a1g1m1s11111111111111 2222)252Y2]2c2k2o2u2w2{22222222222222223%3+3/353A3G3[3_3g3k3s3y33333333333334444474E4U4W4c4i4m44444444444444 555-535;5A5Q5e5o5q5w5{5}555555555555555666#6165676;6M6O6S6Y6a6k6m6666666666667777?7E7I7O7]7a7u7777777777778 8!83858A8G8K8S8W8_8e8o8q8}8888888888888899#9%9)9/9=9A9M9[9k9y9}999999999999999999::::':+:1:K:Q:[:c:g:m:y::::::::::::;;;!;#;-;9;E;S;Y;_;q;{;;;;;;;;;;;;;;;;;;< <<<<)<5 >>>>#>)>/>3>A>W>c>e>w>>>>>>>>>>>>>>>> ? ?7?;?=?A?Y?_?e?g?y?}????????????@!@%@+@1@?@C@E@]@a@g@m@@@@@@@@@@@@@ A AAA!A3A5A;A?AYAeAkAwA{AAAAAAAAAAABBBB#B)B/BCBSBUB[BaBsB}BBBBBBBBBBBBBBCCC%C'C3C7C9COCWCiCCCCCCCCCCCCCCCCC D DD#D)D;D?DEDKDQDSDYDeDoDDDDDDDDDDDDDDEEE+E1EAEIESEUEaEwE}EEEEEEEEtoo many temporary variablestoo many iterationsp is not primeno solutionno inversenot initializednot a squareinvalid rangeinvalid lengthinput not reducedexpand on static bignum dataencoding errordiv by zerocalled with even modulusbignum too longbad reciprocalarg2 lt arg3BN_usubBN_rand_rangeBN_randBN_newBN_mpi2bnBN_mod_sqrtBN_mod_mul_reciprocalBN_mod_lshift_quickBN_mod_inverse_no_branchBN_mod_inverseBN_mod_exp_simpleBN_mod_exp_recpBN_mod_exp_mont_wordBN_mod_exp_mont_consttimeBN_mod_exp_montBN_mod_exp2_montBN_GF2m_mod_sqrtBN_GF2m_mod_sqrBN_GF2m_mod_solve_quad_arrBN_GF2m_mod_solve_quadBN_GF2m_mod_mulBN_GF2m_mod_expBN_GF2m_modBN_EXPAND_INTERNALbn_expand2BN_expBN_div_recpBN_div_no_branchBN_divBN_CTX_startBN_CTX_newBN_CTX_getBN_bn2hexBN_bn2decBN_BLINDING_updateBN_BLINDING_newBN_BLINDING_invert_exBN_BLINDING_create_paramBN_BLINDING_convert_exBNRAND.\crypto\bn\bn_recp.c.\crypto\bn\bn_mont.c.\crypto\bn\bn_mpi.c.\crypto\bn\bn_exp2.c@ADEPQTU.\crypto\bn\bn_gf2m.c p     @     h !!ڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB:6 ڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQSڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQ[=|cH6UӚi?$_e]#ܣbV R)pmg 5NJtl#s'ڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQ[=|cH6UӚi?$_e]#ܣbV R)pmg 5NJtl!|2^F.6;w,']oLR+X9I|j&rZhڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQ[=|cH6UӚi?$_e]#ܣbV R)pmg 5NJtl!|2^F.6;w,']oLR+X9I|j&rZ-3 Pz3U!dX qW] }ǫ 3J%a&k/يdvs>jdR+{ Wza]lw FOt1C[K :ڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQ[=|cH6UӚi?$_e]#ܣbV R)pmg 5NJtl!|2^F.6;w,']oLR+X9I|j&rZ-3 Pz3U!dX qW] }ǫ 3J%a&k/يdvs>jdR+{ Wza]lw FOt1C[K !r<׈q[&'j<h4 %*Lۻގ.ʦ(|YGNk]Oâ#;Q[a)pׯv!pH'հZ꘍ܐM541ڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQ[=|cH6UӚi?$_e]#ܣbV R)pmg 5NJtl!|2^F.6;w,']oLR+X9I|j&rZ-3 Pz3U!dX qW] }ǫ 3J%a&k/يdvs>jdR+{ Wza]lw FOt1C[K !r<׈q[&'j<h4 %*Lۻގ.ʦ(|YGNk]Oâ#;Q[a)pׯv!pH'հZ꘍ܐM546|p&ܲ`&Fuv=7S8/A0jS'1'Z>ϛDlԻG%K3 QQ+ׯBo7ҿYK2rnt^p/F @1 Y#z~6̈EXZK+AT̏m~H^7ৗ(ՋvP=̱\V.28n<h>f?H`-[ ttm@$ڢ!h4b)Ngt ;"QJy4:C0+ m_7O5mmQE䅵vb^~LB7k \8kZ$|KI(fQ[=|cH6UӚi?$_e]#ܣbV R)pmg 5NJtl!|2^F.6;w,']oLR+X9I|j&rZ-3 Pz3U!dX qW] }ǫ 3J%a&k/يdvs>jdR+{ Wza]lw FOt1C[K !r<׈q[&'j<h4 %*Lۻގ.ʦ(|YGNk]Oâ#;Q[a)pׯv!pH'հZ꘍ܐM546|p&ܲ`&Fuv=7S8/A0jS'1'Z>ϛDlԻG%K3 QQ+ׯBo7ҿYK2rnt^p/F @1 Y#z~6̈EXZK+AT̏m~H^7ৗ(ՋvP=̱\V.28n<h>f?H`-[ ttmYto8w|2ߌؾs1;2tG%vk$f:cZh4#t+x#e-"".|W#4sdl0kKȆ/Kyh3[:+)E\\*=05jzjgszͬs1/>䘎k-nAPZV9.ѝ**慎"7 tn;bYAT*8U]U)l:T^8rv 6J&,o])(|1 `~zC|_cM7-X Hzj)sBО)Sg92dQ>a!@r[󸴉 V9Q~{R;5s߈=,4EkP?ƅ͞>f#Bd9?!(`kM=K^wY('3HjB~~1f9)jx;\_,}٘DIWDh'>f,r^&@P?a5ƶ"߹\k% cd 9:hSIxBwiɉrioHJetizg+*=}p_$܀AH1ihD5޳xĶ\Y*Wc."=9ZʧB=G`b%ki})wZx7X+J"8"cs73KIܶjmvHCzb1?B@;` uy`2}kl;ATuq/}7ya”, <ܨl3d+XW=?';<} ]n@TS FTh"k9{^]qR&  S;+˚ah2Wl $?XKzL/,8 ه'pQ !^&)jgh\HS`TP_Z扏9 U+"o;onl]A|'`Kk,BGc@w}-39Eؘ–OBJ|+3Wk1^˶@h7Qc%Q MinghuaQu)rx?|*b^fv |*b^fv e9މp+" Hr9Z^kU𘨜寇$>u|*b^v(߬eaŖ'WMinghuaQuS^ |*b^fv a'L \,Q]tL K 蒴dВCF.7Gnn6 ظY| K MinghuaQu :Ds6yuyy=$<,^R- (`|,[Z9[-zu 8MinghuaQu41;ѳY̛^),eXm]{j^W)2@'jM:q_KD?$rs;L8,zv06M~51k‚S3)E\\*=#֖vVVI0% dX &D#sao55b(0'yX1^9ov`֖vV]h~Z U){Կ6YOzj{&^/͸GNgL敺x$MinghuaQu[ӭ!:  kV!DA%pVKc)xAߘ3Ccn~sKNP1#:dM[ӭ֖vVZ!: 娉A_es&jFVWsL!VP1e-#fHmy@6m&]$iT3S{דNm^\]8X2.68ڣ ,OT(Fx*a-ܴ k)ʑ:XiCwVgVxzxvTC^BﯲQ Hœ' `SQ/xtJ2b~hԙF74>6lq OE\ ys$w #L3?MinghuaQuwű0zQiq{@ ȩ{Ièz[|1G_JtloaybSr%7: jJwD:̒I֖vVQ7ȡo"cZQ7>g >iwpɻ'L7~*j[Cٶ}.g~Q5 a~Δ3V)'"LlZVTr2:~s)/ck Lna&S}UZg'͛VV[nst՟kA=K4K Pfd~l3, #X!;3; B_}ˬ!9u_e96sqUj5x( g6qo~Rt/i"&ח@)s '&L{*eU05v1.T05u1*]ƷeS Zy|x$!?xD?;bS&_#Vzi¬$XI(681]Bo~EFYb6N4aw"Y.uw&]E1;y9%ݐOp.%U~;͆ SghT$Բ Qo5ݰgyE9`[*|ﭳ4`_eI:q! L'ϸ|`b".U@7Fi |NB)4`x/'2Qe_lBRzXcH(k_ @U}^[K\_ϗ4@W֟y!= LKMBb! !\Ku;{GkB.gGa֬'ȩr/lzUOP1{T_]H`ЈݳIk `dub`DJwM[4%ZQV`yNTyaϫk+$jv6ŧXOTЏ8QOKO@6ds֦3_|</d7٢sH%nY?1?䬜`$HCpG0MΰѺ9IGvGMʈRrI܀O7OJ;ʕ1MՌ0zTm,IyDOrܶ {20YL>ǣPc䳑ۑA80K98޴\wc|H*X:3Hka:#%~"!)q/\jˌkJY3+gVn)JZxRM9 )Urz4V)l CWbFS/Qn#<H'[aUsh#ݜ=Qtn/NGv`֖vVT%FCR4"xu2Ȕ5RBQ}$ <8K MoiF=y2=t3#!Yf?XHʟȂMSL D֖vVX wıف|Qrg8NH@t3O8 $&nNm MĆ&qۛlhMf%8#џM۷P\MinghuaQuj6&=>%ViTG*V8oHT%+{|WM ϊ"RGu޽5Y ^H?q=  n)\@}9Ж{pK ]G dގ3( †g`)JWoS-ֺ}a6+jV,SsW+ &NT'D֖vVU/'(fS{ggRcjheT&@'kdRbg.EWxogTb 62ײ67J v[43^3)2E$@Ðgȓq/$֖vVX\@(wMwwǷfmfC q'O ($+|їbj8 (Zj(xCCbm`!:>zC}f PPe($ksSQ-Ƅ֖vVPgxmlGV ""Vw~wwq&τyYc2?7]LOCDއFP n8&TZ9aW]Y6njL|{UUUUUUUUUUUa h(>HaZb!.`4+D9*?cߕYzxUN'ѥzU玚Ê_ ذa}\#{l!.-^Ոq~!/KMinghuaQuq ]2W|T1:FgVCB>'uxWxvy9/02| Ã"Wp.| [pnVk~|RaPw?k&itNqdǩac MBI*IfG*/ߤ֖vV]&g''}B0wWg#Vtc%Sf&dyVT_P7eA͂J/.?wREr/ͷK(NiG4vҹ1p LVg3LE;^,qbV}TSn"~3UUUUUUUUUUUUUUL# ;_JN"(q-Bu+,@ M͵ qg+|4*Up:o?̄@a,l b-w# qݍiWk.aBA0^ʰ^]ޝ#THn1rE{{b!'. i1IQ 7~_-Q Dt(x6ZۗU CyڦxyNqf@9`Us@{^j_<,z>\&(ZdOt &៾n qQ~@P(2RzAj^& T;VW*VW*-\?ahDC-L+5I $֖vVX[3-ƀVggjeK uO5n FV|FgUVVFg#VVTfVW$r|I6?]R}]E]94V&t+c)#<%wgy8A6j.:$ߜkޘz"9S)TpHy9HO{N#exhW-0Z@q(k򆼡(k򆼡(kkV, Oqtu;/p % R"%nyU [Gxv/ Q*j 2 E,v X0<4E87N I-dDGjuSUz‚VFųJ9Lk"'!oj_{>ܺbزRWs,Yb:E8C|хZڨ*P#Q-rIي]ǰS.Q;zy @1[g6`'~ #Llj]9[#lĭ,* ЕloaThM͏ٴ}CYTL놂7ژ*gZbmNPszb8&]j˰L/z}ᨛb]jY7`@4@4@4@4@4@4@4@4@4#p;^ƍ5 IJfyy pxKJcx\ms@3{LjІl  |D۟$ ^ ( 0    0    H     \ 8 4       (  X T  ,  , H               \  ( 0    P     x  x p P 8 $  $   h      `      X |  T  T h T  ,       `  H  H   d  <    (   `   (     8   x  x   x (   .\crypto\ec\ec_curve.c.\crypto\ec\ec_check.c0123456789ABCDEF.\crypto\ec\ec_print.cEC_PRIVATEKEYpublicKeyparametersprivateKeyECPKPARAMETERSvalue.implicitlyCAvalue.parametersvalue.named_curveECPARAMETERScofactororderbasecurvefieldIDX9_62_CURVEseedbaX9_62_FIELDIDfieldTypep.char_twop.primeX9_62_CHARACTERISTIC_TWOtypemp.ppBasisp.tpBasisp.onBasisp.otherX9_62_PENTANOMIALk3k2k1 @ @ @   0  t  h  \ X @P 4 x  4  ,       0      t    @ @         p  ` ` @ @T H  <   , .\crypto\ec\ec_asn1.c.\crypto\ec\ec_key.c@p0`P`0@@P0.\crypto\ec\ec2_smpl.c.\crypto\ec\ec2_mult.cOpenSSL EC algorithmEC< $ 0 P  p P @` p0   .\crypto\ec\ec_ameth.cECDSA-Parameters  0 P` `0.\crypto\ec\ec_pmeth.cec_paramgen_curveGenerator (hybrid):Generator (uncompressed):Generator (compressed):%02x%s.\crypto\ec\eck_prn.cSeed:Cofactor: Order: B: A: Prime:Polynomial:Basis Type: %s Field Type: %s ASN1 OID: %s.\crypto\ec\ecp_oct.c.\crypto\ec\ec2_oct.c.\crypto\ec\ec_oct.cECDH part of OpenSSL 1.0.1c 10 May 2012.\crypto\ecdh\ech_lib.cOpenSSL ECDH method.\crypto\ecdh\ech_ossl.cpoint arithmetic failureKDF failedECDH_DATA_new_methodECDH_compute_keyECDH_CHECKECDSA part of OpenSSL 1.0.1c 10 May 2012.\crypto\ecdsa\ecs_lib.cECDSA_SIG ? ?  OpenSSL ECDSA method.\crypto\ecdsa\ecs_ossl.csignature malloc failedrandom number generation failederr ec libECDSA_sign_setupECDSA_do_verifyECDSA_do_signECDSA_DATA_NEW_METHODECDSA_CHECK.\crypto\buffer\buffer.c.\crypto\buffer\buf_str.cBUF_strndupBUF_strdupBUF_MEM_newBUF_MEM_grow_cleanBUF_MEM_growBUF_memdup.\crypto\bio\bio_lib.c bio callback - unknown type (%d) ctrl return %ld puts return %ld gets return %ld write return %ld read return %ld ctrl(%lu) - %s gets(%lu) - %s puts() - %s write(%d,%lu) - %s write(%d,%lu) - %s fd=%d read(%d,%lu) - %s read(%d,%lu) - %s fd=%d Free - %s BIO[%08lX]:WSAStartupwrite to read only BIOunsupported methoduninitializedunable to listen socketunable to create socketunable to bind sockettag mismatchnull parameterno such fileno port specifiedno port definedno hostname specifiedno accept port specifiednbio connect errorkeepalivein useinvalid ip addressgethostbyname addr is not af ineterror setting nbio on accept socketerror setting nbio on accepted socketerror setting nbioEOF on memory BIOconnect errorbroken pipebad hostname lookupbad fopen modeaccept errorWSASTARTUPSSL_newMEM_WRITEMEM_READLINEBUFFER_CTRLFILE_READFILE_CTRLDGRAM_SCTP_READCONN_STATECONN_CTRLBUFFER_CTRLBIO_writeBIO_sock_initBIO_readBIO_putsBIO_nwrite0BIO_nwriteBIO_nread0BIO_nreadBIO_new_mem_bufBIO_new_fileBIO_newBIO_MAKE_PAIRBIO_get_portBIO_get_host_ipBIO_get_accept_socketBIO_getsBIO_gethostbynameBIO_ctrlBIO_callback_ctrlBIO_BER_GET_HEADERBIO_acceptACPT_STATEmemory buffer.\crypto\bio\bss_mem.cNULLfile descriptorFILE pointer.\crypto\bio\bss_file.cfopen('','')twr+a+socketsocket connecthost=.\crypto\bio\bss_conn.c%d%d.%d.%d.%dNULL filterbuffer.\crypto\bio\bf_buff.c$@?.\crypto\bio\b_print.c0123456789abcdef0xdoapr()%s%04x - %c %02x%c %04x - service=''gopherftphttpssockstelnethttptcp.\crypto\bio\b_sock.c%s:%s%d.%d.%d.%d:%dgetnameinfoport='*freeaddrinfogetaddrinfosocket accept.\crypto\bio\bss_acpt.cnon-blocking IO test filter.\crypto\bio\bf_nbio.cBIO pair.\crypto\bio\bss_bio.cdatagram socket.\crypto\bio\bss_dgram.csetsockoptgetsockoptStack part of OpenSSL 1.0.1c 10 May 2012.\crypto\stack\stack.clhash part of OpenSSL 1.0.1c 10 May 2012.\crypto\lhash\lhash.cnum_hash_comps = %lu num_retrieve_miss = %lu num_retrieve = %lu num_no_delete = %lu num_delete = %lu num_replace = %lu num_insert = %lu num_comp_calls = %lu num_hash_calls = %lu num_contract_reallocs = %lu num_contracts = %lu num_expand_reallocs = %lu num_expands = %lu num_alloc_nodes = %u num_nodes = %u num_items = %lu node %6u -> %3u load %d.%02d actual load %d.%02d %lu items %lu nodes used out of %u RAND part of OpenSSL 1.0.1c 10 May 2012@@.\crypto\rand\md_rand.cYou need to read the OpenSSL FAQ, http://www.openssl.org/support/faq.html....................rbwb.rndC:HOMERANDFILEPRNG not seededno fips random method seterror instantiating drbgerror initialising drbgSSLEAY_RAND_BYTESRAND_init_fipsRAND_get_rand_method.\crypto\rand\rand_win.cDISPLAY@"@@@Thread32NextThread32FirstProcess32NextProcess32FirstHeap32ListNextHeap32ListFirstHeap32NextHeap32FirstCloseToolhelp32Snapshot@GetQueueStatusGetCursorInfoGetForegroundWindowUSER32.DLLP@Intel Hardware Cryptographic Service ProviderCryptReleaseContextCryptGenRandomCryptAcquireContextW1@LanmanServerF@LanmanWorkstationNetApiBufferFreeNetStatisticsGetNETAPI32.DLLADVAPI32.DLL??called a function that was disabled at compile-timeinternal errorpassed a null parametercalled a function you should not callmalloc failurefatalmissing asn1 eosasn1 length mismatchexpecting an asn1 sequencebad get asn1 object callbad asn1 object headernested asn1 errorTS libOCSP libENGINE libDSO libRAND libPKCS12 libX509V3 libPKCS7 libBIO libSSL libEC libCRYPTO libCONF libASN1 libX509 libDSA libPEM libOBJ libBUF libEVP libDH libRSA libBN libsystem libfreadopendirWSAstartupacceptlistenbindioctlsocketconnectfopenHMAC routinesCMS routinesFIPS routinesOCSP routinesengine routinestime stamp routinesDSO support routinesrandom number generatorPKCS12 routinesX509 V3 routinesPKCS7 routinesBIO routinesSSL routineselliptic curve routinescommon libcrypto routinesconfiguration file routinesasn1 encoding routinesx509 certificate routinesdsa routinesPEM routinesobject identifier routinesmemory buffer routinesdigital envelope routinesDiffie-Hellman routinesrsa routinesbignum routinessystem libraryunknown libraryP ` p.\crypto\err\err.cint_err_get (err.c)int_thread_get (err.c)unknownNAerror:%08lX:%s:%s:%sreason(%lu)func(%lu)lib(%lu)%lu:%s:%s:%d:%s .\crypto\objects\o_names.crsaesOaepRSAES-OAEPaes-256-cbc-hmac-sha1AES-256-CBC-HMAC-SHA1aes-192-cbc-hmac-sha1AES-192-CBC-HMAC-SHA1aes-128-cbc-hmac-sha1AES-128-CBC-HMAC-SHA1rc4-hmac-md5RC4-HMAC-MD5aes-256-xtsAES-256-XTSaes-128-xtsAES-128-XTSrsassaPssRSASSA-PSSmgf1MGF1Any Extended Key UsageanyExtendedKeyUsageid-camellia256-wrapid-camellia192-wrapid-camellia128-wrapaes-256-ctrAES-256-CTRaes-192-ctrAES-192-CTRaes-128-ctrAES-128-CTRid-aes256-wrap-padaes-256-ccmid-aes256-CCMaes-256-gcmid-aes256-GCMid-aes192-wrap-padaes-192-ccmid-aes192-CCMaes-192-gcmid-aes192-GCMid-aes128-wrap-padaes-128-ccmid-aes128-CCMaes-128-gcmid-aes128-GCMcmacid-alg-PWRI-KEKdmdNamedeltaRevocationListsupportedAlgorithmshouseIdentifieruniqueMemberdistinguishedNameprotocolInformationenhancedSearchGuidecrossCertificatePaircertificateRevocationListauthorityRevocationListcACertificateuserCertificateuserPasswordseeAlsoroleOccupantownermembersupportedApplicationContextpresentationAddresspreferredDeliveryMethoddestinationIndicatorregisteredAddressinternationaliSDNNumberx121AddressfacsimileTelephoneNumberteletexTerminalIdentifiertelexNumbertelephoneNumberphysicalDeliveryOfficeNamepostOfficeBoxpostalAddressbusinessCategorysearchGuidePermanent Identifierid-on-permanentIdentifierX509v3 Freshest CRLfreshestCRLMicrosoft Local Key setLocalKeySethmacGOST R 3410-2001 Parameter Set Cryptocomid-GostR3410-2001-ParamSet-ccGOST R 34.11-94 with GOST R 34.10-2001 Cryptocomid-GostR3411-94-with-GostR3410-2001-ccGOST R 34.11-94 with GOST R 34.10-94 Cryptocomid-GostR3411-94-with-GostR3410-94-ccGOST 34.10-2001 Cryptocomgost2001ccGOST 34.10-94 Cryptocomgost94ccGOST 28147-89 Cryptocom ParamSetid-Gost28147-89-ccid-GostR3410-94-bBisid-GostR3410-94-bid-GostR3410-94-aBisid-GostR3410-94-aid-GostR3410-2001-CryptoPro-XchB-ParamSetid-GostR3410-2001-CryptoPro-XchA-ParamSetid-GostR3410-2001-CryptoPro-C-ParamSetid-GostR3410-2001-CryptoPro-B-ParamSetid-GostR3410-2001-CryptoPro-A-ParamSetid-GostR3410-2001-TestParamSetid-GostR3410-94-CryptoPro-XchC-ParamSetid-GostR3410-94-CryptoPro-XchB-ParamSetid-GostR3410-94-CryptoPro-XchA-ParamSetid-GostR3410-94-CryptoPro-D-ParamSetid-GostR3410-94-CryptoPro-C-ParamSetid-GostR3410-94-CryptoPro-B-ParamSetid-GostR3410-94-CryptoPro-A-ParamSetid-GostR3410-94-TestParamSetid-Gost28147-89-CryptoPro-RIC-1-ParamSetid-Gost28147-89-CryptoPro-Oscar-1-0-ParamSetid-Gost28147-89-CryptoPro-Oscar-1-1-ParamSetid-Gost28147-89-CryptoPro-D-ParamSetid-Gost28147-89-CryptoPro-C-ParamSetid-Gost28147-89-CryptoPro-B-ParamSetid-Gost28147-89-CryptoPro-A-ParamSetid-Gost28147-89-TestParamSetid-GostR3411-94-CryptoProParamSetid-GostR3411-94-TestParamSetid-Gost28147-89-None-KeyMeshingid-Gost28147-89-CryptoPro-KeyMeshingGOST R 34.10-94 DHid-GostR3410-94DHGOST R 34.10-2001 DHid-GostR3410-2001DHGOST R 34.11-94 PRFprf-gostr3411-94GOST 28147-89 MACgost-macgost89-cntGOST 28147-89gost89GOST R 34.10-94gost94GOST R 34.10-2001gost2001HMAC GOST 34.11-94id-HMACGostR3411-94GOST R 34.11-94md_gost94GOST R 34.11-94 with GOST R 34.10-94id-GostR3411-94-with-GostR3410-94GOST R 34.11-94 with GOST R 34.10-2001id-GostR3411-94-with-GostR3410-2001cryptocomcryptoprowhirlpooldsa_with_SHA256dsa_with_SHA224hmacWithSHA512hmacWithSHA384hmacWithSHA256hmacWithSHA224hmacWithMD5ecdsa-with-SHA512ecdsa-with-SHA384ecdsa-with-SHA256ecdsa-with-SHA224ecdsa-with-Specifiedecdsa-with-Recommendedid-aes256-wrapid-aes192-wrapid-aes128-wrapid-ct-asciiTextWithCRLFid-smime-ct-compressedDataCA RepositorycaRepositoryid-it-suppLangTagsDiffie-Hellman based MACid-DHBasedMacpassword based MACid-PasswordBasedMAChmac-sha1HMAC-SHA1hmac-md5HMAC-MD5seed-cfbSEED-CFBseed-ofbSEED-OFBseed-cbcSEED-CBCseed-ecbSEED-ECBkisaKISAX509v3 Certificate IssuercertificateIssuerX509v3 Issuing Distrubution PointissuingDistributionPointX509v3 Subject Directory AttributessubjectDirectoryAttributescamellia-256-ofbCAMELLIA-256-OFBcamellia-192-ofbCAMELLIA-192-OFBcamellia-128-ofbCAMELLIA-128-OFBcamellia-256-cfb8CAMELLIA-256-CFB8camellia-192-cfb8CAMELLIA-192-CFB8camellia-128-cfb8CAMELLIA-128-CFB8camellia-256-cfb1CAMELLIA-256-CFB1camellia-192-cfb1CAMELLIA-192-CFB1camellia-128-cfb1CAMELLIA-128-CFB1camellia-256-cfbCAMELLIA-256-CFBcamellia-192-cfbCAMELLIA-192-CFBcamellia-128-cfbCAMELLIA-128-CFBcamellia-256-ecbCAMELLIA-256-ECBcamellia-192-ecbCAMELLIA-192-ECBcamellia-128-ecbCAMELLIA-128-ECBcamellia-256-cbcCAMELLIA-256-CBCcamellia-192-cbcCAMELLIA-192-CBCcamellia-128-cbcCAMELLIA-128-CBCipsec4Oakley-EC2N-4ipsec3Oakley-EC2N-3X509v3 Inhibit Any PolicyinhibitAnyPolicyX509v3 Policy MappingspolicyMappingsX509v3 Any PolicyanyPolicywap-wsg-idm-ecid-wtls12wap-wsg-idm-ecid-wtls11wap-wsg-idm-ecid-wtls10wap-wsg-idm-ecid-wtls9wap-wsg-idm-ecid-wtls8wap-wsg-idm-ecid-wtls7wap-wsg-idm-ecid-wtls6wap-wsg-idm-ecid-wtls5wap-wsg-idm-ecid-wtls4wap-wsg-idm-ecid-wtls3wap-wsg-idm-ecid-wtls1sect571r1sect571k1sect409r1sect409k1sect283r1sect283k1sect239k1sect233r1sect233k1sect193r2sect193r1sect163r2sect163r1sect163k1sect131r2sect131r1sect113r2sect113r1secp521r1secp384r1secp256k1secp224r1secp224k1secp192k1secp160r2secp160r1secp160k1secp128r2secp128r1secp112r2secp112r1c2tnb431r1c2pnb368w1c2tnb359v1c2pnb304w1c2pnb272w1c2onb239v5c2onb239v4c2tnb239v3c2tnb239v2c2tnb239v1c2pnb208w1c2onb191v5c2onb191v4c2tnb191v3c2tnb191v2c2tnb191v1c2pnb176v1c2pnb163v3c2pnb163v2c2pnb163v1ppBasistpBasisonBasisid-characteristic-two-basiswap-wsgwapcerticom-arcidentified-organizationsha224SHA224sha512SHA512sha384SHA384sha256SHA256sha224WithRSAEncryptionRSA-SHA224sha512WithRSAEncryptionRSA-SHA512sha384WithRSAEncryptionRSA-SHA384sha256WithRSAEncryptionRSA-SHA256Independentid-ppl-independentX509v3 Name ConstraintsnameConstraintsInherit allid-ppl-inheritAllAny languageid-ppl-anyLanguageProxy Certificate InformationproxyCertInfoid-pplpostalCodestreetAddressstreetdes-ede3-cfb8DES-EDE3-CFB8des-ede3-cfb1DES-EDE3-CFB1des-cfb8DES-CFB8des-cfb1DES-CFB1aes-256-cfb8AES-256-CFB8aes-192-cfb8AES-192-CFB8aes-128-cfb8AES-128-CFB8aes-256-cfb1AES-256-CFB1aes-192-cfb1AES-192-CFB1aes-128-cfb1AES-128-CFB1Microsoft Universal Principal NamemsUPNMicrosoft SmartcardloginmsSmartcardLoginInternational Organizationsinternational-organizationsjoint-iso-itu-tJOINT-ISO-ITU-Titu-tITU-TrsaOAEPEncryptionSETdes-cdmfDES-CDMFset-brand-Novusset-brand-MasterCardset-brand-Visaset-brand-JCBset-brand-AmericanExpressset-brand-Dinersset-brand-IATA-ATAsecure device signaturesetAttr-SecDevSigICC or token signaturesetAttr-TokICCsigcleartext track 2setAttr-T2cleartxtencrypted track 2setAttr-T2Encgenerate cryptogramsetAttr-GenCryptgrmsetAttr-IssCap-SigsetAttr-IssCap-T2setAttr-IssCap-CVMsetAttr-Token-B0PrimesetAttr-Token-EMVset-addPolicyset-rootKeyThumbissuer capabilitiessetAttr-IssCapsetAttr-TokenTypepayment gateway capabilitiessetAttr-PGWYcapsetAttr-CertsetCext-IssuerCapabilitiessetCext-TokenTypesetCext-Track2DatasetCext-TokenIdentifiersetCext-PGWYcapabilitiessetCext-setQualfsetCext-setExtsetCext-tunnelingsetCext-cCertRequiredsetCext-merchDatasetCext-certTypesetCext-hashedRootset-policy-rootadditional verificationsetext-cvsetext-track2setext-pinAnysetext-pinSecuremerchant initiated authsetext-miAuthgeneric cryptogramsetext-genCryptsetct-BCIDistributionTBSsetct-CRLNotificationResTBSsetct-CRLNotificationTBSsetct-CertResTBEsetct-CertReqTBEXsetct-CertReqTBEsetct-RegFormReqTBEsetct-BatchAdminResTBEsetct-BatchAdminReqTBEsetct-CredRevResTBEsetct-CredRevReqTBEXsetct-CredRevReqTBEsetct-CredResTBEsetct-CredReqTBEXsetct-CredReqTBEsetct-CapRevResTBEsetct-CapRevReqTBEXsetct-CapRevReqTBEsetct-CapResTBEsetct-CapReqTBEXsetct-CapReqTBEsetct-AuthRevResTBEBsetct-AuthRevResTBEsetct-AuthRevReqTBEsetct-AcqCardCodeMsgTBEsetct-CapTokenTBEXsetct-CapTokenTBEsetct-AuthTokenTBEsetct-AuthResTBEXsetct-AuthResTBEsetct-AuthReqTBEsetct-PIUnsignedTBEsetct-PIDualSignedTBEsetct-ErrorTBSsetct-CertInqReqTBSsetct-CertResDatasetct-CertReqTBSsetct-CertReqDatasetct-RegFormResTBSsetct-MeAqCInitResTBSsetct-CardCInitResTBSsetct-BatchAdminResDatasetct-BatchAdminReqDatasetct-PCertResTBSsetct-PCertReqDatasetct-CredRevResDatasetct-CredRevReqTBSXsetct-CredRevReqTBSsetct-CredResDatasetct-CredReqTBSXsetct-CredReqTBSsetct-CapRevResDatasetct-CapRevReqTBSXsetct-CapRevReqTBSsetct-CapResDatasetct-CapReqTBSXsetct-CapReqTBSsetct-AuthRevResTBSsetct-AuthRevResDatasetct-AuthRevReqTBSsetct-AcqCardCodeMsgsetct-CapTokenTBSsetct-CapTokenDatasetct-AuthTokenTBSsetct-AuthResTBSXsetct-AuthResTBSsetct-AuthReqTBSsetct-PResDatasetct-PI-TBSsetct-PInitResDatasetct-CapTokenSeqsetct-AuthRevResBaggagesetct-AuthRevReqBaggagesetct-AuthResBaggagesetct-HODInputsetct-PIDataUnsignedsetct-PIDatasetct-PIsetct-OIDatasetct-PANOnlysetct-PANTokensetct-PANDataset-brandcertificate extensionsset-certExtset-policyset-attrmessage extensionsset-msgExtcontent typesset-ctypeSecure Electronic Transactionsid-setpseudonymgenerationQualifierid-hex-multipart-messageid-hex-partial-messagemime-mhs-bodiesmime-mhs-headingsMIME MHSmime-mhsx500UniqueIdentifierdocumentPublisheraudiodITRedirectpersonalSignaturesubtreeMaximumQualitysubtreeMinimumQualitysingleLevelQualitydSAQualitybuildingNamemailPreferenceOptionjanetMailboxorganizationalStatusfriendlyCountryNamepagerTelephoneNumbermobileTelephoneNumberpersonalTitlehomePostalAddressassociatedNameassociatedDomaincNAMERecordsOARecordnSRecordmXRecordpilotAttributeType27aRecordlastModifiedBylastModifiedTimeotherMailboxsecretaryhomeTelephoneNumberdocumentLocationdocumentAuthordocumentVersiondocumentTitledocumentIdentifiermanagerhostuserClassphotoroomNumberfavouriteDrinkinforfc822MailboxmailtextEncodedORAddressuserIdUIDqualityLabelledDatapilotDSApilotOrganizationsimpleSecurityObjectfriendlyCountrydomainRelatedObjectdNSDomainrFC822localPartdocumentSeriesroomdocumentaccountpilotPersonpilotObjectcaseIgnoreIA5StringSyntaxiA5StringSyntaxpilotGroupspilotObjectClasspilotAttributeSyntaxpilotAttributeTypepilotucldataHold Instruction RejectholdInstructionRejectHold Instruction Call IssuerholdInstructionCallIssuerHold Instruction NoneholdInstructionNoneHold Instruction CodeholdInstructionCodeaes-256-cfbAES-256-CFBaes-256-ofbAES-256-OFBaes-256-cbcAES-256-CBCaes-256-ecbAES-256-ECBaes-192-cfbAES-192-CFBaes-192-ofbAES-192-OFBaes-192-cbcAES-192-CBCaes-192-ecbAES-192-ECBaes-128-cfbAES-128-CFBaes-128-ofbAES-128-OFBaes-128-cbcAES-128-CBCaes-128-ecbAES-128-ECBMicrosoft CSP NameCSPNameecdsa-with-SHA1prime256v1prime239v3prime239v2prime239v1prime192v3prime192v2prime192v1id-ecPublicKeycharacteristic-two-fieldprime-fieldANSI X9.62ansi-X9-62X509v3 No Revocation AvailablenoRevAvailX509v3 AC TargetingtargetInformationX509v3 Policy ConstraintspolicyConstraintsroleid-aca-encAttrsSubject Information AccesssubjectInfoAccessac-proxyingmd4WithRSAEncryptionRSA-MD4clearanceSelected Attribute Typesselected-attribute-typesDomaindomaindomainComponentDCdcObjectdcobjectEnterprisesenterprisesMailSNMPv2snmpv2SecuritysecurityPrivateprivateExperimentalexperimentalManagementmgmtDirectorydirectoryianaIANAdodDODorgORGdirectory services - algorithmsX500algorithmsrsaSignaturealgorithmTrust RoottrustRootpathvalidExtended OCSP StatusextendedStatusOCSP Service LocatorserviceLocatorOCSP Archive CutoffarchiveCutoffOCSP No ChecknoCheckAcceptable OCSP ResponsesacceptableResponsesOCSP CRL IDCrlIDOCSP NonceNonceBasic OCSP ResponsebasicOCSPResponsead dvcsAD_DVCSAD Time Stampingad_timestampingid-cct-PKIResponseid-cct-PKIDataid-cct-crsid-qcs-pkixQCSyntax-v1id-aca-roleid-aca-groupid-aca-chargingIdentityid-aca-accessIdentityid-aca-authenticationInfoid-pda-countryOfResidenceid-pda-countryOfCitizenshipid-pda-genderid-pda-placeOfBirthid-pda-dateOfBirthid-on-personalDataid-cmc-confirmCertAcceptanceid-cmc-popLinkWitnessid-cmc-popLinkRandomid-cmc-queryPendingid-cmc-responseInfoid-cmc-regInfoid-cmc-revokeRequestid-cmc-getCRLid-cmc-getCertid-cmc-lraPOPWitnessid-cmc-decryptedPOPid-cmc-encryptedPOPid-cmc-addExtensionsid-cmc-recipientNonceid-cmc-senderNonceid-cmc-transactionIdid-cmc-dataReturnid-cmc-identityProofid-cmc-identificationid-cmc-statusInfoid-alg-dh-popid-alg-dh-sig-hmac-sha1id-alg-noSignatureid-alg-des40id-regInfo-certReqid-regInfo-utf8Pairsid-regCtrl-protocolEncrKeyid-regCtrl-oldCertIDid-regCtrl-pkiArchiveOptionsid-regCtrl-pkiPublicationInfoid-regCtrl-authenticatorid-regCtrl-regTokenid-regInfoid-regCtrlid-it-origPKIMessageid-it-confirmWaitTimeid-it-implicitConfirmid-it-revPassphraseid-it-keyPairParamRepid-it-keyPairParamReqid-it-subscriptionResponseid-it-subscriptionRequestid-it-unsupportedOIDsid-it-currentCRLid-it-caKeyUpdateInfoid-it-preferredSymmAlgid-it-encKeyPairTypesid-it-signKeyPairTypesid-it-caProtEncCertdvcsDVCSIPSec UseripsecUserIPSec TunnelipsecTunnelIPSec End SystemipsecEndSystemtextNoticesbgp-routerIdentifiersbgp-autonomousSysNumsbgp-ipAddrBlockaaControlsac-targetingac-auditEntityqcStatementsBiometric InfobiometricInfoid-mod-cmp2000id-mod-dvcsid-mod-ocspid-mod-timestamp-protocolid-mod-attribute-certid-mod-qualified-cert-93id-mod-qualified-cert-88id-mod-cmpid-mod-kea-profile-93id-mod-kea-profile-88id-mod-cmcid-mod-crmfid-pkix1-implicit-93id-pkix1-explicit-93id-pkix1-implicit-88id-pkix1-explicit-88id-cctid-qcsid-acaid-pdaid-onid-cmcid-algid-pkipid-itid-qtid-pkix-modmd4MD4id-smime-cti-ets-proofOfCreationid-smime-cti-ets-proofOfApprovalid-smime-cti-ets-proofOfSenderid-smime-cti-ets-proofOfDeliveryid-smime-cti-ets-proofOfReceiptid-smime-cti-ets-proofOfOriginid-smime-spq-ets-sqt-unoticeid-smime-spq-ets-sqt-uriid-smime-cd-ldapid-smime-alg-CMSRC2wrapid-smime-alg-CMS3DESwrapid-smime-alg-ESDHid-smime-alg-RC2wrapid-smime-alg-3DESwrapid-smime-alg-ESDHwithRC2id-smime-alg-ESDHwith3DESid-smime-aa-dvcs-dvcid-smime-aa-signatureTypeid-smime-aa-ets-archiveTimeStampid-smime-aa-ets-certCRLTimestampid-smime-aa-ets-escTimeStampid-smime-aa-ets-revocationValuesid-smime-aa-ets-certValuesid-smime-aa-ets-RevocationRefsid-smime-aa-ets-CertificateRefsid-smime-aa-ets-contentTimestampid-smime-aa-ets-otherSigCertid-smime-aa-ets-signerAttrid-smime-aa-ets-signerLocationid-smime-aa-ets-commitmentTypeid-smime-aa-ets-sigPolicyIdid-smime-aa-timeStampTokenid-smime-aa-smimeEncryptCertsid-smime-aa-signingCertificateid-smime-aa-encrypKeyPrefid-smime-aa-contentReferenceid-smime-aa-equivalentLabelsid-smime-aa-macValueid-smime-aa-contentIdentifierid-smime-aa-encapContentTypeid-smime-aa-msgSigDigestid-smime-aa-contentHintid-smime-aa-mlExpandHistoryid-smime-aa-securityLabelid-smime-aa-receiptRequestid-smime-ct-DVCSResponseDataid-smime-ct-DVCSRequestDataid-smime-ct-contentInfoid-smime-ct-TDTInfoid-smime-ct-TSTInfoid-smime-ct-publishCertid-smime-ct-authDataid-smime-ct-receiptid-smime-mod-ets-eSigPolicy-97id-smime-mod-ets-eSigPolicy-88id-smime-mod-ets-eSignature-97id-smime-mod-ets-eSignature-88id-smime-mod-msg-v3id-smime-mod-oidid-smime-mod-essid-smime-mod-cmsid-smime-ctiid-smime-spqid-smime-cdid-smime-algid-smime-aaid-smime-ctid-smime-modS/MIMESMIMEpkcs5X9.57 CM ?X9cmX9.57X9-57ISO US Member BodyISO-USISO Member Bodymember-bodyisoISOOCSP SigningOCSPSigningCA IssuerscaIssuersOCSPAuthority Information AccessauthorityInfoAccessid-adid-pednQualifiernameExtension RequestextReqMicrosoft Extension RequestmsExtReqpbeWithSHA1AndDES-CBCPBE-SHA1-DESpbeWithMD5AndRC2-CBCPBE-MD5-RC2-64pbeWithMD2AndRC2-CBCPBE-MD2-RC2-64S/MIME CapabilitiesSMIME-CAPSrc2-64-cbcRC2-64-CBCPolicy Qualifier User Noticeid-qt-unoticePolicy Qualifier CPSid-qt-cpshmacWithSHA1PBMAC1PBES2x509CrlsdsiCertificatex509CertificatelocalKeyIDfriendlyNamesafeContentsBagsecretBagcrlBagcertBagpkcs8ShroudedKeyBagkeyBagpbeWithSHA1And40BitRC2-CBCPBE-SHA1-RC2-40pbeWithSHA1And128BitRC2-CBCPBE-SHA1-RC2-128pbeWithSHA1And2-KeyTripleDES-CBCPBE-SHA1-2DESpbeWithSHA1And3-KeyTripleDES-CBCPBE-SHA1-3DESpbeWithSHA1And40BitRC4PBE-SHA1-RC4-40pbeWithSHA1And128BitRC4PBE-SHA1-RC4-128Strong Extranet IDSXNetIDInvalidity DateinvalidityDateX509v3 CRL Reason CodeCRLReasonX509v3 Delta CRL IndicatordeltaCRLNetscape Server Gated CryptonsSGCMicrosoft Encrypted File SystemmsEFSMicrosoft Server Gated CryptomsSGCMicrosoft Trust List SigningmsCTLSignMicrosoft Commercial Code SigningmsCodeComMicrosoft Individual Code SigningmsCodeIndTime StampingtimeStampingE-mail ProtectionemailProtectionCode SigningcodeSigningTLS Web Client AuthenticationclientAuthTLS Web Server AuthenticationserverAuthid-kpPKIXX509v3 Extended Key UsageextendedKeyUsagezlib compressionZLIBrun length compressionRLErc5-ofbRC5-OFBrc5-cfbRC5-CFBrc5-ecbRC5-ECBrc5-cbcRC5-CBCripemd160WithRSARSA-RIPEMD160ripemd160RIPEMD160dsaEncryptionsha1WithRSARSA-SHA1-2md5-sha1MD5-SHA1dsaWithSHA1DSA-SHA1pbeWithMD5AndCast5CBCcast5-ofbCAST5-OFBcast5-cfbCAST5-CFBcast5-ecbCAST5-ECBcast5-cbcCAST5-CBCdescriptiontitleserialNumbermd5WithRSARSA-NP-MD5X509v3 CRL Distribution PointscrlDistributionPointsinitialssurnameSNgivenNameGNrc2-40-cbcRC2-40-CBCrc4-40RC4-40mdc2WithRSARSA-MDC2mdc2MDC2bf-ofbBF-OFBbf-cfbBF-CFBbf-ecbBF-ECBbf-cbcBF-CBCX509v3 Authority Key IdentifierauthorityKeyIdentifierX509v3 Certificate PoliciescertificatePoliciesX509v3 CRL NumbercrlNumberX509v3 Basic ConstraintsbasicConstraintsX509v3 Issuer Alternative NameissuerAltNameX509v3 Subject Alternative NamesubjectAltNameX509v3 Private Key Usage PeriodprivateKeyUsagePeriodX509v3 Key UsagekeyUsageX509v3 Subject Key IdentifiersubjectKeyIdentifierid-cedesx-cbcDESX-CBCNetscape Certificate SequencensCertSequenceNetscape CommentnsCommentNetscape SSL Server NamensSslServerNameNetscape CA Policy UrlnsCaPolicyUrlNetscape Renewal UrlnsRenewalUrlNetscape CA Revocation UrlnsCaRevocationUrlNetscape Revocation UrlnsRevocationUrlNetscape Base UrlnsBaseUrlNetscape Cert TypensCertTypedsaWithSHA1-oldDSA-SHA1-oldPBKDF2pbeWithSHA1AndRC2-CBCPBE-SHA1-RC2-64dsaEncryption-oldDSA-olddsaWithSHADSA-SHAsha1WithRSAEncryptionRSA-SHA1sha1SHA1des-ede3-ofbDES-EDE3-OFBdes-ede-ofbDES-EDE-OFBdes-ede3-cfbDES-EDE3-CFBdes-ede-cfbDES-EDE-CFBNetscape Data TypensDataTypeNetscape Certificate ExtensionnsCertExtNetscape Communications Corp.NetscapeextendedCertificateAttributesunstructuredAddresschallengePasswordcountersignaturesigningTimemessageDigestcontentTypeunstructuredNameemailAddresspkcs9idea-ofbIDEA-OFBdes-ofbDES-OFBdes-ede3-cbcDES-EDE3-CBCdes-ede-cbcDES-EDE-CBCshaWithRSAEncryptionRSA-SHAshaSHArc2-ofbRC2-OFBrc2-cfbRC2-CFBrc2-ecbRC2-ECBrc2-cbcRC2-CBCidea-ecbIDEA-ECBidea-cfbIDEA-CFBidea-cbcIDEA-CBCdes-ede3DES-EDE3des-edeDES-EDEdes-cbcDES-CBCdes-cfbDES-CFBdes-ecbDES-ECBdhKeyAgreementpkcs3pkcs7-encryptedDatapkcs7-digestDatapkcs7-signedAndEnvelopedDatapkcs7-envelopedDatapkcs7-signedDatapkcs7-datapkcs7organizationalUnitNameOUorganizationNameOstateOrProvinceNameSTlocalityNameLcountryNameCcommonNameCNX509directory services (X.500)X500pbeWithMD5AndDES-CBCPBE-MD5-DESpbeWithMD2AndDES-CBCPBE-MD2-DESmd5WithRSAEncryptionRSA-MD5md2WithRSAEncryptionRSA-MD2rsaEncryptionrc4RC4md5MD5md2MD2RSA Data Security, Inc. PKCSpkcsRSA Data Security, Inc.rsadsiundefinedUNDEF*H *H *H *H *H *H *H *H *H *H UUUUUUU U U*H *H *H *H *H *H *H *H *H ++ +++<*H ++*H +*H  *H  *H  *H  *H  *H  *H  *H  *H  *H  `HB`HB`HB+*H + + *H  *H  +`HB`HB`HB`HB`HB`HB`HB `HB `HBUUUUUUUUU U#+UUeUdU*UU+U+UU U *H}B *H}B *H8+*H8+$+$*H )*H  U%++++++++7+7+7 +7 +7 `HBUUU+e*H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H *H ++*H  *H *H *H  +7*H  U)U.++0++0+0+ (**H*H8*H8*H *H *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H   *H   *H   *H   *H   *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H  *H ++++++++ + + + +++++++++ + + + + +++++++++++ +++++ +++++++++ + + + + +++++++++++++++++++++++++ + + +++++++++++ + + + + + + + + + + + + + +0+0+0+0+0+0+0+0+0+0+0 +0 +0 ++ U++++++++++++:X &,d &,d UU7*H + + + UHU$U7U8*H=*H=*H=*H=*H=*H=*H=*H=*H=*H=*H=*H=+7`He`He`He`He`He`He`He`He`He)`He*`He+`He,U*H8*H8*H8 & &, &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d &,d% &,d& &,d' &,d( &,d) &,d* &,d+ &,d- &,d. &,d/ &,d0 &,d1 &,d2 &,d3 &,d4 &,d5 &,d6 &,d7 &,d8U-+++++U,UAg*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g* g* g* g* g* g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g* g*!g*"g*#g*$g*%g*&g*'g*(g*)g**g*+g*,g*-g*.g*/g*0g*1g*2g*3g*4g*5g*6g*7g*8g*9g*:g*;g*g*?g*@g*Ag*Bg*Cg*Dg*Eg*Fg*Gg*Hg*Ig*Jg*Kg*Lg*Mg*Ng*Og*Pg*Qg*Rg*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g* g* g* g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*g*"g*#g*g*g*{*H  *H Pg+7+7U U++++U+*H  *H  *H  *H `He`He`He`He++g+g+*H=*H=*H=*H=*H=*H=*H=*H=*H=*H=*H=*H=*H= *H= *H= *H= *H= *H=*H=*H=*H=*H=*H=*H=+++++ ++++ +!+ +"+#+++++++++++++++$+%+&+'g+g+g+g+g+g+g+g+ g+ g+ g+ U U!U6*K=*K=*K=1 1 1 )1 1 1 ,1 1 1 +U UU*D*D*D*D*D++*H}B *H}B++0*H   *H  `He`He`He-*H=*H=*H=*H=*H=*H=*H *H *H  *H  *H  `He`He(7** *** * ******b*c************* * * * * *!*!*!*#*#*#*#*$*$***** * * * * * +7U.+UUUUUUUUUUUUUUUUUU U!U"U#U$U%U&U'U(U/U0U1U2U3U4U5U6*H   `He`He`He`He`He`He`He.`He/`He0*K=*K=*K=U%*H *H  *H  x  p X  P 0  , (  $            ~   ~ ~   ~ ~   ~ ~   x~ \~   T~ T~   P~ D~   @~ 4~  0~  ~  ~ ~  ~ }  } }   `  } }  } }   } }  } }   x} x}   d} d}  ( P} P}  1 H} H} : 8} 8}  B 0} (} K  } } P } } U } |  Z | | !| | " _ | | #| | $| | %j | x| &p| h| '`| X| (T| P| )r H| 0| *w $| | +| { ,| { { - { { .{ { / { { 0 { { 1 { { 2 { { 3 |{ |{ 4 h{ h{ 5 T{ T{ 6 ǀ @{ @{ 7 Ѐ  {  { 8 ـ { z 9 z z : z z ; z z <z pz =dz Xz >Hz 8z ?0z (z @ z z A y y B y y C y y D  y y E  y y F# xy dy G ( Xy Dy H 1 4y y I : y x J C x x K L x x L U x px M ^ dx Px N g @x  x O p x x Px x Qy w w R{ w w S~ w pw T `w @w U 0w w V v v W v v X v v Y xv Xv Z Pv Hv [ @v 8v \0v (v ] v v ^v v _ u u ` u u au u bu u c u u d u u e u hu g \u Pu h @u @u i 8u 8u j ,u ,u k  u u l u t mt t nt t ot t p ʁ t t qӁ t t rxt lt sځ  \t t߁ Pt Dt u 4t  t w t t x t t ys s zs s {s s | s s } s xs ~ ps ps  hs hs  \s  a a ? ň ` ` @ Έ ` ` A ׈ ` ` B ` ` C ` ` D ` ` E ` ` F l` l` G T` T` H <` <` I (` (` J! ` ` K) _ _ L1 _ _ M9 _ _ NA _ _ OI _ _ PQ _ _ QY |_ |_ Ra l_ l_ Si T_ T_ Tq D_ D_ Uy 0_ 0_ V _ _ W _ _ X ^ ^ Y ^ ^ Z ^ ^ [ ^ ^ \ ^ ^ ] ^ ^ _ d^ d^ `ɉ H^ H^ aщ ,^ ,^ bى ^ ^ c ] ] d ] ] e ] ] f ] ] g ] ] h ] ] i ] ] j ] t] k! l] d] l) P] <] m 1 4] (] n :  ] ] o C ] \ p L \ \ q U \ \ r ^ \ \ s g p\ X\ t p P\ P\ u y H\ H\ v <\ 0\ w $\ $\ x \ \ y \ [ z [ [ { [ [ | [ [ } [ [ ~ [ [  [ x[  p[ h[  \[ P[  H[ @[  8[ 8[  ,[  [  [ [  Ɗ [ Z  ϊ Z Z  ي    Z Z  Z Z  Z Z  tZ tZ  `Z DZ  4Z 4Z  ,Z ,Z  Z Y  Y Y  Y Y     Y Y  Y Y  hY hY % XY XY , LY LY 3 @Y @Y ; 4Y 4Y C (Y (Y K Y Y S Y Y [ Y Y c X X k X X  r X X  { X X  X X  X xX  lX `X  TX HX   0S 0S  H S S  R S S  \ R R  f R R  p R R  z R R  R R  R R  R R  lR lR  \R \R  DR DR  (R (R  R R  R R  R Q  Q Q Ž Q Q Ŏ Q Q Ȏ Q Q ˎ Q hQ Ύ \Q \Q ю LQ LQ Ԏ  L L ? L L @ L L A L L B L L Cď |L |L Dȏ hL hL Ȅ XL XL FЏ DL DL Gԏ 0L 0L H؏ L L I܏ L L J K K K K K L K K M K K N K K O K K P pK pK Q \K \K R HK HK S 4K 4K T  K  K U K K V J J W J J X J J Y J J Z lJ lJ [$ \J \J \( LJ LJ ], @J (J ^0 J J _4 J J `8 I I a< I I b@ I I cD I I dH I I eL I I fP pI pI gT XI XI hX DI DI i\ 0I 0I j` I I kd I I lh H H ml H H np H H ot H H px xH xH q} dH dH r LH LH s 8H 8H t $H $H u H H v G G w G G x G G y G pG z \G DG { 0G 0G | G G } G G ~ F F Ő F F ɐ F F ͐ F F ѐ F F ֐ F F  ސ F xF  hF XF   >  > >  > >  > x> h> `> L> 8>   $> >  ) = =  4 = = ? = = G = p= O \= H= W 4=  = _  = < g < < < < < < l< X< D< 0< < < ; ; o ; ; w ; ;  t; P;  4; ;  : :  : :  : :  : :   : :   |: p:   d: X:   L: @:   ,: :  Ɠ : 9  ϓ 9 9 ؓ 9 9  9 9  9 9  t9 t9  d9 d9   T9 T9   <9 <9  $9 $9  9 9 ' 8 8 / 8 8 7 8 8 ? 8 8 G 8 8 O 8 8 W 8 8  _ 8 8 !g x8 x8 " o h8 h8 # x \8 \8 $ P8 P8 % D8 D8 &  8 7 ' 7 7 ( 7 7 ) |7 h7 * \7 H7 + @7 07 , (7 7 -  7  7 .7 6 / 6 6 0 6 6 1ǔ 6 p6 2͔ H6 H6 3Ӕ (6 (6 4ڔ 6 6 5 5 5 6 5 5 7 5 5 8 t5 t5 9 L5 L5 : $5 $5 ; 4 4 < 4 4 = 4 4 > x4 x4 ?' P4 P4 @. (4 (4 A5 4 4 B< 3 3 CC 3 3 DJ 3 3 EQ `3 `3 FX @3 @3 G_ 3 3 Hf 2 2 Im 2 2 Jt 2 2 K{ p2 p2 L \2 \2 M D2 D2 N 02 02 O 2 2 P 2 1 Q 1 1 R 1 1 S l1 <1 T 1 0 Uŕ 0 0 V͕ 4 0 W0 h0 X Օ \0 H0 Yޕ ,0 0 Z 0 0 [ / / \ / / ] / / ^ / / _ / / ` / / a / / b d/ d/ c X/ X/ d @/ @/ e ,/ ,/ f / / g . . h . . i . . j . . k . . l . . m . . n" . . o% . . p( t. t. q+ \. \. r. @. @. s1 (. (. t4 . . u7 . . v: - - w= - - x@ - - yC - - zF - - {I - - |L - - } O  - ~t- h-  Z X- L-  c 8- 8-  l (- -  u  - -  ~ , ,  , ,  , ,  , ,  , , |, p, d, X, D, D,  0, 0,  , ,  , + ̖ + +  Ж + +  ٖ + + + + + x+ `+ H+ 0+ + + * * *  l[]\^lnmo~ o +<>!,=?-P|tBqFC)cW  }"#$.Xr_9n{ DEb%'&(axzy{u|`hw*As   )@d z }! pkxrZrWm\qYs625gXt&%{kg~w|"#0u8~tcY/+S-.,R !y89:;3=<>47QHIJKLVG1@ABCDEF?MNOP265'U(T* cbdef}CFED ijhQNZJPOSRHIQXYWMUVTLGK.*7/,6438-5+120Z[ `a\_]  g9<?>=@;:BAee&'(VS)k3qHLJ:OGN;KIMlvp_/^]hi0Tvfmy#"$[nisq~}|_plwotvum{xyzsrnkghjica`befd@:;<ABC=X.P/QWVDE !"FGH#$%I>?063ST45UJK&'(LMN)*+O71  ,-   89R2^YZ\[]4URzj`ba%jwx71pou$dkpm~t-Q/SR+1,2)0'U(TV*z&'(XHLJG:ON9;KIMroqnsEZw ZWgXY~YVSTUR! l^xr[]\^\qlnmos6y~ 25t&%{ +<>!,=?-kgP zw||tCBqF"#)0xu8cwYc.W   !y}89:;3=<>47HIJKLG@ABCDEF?MNOP65 cbdef}CFED ijhQNZJPOSRHIQXYWMUVTLGK.*7/,6438-5+120[ `a\_]  g9<?>=@;:BA"#$.eeorh_`kZ3{lvm p D_/^]hivb%'&(axzy{fuwmy|#"$[{n   iq~}|_pltvusrnkghjica`befd@:;<ABC=X.P/QWVDE !"FGH#$%I>?063ST45UJK&'(LMN)*+O71  ,-   89R2\[])@sA*4zjd`ba%jx71pou$d}{ | zQ} dijk[\]^_`abcdefghijklmnopqrstceuvwxyz{|RSTUVWXgYZ~Y~x`_      !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^lmno_`abcdefghijk|}~%&h-yCB* )@Fsupqrstuv$|'()*+,-/012wwxyz{43MNOP56789:;<=>?@ABCDEFGHIJKLtq    9   TURSQV/ !%,x  !"#$%&'()*+,-./0123456789:CDEFGHIJKLMNOPQRSTUVWXYZ[Z\]_`abcdefghijkl  :;lpA DE012345678X[;<=>?@ABmnopqrstuvw"#GHIJKLMNO}}".\crypto\objects\obj_dat.c.%lu.\crypto\objects\obj_lib.cunknown nidOBJ_nid2snOBJ_nid2objOBJ_nid2lnOBJ_NAME_new_indexOBJ_dupOBJ_createOBJ_add_object*)A@B)tF@C`_hq@ts@wu@"t#t')+(),T)RU)S,$< 0`HTlx .\crypto\objects\obj_xref.cABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/>?456789:;<=  !"#$%&'()*+,-./0123.\crypto\evp\encode.cctx->length <= (int)sizeof(ctx->enc_data)n < (int)sizeof(ctx->enc_data).\crypto\evp\digest.cctx->digest->md_size <= EVP_MAX_MD_SIZEEVP part of OpenSSL 1.0.1c 10 May 2012.\crypto\evp\evp_enc.cbl <= (int)sizeof(ctx->buf)b <= sizeof ctx->bufb <= sizeof ctx->finalEVP_CIPHER_CTX_iv_length(ctx) <= (int)sizeof(ctx->iv)ctx->cipher->block_size == 1 || ctx->cipher->block_size == 8 || ctx->cipher->block_size == 16niv <= EVP_MAX_IV_LENGTH.\crypto\evp\evp_key.cnkey <= EVP_MAX_KEY_LENGTH0B ? PB0B? PB-0B> PB0B@> PB0B0@ PB0B@A PB[ DBH ] DCH ^ DCH \ DpCH "FE #FF .FE $FD +K`H L<KH L>KG L K`G L,K`H L=KH L?KG L!K`G LKI LKPJ L@PL @PM @P@N @P`M @PL @PM @P@N @P`M  @PL  @PM  @P@N  @P`M @P O @P O  @P O @PO @PO  @PO .\crypto\evp\e_camellia.cPPaPPPQQPTUPQRPT0VPQVPTVPQVPTV@QPW@TPW@QW@TW@Q0X@T0XPQQPTUPQRPT0VPQVPTVPQVPTV@QPW@TPW@QW@TW@Q0X@T0X PQQ PTU PQR PT0V PQV PTV PQV PTV @QPW @TPW @QW @TW @Q0X @T0X vP0@Ra`[[ vP00_a`[[ vP0@Ra`[[ vP00_a`[[ vP0@Ra`[[ vP00_a`[[ qP0Sdpc qPcdpc@qP0Sdpc@qPcdpc wPThHe wP0ghHe wPThHe wP0ghHe wPThHe wP0ghHe.\crypto\evp\e_aes.c `o`m `om `on`o@nPoo %Jrptst'Krqtst(Lrqtst&IrqtstJrptstbJrptst.\crypto\evp\e_rc2.cl <= sizeof(iv)l @w@u n @wu o @wv m @w v www`@`www`@`)*yx x`@d@A yy `@d `xxx`@t pxxx`@t0 xxy`@ xxy`$@@yPy`y@BByy PtBqF@dtqyy PtBqF@d_`yyy$uwy z0z`@dyy  CC@d.\crypto\evp\p_open.c.\crypto\evp\p_sign.c.\crypto\evp\p_verify.c.\crypto\evp\p_lib.c%s algorithm "%s" unsupported Public KeyPrivate KeyParameters.\crypto\evp\p_enc.c.\crypto\evp\p_dec.cmessage digestbase64 encoding.\crypto\evp\bio_b64.cctx->buf_off+i < (int)sizeof(ctx->buf)ctx->buf_len >= ctx->buf_offctx->tmp_len <= 3ctx->buf_off <= (int)sizeof(ctx->buf)i <= nctx->buf_len <= (int)sizeof(ctx->buf)ctx->buf_off < (int)sizeof(ctx->buf).\crypto\evp\bio_enc.cwrong public key typewrong final block lengthunsupported salt typeunsupported private key algorithmunsupported prfunsupported key sizeunsupported key derivation functionunsupported keylengthunsupported cipherunsupported algorithmunsuported number of roundsunknown pbe algorithmunknown digestunknown ciphertoo largepublic key not rsaprivate key encode errorprivate key decode errorpkcs8 unknown broken typeoperaton not initializedno verify function configuredno sign function configuredno operation setno key setno dsa parametersno digest setno default digestno cipher setmethod not supportedmessage digest is nullkeygen failureiv too largeinvalid operationinvalid key lengthinvalid digestinput not initializedinitialization errorexpecting a ec keyexpecting a ecdsa keyexpecting a dsa keyexpecting a dh keyexpecting an rsa keyevp pbe cipherinit errorencode errordisabled for fipsdifferent parametersdifferent key typesdata not multiple of block lengthctrl operation not implementedctrl not implementedcommand not supportedcipher parameter errorcamellia key setup failedbn pubkey errorbad key lengthbad decryptbad block lengthasn1 libaes key setup failedaes iv setup failedRC5_CTRLRC2_MAGIC_TO_METHPKEY_SET_TYPEPKCS8_set_brokenPKCS5_V2_PBKDF2_KEYIVGENPKCS5_v2_PBE_keyivgenPKCS5_PBE_keyivgenINT_CTX_NEWHMAC_Init_exFIPS_MD_CTX_COPYFIPS_DIGESTINITFIPS_CIPHER_CTX_SET_KEY_LENGTHFIPS_CIPHER_CTX_CTRLFIPS_CIPHER_CTX_COPYFIPS_CIPHERINITEVP_VerifyFinalEVP_SignFinalEVP_RIJNDAELEVP_PKEY_verify_recover_initEVP_PKEY_verify_recoverEVP_PKEY_verify_initEVP_PKEY_verifyEVP_PKEY_sign_initEVP_PKEY_signEVP_PKEY_paramgen_initEVP_PKEY_paramgenEVP_PKEY_newEVP_PKEY_keygen_initEVP_PKEY_keygenEVP_PKEY_get1_RSAEVP_PKEY_get1_EC_KEYEVP_PKEY_GET1_ECDSAEVP_PKEY_get1_DSAEVP_PKEY_get1_DHEVP_PKEY_encrypt_oldEVP_PKEY_encrypt_initEVP_PKEY_encryptEVP_PKEY_derive_set_peerEVP_PKEY_derive_initEVP_PKEY_deriveEVP_PKEY_decrypt_oldEVP_PKEY_decrypt_initEVP_PKEY_decryptEVP_PKEY_CTX_dupEVP_PKEY_CTX_ctrl_strEVP_PKEY_CTX_ctrlEVP_PKEY_copy_parametersEVP_PKEY2PKCS8_brokenEVP_PKCS82PKEY_BROKENEVP_PKCS82PKEYEVP_PBE_CipherInitEVP_PBE_alg_add_typeEVP_PBE_alg_addEVP_OpenInitEVP_MD_sizeEVP_MD_CTX_copy_exEVP_EncryptFinal_exEVP_DigestInit_exEVP_DecryptFinal_exEVP_CIPHER_CTX_set_key_lengthEVP_CIPHER_CTX_ctrlEVP_CIPHER_CTX_copyEVP_CipherInit_exECKEY_PKEY2PKCS8ECDSA_PKEY2PKCS8DSA_PKEY2PKCS8DSAPKEY2PKCS8DO_SIGVER_INITD2I_PKEYCMAC_INITCAMELLIA_INIT_KEYAES_XTS_CIPHERAES_XTSAES_INIT_KEYAESNI_XTS_CIPHERAESNI_INIT_KEYОcamellia256CAMELLIA256camellia192CAMELLIA192camellia128CAMELLIA128aes256AES256aes192AES192aes128AES128cast-cbcCAST-cbccastCASTblowfishbfBFrc2RC2SEEDideaIDEAdes3DES3desDESdesxDESXrmd160ripemddss1DSS1ssl3-sha1ssl3-md5ssl2-md5.\crypto\evp\evp_lib.cl <= sizeof(c->iv)j <= sizeof(c->iv)reliable.\crypto\evp\bio_ok.cThe quick brown fox jumped over the lazy dog's back.TYPE=.\crypto\evp\evp_pkey.c P PD@PE@ a@ ,@ +@ %@ b@ PP@P@ !*).\crypto\evp\evp_pbe.cEVP_CIPHER_iv_length(cipher) <= 16EVP_CIPHER_key_length(cipher) <= (int)sizeof(md_tmp).\crypto\evp\p5_crpt.ckeylen <= sizeof key.\crypto\evp\p5_crpt2.c.\crypto\evp\pmeth_lib.cdigest.\crypto\evp\pmeth_fn.c.\crypto\evp\pmeth_gn.c.\crypto\evp\m_sigver.c.\crypto\asn1\a_object.c.\crypto\asn1\a_bitstr.cc ;; ;%02d%02d%02d%02d%02d%02dZ.\crypto\asn1\a_utctm.ccc ;; ;%04d%02d%02d%02d%02d%02dZ.\crypto\asn1\a_gentm.cASN1_TIMEL.\crypto\asn1\a_time.c2019.\crypto\asn1\a_int.c.\crypto\asn1\a_set.c.\crypto\asn1\a_dup.c.\crypto\asn1\a_d2i_fp.c.\crypto\asn1\a_i2d_fp.c.\crypto\asn1\a_enum.c.\crypto\asn1\a_sign.c.\crypto\asn1\a_digest.c.\crypto\asn1\a_verify.c'()+,-./:=?maxsize=minsize=%ld.\crypto\asn1\a_mbstr.cx(  0123456789ABCDEF\%02X\U%04lX\W%08lX.\crypto\asn1\a_strex.c#"= = + ; +,, X509_ALGORSalgorithmsX509_ALGORparameter$\ lOxO`OTO3OHOX509_VALnotAfternotBeforePpOpPOX509_PUBKEYpublic_keyalgor5lP3`P PtP TP.\crypto\asn1\x_pubkey.cX509_SIGlP3LKPPX509_REQsignaturesig_algreq_infoX509_REQ_INFOattributespubkeysubject< @  QCQ5xQP>QQhQ \Q =TQ3HQ 4RRS RBIGNUM/HS/HSZLONGLONG0S0S.\crypto\asn1\x_long.c%ld X509_NAMEX509_NAME_INTERNALNameX509_NAME_ENTRIESRDNSX509_NAME_ENTRYvalueRDT@LT4T,T`BTTTCTSC@DI0KFTS.\crypto\asn1\x_name.ccert_infoX509_CINFextensionssubjectUIDissuerUIDvalidityissuer(@  @u  HQ3 UC|U@5QCX 5pU  dU $XUU U4LUL@UKTQ3HQ VVhT~ X509_CERT_PAIRreverseforwardX509_CERT_AUXotherkeyidaliasrejecttrustLWDW %*s %*s%s: %*s%s OF %s { SEQUENCESET.\crypto\asn1\ameth_lib.c\ 00.\crypto\asn1\f_int.c.\crypto\asn1\f_string.cNETSCAPE_PKEYprivate_keyNETSCAPE_ENCRYPTED_PKEYenckeyos`iXip<|idi@i@ @lP34ii $iSGCKEYSALTEnter Private Key password:private-key.\crypto\asn1\n_pkey.c.\crypto\asn1\f_enum.c.\crypto\asn1\x_pkey.c.\crypto\asn1\a_bool.cX509_EXTENSIONSExtensionX509_EXTENSIONR\DTj jj0kjasn1.\crypto\asn1\bio_asn1.cctx->buflen <= ctx->bufsize.\crypto\asn1\bio_ndef.c.\crypto\asn1\asn_mime.cgostr3411-94sha-512sha-384sha-256-- Content-Type: text/plain -----END %s----- -----BEGIN %s----- Content-Transfer-Encoding: base64%s%s name="%s"%s smime-type=%s;Content-Type: %smime; filename="%s"%ssmime.p7zcompressed-datacerts-onlysigned-datasigned-receiptenveloped-data%s------%s--%s%s filename="smime.p7s"%s%sContent-Disposition: attachment;Content-Transfer-Encoding: base64%s name="smime.p7s"%sContent-Type: %ssignature;%s------%s%s------%s%sThis is an S/MIME signed message%s%s"; boundary="----%s"%s%s micalg=" protocol="%ssignature";Content-Type: multipart/signed;MIME-Version: 1.0%sapplication/pkcs7-application/x-pkcs7-smime.p7mapplication/pkcs7-mimeapplication/x-pkcs7-mimetype: application/pkcs7-signatureapplication/x-pkcs7-signatureboundarymultipart/signedcontent-typetext/plainFORMATFORMBITWRAPSETWRAPSEQWRAPOCTWRAPIMPLICITIMPEXPLICITEXPSEQNUMERICSTRINGNUMERICGENSTRGeneralStringTELETEXSTRINGT61STRINGT61PRINTABLEPRINTABLESTRINGVISIBLEVISIBLESTRINGBMPSTRINGBMPUTF8StringUTF8IA5STRINGIA5UNIVUNIVERSALSTRINGBITSTRINGBITSTROCTETSTRINGOCTGENTIMEGENERALIZEDTIMEUTCUTCTIMEOBJECTOIDENUMERATEDENUMINTEGERINTBOOLEANBOOLqq ppp p pppppppp pp |ptpppdp \p Pp Lp@p 0p (pp p po o o ooo hohoooooooxopohoChar=.\crypto\asn1\asn1_gen.cBITLISTHEXASCIItag=string=GENERALSTRINGGRAPHICSTRINGVIDEOTEXSTRINGUTF8STRINGREALEXTERNALOBJECT DESCRIPTOROCTET STRINGBIT STRINGEOC%-18s`tqpTtDt p0t$ttpttssshhoposdppps0ps|ps@p(unknown)appl [ %d ]cont [ %d ]priv [ %d ] prim: cons: length is greater than %ld Error in encoding BAD ENUMERATEDBAD INTEGER[HEX DUMP]::%dBad boolean :BAD OBJECTd=%-2d hl=%ld l=inf d=%-2d hl=%ld l=%4ld %5ld:ASN.1 part of OpenSSL 1.0.1c 10 May 2012.\crypto\asn1\asn1_lib.caddress= offset=%luwrong typewrong tagunsupported typeunsupported public key typeunsupported encryption algorithmunsupported any defined by typeunkown formatunknown tagunknown signature algorithmunknown public key typeunknown object typeunknown message digest algorithmunknown formatuniversalstring is wrong lengthunexpected eocunable to decode rsa private keyunable to decode rsa keytype not constructedtoo longtime not ascii formattag value too highstring too shortstring too longstreaming not supportedsig invalid mime typeshort linesequence or set needs configsequence not constructedsequence length mismatchsecond number too largeprivate key header missingodd number of charsobject not ascii formatnull is wrong lengthno sig content typeno multipart boundaryno multipart body failureno matching choice typeno content typenot enough datanot ascii formatnon hex charactersnested asn1 stringmstring wrong tagmstring not universalmissing valuemissing second numbermissing eocmime sig parse errormime parse errormime no content typelist errorlength errorinvalid utf8stringinvalid universalstring lengthinvalid time formatinvalid separatorinvalid object encodinginvalid numberinvalid modifierinvalid mime typeinvalid digitinvalid bmpstring lengthinteger too large for longinteger not ascii formatillegal time valueillegal tagged anyillegal options on item templateillegal optional anyillegal objectillegal null valueillegal nullillegal nested taggingillegal integerillegal implicit tagillegal hexillegal formatillegal charactersillegal booleanillegal bitstring formatheader too longfirst num too largefield missingexplicit tag not constructedexplicit length mismatchexpecting a timeexpecting a booleanexpecting an objectexpecting an integererror setting cipher paramserror parsing set elementerror loading sectionerror getting timedigest and key type not supporteddepth exceededdecoding errordata is wrongcontext not initialisedcipher has no object identifierboolean is wrong lengthbn libbmpstring is wrong lengthbad tagbad password readbad object headerbad classaux errorasn1 sig parse errorasn1 parse erroradding objectX509_PKEY_newX509_NEWX509_NAME_EX_NEWX509_NAME_EX_D2IX509_NAME_ENCODEX509_INFO_newX509_CRL_add0_revokedX509_CINF_NEWSMIME_textSMIME_read_ASN1PKCS5_pbkdf2_setPKCS5_pbe_set0_algorPKCS5_pbe_setPKCS5_pbe2_set_ivPARSE_TAGGINGOID_MODULE_INITLONG_C2Ii2d_RSA_PUBKEYi2d_RSA_NETi2d_PublicKeyi2d_PrivateKeyi2d_EC_PUBKEYi2d_DSA_PUBKEYI2D_ASN1_TIMEi2d_ASN1_SETi2d_ASN1_bio_streamd2i_X509_PKEYD2I_X509_CINFD2I_X509D2I_RSA_NET_2d2i_RSA_NETd2i_PublicKeyd2i_PrivateKeyD2I_NETSCAPE_RSA_2d2i_Netscape_RSAd2i_AutoPrivateKeyD2I_ASN1_UTCTIMEd2i_ASN1_UINTEGERd2i_ASN1_type_bytesd2i_ASN1_SETd2i_ASN1_OBJECTD2I_ASN1_INTEGERD2I_ASN1_HEADERD2I_ASN1_GENERALIZEDTIMEd2i_ASN1_bytesd2i_ASN1_BOOLEAND2I_ASN1_BIT_STRINGCOLLECT_DATAc2i_ASN1_OBJECTc2i_ASN1_INTEGERc2i_ASN1_BIT_STRINGBN_to_ASN1_INTEGERBN_to_ASN1_ENUMERATEDBITSTR_CBBIO_new_NDEFB64_WRITE_ASN1B64_READ_ASN1ASN1_verifyASN1_UTCTIME_setASN1_UTCTIME_adjASN1_unpack_stringASN1_TYPE_get_octetstringASN1_TYPE_get_int_octetstringASN1_TIME_setASN1_TIME_adjASN1_TEMPLATE_NOEXP_D2IASN1_TEMPLATE_NEWASN1_TEMPLATE_EX_D2IASN1_STRING_type_newASN1_STRING_TABLE_addASN1_STRING_setASN1_STR2TYPEASN1_signASN1_seq_unpackASN1_seq_packASN1_PKCS5_PBE_SETASN1_PCTX_newASN1_pack_stringASN1_OUTPUT_DATAASN1_OBJECT_newASN1_mbstring_ncopyASN1_item_verifyASN1_item_unpackASN1_item_sign_ctxASN1_item_signASN1_item_packASN1_item_i2d_fpASN1_item_i2d_bioASN1_ITEM_EX_D2IASN1_ITEM_EX_COMBINE_NEWASN1_item_dupASN1_item_d2i_fpASN1_INTEGER_to_BNASN1_INTEGER_setASN1_i2d_fpASN1_i2d_bioASN1_HEADER_NEWASN1_get_objectASN1_generate_v3ASN1_GENERALIZEDTIME_setASN1_GENERALIZEDTIME_adjASN1_FIND_ENDASN1_EX_C2IASN1_ENUMERATED_to_BNASN1_ENUMERATED_setASN1_dupASN1_DO_ADBASN1_digestASN1_D2I_READ_BIOASN1_d2i_fpASN1_D2I_EX_PRIMITIVEASN1_COLLECTASN1_COLLATE_PRIMITIVEASN1_CHECK_TLENASN1_CBASN1_BIT_STRING_set_bitAPPEND_EXPa2i_ASN1_STRINGa2i_ASN1_INTEGERa2i_ASN1_ENUMERATEDa2d_ASN1_OBJECT.\crypto\asn1\a_bytes.c @(((@(@(01(6(7(c(d(e(i@(defaultutf8onlypkixnombstrMASK:.\crypto\asn1\a_strnid.c.\crypto\asn1\evp_asn1.c.\crypto\asn1\asn_pack.cPBEPARAMitersaltPH X<.\crypto\asn1\p5_pbe.cPBKDF2PARAMprfkeylengthPBE2PARAMencryptionkeyfunc3܊3ЊPPH Ċ  3.\crypto\asn1\p5_pbev2.cPKCS8_PRIV_KEY_INFOpkeypkeyalg@7@  ؋3 ЋxQP>.\crypto\asn1\asn_moid.coid_section.\crypto\pem\pem_sign.c.\crypto\pem\pem_seal.cEC PRIVATE KEYDSA PRIVATE KEYRSA PRIVATE KEYX509 CRLTRUSTED CERTIFICATEX509 CERTIFICATECERTIFICATE.\crypto\pem\pem_info.cstrlen(objstr)+23+2*enc->iv_len+13 <= sizeof bufPEM part of OpenSSL 1.0.1c 10 May 20120123456789ABCDEF.\crypto\pem\pem_lib.cphrase is too short, needs to be at least %d chars Enter PEM pass phrase:Proc-Type: 4,BAD-TYPEMIC-ONLYMIC-CLEARENCRYPTEDDEK-Info: -----END ----- -----BEGIN CMSPKCS #7 SIGNED DATAPKCS7CERTIFICATE REQUESTNEW CERTIFICATE REQUESTPARAMETERSPRIVATE KEYENCRYPTED PRIVATE KEYANY PRIVATE KEYenc->iv_len <= (int)sizeof(iv)Proc-Type: Expecting: RSA PUBLIC KEYPUBLIC KEYDSA PARAMETERSEC PARAMETERSDH PARAMETERSunsupported key componentsunsupported encryptionshort headerread keypvk too shortpvk data too shortpublic key no rsaproblems getting passwordno start linenot proc typenot encryptednot dek infokeyblob too shortkeyblob header parse errorinconsistent headerexpecting public key blobexpecting private key bloberror converting private keycipher is nullbio write failurebad version numberbad magic numberbad iv charsbad end linebad base64 decodePEM_X509_INFO_write_bioPEM_X509_INFO_read_bioPEM_X509_INFO_readPEM_WRITE_PRIVATEKEYPEM_write_bioPEM_writePEM_SignFinalPEM_SealInitPEM_SealFinalPEM_READ_PRIVATEKEYPEM_READ_BIO_PRIVATEKEYPEM_read_bio_ParametersPEM_read_bioPEM_readPEM_PK8PKEYPEM_get_EVP_CIPHER_INFOPEM_F_PEM_WRITE_PKCS8PRIVATEKEYPEM_do_headerPEM_def_callbackPEM_ASN1_write_bioPEM_ASN1_writePEM_ASN1_read_bioPEM_ASN1_readLOAD_IVi2b_PVK_bioI2B_PVKDO_PVK_HEADERDO_PVK_BODYDO_PK8PKEY_FPDO_PK8PKEYDO_BLOB_HEADERDO_B2I_BIODO_B2Id2i_PKCS8PrivateKey_fpd2i_PKCS8PrivateKey_bioCHECK_BITLEN_RSACHECK_BITLEN_DSAB2I_RSAb2i_PVK_bioB2I_DSSCERTIFICATE PAIR.\crypto\pem\pem_oth.c.\crypto\pem\pem_pk8.c.\crypto\pem\pem_pkey.c%s PRIVATE KEY%s PARAMETERS.\crypto\pem\pvkfmt.c/usr/local/ssl/private/usr/local/ssl/usr/local/ssl/certs/usr/local/ssl/cert.pemSSL_CERT_DIRSSL_CERT_FILE.\crypto\x509\x509_r2x.c.\crypto\x509\x509_cmp.c0123456789ABCDEF.\crypto\x509\x509_obj.cNO X509_NAME.\crypto\x509\x509_req.c.\crypto\x509\x509spki.cX.509 part of OpenSSL 1.0.1c 10 May 2012.\crypto\x509\x509_vfy.cOPENSSL_ALLOW_PROXY_CERTSwrong lookup typeunknown trust idunknown purpose idunknown key typeunable to get certs public keyunable to find parameters in chainshould retrypublic key encode errorpublic key decode errorno cert set for us to verifyloading defaultsloading cert dirkey values mismatchkey type mismatchinvalid trustinvalid field nameinvalid directoryerr asn1 libcert already in hash tablecant check dh keybase64 decode errorbad x509 filetypeX509_verify_certX509_TRUST_setX509_TRUST_addX509_to_X509_REQX509_STORE_CTX_purpose_inheritX509_STORE_CTX_newX509_STORE_CTX_initX509_STORE_CTX_get1_issuerX509_STORE_add_crlX509_STORE_add_certX509_REQ_to_X509X509_REQ_print_fpX509_REQ_print_exX509_REQ_check_private_keyX509_PUBKEY_setX509_PUBKEY_getX509_print_ex_fpX509_NAME_printX509_NAME_onelineX509_NAME_ENTRY_set_objectX509_NAME_ENTRY_create_by_txtX509_NAME_ENTRY_create_by_NIDX509_NAME_add_entryX509_load_crl_fileX509_load_cert_fileX509_load_cert_crl_fileX509_get_pubkey_parametersX509_EXTENSION_create_by_OBJX509_EXTENSION_create_by_NIDX509_CRL_print_fpX509_check_private_keyX509_ATTRIBUTE_set1_dataX509_ATTRIBUTE_get0_dataX509_ATTRIBUTE_create_by_txtX509_ATTRIBUTE_create_by_OBJX509_ATTRIBUTE_create_by_NIDX509v3_add_extX509at_add1_attrNETSCAPE_SPKI_b64_encodeNETSCAPE_SPKI_b64_decodeGET_CERT_BY_SUBJECTDIR_CTRLCHECK_POLICYBY_FILE_CTRLADD_CERT_DIR.\crypto\x509\x509name.cname=.\crypto\x509\x509_v3.c.\crypto\x509\x509_att.c.\crypto\x509\x509_lu.cerror number %ldCRL path validation errorunsupported or invalid name syntaxunsupported or invalid name constraint syntaxunsupported name constraint typename constraints minimum and maximum not supportedexcluded subtree violationpermitted subtree violationRFC 3779 resource not subset of parent's resourcesUnsupported extension featureDifferent CRL scopeno explicit policyinvalid or inconsistent certificate policy extensioninvalid or inconsistent certificate extensionunhandled critical CRL extensionkey usage does not include digital signaturekey usage does not include CRL signingunhandled critical extensionunable to get CRL issuer certificatekey usage does not include certificate signingauthority and issuer serial number mismatchauthority and subject key identifier mismatchsubject issuer mismatchapplication verification failurecertificate rejectedcertificate not trustedunsupported certificate purposeproxy certificates not allowed, please set the appropriate flagproxy path length constraint exceededpath length constraint exceededinvalid non-CA certificate (has CA markings)invalid CA certificatecertificate revokedcertificate chain too longunable to verify the first certificateunable to get local issuer certificateself signed certificate in certificate chainself signed certificateout of memoryformat error in CRL's nextUpdate fieldformat error in CRL's lastUpdate fieldformat error in certificate's notAfter fieldformat error in certificate's notBefore fieldCRL has expiredcertificate has expiredCRL is not yet validcertificate is not yet validCRL signature failurecertificate signature failureunable to decode issuer public keyunable to decrypt CRL's signatureunable to decrypt certificate's signatureunable to get certificate CRLunable to get issuer certificateokTSA serverOCSP requestOCSP responderObject SignerS/MIME emailSSL ServerSSL Clientcompatible.\crypto\x509\x509_trs.cLoad file into cache.\crypto\x509\by_file.cLoad certs from files in a directory.\crypto\x509\by_dir.c%s%c%08lx.%s%dssl_serverssl_clientsmime_signd} PD8.\crypto\x509\x509_vpm.cBASIC_CONSTRAINTSpathlencaWp! ! 0" 80 tCAsection:,name:,value:.\crypto\x509v3\v3_bcons.cdecipherOnlyDecipher OnlyencipherOnlyEncipher OnlycRLSignCRL SignkeyCertSignCertificate SignkeyAgreementKey AgreementdataEnciphermentData EnciphermentkeyEnciphermentKey EnciphermentnonRepudiationNon RepudiationdigitalSignatureDigital SignatureobjCAObject Signing CAemailCAS/MIME CAsslCASSL CAreservedUnusedobjsignObject SigningemailserverclientG # # (>S # # >.\crypto\x509v3\v3_bitst.c.\crypto\x509v3\v3_conf.ccritical,ASN1:DER:,section=value=, value=EXTENDED_KEY_USAGE~@0 0 @1 p@0 0 @1 .\crypto\x509v3\v3_extku.cH02 2 I02 2 J02 2 K02 2 L02 2 M02 2 N02 2 .\crypto\x509v3\v3_ia5.c.\crypto\x509v3\v3_lib.c %*s%*s%*s%s0123456789ABCDEF.\crypto\x509v3\v3_utl.cnoNONfalseyesYESyYtrueuser too longunsupported optionunknown optionunknown extension nameunknown extensionunknown bit string argumentunable to get issuer keyidunable to get issuer detailssection not foundpolicy when proxy language requires no policypolicy syntax not currently supportedpolicy path length already definedpolicy path lengthpolicy language already definedothername erroroperation not definedodd number of digitsno subject detailsno public keyno proxy cert policy language definedno policy identifierno issuer detailsno issuer certificateno config databaseneed organization and numbersissuer decode errorinvalid syntaxinvalid sectioninvalid safiinvalid purposeinvalid proxy policy settinginvalid policy identifierinvalid optioninvalid object identifierinvalid numbersinvalid null valueinvalid null nameinvalid null argumentinvalid nameinvalid ipaddressinvalid inheritanceinvalid extension stringinvalid boolean stringinvalid asrangeinvalid asnumberinvalid multiple rdnsincorrect policy syntax tagillegal hex digitillegal empty extensionextension value errorextension setting not supportedextension not foundextension name errorextension existsexpected a section nameerror in extensionerror creating extensionerror converting zoneduplicate zone iddistpoint already setdirname errorbn to asn1 integer errorbn dec2bn errorbad objectbad ip addressX509_PURPOSE_setX509_PURPOSE_addX509V3_parse_listX509V3_get_value_boolX509V3_get_stringX509V3_get_sectionX509V3_EXT_nconfX509V3_EXT_i2dX509V3_EXT_confX509V3_EXT_add_aliasX509V3_EXT_addX509V3_add_valueX509V3_add1_i2dV3_GENERIC_EXTENSIONV3_ADDR_VALIDATE_PATH_INTERNALV2I_SUBJECT_ALTV2I_POLICY_MAPPINGSV2I_POLICY_CONSTRAINTSV2I_NAME_CONSTRAINTSV2I_ISSUER_ALTV2I_IPADDRBLOCKSV2I_IDPv2i_GENERAL_NAME_exv2i_GENERAL_NAMESV2I_EXTENDED_KEY_USAGEV2I_CRLDV2I_BASIC_CONSTRAINTSV2I_AUTHORITY_KEYIDV2I_AUTHORITY_INFO_ACCESSv2i_ASN1_BIT_STRINGV2I_ASIDENTIFIERSSXNET_get_id_ulongSXNET_get_id_ascSXNET_add_id_ulongSXNET_add_id_INTEGERSXNET_add_id_ascstring_to_hexSET_DIST_POINT_NAMES2I_SKEY_IDS2I_ASN1_SKEY_IDs2i_ASN1_OCTET_STRINGs2i_ASN1_INTEGERS2I_ASN1_IA5STRINGR2I_PCIR2I_CERTPOLPROCESS_PCI_VALUEPOLICY_SECTIONNREF_NOSNOTICE_SECTIONI2V_AUTHORITY_INFO_ACCESSi2s_ASN1_INTEGERI2S_ASN1_IA5STRINGi2s_ASN1_ENUMERATEDhex_to_stringGNAMES_FROM_SECTNAMEDO_I2V_NAME_CONSTRAINTSDO_EXT_NCONFDO_EXT_I2DDO_EXT_CONFDO_DIRNAMECOPY_ISSUERCOPY_EMAILASIDENTIFIERCHOICE_IS_CANONICALASIDENTIFIERCHOICE_CANONIZEA2I_GENERAL_NAMEGENERAL_NAMESGeneralNamesGENERAL_NAMEd.registeredIDd.iPAddressd.uniformResourceIdentifierd.ediPartyNamed.directoryNamed.x400Addressd.dNSNamed.rfc822Named.otherNameEDIPARTYNAMEpartyNamenameAssignerOTHERNAMEtype_idDT@t@dXP H0<0,C pQ 0Է0 ķQ UpR 0] b VpR 0] ` pR 0] Registered IDIP Address%XDirNameURIDNSEdiPartyNameX400NameothernameIP Address::%XIP Address:%d.%d.%d.%dDirName: URI:%sDNS:%semail:%sEdiPartyName:X400Name:othername:.\crypto\x509v3\v3_alt.csection=otherNamedirNameIPRIDcopymoveRd d .\crypto\x509v3\v3_skey.chashZ@  f f serial.\crypto\x509v3\v3_akey.calwaysPKEY_USAGE_PERIODTi Pj POTNot After: Not Before: X >  >  > j AACompromiseAA CompromiseprivilegeWithdrawnPrivilege WithdrawnremoveFromCRLRemove From CRLcertificateHoldCertificate HoldcessationOfOperationCessation Of OperationsupersededSupersededaffiliationChangedAffiliation ChangedCACompromiseCA CompromisekeyCompromiseKey CompromiseunspecifiedUnspecifiedj DSXNETidsSXNETIDuserzonek Pp @l  D@  @k  %*sZone: %s, User: %*sVersion: %ld (0x%lX).\crypto\x509v3\v3_sxnet.cNOTICEREFnoticenosorganizationUSERNOTICEexptextnoticerefPOLICYQUALINFOpqualidd.usernoticed.cpsurid.otherPOLICYINFOqualifierspolicyidCERTIFICATEPOLICIESYp { `| pq  r <0xr p`q `Ts L$@0$ h.\crypto\x509v3\v3_cpols.c%*sExplicit Text: %s %*sNumber%s: %*sOrganization: %s noticeNumbersexplicitText%*sCPS: %s %*sUser Notice: %*sUnknown Qualifier: %*sNo Qualifiers %*s%s Non CriticalCritical%*sPolicy: userNoticeCPSpolicyIdentifieria5orgISSUING_DIST_POINTonlyattrindirectCRLonlysomereasonsonlyCAonlyuserCRL_DIST_POINTSCRLDistributionPointsDIST_POINTCRLissuerreasonsdistpointDIST_POINT_NAMEname.relativenamename.fullnameunusedgp  @ Yp  @ hxhXD0$Ծľ|l\0 Q `B thp ` TQ H0 L hp      ` .\crypto\x509v3\v3_crld.c%*s%s: %*s%*sRelative Name: %*s%*sFull Name: %*sOnly Attribute Certificates Only Some Reasons%*sIndirect CRL %*sOnly CA Certificates %*sOnly User Certificates %*sCRL Issuer: ReasonsrelativenamefullnameonlyAAtimestampsignTime Stamp signingocsphelperOCSP helperanyAny PurposecrlsignCRL signingsmimeencryptS/MIME encryptionsmimesignS/MIME signingnssslserverNetscape SSL serversslserverSSL serversslclientSSL clientGSUWY~.\crypto\x509v3\v3_purp.cAUTHORITY_INFO_ACCESSACCESS_DESCRIPTIONlocationmethod 0   0  Q <0  - .\crypto\x509v3\v3_info.co`  r   n0   @  q P s ` %*scrlTime: %*scrlNum: %*scrlUrl: .\crypto\x509v3\v3_ocsp.c %*s%*sIssuer: AUTHORITY_KEYID4WUQ ܼ  POLICY_MAPPINGSPOLICY_MAPPINGsubjectDomainPolicyissuerDomainPolicyХ   L8( .\crypto\x509v3\v3_pmaps.cPOLICY_CONSTRAINTSinhibitPolicyMappingrequireExplicitPolicy`   T < (Inhibit Policy MappingRequire Explicit Policy.\crypto\x509v3\v3_pcons.cNAME_CONSTRAINTSexcludedSubtreespermittedSubtreesGENERAL_SUBTREEmaximumminimum    Q    p\p Hp  4.\crypto\x509v3\v3_ncons.cexcludedpermitted%d.%d.%d.%d/%d.%d.%d.%dIP:ExcludedPermittedPROXY_CERT_INFO_EXTENSIONproxyPolicypcPathLengthConstraintPROXY_POLICYpolicypolicyLanguage$4   x   %*sPolicy Text: %s %*sPolicy Language: infinite%*sPath Length Constraint: text:file:hex:.\crypto\x509v3\v3_pci.clanguage.\crypto\x509v3\pcy_cache.c.\crypto\x509v3\pcy_node.c.\crypto\x509v3\pcy_data.c.\crypto\x509v3\pcy_tree.c.\crypto\cms\cms_lib.cCMS_ReceiptoriginatorSignatureValueCMS_ReceiptRequestreceiptsToreceiptsFromsignedContentIdentifierCMS_ReceiptsFromd.receiptListd.allOrFirstTierCMS_Attributes_VerifyCMS_Attributes_SignCMS_ATTRIBUTESCMS_ContentInfod.compressedDatad.authenticatedDatad.encryptedDatad.digestedDatad.envelopedDatad.signedDatad.dataCMS_CompressedDatacompressionAlgorithmCMS_AuthenticatedDataunauthAttrsmacauthAttrsmacAlgorithmCMS_EncryptedDataCMS_DigestedDataCMS_EnvelopedDataunprotectedAttrsencryptedContentInforecipientInfosoriginatorInfoCMS_RecipientInfod.orid.pwrid.kekrid.karid.ktriCMS_OtherRecipientInfooriValueoriTypeCMS_PasswordRecipientInfokeyDerivationAlgorithmCMS_KEKRecipientInfokekidCMS_KEKIdentifierkeyIdentifierCMS_KeyAgreeRecipientInforecipientEncryptedKeysukmoriginatorCMS_OriginatorIdentifierOrKeyd.originatorKeyCMS_OriginatorPublicKeyCMS_RecipientEncryptedKeyCMS_KeyAgreeRecipientIdentifierd.rKeyIdCMS_RecipientKeyIdentifierdateCMS_OtherKeyAttributekeyAttrkeyAttrIdCMS_KeyTransRecipientInfoencryptedKeykeyEncryptionAlgorithmridCMS_EncryptedContentInfoencryptedContentcontentEncryptionAlgorithmCMS_OriginatorInfoCMS_SignedDatasignerInfoscrlscertificatesencapContentInfodigestAlgorithmsCMS_RevocationInfoChoiced.crlCMS_OtherRevocationInfoFormatotherRevInfootherRevInfoFormatCMS_SignerInfounsignedAttrssignatureAlgorithmsignedAttrsdigestAlgorithmsidCMS_EncapsulatedContentInfoeContenteContentTypeCMS_SignerIdentifierd.subjectKeyIdentifierd.issuerAndSerialNumberCMS_CertificateChoicesd.v2AttrCertd.v1AttrCertd.extendedCertificated.certificateCMS_OtherCertificateFormatotherCertotherCertFormatCMS_IssuerAndSerialNumberUC@u  hH8,L  phXL 0 @ @, 3 P>3HQP>$`TP t@ @`3L  < 4` (@ @< 4` { 3@ @ 3 ppd\Dw <,W     x $\ 3<    DL@ @  3h <<,W  (@ @  3 @ @33 L`   0 @ P l@ @\ L  4  P>( @ @3L  LK@ @4  P>  $@ @\ L  33L 3 3@ @3 L <tl\p L < ,   @ { 0 lP> P>@pR (hX LpR l 8@ @{ h .\crypto\cms\cms_io.c.\crypto\cms\cms_smime.cVerify error:wrap errorverification failureunwrap failureunwrap errorunsupported recpientinfo typeunsupported recipient typeunsupported key encryption algorithmunsupported kek algorithmunsupported content typeunsupported compression algorithmunknown idunknown digest algorihmunable to finalize contexttype not enveloped datatype not encrypted datatype not digested datatype not datatype not compressed datastore init errorsmime text errorsignfinal errorsigner certificate not foundrecipient errorreceipt decode errorprivate key does not match certificateno signersno receipt requestno private keyno passwordno msgsigdigestno matching signatureno matching recipientno matching digestno key or certno keyno contentno ciphernot supported for this key typenot pwrinot key transportnot keknot encrypted datanot a signed receiptneed one signermsgsigdigest wrong lengthmsgsigdigest verification failuremsgsigdigest errormessagedigest wrong lengthmessagedigest attribute wrong lengthmd bio init errorinvalid key encryption parameterinvalid encrypted key lengtherror setting recipientinfoerror setting keyerror reading messagedigest attributeerror getting public keydigest errordecrypt errorctrl failurectrl errorcontent verify errorcontent type not signed datacontent type not enveloped datacontent type not compressed datacontent type mismatchcontent not foundcontentidentifier mismatchcms libcms datafinal errorcipher parameter initialisation errorcipher initialisation errorcertificate verify errorcertificate has no keyidcertificate already presentadd signer errorCMS_verifyCMS_uncompressCMS_streamCMS_sign_receiptCMS_SignerInfo_verify_contentCMS_SIGNERINFO_VERIFY_CERTCMS_SignerInfo_verifyCMS_SignerInfo_signCMS_SIGNERINFO_CONTENT_SIGNCMS_SIGNED_DATA_INITCMS_signCMS_set_detachedcms_set1_SignerIdentifierCMS_RecipientInfo_set0_pkeyCMS_RecipientInfo_set0_passwordCMS_RecipientInfo_set0_keycms_RecipientInfo_pwri_cryptCMS_RecipientInfo_ktri_get0_signer_idCMS_RecipientInfo_ktri_get0_algsCMS_RECIPIENTINFO_KTRI_ENCRYPTCMS_RECIPIENTINFO_KTRI_DECRYPTCMS_RecipientInfo_ktri_cert_cmpCMS_RecipientInfo_kekri_id_cmpCMS_RecipientInfo_kekri_get0_idCMS_RECIPIENTINFO_KEKRI_ENCRYPTCMS_RECIPIENTINFO_KEKRI_DECRYPTCMS_RecipientInfo_decryptcms_Receipt_verifyCMS_ReceiptRequest_create0cms_msgSigDigest_add1CMS_GET0_SIGNEDCMS_GET0_REVOCATION_CHOICEScms_get0_envelopedCMS_GET0_ECONTENT_TYPECMS_get0_contentCMS_GET0_CERTIFICATE_CHOICESCMS_finalCMS_ENVELOPED_DATA_INITcms_EnvelopedData_init_bioCMS_EnvelopedData_createCMS_EncryptedData_set1_keyCMS_EncryptedData_encryptCMS_EncryptedData_decryptcms_EncryptedContent_init_bioCMS_encryptcms_encode_ReceiptCMS_digest_verifycms_DigestedData_do_finalcms_DigestAlgorithm_init_biocms_DigestAlgorithm_find_ctxCMS_decrypt_set1_pkeyCMS_decrypt_set1_passwordCMS_decrypt_set1_keyCMS_decryptCMS_dataInitCMS_dataFinalCMS_dataCMS_COPY_MESSAGEDIGESTCMS_COPY_CONTENTcms_CompressedData_init_biocms_CompressedData_createCMS_compressCMS_ADD1_SIGNINGTIMECMS_add1_signerCMS_add1_recipient_certCMS_add1_ReceiptRequestCMS_add0_recipient_passwordCMS_add0_recipient_keyCMS_add0_certCHECK_CONTENT.\crypto\cms\cms_sd.c.\crypto\cms\cms_dd.c.\crypto\cms\cms_env.c.\crypto\cms\cms_enc.c.\crypto\cms\cms_ess.c.\crypto\cms\cms_pwri.cvariable has no valueunknown module nameunable to create new sectionno valueno sectionno conf or environment variableno confno close bracemodule initialization errormissing init functionmissing finish functionmissing equal signmissing close square bracketlist cannot be nullerror loading dsoSTR_COPYNCONF_newNCONF_load_fpNCONF_load_bioNCONF_loadNCONF_get_stringNCONF_get_sectionNCONF_get_number_eNCONF_get_numberNCONF_dump_fpNCONF_dump_bioMODULE_RUNMODULE_LOAD_DSOMODULE_INITDEF_LOAD_BIODEF_LOADCONF_parse_listCONF_modules_loadCONF_load_fpCONF_load_bioCONF_loadCONF_dump_fpCONF part of OpenSSL 1.0.1c 10 May 2012.\crypto\conf\conf_lib.cgroup= name=ENVvv == NULL.\crypto\conf\conf_api.cWIN32OpenSSL defaultCONF_def part of OpenSSL 1.0.1c 10 May 2012.\crypto\conf\conf_def.c[[%s]] [%s] %s=%s line .\crypto\conf\conf_mod.copenssl.cnfOPENSSL_CONFmodule=, path=OPENSSL_finishOPENSSL_init, retcode=%-8dopenssl_confAuto configuration failed TXT_DB part of OpenSSL 1.0.1c 10 May 2012OPENSSL_malloc failure wrong number of fields on line %ld (looking for field %d, got %d, '%s' left) failure in sk_push .\crypto\txt_db\txt_db.cPKCS7_ATTR_VERIFYPKCS7_ATTR_SIGNPKCS7_ATTRIBUTESPKCS7_DIGESTmdPKCS7_ENCRYPTPKCS7_SIGN_ENVELOPEPKCS7_ENC_CONTENTcontent_typePKCS7_RECIP_INFOenc_keykey_enc_algorPKCS7_ENVELOPEenc_datarecipientinfoPKCS7_ISSUER_AND_SERIALPKCS7_SIGNER_INFOunauth_attrenc_digestdigest_enc_algauth_attrdigest_algissuer_and_serialPKCS7_SIGNEDsigner_infocontentsmd_algsd.encryptedd.digestd.signed_and_envelopedd.envelopedd.signhl`a T c <d 0e $@e @` P 0` @(@  3` [L (X`T b b @  b 3 P>3P>0 UCܼ t@  dc X@d  Hc t@  b 83 0 $\ 3X@  dc 3X@d [L (X`T b P@  X@d X@  3`  LKP> P>.\crypto\pkcs7\pk7_lib.cwrong pkcs7 typewrong content typeunsupported cipher typeunknown operationunknown digest typeunable to find message digestunable to find mem biounable to find certificatesigning not supported for this key typesigning ctrl failuresignature failurepkcs7 sig parse errorpkcs7 parse errorpkcs7 datasignpkcs7 datafinal errorpkcs7 datafinalpkcs7 add signer errorpkcs7 add signature erroroperation not supported on this typeno signatures on datano recipient matches keyno recipient matches certificateno matching digest type foundmissing ceripend infoinvalid null pointererror setting ciphererror adding recipientencryption not supported for this key typeencryption ctrl failuredigest failuredecrypted key is wrong lengthcontent and data presentcipher not initializedSMIME_read_PKCS7PKCS7_verifyPKCS7_simple_smimecapPKCS7_sign_add_signerPKCS7_SIGNER_INFO_signPKCS7_SIGNER_INFO_setPKCS7_signatureVerifyPKCS7_signPKCS7_set_typePKCS7_set_digestPKCS7_set_contentPKCS7_set_cipherPKCS7_RECIP_INFO_setPKCS7_get0_signersPKCS7_FIND_DIGESTPKCS7_finalPKCS7_encryptPKCS7_ENCODE_RINFOPKCS7_DECRYPT_RINFOPKCS7_decryptPKCS7_dataVerifyPKCS7_DATASIGNPKCS7_dataInitPKCS7_dataFinalPKCS7_dataDecodePKCS7_ctrlPKCS7_COPY_EXISTING_DIGESTPKCS7_BIO_ADD_DIGESTPKCS7_add_signerPKCS7_add_signaturePKCS7_add_recipient_infoPKCS7_add_crlPKCS7_add_certificatePKCS7_add_attrib_smimecapPKCS7_add0_attrib_signing_timei2d_PKCS7_bio_streamDO_PKCS7_SIGNED_ATTRIBB64_WRITE_PKCS7B64_READ_PKCS7.\crypto\pkcs7\pk7_doit.c.\crypto\pkcs7\pk7_smime.c.\crypto\pkcs7\pk7_attr.c.\crypto\pkcs12\p12_add.cPKCS12_AUTHSAFESPKCS12_SAFEBAGSattribPKCS12_SAFEBAGvalue.bagvalue.safesvalue.shkeybagvalue.keybagPKCS12_BAGSvalue.sdsicertvalue.x509crlvalue.x509certvalue.otherPKCS12_MAC_DATAdinfoPKCS12authsafes@   `     p<PH @       | 0P p   p  ` p7P p<D  8  8  8  h T P (   P> (   l  `  .\crypto\pkcs12\p12_crpt.c.\crypto\pkcs12\p12_crt.c.\crypto\pkcs12\p12_decr.c.\crypto\pkcs12\p12_init.c.\crypto\pkcs12\p12_key.c.\crypto\pkcs12\p12_kiss.c.\crypto\pkcs12\p12_mutl.c.\crypto\pkcs12\p12_utl.c.\crypto\pkcs12\p12_npas.cunsupported pkcs12 modeunknown digest algorithmpkcs12 pbe crypt errorpkcs12 cipherfinal errorpkcs12 algor cipherinit errorparse errormac verify failuremac verify errormac string set errormac setup errormac generation errormac absentkey gen erroriv gen errorinvalid null pkcs12 pointererror setting encrypted data typeencrypt errorcontent type not datacant pack structurePKCS8_encryptPKCS8_add_keyusagePKCS12_verify_macPKCS12_unpack_p7dataPKCS12_unpack_authsafesPKCS12_set_macPKCS12_setup_macPKCS12_PBE_keyivgenPKCS12_pbe_cryptPKCS12_parsePKCS12_pack_p7encdataPKCS12_pack_p7dataPKCS12_newpassPKCS12_MAKE_SHKEYBAGPKCS12_MAKE_KEYBAGPKCS12_key_gen_uniPKCS12_key_gen_ascPKCS12_item_pack_safebagPKCS12_item_i2d_encryptPKCS12_item_decrypt_d2iPKCS12_initPKCS12_gen_macPKCS12_createPKCS12_add_localkeyidPKCS12_add_friendlyname_uniPKCS12_add_friendlyname_ascPKCS12_ADD_FRIENDLYNAMEPARSE_BAGSPARSE_BAG.\crypto\pkcs12\p12_p8e.c.\crypto\comp\comp_lib.czlib not supportedzlib inflate errorzlib deflate errorBIO_ZLIB_WRITEBIO_ZLIB_READBIO_ZLIB_NEWBIO_ZLIB_FLUSH(undef)version incompatibilityunimplemented public key methodunimplemented digestunimplemented cipherrsa not implementedprovide parametersno unload functionno such engineno referenceno load functionno indexno control functionnot loadednot initialisedinvalid stringinvalid init valueinvalid cmd numberinvalid cmd nameinternal list errorinit failed'id' or 'name' missingcould not obtain hardware handlefinish failedfailed loading public keyfailed loading private keyengine section errorengine is not in the listengine configuration errorengines section errordso not foundDSO failuredsa not implementeddh not implementedctrl command not implementedconflicting engine idcommand takes no inputcommand takes inputcmd not executableargument is not a numberalready loadedLOG_MESSAGEINT_ENGINE_MODULE_INITINT_ENGINE_CONFIGUREINT_CTRL_HELPERENGINE_up_refENGINE_UNLOCKED_FINISHENGINE_UNLOAD_KEYENGINE_TABLE_REGISTERENGINE_set_nameENGINE_set_idENGINE_SET_DEFAULT_TYPEENGINE_set_default_stringENGINE_removeENGINE_newENGINE_load_ssl_client_certENGINE_load_public_keyENGINE_load_private_keyENGINE_LIST_REMOVEENGINE_LIST_ADDENGINE_initENGINE_get_prevENGINE_get_pkey_methENGINE_get_pkey_asn1_methENGINE_get_nextENGINE_get_digestENGINE_GET_DEFAULT_TYPEENGINE_get_cipherENGINE_FREE_UTILENGINE_finishENGINE_ctrl_cmd_stringENGINE_ctrl_cmdENGINE_ctrlENGINE_cmd_is_executableENGINE_by_idENGINE_addDYNAMIC_SET_DATA_CTXDYNAMIC_LOADDYNAMIC_GET_DATA_CTXDYNAMIC_CTRL.\crypto\engine\eng_lib.c.\crypto\engine\eng_list.cid=LOADDIR_ADDDIR_LOADID/usr/local/ssl/lib/enginesOPENSSL_ENGINES.\crypto\engine\eng_init.c.\crypto\engine\eng_ctrl.c.\crypto\engine\eng_table.c.\crypto\engine\eng_pkey.cPKEY_ASN1PKEY_CRYPTOPKEYDIGESTSCIPHERSRANDECDSAECDHALLstr=.\crypto\engine\eng_fat.c.\crypto\engine\tb_cipher.c.\crypto\engine\tb_digest.c.\crypto\engine\tb_pkmeth.c.\crypto\engine\tb_asnmth.cSoftware engine supportopenssl  a  @Ayy `@d(TEST_ENG_OPENSSL_RC4) test_init_key() called (TEST_ENG_OPENSSL_PKEY)Loading Private key %s default_algorithms, name=initEMPTYLIST_ADDSO_PATHdynamic_pathsoft_loadengine_id.\crypto\engine\eng_cnf.cenginesLoad up the ENGINE specified by other settingsAdds a directory from which ENGINEs can be loadedSpecifies whether to load from 'DIR_ADD' directories (0=no,1=yes,2=mandatory)Whether to add a loaded ENGINE to the internal list (0=no,1=yes,2=mandatory)Specifies an ENGINE id name for loadingSpecifies to continue even if version checking fails (boolean)NO_VCHECKSpecifies the path to the new ENGINE shared libraryDynamic engine loading support,  8 bind_enginev_check.\crypto\engine\eng_dyn.cIntel RDRAND enginerdrandOCSP_SERVICELOClocatorOCSP_CRLIDcrlTimecrlNumcrlUrlOCSP_BASICRESPtbsResponseDataOCSP_RESPDATAresponseExtensionsresponsesproducedAtresponderIdOCSP_SINGLERESPsingleExtensionsthisUpdatecertStatuscertIdOCSP_CERTSTATUSvalue.unknownvalue.revokedvalue.goodOCSP_REVOKEDINFOrevocationReasonrevocationTimeOCSP_RESPIDvalue.byKeyvalue.byNameOCSP_RESPONSEresponseBytesresponseStatusOCSP_RESPBYTESresponseresponseTypeOCSP_REQUESToptionalSignaturetbsRequestOCSP_REQINFOrequestExtensionsrequestListrequestorNameOCSP_ONEREQsingleRequestExtensionsreqCertOCSP_CERTIDissuerKeyHashissuerNameHashOCSP_SIGNATURE3HQ ,[L# # 3## @u  @$##` #$|#@  l#Q `#  L#$<#0#` # \% #""%"""` %""C"(&"x"d"l&P"D" 4" $" &" "` "` ! DX!'!@  !` ! ! !'!x!` 3HQ  ,[L(h!`!0X! P!t( D!UC>>|( -gggxsS՗'''%NAAA2Xs, QS}}}ϔn7IG؎V0p#q|||ǑfffqSݦ{\K.GGGFEB!ʼn---uZXƿyc.8?#GZZuZ/6l333fccc?\ 98Iqqqύd}2II9Irp;Cن_1KHۨ[[q[*4 R)>&&&-L 222dJ}Yj<x3ws榷3:t¾|a'&އ444hHH=Hzu2$Tzzz􍐐z=d__a_> @=hhhghr4ʮ,A^u}TTMTΓv;""" D/dddc*sssHZ$@@@:]z @(H+V蛕3{KۖMa_===zȗf3[6ԃ+++EVnvvvႂ2d(lw6õ[wt)CjjjwPP]P EE ELW8000`+t???~UUIUǢyYeeeejҺhi///e^J'N睎_޾`pl8.FMM)MRdr9vuuu0 6$ @yKcYх8p6~|c>bbb7Uw:)2Mb1R:b3f%%%5JYYyY *Trrr999rLL-LZa^^e^;xxx888p匌 cƲA WCM١aaa/NE{B!!!B4J%xfсDU" NN%NJkQsf `<<|j5@ ޹goL_&,,,}XQkָk\ӌnnnW97n VVEVDD DI^ߞ!7O***MTgֻmk #F⟇SSQSWܮr , X'SN'lllG+111bttt FFF CL &E<PD([BߺXN,:::tiiio $ H-Appp׶Tqogη;~.ۅBBB*WhZ-,IU(((]Pu\\m\1?k"D#臸O6yoR` {5.KWw7JX) k]>g'A}|fG-Z3unsupported versionunsupported md algorithmunacceptable policyts datasigntst info setup errortsa untrustedtsa name mismatchtoken presenttoken not presenttime syscall errorthere must be one signerresponse setup errorpolicy mismatchpkcs7 to ts tst info failedpkcs7 add signed attr errorno time stamp tokennonce not returnednonce mismatchmessage imprint mismatchinvalid signer certificate purposeess signing certificate erroress add signing cert errordetached contentd2i ts resp int failedcould not set timecould not set enginebad typebad pkcs7 typeTS_VERIFY_CTX_newTS_VERIFY_CERTTS_VERIFYTS_TST_INFO_set_tsaTS_TST_INFO_set_timeTS_TST_INFO_set_serialTS_TST_INFO_set_policy_idTS_TST_INFO_set_nonceTS_TST_INFO_set_msg_imprintTS_TST_INFO_set_accuracyTS_RESP_verify_tokenTS_RESP_verify_signatureTS_RESP_SIGNTS_RESP_set_tst_infoTS_RESP_set_status_infoTS_RESP_SET_GENTIME_WITH_PRECISIONTS_RESP_GET_POLICYTS_RESP_CTX_set_status_infoTS_RESP_CTX_set_signer_certTS_RESP_CTX_set_def_policyTS_RESP_CTX_set_certsTS_RESP_CTX_set_accuracyTS_RESP_CTX_newTS_RESP_CTX_add_policyTS_RESP_CTX_add_mdTS_RESP_CTX_add_failure_infoTS_RESP_CREATE_TST_INFOTS_RESP_create_responseTS_REQ_set_policy_idTS_REQ_set_nonceTS_REQ_set_msg_imprintTS_MSG_IMPRINT_set_algoTS_GET_STATUS_TEXTTS_CONF_set_default_engineTS_COMPUTE_IMPRINTTS_CHECK_STATUS_INFOTS_CHECK_SIGNING_CERTSTS_CHECK_POLICYTS_CHECK_NONCESTS_CHECK_IMPRINTSTS_ACCURACY_set_secondsTS_ACCURACY_set_millisTS_ACCURACY_set_microsPKCS7_to_TS_TST_INFOINT_TS_RESP_VERIFY_TOKENESS_SIGNING_CERT_NEW_INITESS_CERT_ID_NEW_INITESS_ADD_SIGNING_CERTDEF_TIME_CBDEF_SERIAL_CBd2i_TS_RESP.\crypto\ts\ts_req_utils.cCertificate required: %s Nonce: unspecified Policy OID: Version: %d .\crypto\ts\ts_rsp_utils.cthe request cannot be handled due to system failurethe additional information requested could not be understood or is not availablethe requested extension is not supported by the TSAthe requested TSA policy is not supported by the TSAthe TSA's time source is not availablethe data submitted has the wrong formattransaction not permitted or supportedunrecognized or unsupported algorithm identifierRevoked.Revocation warning.Waiting.Rejected.Granted with modifications.Granted. micros millis, seconds, Failure info: Status description: out of bounds Status: TSA: Ordering: %s Accuracy: Time stamp: Serial number: Not included. TST info: Status info: .\crypto\ts\ts_rsp_sign.cBad message digest.Message digest algorithm is not supported.Superfluous message digest parameter.Bad request version.Requested policy is not supported..%ld%04d%02d%02d%02d%02d%02dError during serial number generation.Time is not available.Unsupported extension.Error during TSTInfo generation.Error during signature generation.Error during response generation.Bad request format or system error.systemFailureaddInfoNotAvailableunacceptedExtensionunacceptedPolicytimeNotAvailablebadDataFormatbadRequestbadAlgrevocationNotificationrevocationWarningwaitingrejectiongrantedWithModsgranted.\crypto\ts\ts_rsp_verify.cstatus code: , status text: , failure codes: unknown code.\crypto\ts\ts_verify_ctx.cctx != NULLreq != NULL%4sExtensions: Hash Algorithm: %s Message data: unable to load certificate: %s unable to load certificates: %s unable to load private key: %s variable lookup failed for %s::%s invalid variable value for %s::%s tsadefault_tsaengine:.\crypto\ts\ts_conf.cchilbuiltinsigner_certsigner_keydefault_policyother_policiesdigestsmicrosecsmillisecssecsaccuracyclock_precision_digitsorderingtsa_nameess_cert_id_chaincrypto_deviceESS_SIGNING_CERTpolicy_infocert_idsESS_CERT_IDissuer_serialESS_ISSUER_SERIALTS_RESPtokenstatus_infoTS_STATUS_INFOfailure_infotextstatusTS_TST_INFOtimeTS_ACCURACYmicrosmillissecondsTS_REQcert_reqnoncepolicy_idmsg_imprintTS_MSG_IMPRINThashed_msghash_algoQ3QRQhRQ@  QP Q Q QXUQ Q Q R Q@  QQP  ܼ QlP PQ  OQ $XU8S (tQlQ dQTQ T DQP 8Q0 0Q` TtT (QUQ ܼ TQQ UPP Ppq XUP.\crypto\ts\ts_asn1.c1024153620483072409661448192.\crypto\srp\srp_lib.c.\crypto\srp\srp_vfy.cPHlVRSDS/f;ApD:\CFILES\Projects\WinSSL\openssl-1.0.1c\out32dll\libeay32.pdb  ] q   2   8ZZ  (XH[ X[ $Z[  X^ TX b< j[R[z[&[[ZZZZZZZ<[`````n`b`P`@`0``aa&a@aVa^a|aaaaaa|`"```____\$\.\8\F\R\`\h\p\x\\\\\\\\\\\\\]]]$].]6]@]J]R]\]f]n]x]]]]]]]]]]]]]]]^^^"^.^8^B^L^V^`^j^r^|^^^^^^^^^ __&_4_D_V_l_____\\[[[[  ts47 poWS2_32.dllDeleteDCDeleteObjectGetBitmapBitsBitBltGetObjectAwSelectObject/CreateCompatibleBitmapGetDeviceCaps0CreateCompatibleDC1CreateDCAGDI32.dllDeregisterEventSourceReportEventARegisterEventSourceAADVAPI32.dllGetUserObjectInformationWhGetProcessWindowStation#GetDesktopWindowMessageBoxAUSER32.dlli_snprintffstrtoulJsscanfgetenvwcsstr _vsnprintfxvfprintf__iob_func|_exit7raisefree:reallocmalloc&memcpyr_localtime32_time32*memset_gmtime32_strnicmp_stricmp[strncpyp_errno(memmove5_read_writeisxdigitisdigitfprintfatoiFsprintfastrstrfputsfcloseferrorfreadfwritefflushfopenf_setmode_filenoftellfeoffseekfgetsl_wfopenNstrchr,perror_ftime325qsortOstrcmp_stat325_chmod_fdopen_openTstrerrorisalnumisspacertolowerZstrncmpisupper_strrchrexitestrtol$memchrCsignal_getch.printfMSVCR90.dlls_except_handler4_commonj_encode_pointer_malloc_crtk_encoded_null`_decode_pointer_initterm_initterm_e_amsg_exit _adjust_fdivj__CppXcptFilterK_crt_debugger_hook__clean_type_info_names_internal_unlock__dllonexitv_lock_onexitExitProcessEGetProcAddressGetModuleHandleAGetCurrentThreadIdGetLastErrorGetVersionGetFileTypedGetStdHandleCFindNextFileA2FindFirstFileA.FindClosebFreeLibrary<LoadLibraryARCloseHandlesSetLastErrorgMultiByteToWideCharGetTickCountQueryPerformanceCounterGetCurrentProcessIdGlobalMemoryStatusGetVersionExAVFlushConsoleInputBufferInterlockedExchangeSleepInterlockedCompareExchangeTerminateProcessGetCurrentProcessUnhandledExceptionFilterSetUnhandledExceptionFilterIsDebuggerPresentyGetSystemTimeAsFileTimeKERNEL32.dll;O^DHbX\8 8( h(  p PPj `pm p  @ P`@0`MQRpQw`v ]@]OPIO`O`LP0@v[eg0hO0NK0JPp[_U`Z hH 0ii0Jrv s` p PPP @r0.@0ppP. u plpB @DD`DpH0; : 9 : < P/-0`%P@P!#@&47060$%p`[S`YS\ZZ@fi&`8`9P@9:pp00U?HP6 \pP@p; 39`9@8"$"%8:00++)!p%!P$89.{pzP@P P }{P}0jPkPD`DDpD lAABpLPK K0K@KJKKAoyyllp:pFFFFxpwprrrrP>@:@xPxX WPX VpP\@\\\`0 `@@@0 JrbZ>;=0;; ;G?EpIJ^I`0o kkpe gpdcOoik@e|fpd`c``o jle`g@qecM`oikegqdc f f e e d d c pc  c b d d a a b pb e e `a a a PP Ph0mce od@q@uv@     `yzx`ywu`ytp\ _ [ P^ X ] 33>>KK0U U UU`@U``V0@@T 0[0[pP0 `  BB@ Bp@ P CpCP`C nG0p@6706`6=p= >>P=0gPg`@0TT<<pp055 pMPM КpP`   @Mttpp `0  pJ`DpG`Ia00@0P@д@@" Q`  \\`` e Pe Pd 0c b c a 0b d @ @_ ^  @M3`>KPUT CpB5=0= pT<P5@X P8@P<67`7Q@ b9"`&P'baa@_ 0_ ! SpP`PЮp`!Q    \\ ` e pe pd Pc b c a Pb d ` a@a   ` M3>KpUU@CB:6=P= T<p5` p~``v`P@ `p@@``@0  пP r @f g `g 0l i j k r v Pm p n zh`h @ 0hh@  P ` rP-@ 0p0Q!  ww0w w p0Pf` p>p?>CWwu кJ>=0@00P`JP=`ywpx@0pP0P`P0o `r 0o p +)*P) P @` P ` r0 0uPLkl@lplpo mpr P 0  0 >>@kPo! ! P ]p] 0fff`f6 `7 `/ 7 . / 3 3 9 < < > 0@ = @ pC H ! 0]! P]-Э` R R @R PR R R R R 0] d 0d PD D Zpx wPU ` j i j 0j `c > 3 F `> j  z / Pz@p P ,,--0 p @ Ш  ` @ P P p @ @ ` p 0  P ` Ц Ч  p 0 0 ` /0/07777 p P1@k k l l pk Pk k k P0 `@pPе22p222021@2o o Pm n @o l p) q @q Pq q q q q q 0r Pr r `r r r r r 0s Ps s `s ( @( ( ( ) ? ""@#Py` P4 Ђ @ P Ё = `& 8 6 p`@>  Pu u 0 v u0  uut@vP P P 7x|NNP9 0=  R 0P@pUZWZZXS`WW0 z@0` M@ q Pp N@ `Q O ' @MЦ@ p@pq% i` jp  9hMp:uj` |8 o( q @Q N` ` o` Q p}M ) Ц q`4 N p%pPr@ 8  9`   08 qЦ @Pq % P R ` hP `j0 '@ PiQ j0R @ PQ  N p #p@N@|P@  ` ` `  p @ P (`,p"'!+@)%p|xs0ppz|P@|t{p p     Q P`00@p @0pPP@ `ЪP@ p `p'0( &R dPdM0PB;;<:: q; B0=C C:0C::@7 ; 6 8 7 = p7 P< 7 9 p; 7 6 P0>I   930N PN F @=0 ` P` @0`22 22U`TTSPXw   3 м0 OO:: p kP 0 ` `k 0 Ц R 0 ` p  ` p P q@ kP    Pb 0tP` p P5 c K p , p p  `20 0y( `  `T@ b S 0/  l a qb ` `d qb .p Q @ N @J a `+3  0c P0 Q + 8  p5 `9 <p0 P 0 prp`k K u @r `0? ` @ a p.  '0 @f M b  i `/[@RXd  P8  pM   aP10 P /@  0bd @k  Q @ pbP0 @ `a T 5`a@e  j `c  c  @ pR a ЦR ) ` ` b ` jp `C 8 ai@ P  `  'P1 3p-  pW 0 Z 0@T {Pb Np 0a@`AP> p Pd pN p  `  @: P` 0 b 1 a   0  - p @a@p u `b Lc `@ r@@" R @a0p`S gP_ P  q@ 0   ]pR `R @b _ ` K` Q  b ` P P* @ @ @5 ww0. 0 a@  c 9_ <8 pT p<PT  qa `!` S  }@Y0 @0 p ` p W    @ x =`0d Q  0    0H V `8 u5   @ b +    ` Q Ps  P 5  ( p! P0op- X oPZ`B0R  . ) `* `w @- R 0S `X H  `` Pv0   @3 #y_ ` 0-  ` p 1  `  c p c S P 8 ? a b 0l c `a0 Ppq ` iZ` aap7` / d VpQ p\T Pp !PaT @ T a Pt3 0pa0 00 b q s P.  @0 <`` pT  T `_ 3 `E 0d p0  pa  Pf Pa  K b_ 8 tPb p* 0  a @k6 ` p @; `b a` G c @ 00` c @0 Px 3 @S d PR>e (  @d - v?` @. @ @K 0 Pc p 4  ``J ` d ` Cr   8._ = S @ R  p L T  0 ` Y0a W 3 ``  p `/ , жp? @ S 0qG P-0b  W  0R :T 0X V R `R R @R  qV Ц`  0cb ^ ^ 0> X пbZY YZYZV B X B pLPKb E$GMpGL`Y@Y@Z@H [0pKP Z B[B`K0H@P pt  @ p ` w y0yxx `0  0Ъ `   . @ ``rPPd `@ 0m O p` < Ђ 0R  q7G0P`0B`kl@   Bp  P `m:f x0r ` 0B 1@p  @` pf 7m q ` p p4 z R @n Gpk pl`?0R `r7 H0pkm`;`400  Ц  kQ pl@ p .P@ p@00 @a l @ Ц RP  i@k`0# нk  pn@! 0_ 0P Pkn77` pP4,p|zn@p0D`# Q0m3 f `   p F  s{p  | C@APC PmlR `@ H Х p rQ 0P e 6 <0@|S@l0R `79 C `z0 4P4 7O Px `6n 6 0k i 0rf  0e ` Э @@@{Pe e P P \Х0p-8@V2P222@81313MMOOMMNNOO0N NNNO0PNNN ЦJJ J J q pIЦ`` ЦI R  qP q k R Prd@@op oPoY0opK `! $ " `35p4p04 4% p  0 ` @  0 p  0 @ `   `   0 0% ` $ p 0   @   P q qp p P `  0 ' 0 P    0p% % @ & @ 0 p ` P% `  & P  00pH#g P Ѓ@ :  @ `   P ` 0g @ P@0 R `k P` p 0 P @ P pp R @ @ ``  ] `0 q`   P P R P A Pg q@    Ц; 0R  ` S PS o  З  ` PЙ 00 YЦp^@@S ` 0m Н ` 8 P p`!J @   q@   0   F  0p  y p Ц 0Y7  P R p @   Ц  `  Ф 4 `  p @ @  qlP- 0< `f y  j X` ] n  R p ` V @ ` ЦЦ XЦ P 0 ` `0R VЛ  P S 0 ]` 0l 0 ` ` Л 0R ` mp П @ k @   @  0R @  P q ] P  0 0 pq 0] R P ` R `   p P!0  p @ pT ` ` 0T `P@0 @ 0 AІ  Jp `  @ 0Q" ! `p`@40T S :0@0 @FFFCpE P  P p p 0 p `Z`Y@[ Pe0ec0 j j0c Pcj0@p а Ќ`:Ю М P`` :2 . - 00i0pk'7CSfr3G_s(<Pfw*Jbx .?Ran|1G]l2Jb| 0F`v 2G_o # 5 H Y g w           : T b p       ! 2 E ] o          6 M a o           ! / A R c q          &6EXiz"0@JWdv/BVm-AUi{ 1KT`l|,@NWdz $/<L\dp} )9LWckv!6EUcv&4>Ri,BU`lw*:N^r!2BRiz#-7APYft{1H_v'2CN_o 6LWcn%09ETiq " * 2 : G T a t           !!(!5!@!Y!p!!!!!!!!!!""0"D"^"x""""""#.#E#_#####$ $@$\$s$$$$$$ % %>%Z%x%%%%%%%%&!&9&J&d&q&z&&&&&&&&''&'0'H']'n'''''''''(&(/(@(U(q(((((( )&)@)K)Z)q)))))**5*R*n*y*********+ ++5+E+V+f+}++++++ ,,%,2,B,[,t,,,,,,,,-#-6-J-^-r------.!.7.M._.u......./,/B/]/s//////00,0?0V0o000000001(1D1Y1l1~1111112?2X2k222222303O3l333333344&474K4_4r444444 55/5>5S5c5z55555646K6n6666677)7:7P7e7}77777778!8/8?8Q8a8v8888888899)979D9Q9^9i9y999999999 ::':;:Q:d:x::::::::::;.;=;Q;X;f;|;;;;;;;;;<<&<5<E<Z<m<<<<<<<<<<===(=@=[=r==========>>>'>2>@>P>c>s>>>>>>>>>???!?8?I?a?p?????????? @@!@2@J@[@q@@@@@@@@@AA/AFA_AqAAAAAAAAABB0BCBVBkBzBBBBBBBB C"C:CPChCCCCCCDD3DQDdDqDDDDDDEE2EIElE~EEEEEEEEFF/FDFUFpFFFFFFFFG%G6GGG^GtGGGGGGGHHH,H:HcHHHHHHHI$I1I>IQIcIvIIIII$JLJeJ{JJJJJJJJKK)K5KEK\KjKvKKKKKKKKKL"L7LPLfL}LLLLLL M$M7MJMbM}MMMMMMMN'N7NQNrNNNNNNO&O>OUOaO}OOOOOOOP*P5POPbPvPPPPPPPQ(QBQ[QsQQQQQR%R=RVRnRRRRRRRRSS%S8SMSfSySSSSSST!TZSZgZ{ZZZZZZZ["[;[Y[i[z[[[[[[[ \ \7\D\W\g\x\\\\\\\ ]]4]J]a]|]]]]]]]^0^C^V^n^^^^^^_%_7_P_a_x________ ` `/`A`R`b`r`````````aa)a;aOabasaaaaaaaabb&b8bKbbbrbbbbbbbbcc(c5cFcRc^clcyccccccccdd*d;dMdgd~dddddd ee=eWekeeeeeee f f3fJf]fpfffffffg+g;gLgZggggggggggh'h8hNhchqhhhhhhhhi i8iXiditiiiiiiiij0jGjajzjjjjjjk)kJkfkkkkkkkkl)l:lLlalslllllllllm2m@mMm[mkmzmmmmmmmmnn(n8nHnXnhnynnnnnnnnoo(o;oLo\olo|ooooooooo p#p8pMpbpxpppppppq)q>qSqaqqqqqqqqqqqqqrr(r:rKr\rlr~rrrrrrrrrr ss(s7sDsQsYsasmsvssssssssssst"t/t?tLtYtktst|tttttttttuu&u7uIuZuku}uuuuuuvv.vAvUvZvkvyvvvvvvvvv w!w3wFwVwdwswwwwwwww xx.x?xQxcxsxxxxxxxxx yy/y?yPyXy\yfyoy}yyyyyyyyyyyy zz4z@zNz]zkzvzzzzzzzzz {${?{S{e{x{{{{{{{ |&|9|N|a|r|||||||||}}!}1}@}N}[}k}w}}}}}}}}}}} ~~!~0~;~G~S~o~~~~~~#Fcu"1J^u΀ #2BTdu΁5VqЂ 2CUfuσ#6J^pӄ<[ąޅ !8HZkņԆ-<Qhʇ&<K`yʈۈ(Hjzω1@Rdsϊ݊*BQ^lzËы /=JYľڌ (?Sf~Ǎ֍&=SguÎЎ*A]wŏ؏ %=Xrΐ%=Rfˑۑ#ARg͒ #1K]v“ړ*Oc~Ҕ ";Wrҕ/@O_tÖҖ 2IYuƗח$0?Skwʘݘ  3IVgv̙ޙ "5G^mɚܚ$7HZq›՛,H^xȜ؜1G]t۝,@Vdxў +:KYv̟,5?Oezʠ٠&3HUn̡ܡ)?O]lӢ#6Urãϣأ"7FPbwäӤ/;?KWaqȥ٥ 3DUes}¦Ҧ-Hbҧ8Wpɨݨ*9LUpy۩ +?LPU`jyǪӪ '4@Q_ir˫֫ 3GR]lŬެ #2?JVk|ӭܭ5M]tծ  2Uoӯ(?Tg{ְ%7BR^rȱݱ(=P[lв.EVnƳ%?[v˴&;GYq̵ߵ3JZoȶ5Jf|Ʒ۷-Fbx¸׸,?Qizȹչ.CWh}к !)?UdzŻٻ&9Ndyļټ%:OVdoνݽ.?Wpžپ-=Q`oȿܿ7H^w+=Qgv,B_| 2Kfy&7I^r):Rj/H`w/BVr&4Id4Rp (<Wr %@Zt $?_z.EVk}0CTfy+;H_m%1>M_q+@Xj -Mky9Ys:]q";To"5K[q&=Rkz,:FSq.D[w$DVn"3<R`p| %?]x)<Mbr+EM]m/Lm )7J\m  5Kb}#;Sk~ 0Fav%<Wq.EZt'@Zx4Oey$6BO]k|#5BO]hy 0D]t,?M[m&<Th0APcu ,=Mcx#5FViy2AQbs &3CN^r):Phv2ATr*;Pdp(7EQap#6DQct +<IVco&4K_p$5Iby 1DR`y4H_k~#4CVht0@Vk(9I\l}%4DUfy &6AQex+<Rjx7Ok"0?Rdr*<FRbu,?NWfv 0>R`p}2CYi},6?LT^es   % 1 ? O Z f r }         ! ( 0 ; C M Z a m t {               b   ! XY #= U<$ E   s" M   A>a RwM y k < ; QEg{ b'  , 3~ x* W } \  ^   ` K34[56   7+ E0 q &  ! 3 D > ^ )  _ ?  >O J"#z + $% p   & @ i '()*+,-a 2$iLW3 q56) y p 789:;<>?@ABD=%&FGHIJK 49MN8N O PQRYS SWQUTUVWXYZ [\^_ac>d'efghijkM l x  R mnn f [    % DX D z Toopq ighj rs^uwv! xyz{|}m~} , {tF  j  ` l/ N < k  t " u8-  2          {0rayz3n~uct1o4}m2q|pfhsdvxjkHGwxyz_y z { }  u n  x p v r | m t w  4 OD?/>('# &+ ll  "*,-.)%s q p r >  n& i o$! qr mn k o  ps       ! '_b()*+ac]^`eZ[q  X r   v\5467Nffb_d  h 9 * b=   q ; m  #~ B o VW (J W yQ| g  + * L< 4z n P L  d  ~5 D     ( ]  ; %I N - e G r P aY E s p K\ O 7 ) #  k  2 Ri   2 )$ g { v B n S C  S a : M   ' ^ c 6+(9H @ F 8 ` +    0 g dZ e =I L -; | H     s :  d `o x Y b  8 d h p  7 } 8( e 3 h   r \9  @ j|&3c &/_ 20:^ :L< $;=p!) b _  \ w $ / m c M   l E 7*1Y q)T+!%&ZMpBy(QuUi Q !}J7QJh -'#{.CBVRO8 n B [u.V]tk_& IMn:y 'yT !#$n  S  R  -  /   %&'()* + ,-./ 0 123 456789:;<=>?@ BCNDEFGHIIMKFJKL F   2! 5 D mt    2N A j  0 ~ T  I    S Q ?  / RSTUVWXY    ZP [lj\k ]hi^_ LOz PQRS`bcdeqfghijkMlm  X =  U   \ + * W 6 ] H u ] " * f _ P C < E J   X 8 Pm   3 d & ; 1  . 1 6 o V  [  ' 2 P h V R } l   y U ( , 9   v , U [ % RS C H  s B   CA?  { y  w unopqrstuvwxyz {|}~b/!| 'O   b4:^t.owq _ \uD9g    7  6  o  Z F  \ N { # *0G " x '    /  A "5[kq01rfsaetuviW 2j4h`$ " 'M  :  @ lZ  Xa pv   YY  ;:6 Le76Zo      5   EGCJDHL . E O T 0Z cl^g@]3l 2/4 .,*0?GaxSz)c~Xs]:hl1PjClps_rY)Unf6F=m AW1+i->kAz%7:h=Er@Z<v%^<  H K 5  G  ( X 5  . K ! -  s L c u  C e  T L `  ; nm4f,` gB o m } nh r   UV W   c      V "; .    7. Nj !"#$%&'()*+,-./0123456r 789:yw;<=>?@ABCDEF: GHI~ | JK L MNOPQv RSt)Fz8A TB UXV 9 VRWb TOX> YZ [P\]^_5`a bcdefghitQ j/ k lm  K n  0    opqP5rst[u #  = v wCv 1 Jl T S F \ U  ![xz {"V| }~d e c O "   i * w 4 G  N u<8  eZ  ? ] % Z w   i p  3  "#$%& V I  w. Bs J G f 4 1 Kg [t w { C xs =;}i?A ,8  X` z Rq  t |  T  S 2 O J W & 8 m /zv v~#  g 9-1aIOd~*g|`BqT% ./0u x 1D2 45|678dH9df : ;<:< go=>@>?@A9B"  { F t[ F w $ k 7 @  CDGE  D Q y I  xt FGHIJK LMNOPQz{}!  RSTUVWe =@,0$oX;#XY]ZS9[\]^_`Qabcdefghijklm, 6nopqrstuvwxyz{|}~' $ } wA  >c    j uvtLIBEAY32.dllACCESS_DESCRIPTION_freeACCESS_DESCRIPTION_itACCESS_DESCRIPTION_newAES_bi_ige_encryptAES_cbc_encryptAES_cfb128_encryptAES_cfb1_encryptAES_cfb8_encryptAES_ctr128_encryptAES_decryptAES_ecb_encryptAES_encryptAES_ige_encryptAES_ofb128_encryptAES_optionsAES_set_decrypt_keyAES_set_encrypt_keyAES_unwrap_keyAES_wrap_keyASN1_ANY_itASN1_BIT_STRING_checkASN1_BIT_STRING_freeASN1_BIT_STRING_get_bitASN1_BIT_STRING_itASN1_BIT_STRING_name_printASN1_BIT_STRING_newASN1_BIT_STRING_num_ascASN1_BIT_STRING_setASN1_BIT_STRING_set_ascASN1_BIT_STRING_set_bitASN1_BMPSTRING_freeASN1_BMPSTRING_itASN1_BMPSTRING_newASN1_BOOLEAN_itASN1_ENUMERATED_freeASN1_ENUMERATED_getASN1_ENUMERATED_itASN1_ENUMERATED_newASN1_ENUMERATED_setASN1_ENUMERATED_to_BNASN1_FBOOLEAN_itASN1_GENERALIZEDTIME_adjASN1_GENERALIZEDTIME_checkASN1_GENERALIZEDTIME_freeASN1_GENERALIZEDTIME_itASN1_GENERALIZEDTIME_newASN1_GENERALIZEDTIME_printASN1_GENERALIZEDTIME_setASN1_GENERALIZEDTIME_set_stringASN1_GENERALSTRING_freeASN1_GENERALSTRING_itASN1_GENERALSTRING_newASN1_IA5STRING_freeASN1_IA5STRING_itASN1_IA5STRING_newASN1_INTEGER_cmpASN1_INTEGER_dupASN1_INTEGER_freeASN1_INTEGER_getASN1_INTEGER_itASN1_INTEGER_newASN1_INTEGER_setASN1_INTEGER_to_BNASN1_NULL_freeASN1_NULL_itASN1_NULL_newASN1_OBJECT_createASN1_OBJECT_freeASN1_OBJECT_itASN1_OBJECT_newASN1_OCTET_STRING_NDEF_itASN1_OCTET_STRING_cmpASN1_OCTET_STRING_dupASN1_OCTET_STRING_freeASN1_OCTET_STRING_itASN1_OCTET_STRING_newASN1_OCTET_STRING_setASN1_PCTX_freeASN1_PCTX_get_cert_flagsASN1_PCTX_get_flagsASN1_PCTX_get_nm_flagsASN1_PCTX_get_oid_flagsASN1_PCTX_get_str_flagsASN1_PCTX_newASN1_PCTX_set_cert_flagsASN1_PCTX_set_flagsASN1_PCTX_set_nm_flagsASN1_PCTX_set_oid_flagsASN1_PCTX_set_str_flagsASN1_PRINTABLESTRING_freeASN1_PRINTABLESTRING_itASN1_PRINTABLESTRING_newASN1_PRINTABLE_freeASN1_PRINTABLE_itASN1_PRINTABLE_newASN1_PRINTABLE_typeASN1_SEQUENCE_ANY_itASN1_SEQUENCE_itASN1_SET_ANY_itASN1_STRING_TABLE_addASN1_STRING_TABLE_cleanupASN1_STRING_TABLE_getASN1_STRING_cmpASN1_STRING_copyASN1_STRING_dataASN1_STRING_dupASN1_STRING_freeASN1_STRING_get_default_maskASN1_STRING_lengthASN1_STRING_length_setASN1_STRING_newASN1_STRING_printASN1_STRING_print_exASN1_STRING_print_ex_fpASN1_STRING_setASN1_STRING_set0ASN1_STRING_set_by_NIDASN1_STRING_set_default_maskASN1_STRING_set_default_mask_ascASN1_STRING_to_UTF8ASN1_STRING_typeASN1_STRING_type_newASN1_T61STRING_freeASN1_T61STRING_itASN1_T61STRING_newASN1_TBOOLEAN_itASN1_TIME_adjASN1_TIME_checkASN1_TIME_freeASN1_TIME_itASN1_TIME_newASN1_TIME_printASN1_TIME_setASN1_TIME_set_stringASN1_TIME_to_generalizedtimeASN1_TYPE_cmpASN1_TYPE_freeASN1_TYPE_getASN1_TYPE_get_int_octetstringASN1_TYPE_get_octetstringASN1_TYPE_newASN1_TYPE_setASN1_TYPE_set1ASN1_TYPE_set_int_octetstringASN1_TYPE_set_octetstringASN1_UNIVERSALSTRING_freeASN1_UNIVERSALSTRING_itASN1_UNIVERSALSTRING_newASN1_UNIVERSALSTRING_to_stringASN1_UTCTIME_adjASN1_UTCTIME_checkASN1_UTCTIME_cmp_time_tASN1_UTCTIME_freeASN1_UTCTIME_itASN1_UTCTIME_newASN1_UTCTIME_printASN1_UTCTIME_setASN1_UTCTIME_set_stringASN1_UTF8STRING_freeASN1_UTF8STRING_itASN1_UTF8STRING_newASN1_VISIBLESTRING_freeASN1_VISIBLESTRING_itASN1_VISIBLESTRING_newASN1_add_oid_moduleASN1_bn_printASN1_check_infinite_endASN1_const_check_infinite_endASN1_d2i_bioASN1_d2i_fpASN1_digestASN1_dupASN1_generate_nconfASN1_generate_v3ASN1_get_objectASN1_i2d_bioASN1_i2d_fpASN1_item_d2iASN1_item_d2i_bioASN1_item_d2i_fpASN1_item_digestASN1_item_dupASN1_item_ex_d2iASN1_item_ex_freeASN1_item_ex_i2dASN1_item_ex_newASN1_item_freeASN1_item_i2dASN1_item_i2d_bioASN1_item_i2d_fpASN1_item_ndef_i2dASN1_item_newASN1_item_packASN1_item_printASN1_item_signASN1_item_sign_ctxASN1_item_unpackASN1_item_verifyASN1_mbstring_copyASN1_mbstring_ncopyASN1_object_sizeASN1_pack_stringASN1_parseASN1_parse_dumpASN1_primitive_freeASN1_primitive_newASN1_put_eocASN1_put_objectASN1_seq_packASN1_seq_unpackASN1_signASN1_tag2bitASN1_tag2strASN1_template_d2iASN1_template_freeASN1_template_i2dASN1_template_newASN1_unpack_stringASN1_verifyAUTHORITY_INFO_ACCESS_freeAUTHORITY_INFO_ACCESS_itAUTHORITY_INFO_ACCESS_newAUTHORITY_KEYID_freeAUTHORITY_KEYID_itAUTHORITY_KEYID_newBASIC_CONSTRAINTS_freeBASIC_CONSTRAINTS_itBASIC_CONSTRAINTS_newBF_cbc_encryptBF_cfb64_encryptBF_decryptBF_ecb_encryptBF_encryptBF_ofb64_encryptBF_optionsBF_set_keyBIGNUM_itBIO_acceptBIO_asn1_get_prefixBIO_asn1_get_suffixBIO_asn1_set_prefixBIO_asn1_set_suffixBIO_callback_ctrlBIO_clear_flagsBIO_copy_next_retryBIO_ctrlBIO_ctrl_get_read_requestBIO_ctrl_get_write_guaranteeBIO_ctrl_pendingBIO_ctrl_reset_read_requestBIO_ctrl_wpendingBIO_debug_callbackBIO_dgram_non_fatal_errorBIO_dumpBIO_dump_cbBIO_dump_fpBIO_dump_indentBIO_dump_indent_cbBIO_dump_indent_fpBIO_dup_chainBIO_f_asn1BIO_f_base64BIO_f_bufferBIO_f_cipherBIO_f_mdBIO_f_nbio_testBIO_f_nullBIO_f_reliableBIO_fd_non_fatal_errorBIO_fd_should_retryBIO_find_typeBIO_freeBIO_free_allBIO_get_accept_socketBIO_get_callbackBIO_get_callback_argBIO_get_ex_dataBIO_get_ex_new_indexBIO_get_host_ipBIO_get_portBIO_get_retry_BIOBIO_get_retry_reasonBIO_gethostbynameBIO_getsBIO_indentBIO_int_ctrlBIO_method_nameBIO_method_typeBIO_newBIO_new_CMSBIO_new_NDEFBIO_new_PKCS7BIO_new_acceptBIO_new_bio_pairBIO_new_connectBIO_new_dgramBIO_new_fdBIO_new_fileBIO_new_fpBIO_new_mem_bufBIO_new_socketBIO_nextBIO_nreadBIO_nread0BIO_number_readBIO_number_writtenBIO_nwriteBIO_nwrite0BIO_popBIO_printfBIO_ptr_ctrlBIO_pushBIO_putsBIO_readBIO_s_acceptBIO_s_bioBIO_s_connectBIO_s_datagramBIO_s_fdBIO_s_fileBIO_s_memBIO_s_nullBIO_s_socketBIO_setBIO_set_callbackBIO_set_callback_argBIO_set_cipherBIO_set_ex_dataBIO_set_flagsBIO_set_tcp_ndelayBIO_snprintfBIO_sock_cleanupBIO_sock_errorBIO_sock_initBIO_sock_non_fatal_errorBIO_sock_should_retryBIO_socket_ioctlBIO_socket_nbioBIO_test_flagsBIO_vfreeBIO_vprintfBIO_vsnprintfBIO_writeBN_BLINDING_convertBN_BLINDING_convert_exBN_BLINDING_create_paramBN_BLINDING_freeBN_BLINDING_get_flagsBN_BLINDING_get_thread_idBN_BLINDING_invertBN_BLINDING_invert_exBN_BLINDING_newBN_BLINDING_set_flagsBN_BLINDING_set_thread_idBN_BLINDING_thread_idBN_BLINDING_updateBN_CTX_endBN_CTX_freeBN_CTX_getBN_CTX_initBN_CTX_newBN_CTX_startBN_GENCB_callBN_GF2m_addBN_GF2m_arr2polyBN_GF2m_modBN_GF2m_mod_arrBN_GF2m_mod_divBN_GF2m_mod_div_arrBN_GF2m_mod_expBN_GF2m_mod_exp_arrBN_GF2m_mod_invBN_GF2m_mod_inv_arrBN_GF2m_mod_mulBN_GF2m_mod_mul_arrBN_GF2m_mod_solve_quadBN_GF2m_mod_solve_quad_arrBN_GF2m_mod_sqrBN_GF2m_mod_sqr_arrBN_GF2m_mod_sqrtBN_GF2m_mod_sqrt_arrBN_GF2m_poly2arrBN_MONT_CTX_copyBN_MONT_CTX_freeBN_MONT_CTX_initBN_MONT_CTX_newBN_MONT_CTX_setBN_MONT_CTX_set_lockedBN_RECP_CTX_freeBN_RECP_CTX_initBN_RECP_CTX_newBN_RECP_CTX_setBN_X931_derive_prime_exBN_X931_generate_XpqBN_X931_generate_prime_exBN_addBN_add_wordBN_asc2bnBN_bin2bnBN_bn2binBN_bn2decBN_bn2hexBN_bn2mpiBN_bntest_randBN_clearBN_clear_bitBN_clear_freeBN_cmpBN_copyBN_dec2bnBN_divBN_div_recpBN_div_wordBN_dupBN_expBN_freeBN_from_montgomeryBN_gcdBN_generate_primeBN_generate_prime_exBN_get0_nist_prime_192BN_get0_nist_prime_224BN_get0_nist_prime_256BN_get0_nist_prime_384BN_get0_nist_prime_521BN_get_paramsBN_get_wordBN_hex2bnBN_initBN_is_bit_setBN_is_primeBN_is_prime_exBN_is_prime_fasttestBN_is_prime_fasttest_exBN_kroneckerBN_lshiftBN_lshift1BN_mask_bitsBN_mod_addBN_mod_add_quickBN_mod_expBN_mod_exp2_montBN_mod_exp_montBN_mod_exp_mont_consttimeBN_mod_exp_mont_wordBN_mod_exp_recpBN_mod_exp_simpleBN_mod_inverseBN_mod_lshiftBN_mod_lshift1BN_mod_lshift1_quickBN_mod_lshift_quickBN_mod_mulBN_mod_mul_montgomeryBN_mod_mul_reciprocalBN_mod_sqrBN_mod_sqrtBN_mod_subBN_mod_sub_quickBN_mod_wordBN_mpi2bnBN_mulBN_mul_wordBN_newBN_nist_mod_192BN_nist_mod_224BN_nist_mod_256BN_nist_mod_384BN_nist_mod_521BN_nnmodBN_num_bitsBN_num_bits_wordBN_optionsBN_printBN_print_fpBN_pseudo_randBN_pseudo_rand_rangeBN_randBN_rand_rangeBN_reciprocalBN_rshiftBN_rshift1BN_set_bitBN_set_negativeBN_set_paramsBN_set_wordBN_sqrBN_subBN_sub_wordBN_swapBN_to_ASN1_ENUMERATEDBN_to_ASN1_INTEGERBN_uaddBN_ucmpBN_usubBN_value_oneBUF_MEM_freeBUF_MEM_growBUF_MEM_grow_cleanBUF_MEM_newBUF_memdupBUF_reverseBUF_strdupBUF_strlcatBUF_strlcpyBUF_strndupCAST_cbc_encryptCAST_cfb64_encryptCAST_decryptCAST_ecb_encryptCAST_encryptCAST_ofb64_encryptCAST_set_keyCBIGNUM_itCERTIFICATEPOLICIES_freeCERTIFICATEPOLICIES_itCERTIFICATEPOLICIES_newCMAC_CTX_cleanupCMAC_CTX_copyCMAC_CTX_freeCMAC_CTX_get0_cipher_ctxCMAC_CTX_newCMAC_FinalCMAC_InitCMAC_UpdateCMAC_resumeCMS_ContentInfo_freeCMS_ContentInfo_itCMS_ContentInfo_newCMS_ContentInfo_print_ctxCMS_EncryptedData_decryptCMS_EncryptedData_encryptCMS_EncryptedData_set1_keyCMS_EnvelopedData_createCMS_ReceiptRequest_create0CMS_ReceiptRequest_freeCMS_ReceiptRequest_get0_valuesCMS_ReceiptRequest_itCMS_ReceiptRequest_newCMS_RecipientInfo_decryptCMS_RecipientInfo_kekri_get0_idCMS_RecipientInfo_kekri_id_cmpCMS_RecipientInfo_ktri_cert_cmpCMS_RecipientInfo_ktri_get0_algsCMS_RecipientInfo_ktri_get0_signer_idCMS_RecipientInfo_set0_keyCMS_RecipientInfo_set0_passwordCMS_RecipientInfo_set0_pkeyCMS_RecipientInfo_typeCMS_SignedData_initCMS_SignerInfo_cert_cmpCMS_SignerInfo_get0_algsCMS_SignerInfo_get0_signer_idCMS_SignerInfo_set1_signer_certCMS_SignerInfo_signCMS_SignerInfo_verifyCMS_SignerInfo_verify_contentCMS_add0_CertificateChoicesCMS_add0_RevocationInfoChoiceCMS_add0_certCMS_add0_crlCMS_add0_recipient_keyCMS_add0_recipient_passwordCMS_add1_ReceiptRequestCMS_add1_certCMS_add1_crlCMS_add1_recipient_certCMS_add1_signerCMS_add_simple_smimecapCMS_add_smimecapCMS_add_standard_smimecapCMS_compressCMS_dataCMS_dataFinalCMS_dataInitCMS_data_createCMS_decryptCMS_decrypt_set1_keyCMS_decrypt_set1_passwordCMS_decrypt_set1_pkeyCMS_digest_createCMS_digest_verifyCMS_encryptCMS_finalCMS_get0_RecipientInfosCMS_get0_SignerInfosCMS_get0_contentCMS_get0_eContentTypeCMS_get0_signersCMS_get0_typeCMS_get1_ReceiptRequestCMS_get1_certsCMS_get1_crlsCMS_is_detachedCMS_set1_eContentTypeCMS_set1_signers_certsCMS_set_detachedCMS_signCMS_sign_receiptCMS_signed_add1_attrCMS_signed_add1_attr_by_NIDCMS_signed_add1_attr_by_OBJCMS_signed_add1_attr_by_txtCMS_signed_delete_attrCMS_signed_get0_data_by_OBJCMS_signed_get_attrCMS_signed_get_attr_by_NIDCMS_signed_get_attr_by_OBJCMS_signed_get_attr_countCMS_streamCMS_uncompressCMS_unsigned_add1_attrCMS_unsigned_add1_attr_by_NIDCMS_unsigned_add1_attr_by_OBJCMS_unsigned_add1_attr_by_txtCMS_unsigned_delete_attrCMS_unsigned_get0_data_by_OBJCMS_unsigned_get_attrCMS_unsigned_get_attr_by_NIDCMS_unsigned_get_attr_by_OBJCMS_unsigned_get_attr_countCMS_verifyCMS_verify_receiptCOMP_CTX_freeCOMP_CTX_newCOMP_compress_blockCOMP_expand_blockCOMP_rleCOMP_zlibCOMP_zlib_cleanupCONF_dump_bioCONF_dump_fpCONF_freeCONF_get1_default_config_fileCONF_get_numberCONF_get_sectionCONF_get_stringCONF_imodule_get_flagsCONF_imodule_get_moduleCONF_imodule_get_nameCONF_imodule_get_usr_dataCONF_imodule_get_valueCONF_imodule_set_flagsCONF_imodule_set_usr_dataCONF_loadCONF_load_bioCONF_load_fpCONF_module_addCONF_module_get_usr_dataCONF_module_set_usr_dataCONF_modules_finishCONF_modules_freeCONF_modules_loadCONF_modules_load_fileCONF_modules_unloadCONF_parse_listCONF_set_default_methodCONF_set_nconfCRL_DIST_POINTS_freeCRL_DIST_POINTS_itCRL_DIST_POINTS_newCRYPTO_THREADID_cmpCRYPTO_THREADID_cpyCRYPTO_THREADID_currentCRYPTO_THREADID_get_callbackCRYPTO_THREADID_hashCRYPTO_THREADID_set_callbackCRYPTO_THREADID_set_numericCRYPTO_THREADID_set_pointerCRYPTO_add_lockCRYPTO_cbc128_decryptCRYPTO_cbc128_encryptCRYPTO_ccm128_aadCRYPTO_ccm128_decryptCRYPTO_ccm128_decrypt_ccm64CRYPTO_ccm128_encryptCRYPTO_ccm128_encrypt_ccm64CRYPTO_ccm128_initCRYPTO_ccm128_setivCRYPTO_ccm128_tagCRYPTO_cfb128_1_encryptCRYPTO_cfb128_8_encryptCRYPTO_cfb128_encryptCRYPTO_cleanup_all_ex_dataCRYPTO_ctr128_encryptCRYPTO_ctr128_encrypt_ctr32CRYPTO_cts128_decryptCRYPTO_cts128_decrypt_blockCRYPTO_cts128_encryptCRYPTO_cts128_encrypt_blockCRYPTO_dbg_freeCRYPTO_dbg_get_optionsCRYPTO_dbg_mallocCRYPTO_dbg_reallocCRYPTO_dbg_set_optionsCRYPTO_destroy_dynlockidCRYPTO_dup_ex_dataCRYPTO_ex_data_new_classCRYPTO_freeCRYPTO_free_ex_dataCRYPTO_free_lockedCRYPTO_gcm128_aadCRYPTO_gcm128_decryptCRYPTO_gcm128_decrypt_ctr32CRYPTO_gcm128_encryptCRYPTO_gcm128_encrypt_ctr32CRYPTO_gcm128_finishCRYPTO_gcm128_initCRYPTO_gcm128_newCRYPTO_gcm128_releaseCRYPTO_gcm128_setivCRYPTO_gcm128_tagCRYPTO_get_add_lock_callbackCRYPTO_get_dynlock_create_callbackCRYPTO_get_dynlock_destroy_callbackCRYPTO_get_dynlock_lock_callbackCRYPTO_get_dynlock_valueCRYPTO_get_ex_dataCRYPTO_get_ex_data_implementationCRYPTO_get_ex_new_indexCRYPTO_get_id_callbackCRYPTO_get_lock_nameCRYPTO_get_locked_mem_ex_functionsCRYPTO_get_locked_mem_functionsCRYPTO_get_locking_callbackCRYPTO_get_mem_debug_functionsCRYPTO_get_mem_debug_optionsCRYPTO_get_mem_ex_functionsCRYPTO_get_mem_functionsCRYPTO_get_new_dynlockidCRYPTO_get_new_lockidCRYPTO_is_mem_check_onCRYPTO_lockCRYPTO_mallocCRYPTO_malloc_lockedCRYPTO_mem_ctrlCRYPTO_mem_leaksCRYPTO_mem_leaks_cbCRYPTO_mem_leaks_fpCRYPTO_new_ex_dataCRYPTO_nistcts128_decryptCRYPTO_nistcts128_decrypt_blockCRYPTO_nistcts128_encryptCRYPTO_nistcts128_encrypt_blockCRYPTO_num_locksCRYPTO_ofb128_encryptCRYPTO_pop_infoCRYPTO_push_info_CRYPTO_reallocCRYPTO_realloc_cleanCRYPTO_remallocCRYPTO_remove_all_infoCRYPTO_set_add_lock_callbackCRYPTO_set_dynlock_create_callbackCRYPTO_set_dynlock_destroy_callbackCRYPTO_set_dynlock_lock_callbackCRYPTO_set_ex_dataCRYPTO_set_ex_data_implementationCRYPTO_set_id_callbackCRYPTO_set_locked_mem_ex_functionsCRYPTO_set_locked_mem_functionsCRYPTO_set_locking_callbackCRYPTO_set_mem_debug_functionsCRYPTO_set_mem_debug_optionsCRYPTO_set_mem_ex_functionsCRYPTO_set_mem_functionsCRYPTO_strdupCRYPTO_thread_idCRYPTO_xts128_encryptCamellia_cbc_encryptCamellia_cfb128_encryptCamellia_cfb1_encryptCamellia_cfb8_encryptCamellia_ctr128_encryptCamellia_decryptCamellia_ecb_encryptCamellia_encryptCamellia_ofb128_encryptCamellia_set_keyDES_cbc_cksumDES_cbc_encryptDES_cfb64_encryptDES_cfb_encryptDES_check_key_parityDES_cryptDES_decrypt3DES_ecb3_encryptDES_ecb_encryptDES_ede3_cbc_encryptDES_ede3_cbcm_encryptDES_ede3_cfb64_encryptDES_ede3_cfb_encryptDES_ede3_ofb64_encryptDES_enc_readDES_enc_writeDES_encrypt1DES_encrypt2DES_encrypt3DES_fcryptDES_is_weak_keyDES_key_schedDES_ncbc_encryptDES_ofb64_encryptDES_ofb_encryptDES_optionsDES_pcbc_encryptDES_quad_cksumDES_random_keyDES_read_2passwordsDES_read_passwordDES_set_keyDES_set_key_checkedDES_set_key_uncheckedDES_set_odd_parityDES_string_to_2keysDES_string_to_keyDES_xcbc_encryptDH_OpenSSLDH_checkDH_check_pub_keyDH_compute_keyDH_freeDH_generate_keyDH_generate_parametersDH_generate_parameters_exDH_get_default_methodDH_get_ex_dataDH_get_ex_new_indexDH_newDH_new_methodDH_set_default_methodDH_set_ex_dataDH_set_methodDH_sizeDH_up_refDHparams_dupDHparams_printDHparams_print_fpDIRECTORYSTRING_freeDIRECTORYSTRING_itDIRECTORYSTRING_newDISPLAYTEXT_freeDISPLAYTEXT_itDISPLAYTEXT_newDIST_POINT_NAME_freeDIST_POINT_NAME_itDIST_POINT_NAME_newDIST_POINT_freeDIST_POINT_itDIST_POINT_newDIST_POINT_set_dpnameDSA_OpenSSLDSA_SIG_freeDSA_SIG_newDSA_do_signDSA_do_verifyDSA_dup_DHDSA_freeDSA_generate_keyDSA_generate_parametersDSA_generate_parameters_exDSA_get_default_methodDSA_get_ex_dataDSA_get_ex_new_indexDSA_newDSA_new_methodDSA_printDSA_print_fpDSA_set_default_methodDSA_set_ex_dataDSA_set_methodDSA_signDSA_sign_setupDSA_sizeDSA_up_refDSA_verifyDSAparams_dupDSAparams_printDSAparams_print_fpDSO_METHOD_beosDSO_METHOD_dlDSO_METHOD_dlfcnDSO_METHOD_nullDSO_METHOD_opensslDSO_METHOD_vmsDSO_METHOD_win32DSO_bind_funcDSO_bind_varDSO_convert_filenameDSO_ctrlDSO_flagsDSO_freeDSO_get_default_methodDSO_get_filenameDSO_get_loaded_filenameDSO_get_methodDSO_global_lookupDSO_loadDSO_mergeDSO_newDSO_new_methodDSO_pathbyaddrDSO_set_default_methodDSO_set_filenameDSO_set_methodDSO_set_name_converterDSO_up_refECDH_OpenSSLECDH_compute_keyECDH_get_default_methodECDH_get_ex_dataECDH_get_ex_new_indexECDH_set_default_methodECDH_set_ex_dataECDH_set_methodECDSA_OpenSSLECDSA_SIG_freeECDSA_SIG_newECDSA_do_signECDSA_do_sign_exECDSA_do_verifyECDSA_get_default_methodECDSA_get_ex_dataECDSA_get_ex_new_indexECDSA_set_default_methodECDSA_set_ex_dataECDSA_set_methodECDSA_signECDSA_sign_exECDSA_sign_setupECDSA_sizeECDSA_verifyECPKParameters_printECPKParameters_print_fpECParameters_printECParameters_print_fpEC_GF2m_simple_methodEC_GFp_mont_methodEC_GFp_nist_methodEC_GFp_simple_methodEC_GROUP_checkEC_GROUP_check_discriminantEC_GROUP_clear_freeEC_GROUP_cmpEC_GROUP_copyEC_GROUP_dupEC_GROUP_freeEC_GROUP_get0_generatorEC_GROUP_get0_seedEC_GROUP_get_asn1_flagEC_GROUP_get_basis_typeEC_GROUP_get_cofactorEC_GROUP_get_curve_GF2mEC_GROUP_get_curve_GFpEC_GROUP_get_curve_nameEC_GROUP_get_degreeEC_GROUP_get_orderEC_GROUP_get_pentanomial_basisEC_GROUP_get_point_conversion_formEC_GROUP_get_seed_lenEC_GROUP_get_trinomial_basisEC_GROUP_have_precompute_multEC_GROUP_method_ofEC_GROUP_newEC_GROUP_new_by_curve_nameEC_GROUP_new_curve_GF2mEC_GROUP_new_curve_GFpEC_GROUP_precompute_multEC_GROUP_set_asn1_flagEC_GROUP_set_curve_GF2mEC_GROUP_set_curve_GFpEC_GROUP_set_curve_nameEC_GROUP_set_generatorEC_GROUP_set_point_conversion_formEC_GROUP_set_seedEC_KEY_check_keyEC_KEY_clear_flagsEC_KEY_copyEC_KEY_dupEC_KEY_freeEC_KEY_generate_keyEC_KEY_get0_groupEC_KEY_get0_private_keyEC_KEY_get0_public_keyEC_KEY_get_conv_formEC_KEY_get_enc_flagsEC_KEY_get_flagsEC_KEY_get_key_method_dataEC_KEY_insert_key_method_dataEC_KEY_newEC_KEY_new_by_curve_nameEC_KEY_precompute_multEC_KEY_printEC_KEY_print_fpEC_KEY_set_asn1_flagEC_KEY_set_conv_formEC_KEY_set_enc_flagsEC_KEY_set_flagsEC_KEY_set_groupEC_KEY_set_private_keyEC_KEY_set_public_keyEC_KEY_set_public_key_affine_coordinatesEC_KEY_up_refEC_METHOD_get_field_typeEC_POINT_addEC_POINT_bn2pointEC_POINT_clear_freeEC_POINT_cmpEC_POINT_copyEC_POINT_dblEC_POINT_dupEC_POINT_freeEC_POINT_get_Jprojective_coordinates_GFpEC_POINT_get_affine_coordinates_GF2mEC_POINT_get_affine_coordinates_GFpEC_POINT_hex2pointEC_POINT_invertEC_POINT_is_at_infinityEC_POINT_is_on_curveEC_POINT_make_affineEC_POINT_method_ofEC_POINT_mulEC_POINT_newEC_POINT_oct2pointEC_POINT_point2bnEC_POINT_point2hexEC_POINT_point2octEC_POINT_set_Jprojective_coordinates_GFpEC_POINT_set_affine_coordinates_GF2mEC_POINT_set_affine_coordinates_GFpEC_POINT_set_compressed_coordinates_GF2mEC_POINT_set_compressed_coordinates_GFpEC_POINT_set_to_infinityEC_POINTs_make_affineEC_POINTs_mulEC_get_builtin_curvesEDIPARTYNAME_freeEDIPARTYNAME_itEDIPARTYNAME_newENGINE_addENGINE_add_conf_moduleENGINE_by_idENGINE_cleanupENGINE_cmd_is_executableENGINE_ctrlENGINE_ctrl_cmdENGINE_ctrl_cmd_stringENGINE_finishENGINE_freeENGINE_get_DHENGINE_get_DSAENGINE_get_ECDHENGINE_get_ECDSAENGINE_get_RANDENGINE_get_RSAENGINE_get_STOREENGINE_get_cipherENGINE_get_cipher_engineENGINE_get_ciphersENGINE_get_cmd_defnsENGINE_get_ctrl_functionENGINE_get_default_DHENGINE_get_default_DSAENGINE_get_default_ECDHENGINE_get_default_ECDSAENGINE_get_default_RANDENGINE_get_default_RSAENGINE_get_destroy_functionENGINE_get_digestENGINE_get_digest_engineENGINE_get_digestsENGINE_get_ex_dataENGINE_get_ex_new_indexENGINE_get_finish_functionENGINE_get_firstENGINE_get_flagsENGINE_get_idENGINE_get_init_functionENGINE_get_lastENGINE_get_load_privkey_functionENGINE_get_load_pubkey_functionENGINE_get_nameENGINE_get_nextENGINE_get_pkey_asn1_methENGINE_get_pkey_asn1_meth_engineENGINE_get_pkey_asn1_meth_strENGINE_get_pkey_asn1_methsENGINE_get_pkey_methENGINE_get_pkey_meth_engineENGINE_get_pkey_methsENGINE_get_prevENGINE_get_ssl_client_cert_functionENGINE_get_static_stateENGINE_get_table_flagsENGINE_initENGINE_load_builtin_enginesENGINE_load_cryptodevENGINE_load_dynamicENGINE_load_opensslENGINE_load_private_keyENGINE_load_public_keyENGINE_load_rdrandENGINE_load_rsaxENGINE_load_ssl_client_certENGINE_newENGINE_pkey_asn1_find_strENGINE_register_DHENGINE_register_DSAENGINE_register_ECDHENGINE_register_ECDSAENGINE_register_RANDENGINE_register_RSAENGINE_register_STOREENGINE_register_all_DHENGINE_register_all_DSAENGINE_register_all_ECDHENGINE_register_all_ECDSAENGINE_register_all_RANDENGINE_register_all_RSAENGINE_register_all_STOREENGINE_register_all_ciphersENGINE_register_all_completeENGINE_register_all_digestsENGINE_register_all_pkey_asn1_methsENGINE_register_all_pkey_methsENGINE_register_ciphersENGINE_register_completeENGINE_register_digestsENGINE_register_pkey_asn1_methsENGINE_register_pkey_methsENGINE_removeENGINE_set_DHENGINE_set_DSAENGINE_set_ECDHENGINE_set_ECDSAENGINE_set_RANDENGINE_set_RSAENGINE_set_STOREENGINE_set_ciphersENGINE_set_cmd_defnsENGINE_set_ctrl_functionENGINE_set_defaultENGINE_set_default_DHENGINE_set_default_DSAENGINE_set_default_ECDHENGINE_set_default_ECDSAENGINE_set_default_RANDENGINE_set_default_RSAENGINE_set_default_ciphersENGINE_set_default_digestsENGINE_set_default_pkey_asn1_methsENGINE_set_default_pkey_methsENGINE_set_default_stringENGINE_set_destroy_functionENGINE_set_digestsENGINE_set_ex_dataENGINE_set_finish_functionENGINE_set_flagsENGINE_set_idENGINE_set_init_functionENGINE_set_load_privkey_functionENGINE_set_load_pubkey_functionENGINE_set_load_ssl_client_cert_functionENGINE_set_nameENGINE_set_pkey_asn1_methsENGINE_set_pkey_methsENGINE_set_table_flagsENGINE_unregister_DHENGINE_unregister_DSAENGINE_unregister_ECDHENGINE_unregister_ECDSAENGINE_unregister_RANDENGINE_unregister_RSAENGINE_unregister_STOREENGINE_unregister_ciphersENGINE_unregister_digestsENGINE_unregister_pkey_asn1_methsENGINE_unregister_pkey_methsENGINE_up_refERR_add_error_dataERR_add_error_vdataERR_clear_errorERR_error_stringERR_error_string_nERR_free_stringsERR_func_error_stringERR_get_err_state_tableERR_get_errorERR_get_error_lineERR_get_error_line_dataERR_get_implementationERR_get_next_error_libraryERR_get_stateERR_get_string_tableERR_lib_error_stringERR_load_ASN1_stringsERR_load_BIO_stringsERR_load_BN_stringsERR_load_BUF_stringsERR_load_CMS_stringsERR_load_COMP_stringsERR_load_CONF_stringsERR_load_CRYPTO_stringsERR_load_DH_stringsERR_load_DSA_stringsERR_load_DSO_stringsERR_load_ECDH_stringsERR_load_ECDSA_stringsERR_load_EC_stringsERR_load_ENGINE_stringsERR_load_ERR_stringsERR_load_EVP_stringsERR_load_OBJ_stringsERR_load_OCSP_stringsERR_load_PEM_stringsERR_load_PKCS12_stringsERR_load_PKCS7_stringsERR_load_RAND_stringsERR_load_RSA_stringsERR_load_TS_stringsERR_load_UI_stringsERR_load_X509V3_stringsERR_load_X509_stringsERR_load_crypto_stringsERR_load_stringsERR_peek_errorERR_peek_error_lineERR_peek_error_line_dataERR_peek_last_errorERR_peek_last_error_lineERR_peek_last_error_line_dataERR_pop_to_markERR_print_errorsERR_print_errors_cbERR_print_errors_fpERR_put_errorERR_reason_error_stringERR_release_err_state_tableERR_remove_stateERR_remove_thread_stateERR_set_error_dataERR_set_implementationERR_set_markERR_unload_stringsESS_CERT_ID_dupESS_CERT_ID_freeESS_CERT_ID_newESS_ISSUER_SERIAL_dupESS_ISSUER_SERIAL_freeESS_ISSUER_SERIAL_newESS_SIGNING_CERT_dupESS_SIGNING_CERT_freeESS_SIGNING_CERT_newEVP_BytesToKeyEVP_CIPHER_CTX_block_sizeEVP_CIPHER_CTX_cipherEVP_CIPHER_CTX_cleanupEVP_CIPHER_CTX_clear_flagsEVP_CIPHER_CTX_copyEVP_CIPHER_CTX_ctrlEVP_CIPHER_CTX_flagsEVP_CIPHER_CTX_freeEVP_CIPHER_CTX_get_app_dataEVP_CIPHER_CTX_initEVP_CIPHER_CTX_iv_lengthEVP_CIPHER_CTX_key_lengthEVP_CIPHER_CTX_newEVP_CIPHER_CTX_nidEVP_CIPHER_CTX_rand_keyEVP_CIPHER_CTX_set_app_dataEVP_CIPHER_CTX_set_flagsEVP_CIPHER_CTX_set_key_lengthEVP_CIPHER_CTX_set_paddingEVP_CIPHER_CTX_test_flagsEVP_CIPHER_asn1_to_paramEVP_CIPHER_block_sizeEVP_CIPHER_do_allEVP_CIPHER_do_all_sortedEVP_CIPHER_flagsEVP_CIPHER_get_asn1_ivEVP_CIPHER_iv_lengthEVP_CIPHER_key_lengthEVP_CIPHER_nidEVP_CIPHER_param_to_asn1EVP_CIPHER_set_asn1_ivEVP_CIPHER_typeEVP_CipherEVP_CipherFinalEVP_CipherFinal_exEVP_CipherInitEVP_CipherInit_exEVP_CipherUpdateEVP_DecodeBlockEVP_DecodeFinalEVP_DecodeInitEVP_DecodeUpdateEVP_DecryptFinalEVP_DecryptFinal_exEVP_DecryptInitEVP_DecryptInit_exEVP_DecryptUpdateEVP_DigestEVP_DigestFinalEVP_DigestFinal_exEVP_DigestInitEVP_DigestInit_exEVP_DigestSignFinalEVP_DigestSignInitEVP_DigestUpdateEVP_DigestVerifyFinalEVP_DigestVerifyInitEVP_EncodeBlockEVP_EncodeFinalEVP_EncodeInitEVP_EncodeUpdateEVP_EncryptFinalEVP_EncryptFinal_exEVP_EncryptInitEVP_EncryptInit_exEVP_EncryptUpdateEVP_MD_CTX_cleanupEVP_MD_CTX_clear_flagsEVP_MD_CTX_copyEVP_MD_CTX_copy_exEVP_MD_CTX_createEVP_MD_CTX_destroyEVP_MD_CTX_initEVP_MD_CTX_mdEVP_MD_CTX_set_flagsEVP_MD_CTX_test_flagsEVP_MD_block_sizeEVP_MD_do_allEVP_MD_do_all_sortedEVP_MD_flagsEVP_MD_pkey_typeEVP_MD_sizeEVP_MD_typeEVP_OpenFinalEVP_OpenInitEVP_PBE_CipherInitEVP_PBE_alg_addEVP_PBE_alg_add_typeEVP_PBE_cleanupEVP_PBE_findEVP_PKCS82PKEYEVP_PKEY2PKCS8EVP_PKEY2PKCS8_brokenEVP_PKEY_CTX_ctrlEVP_PKEY_CTX_ctrl_strEVP_PKEY_CTX_dupEVP_PKEY_CTX_freeEVP_PKEY_CTX_get0_peerkeyEVP_PKEY_CTX_get0_pkeyEVP_PKEY_CTX_get_app_dataEVP_PKEY_CTX_get_cbEVP_PKEY_CTX_get_dataEVP_PKEY_CTX_get_keygen_infoEVP_PKEY_CTX_get_operationEVP_PKEY_CTX_newEVP_PKEY_CTX_new_idEVP_PKEY_CTX_set0_keygen_infoEVP_PKEY_CTX_set_app_dataEVP_PKEY_CTX_set_cbEVP_PKEY_CTX_set_dataEVP_PKEY_add1_attrEVP_PKEY_add1_attr_by_NIDEVP_PKEY_add1_attr_by_OBJEVP_PKEY_add1_attr_by_txtEVP_PKEY_asn1_add0EVP_PKEY_asn1_add_aliasEVP_PKEY_asn1_copyEVP_PKEY_asn1_findEVP_PKEY_asn1_find_strEVP_PKEY_asn1_freeEVP_PKEY_asn1_get0EVP_PKEY_asn1_get0_infoEVP_PKEY_asn1_get_countEVP_PKEY_asn1_newEVP_PKEY_asn1_set_ctrlEVP_PKEY_asn1_set_freeEVP_PKEY_asn1_set_paramEVP_PKEY_asn1_set_privateEVP_PKEY_asn1_set_publicEVP_PKEY_assignEVP_PKEY_base_idEVP_PKEY_bitsEVP_PKEY_cmpEVP_PKEY_cmp_parametersEVP_PKEY_copy_parametersEVP_PKEY_decryptEVP_PKEY_decrypt_initEVP_PKEY_decrypt_oldEVP_PKEY_delete_attrEVP_PKEY_deriveEVP_PKEY_derive_initEVP_PKEY_derive_set_peerEVP_PKEY_encryptEVP_PKEY_encrypt_initEVP_PKEY_encrypt_oldEVP_PKEY_freeEVP_PKEY_get0EVP_PKEY_get0_asn1EVP_PKEY_get1_DHEVP_PKEY_get1_DSAEVP_PKEY_get1_EC_KEYEVP_PKEY_get1_RSAEVP_PKEY_get_attrEVP_PKEY_get_attr_by_NIDEVP_PKEY_get_attr_by_OBJEVP_PKEY_get_attr_countEVP_PKEY_get_default_digest_nidEVP_PKEY_idEVP_PKEY_keygenEVP_PKEY_keygen_initEVP_PKEY_meth_add0EVP_PKEY_meth_copyEVP_PKEY_meth_findEVP_PKEY_meth_freeEVP_PKEY_meth_get0_infoEVP_PKEY_meth_newEVP_PKEY_meth_set_cleanupEVP_PKEY_meth_set_copyEVP_PKEY_meth_set_ctrlEVP_PKEY_meth_set_decryptEVP_PKEY_meth_set_deriveEVP_PKEY_meth_set_encryptEVP_PKEY_meth_set_initEVP_PKEY_meth_set_keygenEVP_PKEY_meth_set_paramgenEVP_PKEY_meth_set_signEVP_PKEY_meth_set_signctxEVP_PKEY_meth_set_verifyEVP_PKEY_meth_set_verify_recoverEVP_PKEY_meth_set_verifyctxEVP_PKEY_missing_parametersEVP_PKEY_newEVP_PKEY_new_mac_keyEVP_PKEY_paramgenEVP_PKEY_paramgen_initEVP_PKEY_print_paramsEVP_PKEY_print_privateEVP_PKEY_print_publicEVP_PKEY_save_parametersEVP_PKEY_set1_DHEVP_PKEY_set1_DSAEVP_PKEY_set1_EC_KEYEVP_PKEY_set1_RSAEVP_PKEY_set_typeEVP_PKEY_set_type_strEVP_PKEY_signEVP_PKEY_sign_initEVP_PKEY_sizeEVP_PKEY_typeEVP_PKEY_verifyEVP_PKEY_verify_initEVP_PKEY_verify_recoverEVP_PKEY_verify_recover_initEVP_SealFinalEVP_SealInitEVP_SignFinalEVP_VerifyFinalEVP_add_cipherEVP_add_digestEVP_aes_128_cbcEVP_aes_128_cbc_hmac_sha1EVP_aes_128_ccmEVP_aes_128_cfb1EVP_aes_128_cfb128EVP_aes_128_cfb8EVP_aes_128_ctrEVP_aes_128_ecbEVP_aes_128_gcmEVP_aes_128_ofbEVP_aes_128_xtsEVP_aes_192_cbcEVP_aes_192_ccmEVP_aes_192_cfb1EVP_aes_192_cfb128EVP_aes_192_cfb8EVP_aes_192_ctrEVP_aes_192_ecbEVP_aes_192_gcmEVP_aes_192_ofbEVP_aes_256_cbcEVP_aes_256_cbc_hmac_sha1EVP_aes_256_ccmEVP_aes_256_cfb1EVP_aes_256_cfb128EVP_aes_256_cfb8EVP_aes_256_ctrEVP_aes_256_ecbEVP_aes_256_gcmEVP_aes_256_ofbEVP_aes_256_xtsEVP_bf_cbcEVP_bf_cfb64EVP_bf_ecbEVP_bf_ofbEVP_camellia_128_cbcEVP_camellia_128_cfb1EVP_camellia_128_cfb128EVP_camellia_128_cfb8EVP_camellia_128_ecbEVP_camellia_128_ofbEVP_camellia_192_cbcEVP_camellia_192_cfb1EVP_camellia_192_cfb128EVP_camellia_192_cfb8EVP_camellia_192_ecbEVP_camellia_192_ofbEVP_camellia_256_cbcEVP_camellia_256_cfb1EVP_camellia_256_cfb128EVP_camellia_256_cfb8EVP_camellia_256_ecbEVP_camellia_256_ofbEVP_cast5_cbcEVP_cast5_cfb64EVP_cast5_ecbEVP_cast5_ofbEVP_cleanupEVP_des_cbcEVP_des_cfb1EVP_des_cfb64EVP_des_cfb8EVP_des_ecbEVP_des_edeEVP_des_ede3EVP_des_ede3_cbcEVP_des_ede3_cfb1EVP_des_ede3_cfb64EVP_des_ede3_cfb8EVP_des_ede3_ecbEVP_des_ede3_ofbEVP_des_ede_cbcEVP_des_ede_cfb64EVP_des_ede_ecbEVP_des_ede_ofbEVP_des_ofbEVP_desx_cbcEVP_dssEVP_dss1EVP_ecdsaEVP_enc_nullEVP_get_cipherbynameEVP_get_digestbynameEVP_get_pw_promptEVP_idea_cbcEVP_idea_cfb64EVP_idea_ecbEVP_idea_ofbEVP_md4EVP_md5EVP_md_nullEVP_mdc2EVP_rc2_40_cbcEVP_rc2_64_cbcEVP_rc2_cbcEVP_rc2_cfb64EVP_rc2_ecbEVP_rc2_ofbEVP_rc4EVP_rc4_40EVP_rc4_hmac_md5EVP_read_pw_stringEVP_read_pw_string_minEVP_ripemd160EVP_seed_cbcEVP_seed_cfb128EVP_seed_ecbEVP_seed_ofbEVP_set_pw_promptEVP_shaEVP_sha1EVP_sha224EVP_sha256EVP_sha384EVP_sha512EVP_whirlpoolEXTENDED_KEY_USAGE_freeEXTENDED_KEY_USAGE_itEXTENDED_KEY_USAGE_newFIPS_modeFIPS_mode_setGENERAL_NAMES_freeGENERAL_NAMES_itGENERAL_NAMES_newGENERAL_NAME_cmpGENERAL_NAME_dupGENERAL_NAME_freeGENERAL_NAME_get0_otherNameGENERAL_NAME_get0_valueGENERAL_NAME_itGENERAL_NAME_newGENERAL_NAME_printGENERAL_NAME_set0_othernameGENERAL_NAME_set0_valueGENERAL_SUBTREE_freeGENERAL_SUBTREE_itGENERAL_SUBTREE_newHMACHMAC_CTX_cleanupHMAC_CTX_copyHMAC_CTX_initHMAC_CTX_set_flagsHMAC_FinalHMAC_InitHMAC_Init_exHMAC_UpdateISSUING_DIST_POINT_freeISSUING_DIST_POINT_itISSUING_DIST_POINT_newKRB5_APREQBODY_freeKRB5_APREQBODY_itKRB5_APREQBODY_newKRB5_APREQ_freeKRB5_APREQ_itKRB5_APREQ_newKRB5_AUTHDATA_freeKRB5_AUTHDATA_itKRB5_AUTHDATA_newKRB5_AUTHENTBODY_freeKRB5_AUTHENTBODY_itKRB5_AUTHENTBODY_newKRB5_AUTHENT_freeKRB5_AUTHENT_itKRB5_AUTHENT_newKRB5_CHECKSUM_freeKRB5_CHECKSUM_itKRB5_CHECKSUM_newKRB5_ENCDATA_freeKRB5_ENCDATA_itKRB5_ENCDATA_newKRB5_ENCKEY_freeKRB5_ENCKEY_itKRB5_ENCKEY_newKRB5_PRINCNAME_freeKRB5_PRINCNAME_itKRB5_PRINCNAME_newKRB5_TICKET_freeKRB5_TICKET_itKRB5_TICKET_newKRB5_TKTBODY_freeKRB5_TKTBODY_itKRB5_TKTBODY_newLONG_itMD4MD4_FinalMD4_InitMD4_TransformMD4_UpdateMD5MD5_FinalMD5_InitMD5_TransformMD5_UpdateMDC2MDC2_FinalMDC2_InitMDC2_UpdateNAME_CONSTRAINTS_checkNAME_CONSTRAINTS_freeNAME_CONSTRAINTS_itNAME_CONSTRAINTS_newNCONF_WIN32NCONF_defaultNCONF_dump_bioNCONF_dump_fpNCONF_freeNCONF_free_dataNCONF_get_number_eNCONF_get_sectionNCONF_get_stringNCONF_loadNCONF_load_bioNCONF_load_fpNCONF_newNETSCAPE_CERT_SEQUENCE_freeNETSCAPE_CERT_SEQUENCE_itNETSCAPE_CERT_SEQUENCE_newNETSCAPE_SPKAC_freeNETSCAPE_SPKAC_itNETSCAPE_SPKAC_newNETSCAPE_SPKI_b64_decodeNETSCAPE_SPKI_b64_encodeNETSCAPE_SPKI_freeNETSCAPE_SPKI_get_pubkeyNETSCAPE_SPKI_itNETSCAPE_SPKI_newNETSCAPE_SPKI_printNETSCAPE_SPKI_set_pubkeyNETSCAPE_SPKI_signNETSCAPE_SPKI_verifyNETSCAPE_X509_freeNETSCAPE_X509_itNETSCAPE_X509_newNOTICEREF_freeNOTICEREF_itNOTICEREF_newOBJ_NAME_addOBJ_NAME_cleanupOBJ_NAME_do_allOBJ_NAME_do_all_sortedOBJ_NAME_getOBJ_NAME_initOBJ_NAME_new_indexOBJ_NAME_removeOBJ_add_objectOBJ_add_sigidOBJ_bsearch_OBJ_bsearch_ex_OBJ_cleanupOBJ_cmpOBJ_createOBJ_create_objectsOBJ_dupOBJ_find_sigid_algsOBJ_find_sigid_by_algsOBJ_ln2nidOBJ_new_nidOBJ_nid2lnOBJ_nid2objOBJ_nid2snOBJ_obj2nidOBJ_obj2txtOBJ_sigid_freeOBJ_sn2nidOBJ_txt2nidOBJ_txt2objOCSP_BASICRESP_add1_ext_i2dOCSP_BASICRESP_add_extOCSP_BASICRESP_delete_extOCSP_BASICRESP_freeOCSP_BASICRESP_get1_ext_d2iOCSP_BASICRESP_get_extOCSP_BASICRESP_get_ext_by_NIDOCSP_BASICRESP_get_ext_by_OBJOCSP_BASICRESP_get_ext_by_criticalOCSP_BASICRESP_get_ext_countOCSP_BASICRESP_itOCSP_BASICRESP_newOCSP_CERTID_dupOCSP_CERTID_freeOCSP_CERTID_itOCSP_CERTID_newOCSP_CERTSTATUS_freeOCSP_CERTSTATUS_itOCSP_CERTSTATUS_newOCSP_CRLID_freeOCSP_CRLID_itOCSP_CRLID_newOCSP_ONEREQ_add1_ext_i2dOCSP_ONEREQ_add_extOCSP_ONEREQ_delete_extOCSP_ONEREQ_freeOCSP_ONEREQ_get1_ext_d2iOCSP_ONEREQ_get_extOCSP_ONEREQ_get_ext_by_NIDOCSP_ONEREQ_get_ext_by_OBJOCSP_ONEREQ_get_ext_by_criticalOCSP_ONEREQ_get_ext_countOCSP_ONEREQ_itOCSP_ONEREQ_newOCSP_REQINFO_freeOCSP_REQINFO_itOCSP_REQINFO_newOCSP_REQUEST_add1_ext_i2dOCSP_REQUEST_add_extOCSP_REQUEST_delete_extOCSP_REQUEST_freeOCSP_REQUEST_get1_ext_d2iOCSP_REQUEST_get_extOCSP_REQUEST_get_ext_by_NIDOCSP_REQUEST_get_ext_by_OBJOCSP_REQUEST_get_ext_by_criticalOCSP_REQUEST_get_ext_countOCSP_REQUEST_itOCSP_REQUEST_newOCSP_REQUEST_printOCSP_REQ_CTX_add1_headerOCSP_REQ_CTX_freeOCSP_REQ_CTX_set1_reqOCSP_RESPBYTES_freeOCSP_RESPBYTES_itOCSP_RESPBYTES_newOCSP_RESPDATA_freeOCSP_RESPDATA_itOCSP_RESPDATA_newOCSP_RESPID_freeOCSP_RESPID_itOCSP_RESPID_newOCSP_RESPONSE_freeOCSP_RESPONSE_itOCSP_RESPONSE_newOCSP_RESPONSE_printOCSP_REVOKEDINFO_freeOCSP_REVOKEDINFO_itOCSP_REVOKEDINFO_newOCSP_SERVICELOC_freeOCSP_SERVICELOC_itOCSP_SERVICELOC_newOCSP_SIGNATURE_freeOCSP_SIGNATURE_itOCSP_SIGNATURE_newOCSP_SINGLERESP_add1_ext_i2dOCSP_SINGLERESP_add_extOCSP_SINGLERESP_delete_extOCSP_SINGLERESP_freeOCSP_SINGLERESP_get1_ext_d2iOCSP_SINGLERESP_get_extOCSP_SINGLERESP_get_ext_by_NIDOCSP_SINGLERESP_get_ext_by_OBJOCSP_SINGLERESP_get_ext_by_criticalOCSP_SINGLERESP_get_ext_countOCSP_SINGLERESP_itOCSP_SINGLERESP_newOCSP_accept_responses_newOCSP_archive_cutoff_newOCSP_basic_add1_certOCSP_basic_add1_nonceOCSP_basic_add1_statusOCSP_basic_signOCSP_basic_verifyOCSP_cert_id_newOCSP_cert_status_strOCSP_cert_to_idOCSP_check_nonceOCSP_check_validityOCSP_copy_nonceOCSP_crlID_newOCSP_crl_reason_strOCSP_id_cmpOCSP_id_get0_infoOCSP_id_issuer_cmpOCSP_onereq_get0_idOCSP_parse_urlOCSP_request_add0_idOCSP_request_add1_certOCSP_request_add1_nonceOCSP_request_is_signedOCSP_request_onereq_countOCSP_request_onereq_get0OCSP_request_set1_nameOCSP_request_signOCSP_request_verifyOCSP_resp_countOCSP_resp_findOCSP_resp_find_statusOCSP_resp_get0OCSP_response_createOCSP_response_get1_basicOCSP_response_statusOCSP_response_status_strOCSP_sendreq_bioOCSP_sendreq_nbioOCSP_sendreq_newOCSP_single_get0_statusOCSP_url_svcloc_newOPENSSL_DIR_endOPENSSL_DIR_readOPENSSL_add_all_algorithms_confOPENSSL_add_all_algorithms_noconfOPENSSL_asc2uniOPENSSL_cleanseOPENSSL_configOPENSSL_cpuid_setupOPENSSL_gmtimeOPENSSL_gmtime_adjOPENSSL_ia32cap_locOPENSSL_initOPENSSL_isserviceOPENSSL_issetugidOPENSSL_load_builtin_modulesOPENSSL_memcmpOPENSSL_no_configOPENSSL_showfatalOPENSSL_stderrOPENSSL_strcasecmpOPENSSL_strncasecmpOPENSSL_uni2ascOSSL_DES_versionOSSL_libdes_versionOTHERNAME_cmpOTHERNAME_freeOTHERNAME_itOTHERNAME_newOpenSSLDieOpenSSL_add_all_ciphersOpenSSL_add_all_digestsPBE2PARAM_freePBE2PARAM_itPBE2PARAM_newPBEPARAM_freePBEPARAM_itPBEPARAM_newPBKDF2PARAM_freePBKDF2PARAM_itPBKDF2PARAM_newPEM_ASN1_readPEM_ASN1_read_bioPEM_ASN1_writePEM_ASN1_write_bioPEM_SealFinalPEM_SealInitPEM_SealUpdatePEM_SignFinalPEM_SignInitPEM_SignUpdatePEM_X509_INFO_readPEM_X509_INFO_read_bioPEM_X509_INFO_write_bioPEM_bytes_read_bioPEM_def_callbackPEM_dek_infoPEM_do_headerPEM_get_EVP_CIPHER_INFOPEM_proc_typePEM_readPEM_read_CMSPEM_read_DHparamsPEM_read_DSAPrivateKeyPEM_read_DSA_PUBKEYPEM_read_DSAparamsPEM_read_ECPKParametersPEM_read_ECPrivateKeyPEM_read_EC_PUBKEYPEM_read_NETSCAPE_CERT_SEQUENCEPEM_read_PKCS7PEM_read_PKCS8PEM_read_PKCS8_PRIV_KEY_INFOPEM_read_PUBKEYPEM_read_PrivateKeyPEM_read_RSAPrivateKeyPEM_read_RSAPublicKeyPEM_read_RSA_PUBKEYPEM_read_X509PEM_read_X509_AUXPEM_read_X509_CERT_PAIRPEM_read_X509_CRLPEM_read_X509_REQPEM_read_bioPEM_read_bio_CMSPEM_read_bio_DHparamsPEM_read_bio_DSAPrivateKeyPEM_read_bio_DSA_PUBKEYPEM_read_bio_DSAparamsPEM_read_bio_ECPKParametersPEM_read_bio_ECPrivateKeyPEM_read_bio_EC_PUBKEYPEM_read_bio_NETSCAPE_CERT_SEQUENCEPEM_read_bio_PKCS7PEM_read_bio_PKCS8PEM_read_bio_PKCS8_PRIV_KEY_INFOPEM_read_bio_PUBKEYPEM_read_bio_ParametersPEM_read_bio_PrivateKeyPEM_read_bio_RSAPrivateKeyPEM_read_bio_RSAPublicKeyPEM_read_bio_RSA_PUBKEYPEM_read_bio_X509PEM_read_bio_X509_AUXPEM_read_bio_X509_CERT_PAIRPEM_read_bio_X509_CRLPEM_read_bio_X509_REQPEM_writePEM_write_CMSPEM_write_DHparamsPEM_write_DSAPrivateKeyPEM_write_DSA_PUBKEYPEM_write_DSAparamsPEM_write_ECPKParametersPEM_write_ECPrivateKeyPEM_write_EC_PUBKEYPEM_write_NETSCAPE_CERT_SEQUENCEPEM_write_PKCS7PEM_write_PKCS8PEM_write_PKCS8PrivateKeyPEM_write_PKCS8PrivateKey_nidPEM_write_PKCS8_PRIV_KEY_INFOPEM_write_PUBKEYPEM_write_PrivateKeyPEM_write_RSAPrivateKeyPEM_write_RSAPublicKeyPEM_write_RSA_PUBKEYPEM_write_X509PEM_write_X509_AUXPEM_write_X509_CERT_PAIRPEM_write_X509_CRLPEM_write_X509_REQPEM_write_X509_REQ_NEWPEM_write_bioPEM_write_bio_ASN1_streamPEM_write_bio_CMSPEM_write_bio_CMS_streamPEM_write_bio_DHparamsPEM_write_bio_DSAPrivateKeyPEM_write_bio_DSA_PUBKEYPEM_write_bio_DSAparamsPEM_write_bio_ECPKParametersPEM_write_bio_ECPrivateKeyPEM_write_bio_EC_PUBKEYPEM_write_bio_NETSCAPE_CERT_SEQUENCEPEM_write_bio_PKCS7PEM_write_bio_PKCS7_streamPEM_write_bio_PKCS8PEM_write_bio_PKCS8PrivateKeyPEM_write_bio_PKCS8PrivateKey_nidPEM_write_bio_PKCS8_PRIV_KEY_INFOPEM_write_bio_PUBKEYPEM_write_bio_ParametersPEM_write_bio_PrivateKeyPEM_write_bio_RSAPrivateKeyPEM_write_bio_RSAPublicKeyPEM_write_bio_RSA_PUBKEYPEM_write_bio_X509PEM_write_bio_X509_AUXPEM_write_bio_X509_CERT_PAIRPEM_write_bio_X509_CRLPEM_write_bio_X509_REQPEM_write_bio_X509_REQ_NEWPKCS12_AUTHSAFES_itPKCS12_BAGS_freePKCS12_BAGS_itPKCS12_BAGS_newPKCS12_MAC_DATA_freePKCS12_MAC_DATA_itPKCS12_MAC_DATA_newPKCS12_MAKE_KEYBAGPKCS12_MAKE_SHKEYBAGPKCS12_PBE_addPKCS12_PBE_keyivgenPKCS12_SAFEBAGS_itPKCS12_SAFEBAG_freePKCS12_SAFEBAG_itPKCS12_SAFEBAG_newPKCS12_add_CSPName_ascPKCS12_add_certPKCS12_add_friendlyname_ascPKCS12_add_friendlyname_uniPKCS12_add_keyPKCS12_add_localkeyidPKCS12_add_safePKCS12_add_safesPKCS12_certbag2x509PKCS12_certbag2x509crlPKCS12_createPKCS12_decrypt_skeyPKCS12_freePKCS12_gen_macPKCS12_get_attr_genPKCS12_get_friendlynamePKCS12_initPKCS12_itPKCS12_item_decrypt_d2iPKCS12_item_i2d_encryptPKCS12_item_pack_safebagPKCS12_key_gen_ascPKCS12_key_gen_uniPKCS12_newPKCS12_newpassPKCS12_pack_authsafesPKCS12_pack_p7dataPKCS12_pack_p7encdataPKCS12_parsePKCS12_pbe_cryptPKCS12_set_macPKCS12_setup_macPKCS12_unpack_authsafesPKCS12_unpack_p7dataPKCS12_unpack_p7encdataPKCS12_verify_macPKCS12_x5092certbagPKCS12_x509crl2certbagPKCS1_MGF1PKCS5_PBE_addPKCS5_PBE_keyivgenPKCS5_PBKDF2_HMACPKCS5_PBKDF2_HMAC_SHA1PKCS5_pbe2_setPKCS5_pbe2_set_ivPKCS5_pbe_setPKCS5_pbe_set0_algorPKCS5_pbkdf2_setPKCS5_v2_PBE_keyivgenPKCS7_ATTR_SIGN_itPKCS7_ATTR_VERIFY_itPKCS7_DIGEST_freePKCS7_DIGEST_itPKCS7_DIGEST_newPKCS7_ENCRYPT_freePKCS7_ENCRYPT_itPKCS7_ENCRYPT_newPKCS7_ENC_CONTENT_freePKCS7_ENC_CONTENT_itPKCS7_ENC_CONTENT_newPKCS7_ENVELOPE_freePKCS7_ENVELOPE_itPKCS7_ENVELOPE_newPKCS7_ISSUER_AND_SERIAL_digestPKCS7_ISSUER_AND_SERIAL_freePKCS7_ISSUER_AND_SERIAL_itPKCS7_ISSUER_AND_SERIAL_newPKCS7_RECIP_INFO_freePKCS7_RECIP_INFO_get0_algPKCS7_RECIP_INFO_itPKCS7_RECIP_INFO_newPKCS7_RECIP_INFO_setPKCS7_SIGNED_freePKCS7_SIGNED_itPKCS7_SIGNED_newPKCS7_SIGNER_INFO_freePKCS7_SIGNER_INFO_get0_algsPKCS7_SIGNER_INFO_itPKCS7_SIGNER_INFO_newPKCS7_SIGNER_INFO_setPKCS7_SIGNER_INFO_signPKCS7_SIGN_ENVELOPE_freePKCS7_SIGN_ENVELOPE_itPKCS7_SIGN_ENVELOPE_newPKCS7_add0_attrib_signing_timePKCS7_add1_attrib_digestPKCS7_add_attrib_content_typePKCS7_add_attrib_smimecapPKCS7_add_attributePKCS7_add_certificatePKCS7_add_crlPKCS7_add_recipientPKCS7_add_recipient_infoPKCS7_add_signaturePKCS7_add_signed_attributePKCS7_add_signerPKCS7_cert_from_signer_infoPKCS7_content_newPKCS7_ctrlPKCS7_dataDecodePKCS7_dataFinalPKCS7_dataInitPKCS7_dataVerifyPKCS7_decryptPKCS7_digest_from_attributesPKCS7_dupPKCS7_encryptPKCS7_finalPKCS7_freePKCS7_get0_signersPKCS7_get_attributePKCS7_get_issuer_and_serialPKCS7_get_signed_attributePKCS7_get_signer_infoPKCS7_get_smimecapPKCS7_itPKCS7_newPKCS7_print_ctxPKCS7_set0_type_otherPKCS7_set_attributesPKCS7_set_cipherPKCS7_set_contentPKCS7_set_digestPKCS7_set_signed_attributesPKCS7_set_typePKCS7_signPKCS7_sign_add_signerPKCS7_signatureVerifyPKCS7_simple_smimecapPKCS7_streamPKCS7_to_TS_TST_INFOPKCS7_verifyPKCS8_PRIV_KEY_INFO_freePKCS8_PRIV_KEY_INFO_itPKCS8_PRIV_KEY_INFO_newPKCS8_add_keyusagePKCS8_decryptPKCS8_encryptPKCS8_pkey_get0PKCS8_pkey_set0PKCS8_set_brokenPKEY_USAGE_PERIOD_freePKEY_USAGE_PERIOD_itPKEY_USAGE_PERIOD_newPOLICYINFO_freePOLICYINFO_itPOLICYINFO_newPOLICYQUALINFO_freePOLICYQUALINFO_itPOLICYQUALINFO_newPOLICY_CONSTRAINTS_freePOLICY_CONSTRAINTS_itPOLICY_CONSTRAINTS_newPOLICY_MAPPINGS_itPOLICY_MAPPING_freePOLICY_MAPPING_itPOLICY_MAPPING_newPROXY_CERT_INFO_EXTENSION_freePROXY_CERT_INFO_EXTENSION_itPROXY_CERT_INFO_EXTENSION_newPROXY_POLICY_freePROXY_POLICY_itPROXY_POLICY_newRAND_SSLeayRAND_addRAND_bytesRAND_cleanupRAND_egdRAND_egd_bytesRAND_eventRAND_file_nameRAND_get_rand_methodRAND_load_fileRAND_pollRAND_pseudo_bytesRAND_query_egd_bytesRAND_screenRAND_seedRAND_set_rand_engineRAND_set_rand_methodRAND_statusRAND_write_fileRC2_cbc_encryptRC2_cfb64_encryptRC2_decryptRC2_ecb_encryptRC2_encryptRC2_ofb64_encryptRC2_set_keyRC4RC4_optionsRC4_set_keyRIPEMD160RIPEMD160_FinalRIPEMD160_InitRIPEMD160_TransformRIPEMD160_UpdateRSAPrivateKey_dupRSAPrivateKey_itRSAPublicKey_dupRSAPublicKey_itRSA_PKCS1_SSLeayRSA_PSS_PARAMS_freeRSA_PSS_PARAMS_itRSA_PSS_PARAMS_newRSA_X931_hash_idRSA_blinding_offRSA_blinding_onRSA_check_keyRSA_flagsRSA_freeRSA_generate_keyRSA_generate_key_exRSA_get_default_methodRSA_get_ex_dataRSA_get_ex_new_indexRSA_get_methodRSA_memory_lockRSA_newRSA_new_methodRSA_null_methodRSA_padding_add_PKCS1_OAEPRSA_padding_add_PKCS1_PSSRSA_padding_add_PKCS1_PSS_mgf1RSA_padding_add_PKCS1_type_1RSA_padding_add_PKCS1_type_2RSA_padding_add_SSLv23RSA_padding_add_X931RSA_padding_add_noneRSA_padding_check_PKCS1_OAEPRSA_padding_check_PKCS1_type_1RSA_padding_check_PKCS1_type_2RSA_padding_check_SSLv23RSA_padding_check_X931RSA_padding_check_noneRSA_printRSA_print_fpRSA_private_decryptRSA_private_encryptRSA_public_decryptRSA_public_encryptRSA_set_default_methodRSA_set_ex_dataRSA_set_methodRSA_setup_blindingRSA_signRSA_sign_ASN1_OCTET_STRINGRSA_sizeRSA_up_refRSA_verifyRSA_verify_ASN1_OCTET_STRINGRSA_verify_PKCS1_PSSRSA_verify_PKCS1_PSS_mgf1SEED_cbc_encryptSEED_cfb128_encryptSEED_decryptSEED_ecb_encryptSEED_encryptSEED_ofb128_encryptSEED_set_keySHASHA1SHA1_FinalSHA1_InitSHA1_TransformSHA1_UpdateSHA224SHA224_FinalSHA224_InitSHA224_UpdateSHA256SHA256_FinalSHA256_InitSHA256_TransformSHA256_UpdateSHA384SHA384_FinalSHA384_InitSHA384_UpdateSHA512SHA512_FinalSHA512_InitSHA512_TransformSHA512_UpdateSHA_FinalSHA_InitSHA_TransformSHA_UpdateSMIME_crlf_copySMIME_read_ASN1SMIME_read_CMSSMIME_read_PKCS7SMIME_textSMIME_write_ASN1SMIME_write_CMSSMIME_write_PKCS7SRP_Calc_ASRP_Calc_BSRP_Calc_client_keySRP_Calc_server_keySRP_Calc_uSRP_Calc_xSRP_VBASE_freeSRP_VBASE_get_by_userSRP_VBASE_initSRP_VBASE_newSRP_Verify_A_mod_NSRP_Verify_B_mod_NSRP_check_known_gN_paramSRP_create_verifierSRP_create_verifier_BNSRP_get_default_gNSSLeaySSLeay_versionSXNETID_freeSXNETID_itSXNETID_newSXNET_add_id_INTEGERSXNET_add_id_ascSXNET_add_id_ulongSXNET_freeSXNET_get_id_INTEGERSXNET_get_id_ascSXNET_get_id_ulongSXNET_itSXNET_newTS_ACCURACY_dupTS_ACCURACY_freeTS_ACCURACY_get_microsTS_ACCURACY_get_millisTS_ACCURACY_get_secondsTS_ACCURACY_newTS_ACCURACY_set_microsTS_ACCURACY_set_millisTS_ACCURACY_set_secondsTS_ASN1_INTEGER_print_bioTS_CONF_get_tsa_sectionTS_CONF_load_certTS_CONF_load_certsTS_CONF_load_keyTS_CONF_set_accuracyTS_CONF_set_certsTS_CONF_set_clock_precision_digitsTS_CONF_set_crypto_deviceTS_CONF_set_def_policyTS_CONF_set_default_engineTS_CONF_set_digestsTS_CONF_set_ess_cert_id_chainTS_CONF_set_orderingTS_CONF_set_policiesTS_CONF_set_serialTS_CONF_set_signer_certTS_CONF_set_signer_keyTS_CONF_set_tsa_nameTS_MSG_IMPRINT_dupTS_MSG_IMPRINT_freeTS_MSG_IMPRINT_get_algoTS_MSG_IMPRINT_get_msgTS_MSG_IMPRINT_newTS_MSG_IMPRINT_print_bioTS_MSG_IMPRINT_set_algoTS_MSG_IMPRINT_set_msgTS_OBJ_print_bioTS_REQ_add_extTS_REQ_delete_extTS_REQ_dupTS_REQ_ext_freeTS_REQ_freeTS_REQ_get_cert_reqTS_REQ_get_extTS_REQ_get_ext_by_NIDTS_REQ_get_ext_by_OBJTS_REQ_get_ext_by_criticalTS_REQ_get_ext_countTS_REQ_get_ext_d2iTS_REQ_get_extsTS_REQ_get_msg_imprintTS_REQ_get_nonceTS_REQ_get_policy_idTS_REQ_get_versionTS_REQ_newTS_REQ_print_bioTS_REQ_set_cert_reqTS_REQ_set_msg_imprintTS_REQ_set_nonceTS_REQ_set_policy_idTS_REQ_set_versionTS_REQ_to_TS_VERIFY_CTXTS_RESP_CTX_add_failure_infoTS_RESP_CTX_add_flagsTS_RESP_CTX_add_mdTS_RESP_CTX_add_policyTS_RESP_CTX_freeTS_RESP_CTX_get_requestTS_RESP_CTX_get_tst_infoTS_RESP_CTX_newTS_RESP_CTX_set_accuracyTS_RESP_CTX_set_certsTS_RESP_CTX_set_clock_precision_digitsTS_RESP_CTX_set_def_policyTS_RESP_CTX_set_extension_cbTS_RESP_CTX_set_serial_cbTS_RESP_CTX_set_signer_certTS_RESP_CTX_set_signer_keyTS_RESP_CTX_set_status_infoTS_RESP_CTX_set_status_info_condTS_RESP_CTX_set_time_cbTS_RESP_create_responseTS_RESP_dupTS_RESP_freeTS_RESP_get_status_infoTS_RESP_get_tokenTS_RESP_get_tst_infoTS_RESP_newTS_RESP_print_bioTS_RESP_set_status_infoTS_RESP_set_tst_infoTS_RESP_verify_responseTS_RESP_verify_signatureTS_RESP_verify_tokenTS_STATUS_INFO_dupTS_STATUS_INFO_freeTS_STATUS_INFO_newTS_STATUS_INFO_print_bioTS_TST_INFO_add_extTS_TST_INFO_delete_extTS_TST_INFO_dupTS_TST_INFO_ext_freeTS_TST_INFO_freeTS_TST_INFO_get_accuracyTS_TST_INFO_get_extTS_TST_INFO_get_ext_by_NIDTS_TST_INFO_get_ext_by_OBJTS_TST_INFO_get_ext_by_criticalTS_TST_INFO_get_ext_countTS_TST_INFO_get_ext_d2iTS_TST_INFO_get_extsTS_TST_INFO_get_msg_imprintTS_TST_INFO_get_nonceTS_TST_INFO_get_orderingTS_TST_INFO_get_policy_idTS_TST_INFO_get_serialTS_TST_INFO_get_timeTS_TST_INFO_get_tsaTS_TST_INFO_get_versionTS_TST_INFO_newTS_TST_INFO_print_bioTS_TST_INFO_set_accuracyTS_TST_INFO_set_msg_imprintTS_TST_INFO_set_nonceTS_TST_INFO_set_orderingTS_TST_INFO_set_policy_idTS_TST_INFO_set_serialTS_TST_INFO_set_timeTS_TST_INFO_set_tsaTS_TST_INFO_set_versionTS_VERIFY_CTX_cleanupTS_VERIFY_CTX_freeTS_VERIFY_CTX_initTS_VERIFY_CTX_newTS_X509_ALGOR_print_bioTS_ext_print_bioTXT_DB_create_indexTXT_DB_freeTXT_DB_get_by_indexTXT_DB_insertTXT_DB_readTXT_DB_writeUI_OpenSSLUI_UTIL_read_pwUI_UTIL_read_pw_stringUI_add_error_stringUI_add_info_stringUI_add_input_booleanUI_add_input_stringUI_add_user_dataUI_add_verify_stringUI_construct_promptUI_create_methodUI_ctrlUI_destroy_methodUI_dup_error_stringUI_dup_info_stringUI_dup_input_booleanUI_dup_input_stringUI_dup_verify_stringUI_freeUI_get0_action_stringUI_get0_output_stringUI_get0_resultUI_get0_result_stringUI_get0_test_stringUI_get0_user_dataUI_get_default_methodUI_get_ex_dataUI_get_ex_new_indexUI_get_input_flagsUI_get_methodUI_get_result_maxsizeUI_get_result_minsizeUI_get_string_typeUI_method_get_closerUI_method_get_flusherUI_method_get_openerUI_method_get_prompt_constructorUI_method_get_readerUI_method_get_writerUI_method_set_closerUI_method_set_flusherUI_method_set_openerUI_method_set_prompt_constructorUI_method_set_readerUI_method_set_writerUI_newUI_new_methodUI_processUI_set_default_methodUI_set_ex_dataUI_set_methodUI_set_resultUSERNOTICE_freeUSERNOTICE_itUSERNOTICE_newUTF8_getcUTF8_putcWHIRLPOOLWHIRLPOOL_BitUpdateWHIRLPOOL_FinalWHIRLPOOL_InitWHIRLPOOL_UpdateX509V3_EXT_CRL_add_confX509V3_EXT_CRL_add_nconfX509V3_EXT_REQ_add_confX509V3_EXT_REQ_add_nconfX509V3_EXT_addX509V3_EXT_add_aliasX509V3_EXT_add_confX509V3_EXT_add_listX509V3_EXT_add_nconfX509V3_EXT_add_nconf_skX509V3_EXT_cleanupX509V3_EXT_confX509V3_EXT_conf_nidX509V3_EXT_d2iX509V3_EXT_getX509V3_EXT_get_nidX509V3_EXT_i2dX509V3_EXT_nconfX509V3_EXT_nconf_nidX509V3_EXT_printX509V3_EXT_print_fpX509V3_EXT_val_prnX509V3_NAME_from_sectionX509V3_add1_i2dX509V3_add_standard_extensionsX509V3_add_valueX509V3_add_value_boolX509V3_add_value_bool_nfX509V3_add_value_intX509V3_add_value_ucharX509V3_conf_freeX509V3_extensions_printX509V3_get_d2iX509V3_get_sectionX509V3_get_stringX509V3_get_value_boolX509V3_get_value_intX509V3_parse_listX509V3_section_freeX509V3_set_conf_lhashX509V3_set_ctxX509V3_set_nconfX509V3_string_freeX509_ALGORS_itX509_ALGOR_dupX509_ALGOR_freeX509_ALGOR_get0X509_ALGOR_itX509_ALGOR_newX509_ALGOR_set0X509_ALGOR_set_mdX509_ATTRIBUTE_countX509_ATTRIBUTE_createX509_ATTRIBUTE_create_by_NIDX509_ATTRIBUTE_create_by_OBJX509_ATTRIBUTE_create_by_txtX509_ATTRIBUTE_dupX509_ATTRIBUTE_freeX509_ATTRIBUTE_get0_dataX509_ATTRIBUTE_get0_objectX509_ATTRIBUTE_get0_typeX509_ATTRIBUTE_itX509_ATTRIBUTE_newX509_ATTRIBUTE_set1_dataX509_ATTRIBUTE_set1_objectX509_CERT_AUX_freeX509_CERT_AUX_itX509_CERT_AUX_newX509_CERT_AUX_printX509_CERT_PAIR_freeX509_CERT_PAIR_itX509_CERT_PAIR_newX509_CINF_freeX509_CINF_itX509_CINF_newX509_CRL_INFO_freeX509_CRL_INFO_itX509_CRL_INFO_newX509_CRL_METHOD_freeX509_CRL_METHOD_newX509_CRL_add0_revokedX509_CRL_add1_ext_i2dX509_CRL_add_extX509_CRL_cmpX509_CRL_delete_extX509_CRL_digestX509_CRL_dupX509_CRL_freeX509_CRL_get0_by_certX509_CRL_get0_by_serialX509_CRL_get_extX509_CRL_get_ext_by_NIDX509_CRL_get_ext_by_OBJX509_CRL_get_ext_by_criticalX509_CRL_get_ext_countX509_CRL_get_ext_d2iX509_CRL_get_meth_dataX509_CRL_itX509_CRL_matchX509_CRL_newX509_CRL_printX509_CRL_print_fpX509_CRL_set_default_methodX509_CRL_set_issuer_nameX509_CRL_set_lastUpdateX509_CRL_set_meth_dataX509_CRL_set_nextUpdateX509_CRL_set_versionX509_CRL_signX509_CRL_sign_ctxX509_CRL_sortX509_CRL_verifyX509_EXTENSIONS_itX509_EXTENSION_create_by_NIDX509_EXTENSION_create_by_OBJX509_EXTENSION_dupX509_EXTENSION_freeX509_EXTENSION_get_criticalX509_EXTENSION_get_dataX509_EXTENSION_get_objectX509_EXTENSION_itX509_EXTENSION_newX509_EXTENSION_set_criticalX509_EXTENSION_set_dataX509_EXTENSION_set_objectX509_INFO_freeX509_INFO_newX509_LOOKUP_by_aliasX509_LOOKUP_by_fingerprintX509_LOOKUP_by_issuer_serialX509_LOOKUP_by_subjectX509_LOOKUP_ctrlX509_LOOKUP_fileX509_LOOKUP_freeX509_LOOKUP_hash_dirX509_LOOKUP_initX509_LOOKUP_newX509_LOOKUP_shutdownX509_NAME_ENTRY_create_by_NIDX509_NAME_ENTRY_create_by_OBJX509_NAME_ENTRY_create_by_txtX509_NAME_ENTRY_dupX509_NAME_ENTRY_freeX509_NAME_ENTRY_get_dataX509_NAME_ENTRY_get_objectX509_NAME_ENTRY_itX509_NAME_ENTRY_newX509_NAME_ENTRY_set_dataX509_NAME_ENTRY_set_objectX509_NAME_add_entryX509_NAME_add_entry_by_NIDX509_NAME_add_entry_by_OBJX509_NAME_add_entry_by_txtX509_NAME_cmpX509_NAME_delete_entryX509_NAME_digestX509_NAME_dupX509_NAME_entry_countX509_NAME_freeX509_NAME_get_entryX509_NAME_get_index_by_NIDX509_NAME_get_index_by_OBJX509_NAME_get_text_by_NIDX509_NAME_get_text_by_OBJX509_NAME_hashX509_NAME_hash_oldX509_NAME_itX509_NAME_newX509_NAME_onelineX509_NAME_printX509_NAME_print_exX509_NAME_print_ex_fpX509_NAME_setX509_OBJECT_free_contentsX509_OBJECT_idx_by_subjectX509_OBJECT_retrieve_by_subjectX509_OBJECT_retrieve_matchX509_OBJECT_up_ref_countX509_PKEY_freeX509_PKEY_newX509_POLICY_NODE_printX509_PUBKEY_freeX509_PUBKEY_getX509_PUBKEY_get0_paramX509_PUBKEY_itX509_PUBKEY_newX509_PUBKEY_setX509_PUBKEY_set0_paramX509_PURPOSE_addX509_PURPOSE_cleanupX509_PURPOSE_get0X509_PURPOSE_get0_nameX509_PURPOSE_get0_snameX509_PURPOSE_get_by_idX509_PURPOSE_get_by_snameX509_PURPOSE_get_countX509_PURPOSE_get_idX509_PURPOSE_get_trustX509_PURPOSE_setX509_REQ_INFO_freeX509_REQ_INFO_itX509_REQ_INFO_newX509_REQ_add1_attrX509_REQ_add1_attr_by_NIDX509_REQ_add1_attr_by_OBJX509_REQ_add1_attr_by_txtX509_REQ_add_extensionsX509_REQ_add_extensions_nidX509_REQ_check_private_keyX509_REQ_delete_attrX509_REQ_digestX509_REQ_dupX509_REQ_extension_nidX509_REQ_freeX509_REQ_get1_emailX509_REQ_get_attrX509_REQ_get_attr_by_NIDX509_REQ_get_attr_by_OBJX509_REQ_get_attr_countX509_REQ_get_extension_nidsX509_REQ_get_extensionsX509_REQ_get_pubkeyX509_REQ_itX509_REQ_newX509_REQ_printX509_REQ_print_exX509_REQ_print_fpX509_REQ_set_extension_nidsX509_REQ_set_pubkeyX509_REQ_set_subject_nameX509_REQ_set_versionX509_REQ_signX509_REQ_sign_ctxX509_REQ_to_X509X509_REQ_verifyX509_REVOKED_add1_ext_i2dX509_REVOKED_add_extX509_REVOKED_delete_extX509_REVOKED_freeX509_REVOKED_get_extX509_REVOKED_get_ext_by_NIDX509_REVOKED_get_ext_by_OBJX509_REVOKED_get_ext_by_criticalX509_REVOKED_get_ext_countX509_REVOKED_get_ext_d2iX509_REVOKED_itX509_REVOKED_newX509_REVOKED_set_revocationDateX509_REVOKED_set_serialNumberX509_SIG_freeX509_SIG_itX509_SIG_newX509_STORE_CTX_cleanupX509_STORE_CTX_freeX509_STORE_CTX_get0_current_crlX509_STORE_CTX_get0_current_issuerX509_STORE_CTX_get0_paramX509_STORE_CTX_get0_parent_ctxX509_STORE_CTX_get0_policy_treeX509_STORE_CTX_get1_chainX509_STORE_CTX_get1_issuerX509_STORE_CTX_get_chainX509_STORE_CTX_get_current_certX509_STORE_CTX_get_errorX509_STORE_CTX_get_error_depthX509_STORE_CTX_get_ex_dataX509_STORE_CTX_get_ex_new_indexX509_STORE_CTX_get_explicit_policyX509_STORE_CTX_initX509_STORE_CTX_newX509_STORE_CTX_purpose_inheritX509_STORE_CTX_set0_crlsX509_STORE_CTX_set0_paramX509_STORE_CTX_set_certX509_STORE_CTX_set_chainX509_STORE_CTX_set_defaultX509_STORE_CTX_set_depthX509_STORE_CTX_set_errorX509_STORE_CTX_set_ex_dataX509_STORE_CTX_set_flagsX509_STORE_CTX_set_purposeX509_STORE_CTX_set_timeX509_STORE_CTX_set_trustX509_STORE_CTX_set_verify_cbX509_STORE_CTX_trusted_stackX509_STORE_add_certX509_STORE_add_crlX509_STORE_add_lookupX509_STORE_freeX509_STORE_get1_certsX509_STORE_get1_crlsX509_STORE_get_by_subjectX509_STORE_load_locationsX509_STORE_newX509_STORE_set1_paramX509_STORE_set_default_pathsX509_STORE_set_depthX509_STORE_set_flagsX509_STORE_set_purposeX509_STORE_set_trustX509_STORE_set_verify_cbX509_TRUST_addX509_TRUST_cleanupX509_TRUST_get0X509_TRUST_get0_nameX509_TRUST_get_by_idX509_TRUST_get_countX509_TRUST_get_flagsX509_TRUST_get_trustX509_TRUST_setX509_TRUST_set_defaultX509_VAL_freeX509_VAL_itX509_VAL_newX509_VERIFY_PARAM_add0_policyX509_VERIFY_PARAM_add0_tableX509_VERIFY_PARAM_clear_flagsX509_VERIFY_PARAM_freeX509_VERIFY_PARAM_get_depthX509_VERIFY_PARAM_get_flagsX509_VERIFY_PARAM_inheritX509_VERIFY_PARAM_lookupX509_VERIFY_PARAM_newX509_VERIFY_PARAM_set1X509_VERIFY_PARAM_set1_nameX509_VERIFY_PARAM_set1_policiesX509_VERIFY_PARAM_set_depthX509_VERIFY_PARAM_set_flagsX509_VERIFY_PARAM_set_purposeX509_VERIFY_PARAM_set_timeX509_VERIFY_PARAM_set_trustX509_VERIFY_PARAM_table_cleanupX509_add1_ext_i2dX509_add1_reject_objectX509_add1_trust_objectX509_add_extX509_alias_get0X509_alias_set1X509_certificate_typeX509_check_akidX509_check_caX509_check_issuedX509_check_private_keyX509_check_purposeX509_check_trustX509_cmpX509_cmp_current_timeX509_cmp_timeX509_delete_extX509_digestX509_dupX509_email_freeX509_find_by_issuer_and_serialX509_find_by_subjectX509_freeX509_get0_pubkey_bitstrX509_get1_emailX509_get1_ocspX509_get_default_cert_areaX509_get_default_cert_dirX509_get_default_cert_dir_envX509_get_default_cert_fileX509_get_default_cert_file_envX509_get_default_private_dirX509_get_ex_dataX509_get_ex_new_indexX509_get_extX509_get_ext_by_NIDX509_get_ext_by_OBJX509_get_ext_by_criticalX509_get_ext_countX509_get_ext_d2iX509_get_issuer_nameX509_get_pubkeyX509_get_pubkey_parametersX509_get_serialNumberX509_get_subject_nameX509_gmtime_adjX509_issuer_and_serial_cmpX509_issuer_and_serial_hashX509_issuer_name_cmpX509_issuer_name_hashX509_issuer_name_hash_oldX509_itX509_keyid_get0X509_keyid_set1X509_load_cert_crl_fileX509_load_cert_fileX509_load_crl_fileX509_newX509_ocspid_printX509_policy_checkX509_policy_level_get0_nodeX509_policy_level_node_countX509_policy_node_get0_parentX509_policy_node_get0_policyX509_policy_node_get0_qualifiersX509_policy_tree_freeX509_policy_tree_get0_levelX509_policy_tree_get0_policiesX509_policy_tree_get0_user_policiesX509_policy_tree_level_countX509_printX509_print_exX509_print_ex_fpX509_print_fpX509_pubkey_digestX509_reject_clearX509_set_ex_dataX509_set_issuer_nameX509_set_notAfterX509_set_notBeforeX509_set_pubkeyX509_set_serialNumberX509_set_subject_nameX509_set_versionX509_signX509_sign_ctxX509_signature_dumpX509_signature_printX509_subject_name_cmpX509_subject_name_hashX509_subject_name_hash_oldX509_supported_extensionX509_time_adjX509_time_adj_exX509_to_X509_REQX509_trust_clearX509_verifyX509_verify_certX509_verify_cert_error_stringX509at_add1_attrX509at_add1_attr_by_NIDX509at_add1_attr_by_OBJX509at_add1_attr_by_txtX509at_delete_attrX509at_get0_data_by_OBJX509at_get_attrX509at_get_attr_by_NIDX509at_get_attr_by_OBJX509at_get_attr_countX509v3_add_extX509v3_delete_extX509v3_get_extX509v3_get_ext_by_NIDX509v3_get_ext_by_OBJX509v3_get_ext_by_criticalX509v3_get_ext_countZLONG_it_ossl_096_des_random_seed_ossl_old_crypt_ossl_old_des_cbc_cksum_ossl_old_des_cbc_encrypt_ossl_old_des_cfb64_encrypt_ossl_old_des_cfb_encrypt_ossl_old_des_crypt_ossl_old_des_decrypt3_ossl_old_des_ecb3_encrypt_ossl_old_des_ecb_encrypt_ossl_old_des_ede3_cbc_encrypt_ossl_old_des_ede3_cfb64_encrypt_ossl_old_des_ede3_ofb64_encrypt_ossl_old_des_enc_read_ossl_old_des_enc_write_ossl_old_des_encrypt_ossl_old_des_encrypt2_ossl_old_des_encrypt3_ossl_old_des_fcrypt_ossl_old_des_is_weak_key_ossl_old_des_key_sched_ossl_old_des_ncbc_encrypt_ossl_old_des_ofb64_encrypt_ossl_old_des_ofb_encrypt_ossl_old_des_options_ossl_old_des_pcbc_encrypt_ossl_old_des_quad_cksum_ossl_old_des_random_key_ossl_old_des_random_seed_ossl_old_des_read_2passwords_ossl_old_des_read_password_ossl_old_des_read_pw_ossl_old_des_read_pw_string_ossl_old_des_set_key_ossl_old_des_set_odd_parity_ossl_old_des_string_to_2keys_ossl_old_des_string_to_key_ossl_old_des_xcbc_encrypt_shadow_DES_check_key_shadow_DES_rw_modea2d_ASN1_OBJECTa2i_ASN1_ENUMERATEDa2i_ASN1_INTEGERa2i_ASN1_STRINGa2i_GENERAL_NAMEa2i_IPADDRESSa2i_IPADDRESS_NCa2i_ipaddasn1_Finishasn1_GetSequenceasn1_add_errorasn1_const_Finishasn1_do_adbasn1_do_lockasn1_enc_freeasn1_enc_initasn1_enc_restoreasn1_enc_saveasn1_ex_c2iasn1_ex_i2casn1_get_choice_selectorasn1_get_field_ptrasn1_set_choice_selectorb2i_PVK_biob2i_PrivateKeyb2i_PrivateKey_biob2i_PublicKeyb2i_PublicKey_biobn_add_wordsbn_div_wordsbn_dup_expandbn_expand2bn_mul_add_wordsbn_mul_wordsbn_sqr_wordsbn_sub_wordsc2i_ASN1_BIT_STRINGc2i_ASN1_INTEGERc2i_ASN1_OBJECTcheck_deferd2i_ACCESS_DESCRIPTIONd2i_ASN1_BIT_STRINGd2i_ASN1_BMPSTRINGd2i_ASN1_BOOLEANd2i_ASN1_ENUMERATEDd2i_ASN1_GENERALIZEDTIMEd2i_ASN1_GENERALSTRINGd2i_ASN1_IA5STRINGd2i_ASN1_INTEGERd2i_ASN1_NULLd2i_ASN1_OBJECTd2i_ASN1_OCTET_STRINGd2i_ASN1_PRINTABLEd2i_ASN1_PRINTABLESTRINGd2i_ASN1_SEQUENCE_ANYd2i_ASN1_SETd2i_ASN1_SET_ANYd2i_ASN1_T61STRINGd2i_ASN1_TIMEd2i_ASN1_TYPEd2i_ASN1_UINTEGERd2i_ASN1_UNIVERSALSTRINGd2i_ASN1_UTCTIMEd2i_ASN1_UTF8STRINGd2i_ASN1_VISIBLESTRINGd2i_ASN1_bytesd2i_ASN1_type_bytesd2i_AUTHORITY_INFO_ACCESSd2i_AUTHORITY_KEYIDd2i_AutoPrivateKeyd2i_BASIC_CONSTRAINTSd2i_CERTIFICATEPOLICIESd2i_CMS_ContentInfod2i_CMS_ReceiptRequestd2i_CMS_biod2i_CRL_DIST_POINTSd2i_DHparamsd2i_DIRECTORYSTRINGd2i_DISPLAYTEXTd2i_DIST_POINTd2i_DIST_POINT_NAMEd2i_DSAPrivateKeyd2i_DSAPrivateKey_biod2i_DSAPrivateKey_fpd2i_DSAPublicKeyd2i_DSA_PUBKEYd2i_DSA_PUBKEY_biod2i_DSA_PUBKEY_fpd2i_DSA_SIGd2i_DSAparamsd2i_ECDSA_SIGd2i_ECPKParametersd2i_ECParametersd2i_ECPrivateKeyd2i_ECPrivateKey_biod2i_ECPrivateKey_fpd2i_EC_PUBKEYd2i_EC_PUBKEY_biod2i_EC_PUBKEY_fpd2i_EDIPARTYNAMEd2i_ESS_CERT_IDd2i_ESS_ISSUER_SERIALd2i_ESS_SIGNING_CERTd2i_EXTENDED_KEY_USAGEd2i_GENERAL_NAMEd2i_GENERAL_NAMESd2i_ISSUING_DIST_POINTd2i_KRB5_APREQd2i_KRB5_APREQBODYd2i_KRB5_AUTHDATAd2i_KRB5_AUTHENTd2i_KRB5_AUTHENTBODYd2i_KRB5_CHECKSUMd2i_KRB5_ENCDATAd2i_KRB5_ENCKEYd2i_KRB5_PRINCNAMEd2i_KRB5_TICKETd2i_KRB5_TKTBODYd2i_NETSCAPE_CERT_SEQUENCEd2i_NETSCAPE_SPKACd2i_NETSCAPE_SPKId2i_NETSCAPE_X509d2i_NOTICEREFd2i_Netscape_RSAd2i_OCSP_BASICRESPd2i_OCSP_CERTIDd2i_OCSP_CERTSTATUSd2i_OCSP_CRLIDd2i_OCSP_ONEREQd2i_OCSP_REQINFOd2i_OCSP_REQUESTd2i_OCSP_RESPBYTESd2i_OCSP_RESPDATAd2i_OCSP_RESPIDd2i_OCSP_RESPONSEd2i_OCSP_REVOKEDINFOd2i_OCSP_SERVICELOCd2i_OCSP_SIGNATUREd2i_OCSP_SINGLERESPd2i_OTHERNAMEd2i_PBE2PARAMd2i_PBEPARAMd2i_PBKDF2PARAMd2i_PKCS12d2i_PKCS12_BAGSd2i_PKCS12_MAC_DATAd2i_PKCS12_SAFEBAGd2i_PKCS12_biod2i_PKCS12_fpd2i_PKCS7d2i_PKCS7_DIGESTd2i_PKCS7_ENCRYPTd2i_PKCS7_ENC_CONTENTd2i_PKCS7_ENVELOPEd2i_PKCS7_ISSUER_AND_SERIALd2i_PKCS7_RECIP_INFOd2i_PKCS7_SIGNEDd2i_PKCS7_SIGNER_INFOd2i_PKCS7_SIGN_ENVELOPEd2i_PKCS7_biod2i_PKCS7_fpd2i_PKCS8PrivateKey_biod2i_PKCS8PrivateKey_fpd2i_PKCS8_PRIV_KEY_INFOd2i_PKCS8_PRIV_KEY_INFO_biod2i_PKCS8_PRIV_KEY_INFO_fpd2i_PKCS8_biod2i_PKCS8_fpd2i_PKEY_USAGE_PERIODd2i_POLICYINFOd2i_POLICYQUALINFOd2i_PROXY_CERT_INFO_EXTENSIONd2i_PROXY_POLICYd2i_PUBKEYd2i_PUBKEY_biod2i_PUBKEY_fpd2i_PrivateKeyd2i_PrivateKey_biod2i_PrivateKey_fpd2i_PublicKeyd2i_RSAPrivateKeyd2i_RSAPrivateKey_biod2i_RSAPrivateKey_fpd2i_RSAPublicKeyd2i_RSAPublicKey_biod2i_RSAPublicKey_fpd2i_RSA_NETd2i_RSA_PSS_PARAMSd2i_RSA_PUBKEYd2i_RSA_PUBKEY_biod2i_RSA_PUBKEY_fpd2i_SXNETd2i_SXNETIDd2i_TS_ACCURACYd2i_TS_MSG_IMPRINTd2i_TS_MSG_IMPRINT_biod2i_TS_MSG_IMPRINT_fpd2i_TS_REQd2i_TS_REQ_biod2i_TS_REQ_fpd2i_TS_RESPd2i_TS_RESP_biod2i_TS_RESP_fpd2i_TS_STATUS_INFOd2i_TS_TST_INFOd2i_TS_TST_INFO_biod2i_TS_TST_INFO_fpd2i_USERNOTICEd2i_X509d2i_X509_ALGORd2i_X509_ALGORSd2i_X509_ATTRIBUTEd2i_X509_AUXd2i_X509_CERT_AUXd2i_X509_CERT_PAIRd2i_X509_CINFd2i_X509_CRLd2i_X509_CRL_INFOd2i_X509_CRL_biod2i_X509_CRL_fpd2i_X509_EXTENSIONd2i_X509_EXTENSIONSd2i_X509_NAMEd2i_X509_NAME_ENTRYd2i_X509_PKEYd2i_X509_PUBKEYd2i_X509_REQd2i_X509_REQ_INFOd2i_X509_REQ_biod2i_X509_REQ_fpd2i_X509_REVOKEDd2i_X509_SIGd2i_X509_VALd2i_X509_biod2i_X509_fpget_rfc2409_prime_1024get_rfc2409_prime_768get_rfc3526_prime_1536get_rfc3526_prime_2048get_rfc3526_prime_3072get_rfc3526_prime_4096get_rfc3526_prime_6144get_rfc3526_prime_8192hex_to_stringi2a_ACCESS_DESCRIPTIONi2a_ASN1_ENUMERATEDi2a_ASN1_INTEGERi2a_ASN1_OBJECTi2a_ASN1_STRINGi2b_PVK_bioi2b_PrivateKey_bioi2b_PublicKey_bioi2c_ASN1_BIT_STRINGi2c_ASN1_INTEGERi2d_ACCESS_DESCRIPTIONi2d_ASN1_BIT_STRINGi2d_ASN1_BMPSTRINGi2d_ASN1_BOOLEANi2d_ASN1_ENUMERATEDi2d_ASN1_GENERALIZEDTIMEi2d_ASN1_GENERALSTRINGi2d_ASN1_IA5STRINGi2d_ASN1_INTEGERi2d_ASN1_NULLi2d_ASN1_OBJECTi2d_ASN1_OCTET_STRINGi2d_ASN1_PRINTABLEi2d_ASN1_PRINTABLESTRINGi2d_ASN1_SEQUENCE_ANYi2d_ASN1_SETi2d_ASN1_SET_ANYi2d_ASN1_T61STRINGi2d_ASN1_TIMEi2d_ASN1_TYPEi2d_ASN1_UNIVERSALSTRINGi2d_ASN1_UTCTIMEi2d_ASN1_UTF8STRINGi2d_ASN1_VISIBLESTRINGi2d_ASN1_bio_streami2d_ASN1_bytesi2d_AUTHORITY_INFO_ACCESSi2d_AUTHORITY_KEYIDi2d_BASIC_CONSTRAINTSi2d_CERTIFICATEPOLICIESi2d_CMS_ContentInfoi2d_CMS_ReceiptRequesti2d_CMS_bioi2d_CMS_bio_streami2d_CRL_DIST_POINTSi2d_DHparamsi2d_DIRECTORYSTRINGi2d_DISPLAYTEXTi2d_DIST_POINTi2d_DIST_POINT_NAMEi2d_DSAPrivateKeyi2d_DSAPrivateKey_bioi2d_DSAPrivateKey_fpi2d_DSAPublicKeyi2d_DSA_PUBKEYi2d_DSA_PUBKEY_bioi2d_DSA_PUBKEY_fpi2d_DSA_SIGi2d_DSAparamsi2d_ECDSA_SIGi2d_ECPKParametersi2d_ECParametersi2d_ECPrivateKeyi2d_ECPrivateKey_bioi2d_ECPrivateKey_fpi2d_EC_PUBKEYi2d_EC_PUBKEY_bioi2d_EC_PUBKEY_fpi2d_EDIPARTYNAMEi2d_ESS_CERT_IDi2d_ESS_ISSUER_SERIALi2d_ESS_SIGNING_CERTi2d_EXTENDED_KEY_USAGEi2d_GENERAL_NAMEi2d_GENERAL_NAMESi2d_ISSUING_DIST_POINTi2d_KRB5_APREQi2d_KRB5_APREQBODYi2d_KRB5_AUTHDATAi2d_KRB5_AUTHENTi2d_KRB5_AUTHENTBODYi2d_KRB5_CHECKSUMi2d_KRB5_ENCDATAi2d_KRB5_ENCKEYi2d_KRB5_PRINCNAMEi2d_KRB5_TICKETi2d_KRB5_TKTBODYi2d_NETSCAPE_CERT_SEQUENCEi2d_NETSCAPE_SPKACi2d_NETSCAPE_SPKIi2d_NETSCAPE_X509i2d_NOTICEREFi2d_Netscape_RSAi2d_OCSP_BASICRESPi2d_OCSP_CERTIDi2d_OCSP_CERTSTATUSi2d_OCSP_CRLIDi2d_OCSP_ONEREQi2d_OCSP_REQINFOi2d_OCSP_REQUESTi2d_OCSP_RESPBYTESi2d_OCSP_RESPDATAi2d_OCSP_RESPIDi2d_OCSP_RESPONSEi2d_OCSP_REVOKEDINFOi2d_OCSP_SERVICELOCi2d_OCSP_SIGNATUREi2d_OCSP_SINGLERESPi2d_OTHERNAMEi2d_PBE2PARAMi2d_PBEPARAMi2d_PBKDF2PARAMi2d_PKCS12i2d_PKCS12_BAGSi2d_PKCS12_MAC_DATAi2d_PKCS12_SAFEBAGi2d_PKCS12_bioi2d_PKCS12_fpi2d_PKCS7i2d_PKCS7_DIGESTi2d_PKCS7_ENCRYPTi2d_PKCS7_ENC_CONTENTi2d_PKCS7_ENVELOPEi2d_PKCS7_ISSUER_AND_SERIALi2d_PKCS7_NDEFi2d_PKCS7_RECIP_INFOi2d_PKCS7_SIGNEDi2d_PKCS7_SIGNER_INFOi2d_PKCS7_SIGN_ENVELOPEi2d_PKCS7_bioi2d_PKCS7_bio_streami2d_PKCS7_fpi2d_PKCS8PrivateKeyInfo_bioi2d_PKCS8PrivateKeyInfo_fpi2d_PKCS8PrivateKey_bioi2d_PKCS8PrivateKey_fpi2d_PKCS8PrivateKey_nid_bioi2d_PKCS8PrivateKey_nid_fpi2d_PKCS8_PRIV_KEY_INFOi2d_PKCS8_PRIV_KEY_INFO_bioi2d_PKCS8_PRIV_KEY_INFO_fpi2d_PKCS8_bioi2d_PKCS8_fpi2d_PKEY_USAGE_PERIODi2d_POLICYINFOi2d_POLICYQUALINFOi2d_PROXY_CERT_INFO_EXTENSIONi2d_PROXY_POLICYi2d_PUBKEYi2d_PUBKEY_bioi2d_PUBKEY_fpi2d_PrivateKeyi2d_PrivateKey_bioi2d_PrivateKey_fpi2d_PublicKeyi2d_RSAPrivateKeyi2d_RSAPrivateKey_bioi2d_RSAPrivateKey_fpi2d_RSAPublicKeyi2d_RSAPublicKey_bioi2d_RSAPublicKey_fpi2d_RSA_NETi2d_RSA_PSS_PARAMSi2d_RSA_PUBKEYi2d_RSA_PUBKEY_bioi2d_RSA_PUBKEY_fpi2d_SXNETi2d_SXNETIDi2d_TS_ACCURACYi2d_TS_MSG_IMPRINTi2d_TS_MSG_IMPRINT_bioi2d_TS_MSG_IMPRINT_fpi2d_TS_REQi2d_TS_REQ_bioi2d_TS_REQ_fpi2d_TS_RESPi2d_TS_RESP_bioi2d_TS_RESP_fpi2d_TS_STATUS_INFOi2d_TS_TST_INFOi2d_TS_TST_INFO_bioi2d_TS_TST_INFO_fpi2d_USERNOTICEi2d_X509i2d_X509_ALGORi2d_X509_ALGORSi2d_X509_ATTRIBUTEi2d_X509_AUXi2d_X509_CERT_AUXi2d_X509_CERT_PAIRi2d_X509_CINFi2d_X509_CRLi2d_X509_CRL_INFOi2d_X509_CRL_bioi2d_X509_CRL_fpi2d_X509_EXTENSIONi2d_X509_EXTENSIONSi2d_X509_NAMEi2d_X509_NAME_ENTRYi2d_X509_PKEYi2d_X509_PUBKEYi2d_X509_REQi2d_X509_REQ_INFOi2d_X509_REQ_bioi2d_X509_REQ_fpi2d_X509_REVOKEDi2d_X509_SIGi2d_X509_VALi2d_X509_bioi2d_X509_fpi2o_ECPublicKeyi2s_ASN1_ENUMERATEDi2s_ASN1_ENUMERATED_TABLEi2s_ASN1_INTEGERi2s_ASN1_OCTET_STRINGi2t_ASN1_OBJECTi2v_ASN1_BIT_STRINGi2v_GENERAL_NAMEi2v_GENERAL_NAMESidea_cbc_encryptidea_cfb64_encryptidea_ecb_encryptidea_encryptidea_ofb64_encryptidea_optionsidea_set_decrypt_keyidea_set_encrypt_keylh_deletelh_doalllh_doall_arglh_freelh_insertlh_newlh_node_statslh_node_stats_biolh_node_usage_statslh_node_usage_stats_biolh_num_itemslh_retrievelh_statslh_stats_biolh_strhashname_cmpo2i_ECPublicKeypitem_freepitem_newpqueue_findpqueue_freepqueue_insertpqueue_iteratorpqueue_newpqueue_nextpqueue_peekpqueue_poppqueue_printpqueue_sizeprivate_AES_set_decrypt_keyprivate_AES_set_encrypt_keyprivate_RC4_set_keys2i_ASN1_INTEGERs2i_ASN1_OCTET_STRINGsk_deletesk_delete_ptrsk_dupsk_findsk_find_exsk_freesk_insertsk_is_sortedsk_newsk_new_nullsk_numsk_popsk_pop_freesk_pushsk_setsk_set_cmp_funcsk_shiftsk_sortsk_unshiftsk_valuesk_zerostring_to_hexv2i_ASN1_BIT_STRINGv2i_GENERAL_NAMEv2i_GENERAL_NAMESv2i_GENERAL_NAME_ex @` @` @`8 p2 , 8 , ;<P<?@`Bd@ p Pl `X H 8 (    e d  8 @  P `܂ pȂ   @    l  ` X L 8 0, @ P ` p؁ ȁ  `  t P\ L 8 (  p  Ԁ Ȁ     0 d e rp fT gH h8 i n j s o k l t p q mh  0`p 0ؠ  Ƞ   @ 0    t d  T < (  P ` pП        p ` T 0@ @$ P ` p؞    @ h H (        d L 0@ @0 `  p     Ԝ     P `t ph H 0 P  d e f؛ gě h j k lh mX n<  o z pؚ | } { | X @ 0     Й     l ~T x@ , i   qԘ  y  X r@ 4 (   З s t ul X v@ ,   ؖ Ȗ w d ` @  @ P  |  l  X  < p , p    0 @ P ث ` ȫ       x  d  T  D   < f 0 l m  d < h  j e ܪ g  n Ī o k i Ȭ `!#&& %@  %0  %$ 0 % @ % @% P% `%ش  %ȴ % p% % % %| 0%p %\ %P %@ %0 % % % % %Գ % % @% % % %t %\  %P %< %0 0% @% P% P% `% p%ܲ %Ȳ % p% % `% %l %X %H %8 d% n% q% r% e%ر f% s% g%| m%d o%X t%@ h% p% i% j% k% l%x h  @C@DEEHOIK SUVPW`W`ȸ P    0| @` P @ 0    @ p з   e m j h f l| n@ g ol k dX i P     l  X  H  4 @  P  `  p      0  @  P x ` `  L  8  $              t 0 `  <   0        0 d @ 8 P ` 0   @   pt T @8   ` p  | ` 0       P  `  @x PT `8 p   p    \ ,         | d  L  4             l  T < $      0  `  @  P  P h X H  8 0( p  @ P   ` pp X  H 8 $       p D (   @            p  0 h P  @  , P  `    p      s t  d| ` uD  v, w   x̿ y e p nh mP @  f, g h z ܾ {Ⱦ  | |ܪ }  l ~\ o L  4  j k l q̽   r  p d    p 0"4"8"<"  1`+T @+@ P+( f+ g+l d+X e+   ;70>* @* P* `* p*p d* e*< f*d g*ܪ j*Ī k*l h*D i*, pH @8 $ P `   @  P  ` x 0 d p X  D  8                            p  d   X  L 0 @ @ 0 @ $        P  `  p  d  e  f  |  g   x h d i < j  k  } l  {  m  n  o  p  q t r `  P s @ t 0 u  v  w  x  y  ~  z   pWV0ZYX`VV Z:Z:ZpZv ]p]0^`^[@[`[ @`_0ed`_0_, j jkh`hh 4 qqvq pput wvPwvv  zx{wx  pХpТ pPw0$ PH @@ 0P$" `$" @$" f$" g$p" e$T" d$D" ) ) ) ) ) x) `) D)  4)  $)  )  ( ( ( ( (  ( !p( "\( #L( $4( %( /( &' '' -' .' 0' '  ' 0 @, P' `' p' ' x' p' h' \' T' L' D' <' 4' ,'  $'  '  '  ' & & & &  & !& "& #& $& %& && '& /|& :h& ;P& <4& =& >& ?% @% A% B% C% D% EP% @P`p|pd% edP:`p `:Зp l `pP F xFPhF `F PF >>>>o>|>h>T>fD>x4>>>gܪ ===t===h=i`=X D=u(= =<j<<<<y<|<d<kP<{8<|<l;};v;~;m;n|; PHP0       `  ??`@@@AAA A@BWV$0]]]]]]]]]]]]  |   d Э <  @ @     `k` @ P ` p ܇ Їp     x ` T @ 4 (     Ԇ     x0 h@ XP L` 8p $    Ѕ  ` 0  tp `P L 8 (`    ܄ ̄  0   x  `@ HP 40   ` ܃p     x h X H@ < $   @ ؂ Ȃ    t d P0 @@ 0P ` p   ́     | p ` P0 < ,  P 0 @ ܀ ЀP `  ` p  pp X D@ 4P (          t \d Pe Df 0g h  ~i ~j ~k | l ~ ~m ~n  o ~ p~ L~p D? 8~ ~q ~r }s }t }u }v }w x}x X}y H}z 4}{ $} } || | | | | | |} x| d| T|~ <| | | { { { { { x{ d{ T{ <{ ({ { z z D> z z z z z tz \z Lz 4z z z y y y y = y y xy dy Ly 4y y y x x x x x px Xx Hx 4x x  x w w w w w tw dw @w ,w w v v v v P< v |v hv |; \v Pv 0 $   0  ܓ ē@ P `   p x h ` TP L` <p (  @  `  p    h P < ,      ؑ0 đ@ P d e \@f pg `t Lh u 8v $ s w ؐx y z { xi hj Xk Hl 8m n | } o ؏p ȏq P<r ~ :@ ,P  ` p  Л  p   p P 0   0 ̚   p@ X D 0   0 ܙ0 @ P ` p x h L 8 $    ̘  `  pP ` P <d (v r e f ԗq w { s t tg `h L| >i ,} ~ j k Ȗl u m dy x lo d<p Xz Pv  0 0 x0 h0 X H 80 ,  p   xm xph\TL@8$ԧxhTH<4$( ( P) `) XȪ8pHب0h<@ Щ lx`$@ " "t "T"H"< "0"$p"p "@ " "ض"ȶ"P """p@"`P"T0"D` "0 "$ "@"""ܵ "ȵ0" """"l"X"D"00 "P""p"ش`"`"p"`"P"t "l "X "H0 "0 " " " "Գ@"""""p"`"P "<"(""""Բ"v"w"d"e"x"h"P"<"$"""ܱ"ȱs"f"g"|t"d"Lq"8""""h"Ȱi"""j"tk"\l"Hm"4"T{"$n"""ܯ"""""|~"h|"Lz"H"4y""""Ȯr"}"p""t"d"D"0" """z"{"do"H"4"x" u""hv"xhXD0$Ծľ | l\@ 8,    p Й  x 0 dT0.@ .@.P . .P.l`.\p.D.4.........` .d .L0.,@. P.`. .p....d.H.,..... .0.@.pP.` .H ., .`.p....|.\.<...p . . .t .X .<0 .(@ .P .` .p . . . .0 .l .` .P .Dc.0..d.e.f.g.h.|.`i.L.4j.k.l.m.n.o.p..|q.`r.8s.$t.u..v.h>w.x.y.l.X.4...z.{.|..}.~..|.y.=.=.t.d.P.8. ..........d.T.@.,.......<..x.T.8.....hv....t @`p@p0P`xhXD0 Pnsd|ehoPp8mf ijkrP lgqh@@ @ B C H `C I `H H H PO B 0C H `C I `H H H PO !| !l !T !< p! `! @! P! `! 0! p! !| !` !T !@ !0 ! ! ! !P!@!0!`!!! !h!T!@!,!@!!!!!p!P!!t!(u!!~t!\v!@!! d! w!e!!!x!y!!x{!!z!z!zg!lz!|!y!=!L!y!xys!(! {!!!dyh!|!!~!}!p!`!L!4!i! !d!!!px!@j!k!l!m!xn!do!Lp!8q!8r!$#p#@##l`#P#8#(## ##P#####l#\ #H0#0`# p# ### #0###|#ld#Xy#@e# f#D?g#0x# h#\i#j#k#l#m#n#o#p#pq#\r#Ps#0t#u#v#w#0)\@)LP)<`),c)d)e)|s @  l@ &P &` &p &x&l&\ &@&4 &$ & && & & &0& &| &d@&Tp&H&8&$` & p & &&& &&&x &h &P &< &$ & & & &&d&&&&t&\g&Dw&$&&h&&&f&i&&x&\&@j&0k& l&m&n&& &&&&u&xp&lx&X&L}&8&(t&~&q&&&&e&&t a@ 0   $!!@'1`'0p'0'0'0P'0'0'0'l00'X0'D0 '40' 0@' 0'/'/P'/`'/'/d'/e'f'/z'p/{'T/y'@/g'$/|'/h'.i'.j'|k'l'.m'.n''.o't.p'`.q'L.r'4.s'.t'.u' v'd}'-~'-'-w'-x'd'-(2(l2@(T2(L2P(82`($2( 2p(1(1(1(1(1h(1f(1g(p1i(\1d(H1e(41j(120Y pX \ Y 0 /8H/(H/H/H/G /GP /G@ /G0/G@/lGP/TG@/@GP/0G`/ Gp/G/F /F /F/F`/Fp/|F/hF/PF/8F/ F/F/E/E/E/E/E /tE0/XE@/SK?NvہTG= `yȱ4g|"UCР;X)z`KQ'nU_O_MKaLۻ{*z'9AYst t ^yU /)f:`ϰgi9PکH 1K#=gwu홠)sP`=1er/X^fJ2Ak@_@@h```: тK[C1tOFٺ wl]azW {+Rdj>svd/k&Νa%J3 یᦅ} ]Wq Xd߫!U3zP 3-ĪZr&j|I9X+RLo]ŵ좃',w;6.F^2|!ltJN5 gmp)՞R Vb#]e_$?iU6Hژc|=[Qf(IK|$Zk8\ k7BL~^bvEQmm5O7_m +0C:ͳ4yJQ"; tgN)܀b4h!145Mܷݏ괓Z'Hp!vp)a[Q;#ÐO]kNGY|(ʾ.»L*% 4hsvd/k&Νa%J3 یᦅ} ]Wq Xd߫!U3zP 3-ĪZr&j|I9X+RLo]ŵ좃',w;6.F^2|!ltJN5 gmp)՞R Vb#]e_$?iU6Hژc|=[Qf(IK|$Zk8\ k7BL~^bvEQmm5O7_m +0C:ͳ4yJQ"; tgN)܀b4h!ad$@mtt [-`H?f>hZ'1'Sj0A/8S7=vuF&`&p|Ҵ645Mܷݏ괓Z'Hp!vp)a[Q;#ÐO]kNGY|(ʾ.»L*% 4hsvd/k&Νa%J3 یᦅ} ]Wq Xd߫!U3zP 3-ĪZr&j|I9X+RLo]ŵ좃',w;6.F^2|!ltJN5 gmp)՞R Vb#]e_$?iU6Hژc|=[Qf(IK|$Zk8\ k7BL~^bvEQmm5O7_m +0C:ͳ4yJQ"; tgN)܀b4h!݀`qnVߔVvP0wVGXGn +8.lHC @F5hksM~ձ~]HFd]%E6jt1-D?*mx<+:[3hyK݅/Kk0lds4#W|.""ي-e#x+t#4hZc:f$kv%Gtӝ2;1sľ،2|w8otYmtt [-`H?f>hZ'1'Sj0A/8S7=vuF&`&p|Ҵ645Mܷݏ괓Z'Hp!vp)a[Q;#ÐO]kNGY|(ʾ.»L*% 4hsvd/k&Νa%J3 یᦅ} ]Wq Xd߫!U3zP 3-ĪZr&j|I9X+RLo]ŵ좃',w;6.F^2|!ltJN5 gmp)՞R Vb#]e_$?iU6Hژc|=[Qf(IK|$Zk8\ k7BL~^bvEQmm5O7_m +0C:ͳ4yJQ"; tgN)܀b4h!g$k PAPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGX00090C0K0Q0W0\0f0{000000000000001 11151L1Q1W1_1t111111122#212C2Q2c2q22222222233#313C3Q3c3q33333333344#414C4Q4c4q4444444455!5O55555560676M6y66666677%757A7Q7j7u777777788888889 9"9+9<9I9O9Y9s99999:>:H:Z:c:j:o:v::::::: ;;{;;;;;;<<">=>Q>k>>>>>>>>??+?F?M?w???????????? 00 0$0H0M0R0X0^0d0j0p000000000011(11171=1\1`1j1|111111111 222*2I2Q2g2z2222222333A3Y3^333333a4444565>5a5y5~5555556M6W6w6}666666666 77$727:7C7L7U7`7v7777777778%818889^9u999999999 ::3:;:@:M:v::::::::1;H;a;m;;;;;;;;; <<<&<1">9>A>F>S>q>>>>>>>?A?S?\?e?n?y????0x00G0O0X0`0k001+1>1P1Y1b1k1v111112A2c2|22334'484O4h4t4444444445.5@5N5T5]5p5u55555555555 6<6O6f66666666667,747=7E7P7f777778)84898@8M8X8]8g8t888888888888899+969E9g9~9999999::9:k:::::;;;;;;;<<<$<3<=<<<<<<<<<<===(=9=R=_=q=z=~==========>>1>:>>>H>Y>r>>>>>>>>N???@H+0I01S1112222333366 7&7W7q7777788U8r8{8::F;P 58p9$:7??77K8l8 9 ; 7::+;;;<<;=K??$1e13#4y4t5 7777V;3<< = 0x555555555(6H6778&888n8x888#9A9U9g99999S:q:::::::;;;;;; <>> >>>>> >$>(>,>0>4>8><>@>D>H>A??401G234444 4$4(4,4044484<4@4D4H4L4P4T4X4\4`4d4h4l4p4t4x4|44444466666677"747@7R7n7t7|7777777]8k888888889919@9_9999:!:):;-;R;a;o;;>&? P4080<0@0D0H0L0P0T0X0\0`0d0h0l0p0013566 6$6(6,6064686<6@6D6H6L6P6T6@ J6PL1R1{12257t8x8|888888888888889h:l:p:t:x:|:::;?`X(0,0004080<0@0D0H0L0P0T0X0\0`0d02;35R5k59d<<<<<<=== ===== =$=(=+?p14K5?4233 3$3(3,3034383<3@3D3H3L3P3T3k5Q7:;l9J7>E>P>W>u>>>>>>>>?? ?'?.?E?X?h?s???????0!0,030`0o0v00000000000111$1/1A1H1S1}1111111222_2f2t2222222223 3313?3M3d3r3y33333334 44!4(4G4W4^4r4444445#5-585?5[5s5~55555555555!6,636C6Q6|66666677)7a7z77777777788&848B8[8b8i8p88888888 99:9U9e9p9w999999999::#:*:;:Q:X:_:f:q:::::::::&;1;I;V;];;;;;;;<"<0<7<>>3>B>L>_>~>>>>>>>>>>>@0G0P0Y00000000011%1.1\1c1l1u1111111121282A2J2x222222223 333M3T3]3f333333333+424;4D4r4y4444455>5E5N5W55555555566#6,6Z6a6j6s666666667/767?7H7v7}77777778 888K8R8[8d888888888 9'90999=q??40]000.151O1V1l1s155566,666::;< {1 78012]222P\:m:~::::::;%;6;B;];d;u;;;;;;<<< >I>Z>k>w>>>>>>>??7?>?M?[????????`0<0C0T0`0000000 110171F1P11111111132D2U2a2|222222223$333A3w33333333$4+4:4D4a4h4w4455%5/5M5T5c5m555556!606:6Z6a6n6x666666667%7,7;7E7_7f7u7777777788*818@8J8{8888888888 99F9M9\9f999999999::%:/:M:T:c:m:::::::::;!;.;8;O;Y;k;r;;;;;;;;<<"<1<;=M=W=u=|======== >>">@>G>V>`>z>>>>>>>>???)?=?J?Y?c?????????p00$0.0\0f0u0000000000&1-1<1L1^1r1|111111222233$3.3@3Q3^3h333334"4/494T4^4m4w444444445 5'565@5s5z5555555556 6<6C6R6\6|66666666777'7E7L7[7e77777777788$8.8H8O8^8h88888888899,939^9k9z9999999999,:6:E:O:j:t:::::::::;;6;=;L;V;m;w;;;;;;;<<'>6>@>^>e>t>~>>>>>>>> ??2? 02</2u2R99::%:D:I:h:n:::::::: ;;<0`>d>h>l>p>t>x>|>'?@22222222256 9\9f999R:]::;;;<<+>??020r0~0000b111G3N3b44f<<<===3? $700+2?2266607A7W77:=0$E455577t8z8::???@4r7b9`|1B:<?p6-7L7^77788y9(233#3=3O3a3n3w577P88> ??P801J33(4a4q44445566,7q77,9B9h9999 :Q:h:/=>=Z=r=w===D>[>\111111n2223*4x555556;6[6: ;T;; <>>>!?=??80002K3333N5555666 6`66 7Z778888TD0F1333444L4Z4}445677 9W9{99:,::E;b;;z<<,=B=H=R==>?#?9???\00111C112&2P2}223334C44g55556)6q66{888m99:::T<<=='>??d0a0n001#1C1f11S2222 3'33304456G6z66t8-9P9|99a:q:::::: ;);M;i;;;3==P>>p00 1#1(101F1K1111122L3f33333.4T4[4q44444 555"66666666/7g7778`888<3= >>`00s233<4C44Q5n5556l6p6t6x6|666666666667T7778H88889C9d9h<D7!7/777777E899z;;;;m<<<<<<=6===S>>>>> <09334F406677868S88:;1;\;;;$<<? ?-???0G0d000001182223,3D333)556|666666666767V7a8x9::::::;;';>;;;; <@>>>>?V????@]0001!1n11111E2t22222313`3|3333333"4O444415i5555556I6f6t666666 777O8p8t8x8|8899!9E9_9}9999999991:6:F:P::::::::*;1;S;Z;;;;; >>S>>3?b???PP 0@0Y000111w223:456667778868D8g88s9Z:<<<==>">>?`\200 113 33 3c3333334c66v77788p999:9:S:;`01132223333484X455!677L777/88809O9+::;;;;;LT>>U???\ 050034444G557778888888M991:Z:|::;;b;~;=b>>z?O00011%2x222222233)3334N4i444+5N5~555627O7m7;8L8i8889[9~99999-:>:Y::::5;V;;;;M<=t===)>^>?D?d???0H00081a1142]2x222223[33334)4P4c444455(5K5l55555P66677E77778*8M8v888829Z999::::0;s;;;(*?,e0071d11 2?22z335 6C89o;;;a5~:?P 0b00V111p223*444N55556~8888989U99::<<=>)?0???X0005000a1k22w3~33334558H888888 939A9\9u999:F=== >>>]? L0d112223B5b5O6678D99::;<'<<=>>>>>??,?_?00 0070<0A01F1112223X3!434>4U4a4o444444555555m77777,8899o::::;;?>>?@T|000'1{1114C4N4c4z45P5b5w56R6g6657I7'8=8888`9y::9;;;><<<1==_>P001?222222&3E3|33334)4;4M4U4g4y4444444555 5555555555,6@7772898999 99999 9$9(9,9a:::::;;;;;;;h}>>>?\?d?????`0 00k0{000001)1F1Y111112@2L2a2t22222223 33#3k333444 44444 4$4(444m5}5555666666666 77I7r777!8288888T9X9\9`9d9h9:/:E:c::::c;a<<<<==-=3=D=r==>3>P>V>l>r>>>>>>>>> ?/?E?K?\??????????pp0e0o00081J1111*2122q3385<5@5D5H5L5P5T5X5\5`5d5h5B667778;;<-=Z==??? ????? ?$?(?x0!11K2{22233455,6~6839:999m:t:!;(;=====>>> >>>>>4>8><>@>D>p>t>x>|>>>>>>>>> ?:???012/2m222A333%4r44444455$5:5z555:6z666777\777777777788%8B8R8X8;99999: ::":k:x::::;7;];;;;<>:>>>>e????00H00000011"151;1L1i1|111111111"2Q22222444455<7@7D7H7L7P7T7X7\7`7d7h7l7p728{888;;O=>?,?Y?s??0/0P111A223353444444444444444555 555555[6e66667788979@9F9999999::: ::::: :$:(:,:0:4:::a;g;p;;&^>>.112556687H7788 99+9:9L9[9j9y9999999::::;;;;;<< <<+<7<[,>>>>? ??#?1?7?L?_??D0*080G0^0d0q0w0000000 111 1*141>1H1X1c1n1t1~11 2-2g22222N3b3v3333K4a4t4444444445 5"5,5a55556]6666666677777A8Q888888889&9.9w99999999::!:?:V:i:q::::::;*;A;_;v;;;;;;<<1>7>u>z>>>>>>> ???>?W???????Xh00000011111111222(232>2I2T22,333/4Y444445V5|555556w666666666666a7o7777777777W8g8~88888888889"9/9A9J9N9X9i999999999 :%:C:b:o::::::::';7;N;S;];b;o;y;;;;;; <<$<(<2>2>?>D>S>t>>>>?N?r????????????0020?0Q0Z0^0h0y000000000 11$1(121H1|1111111 22+24282B2X22223 333*3X3e3w3333333333404Q444444555(5955X677 888"838S8t8~8888888999(9:9A9H9_9e9r9999999;; <(>??80e00J1w1K222333424<4K4[4k4{444444 575P555566 77?7H7]7s7}7778 8"8.838C8M8Y8q8889 99(9P9e99999::&:W:u::::;;;;<=== ============>>>O>V>[>o>>>>?"?o????h'0_0s0}00011Z113!3N3W334@4Y4h555 6i8S9999:C::::;;I< =+====>??r???? | 0.0[0h0000000001%101a1j1y11122)262Z2k2223*3t3333446?6U6_67u88#9=9\999;M>??0d0112]2g22233-44 5#5516666*747o777$8(8,8084888A9I:R:X:a:s:|::::{;;;;;@\11122!2Q4a4q44q6666:;;!;1;A;Q;a;q;q<====>>!>1>>>>>?????P10f000[1b11111x22w33333S4o4v4455/5E5g5n55558888889 99"9+929B9K9R9b9k9r9999999999999: ::":+:2:B:K:R:b:k:r:::::::::::::; ;;";+;2;B;K;R;;;F<>>>>>??? ?[?x??`t33"323;3B3R3[3b3334A4b4|4455555555777 777[7778:::::::::?>;?TG0y012`222 3444444f556O888 9a9;;<< <<<<< <==%>/>X>b>0111122t2~22222/3933333 44)4P4Z4n4x44444 55555536=6D7H7L7P7T7X7\7`788;;t=x=|=======>>>>?????????0000000001 1X1b1o1y11111112$2.282B2L2V2`22222S3]3g3q33333d4n4x444444;5E5O5Y555555555666$6.686c6m6z66.777<8C88888999 999999!;8;k?l81o3v3444444444+5q5555 6o66664788888+99999A:F:U:k::::;; <[<>>>?d 2H2g2q22!3M3b33y444.5f55555A7J7W7v7,8\888 9m99;; <6[>>>P???T%0000c112N223_334`444S55 676c6660777 8^8809q99T:k;<=k?H+12+33m5D666667k778&8@8x999:1::<<===x????D21L1133~44556q666667z88j9o;;(=F==,>T>e???HF0-444446w66678/88'9":c:;6;G;X;{<>H????@1J1b11,7L788(8:899u::;>;;;=R=h= >>)?I??? L0q2w2 3r3}333344.4S4g4}444}555556636B6l666666777 777777-8v8889{99<;@;D;H;L;|;;;5<<===#>[> ?-?9?R?z????00L2222$3333333494V4A5]5y55555616E6o666667P7y77777 8:8Z8888#999:::;;q<<<<&>A>Q>m>>>>>>B???@001L2a2}2222233-3I3a3u3333\44\555556667277778*9a9s99::;;;;;< =)=A=U=v==!>L>>>>>>B??P01t111112%2A2Q2a4}44444 5!555]5y55556F67778888Y9::3;J;;; <)>>m>>>??`0001Z11n2222223;3^3t3y3333 44C4H444444445595]5566[6w666h7777W8y888u99999:::<=(=4=X={>>>>>y???pq0000011'1F1M1S1X1112;2S2222353K3P33333h4 515@5F5n5555556g6x666666666)7E7J7~77777777788H8^8s88849;9k99999Q:::::: ;;<;n;;;;;;<>>>>>0??p4000000111 1#1x1|1111110233h3l3p3t3x33X5\5`5d5h5l5p5I:(;,;0;4;8;<;,?!0b0015112]223#334@4U5p55G6s6y6666666666l77v88899:;!>=>Y>q>>>>>>?!?=?Y?q??????0010M0i0000001111M1i1111112212M2i2222223313M3i3333334414M4i44444445%5A5]5y5555556%6A6]6y666666677-7I7m77778-8~88888[9|9999:;C;J;; <@>>Q?W?????????|#0-0;0A0_0e001 1$1*11111"2B2T2a2b3w333334&466778899k:::: ;s;;<<===>I>e>>>>>N?e??tF0a0001 1/11112223)3333444O5t55555656K666788Y9~9999::s<====>1>E>m>>>>lH0O0\1`1d1h1l1p1t1x13933`4j4H5L5P5T5X557777778899:#:*:1:S:d:::%;;;;\=c===d>>;?o011G1U1n111/22=3c3R4444?5>66074787<7@7D7[7777777 88'8C8S8d88888889,9B9V9]9k9p9v99999999h::::*;y;;;;;/2>B>{>>>?P?j?v????>0c00001<1@1D1H1L111Q2V2[2u2~22223333 44r4485?5e55555555556646W6n6u666"7;7777758V88889_9999':A:d:h:l:p:t:x:|:::A;`<>/?5?K?a?h?????`001411z23>333d4444D555616:666667Z777L:a::; ;<+K>i>>>)?????0x00=0Z000"1;1111212E2a2}2222213404C44445666q77777'9-99R:::::D;h;;c>>?@l3000151l111112m222356]66677e88888 999)979S9}9999;::;;<=~====?>!?U??PQ0`000091112Q222X3334V4449526f666 7:77778R88889N9999:N:~::Y;;;;A<<<)=3=L=k==>??C?W????`0{00K122u3z333334 454:4e4j4444444%5*5U5Z555555566E6J6u6z666777788E8J8u8z888889 959:9e9j9: :D:I:u:z:::::; ;;;;;;;%<*<<<<<==E=J=======%>*>U>Z>>>??E?J?u?z?????p0 050:0e0j00000000%1*1U1Z111111122E2J2u2z22222a33445 555:5e5j5555555%6*6U6Z6{66o778\88K99/:4:{::Z;< =f==e>>(????<e00*1k11 2|2 3333444578R:<-={==Z>s>>@101F2r22444I5556Y66779::::;;!;"<+=?P0&2;2m2+44466,77788889989<9@9D9R999;<<? ?( 8899-;;(>=??126$7d77:;d=4?2|222223?3V3m3333777::==Q==?(^0Q11S556?7A88899Z;=>>T500I1g11445556#6b6666667^777@8o8889(9i99:0:y::::i;<< k1p11+202k2222222 33+3K3P3k3333333 44+4K4P4k4555566;6@6[6666667 7;7@7[7777778 8@8E8K8Q8W8]8c8i8o8u8{888888888888888888888889 9999#9)9/959;9A9G9M9S9Y9_9e9k9q9w9}9999999999999999999999999999::: ::::: :$:(:,:0:4:8:<:@:D:H:L:P:T:X:\:`:d:h:l:p:t:x:|:::::::M;z;;<*>>? x001<1S2222353q334T444L55F66666%7r777K8x88V9999/:::: ;N;;;<,Q?_?l?????? 0010:0I0q1111112<2U222?3X3^3d3w34 585Q5W5]55'6x6666)777c7|7%8e8 9{9995:P:_::;4;N;;;<<@<==%=>0 A0]0y0000O111111$2:222@3M3i335j666667 77C7z777778<8B8Z888888C9K9_9|:::x;~;;;;< <3<9(>K>>>A??@ t000021f1112>2r2223(3.3433333u4444 565S55555D6x66637:7999:w:<>>>+?@???P $0000 1)1A1U1q111111 2)2A2U2q2222222`3p3t3x3|333333333333333334$5(5,5054585<5@5D5[555555555 6)6X666757I7N7z7777777777778!8B8c88889.9D9_9y9999999999Z:w::;;;;N<_<<<===>.>q>>>>&?4?`?d?h?l?p?t?x?|??????` 0%0>0W0001E1^112a2z222 333N444"555A667J777b8s888S9999 :!:5:[:a:z:::A;];y;;;;;;<%<[ >x>>>>>??p 3000 1)1A1U1q1111112292Q2e2222223393Q3e33330464P4W4]4c444445D555!6{666 7[7x77777738>8R8c88888889 9969<9U999:k: ;%;?;[;a;g;};;;;<<<<<====>1>P>V>\>u>>>>>>?W??? e000000011q111111 2)2A2U2q222222 3)3A3U33333344<44444444445$5d5555666J777 886888.9D9::;;2;\;;;#>J>Q>f>>>Z???? hT0]0j00111\11144455064686<6@6D6H6L6P6d7~7q;;;;;=>;>?"?e?? 0V0001'1;1W1q111122>2V2\22222334 4m4s4444A5]5y555555566X7777777 8858;8A8a8q888889P999:.:4:::M:q:::::::E;L;k;r;;<<<_K>>>>>x???? 030o0000000013-3I3a3u3333334"4C4Q4\4b4y4444445S5Y5_555556&65666666M7x7~7777777M8e888D9`9f9~999998;[;p;v;|;;;;;;<<<=c>|>> (a0y00$2~2 33a304<<g>? #0111 1$1(1,1012)3P3T3X3\3`3d33333333455677X8889:::;;;<<<9=T=X=\=`=d=h==?>{>>>>>>>>>>>>A?Q?a?q????????? x00!010A0Q000011!111_11111111122444%5*5U5Z55566 77O88899j::::a;;/<\=!>J>`>?? D00I1112263f3?455J66=778#8.8^88Y:u:;0? D0>11'22K33Q444m5]889x999:M;;;<<<==>>? h0001N111=22333334o444A55566+7q777Y88.99L::;;2">>>? `0K0001"2;2q2 3=333c444R66L7K888+9z99:v::;-;;; >>>@?y?0 |0B124333]444Z5s556Q6c6n66666N7~777(8F8W8]88.9[9i999@:Z:h:::;;I;{;;;`>|>>>V??????P %0m000000d1r11A2G2x22222334#4P4`4f444444 5L5]5555556#6D6|666628g8~8889T9s9C::::;;;=;C;M;` 10Y000000001%1I1f111111!2=2Y2q2222223!3=3Y3q333334%4A4]4y4444445%5A5]5y5555556%6A6Q6r66677b9x9|999999::#;D;;;;< =0==6>>Q?p X20I0o01=11222 222283p33G444555666+7 8::::::.;;<<==u> L131A22445O66'7M88899.:::;;<==U>>>>>>?F?z??? XG0/1111-2223q334R4444)5^5~66'7A7]88949Z9 ::;-;H;;<<<=O==_>?? d,0011?223j33344556696Q6e66666677-7I7a7u7777778!8189l::P;;k>? `0!1[1112222 3H33;444 5o555K6[6r669:;=== =====>>M>>>>??? \-0[00*11[2w223B4{44 5T55C6k9::;;;;L<}<<<<<0====???1?????? 00021F1V1r1111111 2~22334'4`4f4q44444445<5\55556"6C6]67,7:7777888.8B8G8\8r888889'9I9`999999 : :U:}::::C;f;;;;>>/?E?[???????????? 0A000111B2j22223<3t3333344g4$5N5566g6666h77/8889!9=9W99999 :,:I::::::;9;cH>_>v???????? /050:0S000000011?1E1J1c111111 222O2U2Z2s2222223!3&3_3e3j3333333+41464o4u4z444445 5;5A5F5555555666K6Q6V66666Z7_777798f888899i9n99 :6:Q:::::9;>;f;I?? 00=00000N1S1c1r1111112 2A2Y2223~3334i4{444*5D5\5a5{5556F6d66666C7Q77A8Z8m8|88888999w9~999999<:E:f::::::;M</>?M??????????? 0%080J0\0}0A1U1h1z1111212E2a2}222222313E3a3}333333414E4a4}444444515E5a5}555555616E6a6}666666717E7a7}777777818E8a8}888888919E9I= X01122`22223=3p33344444_56778889|9K;d;;<7??0 011#2a222245567777777777 8%888?8U8h8o88888897999\::::::;O;e;;;UY??@ hG012~33R4445A556g666<7Z7777777=8m888s9999:];c<<===>>>?"?(?4?>??P 01122224r556*6:6D6N666666 7*787777 8"808a8888888889G9L9Q9W9c9k9v9{999999999::#:.:8:C:N:e:::::::::::; ;;; ;*;;;U;_;j;;;;;;;;;'Q?m??????` 0!050Q0m000000 1!151Q1m111111 2!252Q2m222222 3!353Q3m333333 4!454Q4m444443566k;;;;<<<==-=4=Q=X=n=u===>>>>>>>> ??Z?a???????p 00000000.151g1n11111n2w222222222!3(3\3c333f4o4~44444444455$5A5H5^6g6v6}666666666C7J7w7~788&8-8<8C8Y8y8888889'9.999999:::;:B:::::::;;<<<$>H> @7777W88+99z::::::;;;g; >K?? 001^1l111111222-2U2y2222223%3J3b3{333333333 4"4;4F4^44444#5^5l556?6666%77789::;;;(<<=*=o=>c?? d 030a000N1b11111'2A2g2n2u2_4455S56 66647d777D8n88999U:r::";K;;;<=>4>? 0,0e0000001*101612+3{334444556678:8A88"9)99q::;H;;;;;;;<<;>%>N>S>g>l>q>w>>>>>>>>B???? 4 000!0000 111!1111111162`2f2l2v2233 3363Q3W3]3g3333444(4V444444 55S5^555566696666667C7I7S7s7778%8+818;8Q8m88888899;9@9[9}99999 ::+:K:P:k::::::;-;I;a;u;;;;;;<<11>E>f>>>>>>?-?I?a?u??? L0K0t0000+1{1112 35669:6:Z:h::;;(;R?K?`? 2 446[7 88<99.:4:::@:F:;;<<==$=+=0=F=R=p=x==========>>(>8>>>E>\>b>>>>>>>>?? ?(?S?Z?_?d?k?x????? 0K000000000001 111"1*121>1G1L1R1\1e1p1|111111111}2222222222 33V3[3|3384=4O4m4444445 5&5j555555555566 66 222444444444444444444555 55555 5$5(5,5054585<5@5D5H5L5P5T5X5L >> ><>@>d>h>` <= T7788(8<8P8d8x88 4X2l2p222222222222222 333$3,3@3T3X3h3l3x33333333333334 4$4(4,4044484<4@4D4`4d4h4l4p4t4x466666666666 84888H8L8X8`8h8|8888888888888999(9<9@9P9T9d9h9t9|999999999999:::0:<:D:L:>>>>>>>>>>>>>>>>>>>?????????? 00h6|6666666668999 99999 9$9(9,9094989<9D9 :$:(:0:8:x:|::0;4;8;<;@;D;H;L;P;T;X;\;`;d;h;l;p;;;;;;;;;;;;;;;;;<<< <<<<< <$<(<8<<<@> >(><>@>P>T>`>p>>>>>>>>>>>>>? ?? ?0?4?D?H?X?\?l?p????????????? 000 0`0d0h0l0p0t0x0|0000000000000000000000L1P1T1X1\1`1d1h1l1p1t1x1|111111111111112282<2@2444444 99999::: ::: ,6677 77 7$70787<7H7P7T7`7h7l7x777777777777777777888 8(8,888@8D8P8X8\8h8p8t8888888888888888899999(90949@9H9L9X9`9d9p9x9|9999999999999999:: : :$:0:8:<:P:T:h:l:x:::::::::::::::;; ;(;,;8;@;D;X;\;h;p;t;;;;;;;;;;;;;;;;;<<<<<(<0<4<@>> >(>,>8>@>D>P>X>\>h>p>t>>>>>>>>>>>>>>>>?????(?0?4?@?H?L?X?`?d?p?x?|????????????? 00 0 0$080<0H0P0T0`0h0l0x000000000000000111 1(1,1@1D1X1\1p1t111111111111111122(20242@2H2L2`2d2x2|22222222222222233 33 3$30383<3H3P3T3`3h3l3x333333333333333333444 4(4,484@4D4P4X4\4h4p4t4444444444444444455555(50545@5H5L5X5`5d5p5x5|555555555555555566 66 6$60686<6H6P6T6`6h6l6x66666666666666666777 7(7,787@7D7P7X7\7h7p7t7777777777777777788888(80848@8H8L8X8`8d8p8x8|888888888888888899 99 9$90989<9H9P9T9`9h9l9x999999999999999999::: :(:,:8:@:D:P:X:\:h:p:t:::::::::::::::::;;;;;(;0;4;@;H;L;X;`;d;p;x;|;;;;;;;;;;;;;;;;<< << <$<0<8<<>>>>(>0>4>@>H>L>X>`>d>p>x>|>>>>>>>>>>>>>>>>?? ?? ?$?0?8?> >> >$>0>8><>H>P>T>`>h>l>x>>>>>>>>>>>>>>>>>>??? ?(?,?8?@?D?P?X?\?h?p?t????????????????? 00000(00040@0H0L0X0`0d0p0x0|000000000000000011 11 1$10181<1H1P1T1`1h1l1x111111111111111111222 2(2,282@2D2P2X2\2h2p2t2222222222222222233333(30343@3H3L3X3`3d3p3x3|333333333333333344 44 4$40484<4H4P4T4`4h4l4x444444444444444444555 5(5,585@5D5P5X5\5h5p5t5555555555555555566666(60646@6H6L6X6`6d6p6x6|666666666666677 77 7$70787<7H7P7T7`7h7l7x777777777777777777888 8(8,888@8D8P8X8\8h8p8t8888888888888888899999(90949@9H9L9X9`9d9p9x9|9999999999999999:: :: :$:0:8:<:H:P:T:`:h:l:x::::::::::::::::::;;; ;(;,;8;@;D;P;X;\;h;p;t;;;;;;;;;;;;;;;;;<<<<<(<0<4<@>> >(>,>8>@>D>P>X>\>h>p>t>>>>>>>>>>>>>>>>>?????(?0?4?@?H?L?X?`?d?p?x?|???????????????? 00 00 0$00080<0H0P0T0`0h0l0x000000000000000000111 1(1,181@1D1P1X1\1h1p1t1111111111111111122222(20242@2H2L2X2`2d2p2x2|222222222222222233 33 3$30383<3H3P3T3`3h3l3x3333333333333333344(4,4@4D4X4\4p4t444444444444455555(50545@5H5L5X5`5d5p5x5|555555555555555566 66 6$60686<6H6P6T6`6h6l6x666666666666666666777 7(7,787@7D7P7X7\7h7p7t7777777777777777788888(80848@8H8L8X8`8d8p8x8|888888888888888899 99 9$90989<9H9P9T9`9h9l9x999999999999999999::: :(:,:8:@:D:P:X:\:h:p:t:::::::::::::::::;;;;;(;0;4;@;H;L;X;`;d;p;x;|;;;;;;;;;;;;;;;;<< << <$<0<8<<>>>>(>0>4>H>L>`>d>x>|>>>>>>>>>>>>>?? ?? ?$?0?8? >>0>4>@>D>d>h>t>x>>>>>>>>>????4?8?D?H?h?l?x?|??????????? 0000080<0H0L0P0l0p0|00000000000001 111 1<1@1L1P1T1p1t111111111111 222 2@2D2P2T2t2x2222222222233 3$3D3H3T3X3x3|3333333333344$4(4H4L4X4\4|44444444444455(5,5h5l555556686<6l6p666667 7<7@7p7t77777 88@8D8t8x8888899D9H9x9|99999::H:L:|::::::;;L;P;;;;;;;< (>X>\>`>p>>>>>>>>>>>> ?(?,?0?@?\?`?d?t?????????080,000D0`0d0x00000000110141H1d1h1|1111122L2P2222222222222233 3,30343P3T3`3d3h3333333333344$4(4H4L4X4\4|44444444444054585D5H55555555555 6$6(64686p6t6x66666666777$7(7`7d7h7t7x777788888P8T8X8d8h88888888899@9D9H9T9X9@@66889909D9X9l9999999,>@>D>???p04080P0T0d0h0t0000000012222(282H2L2\2`2l2|22222222222333 30343D3H3T3d3t3x33333555566$6(60646<6@6H6L6T6X6`6d6l6p6x6|666666666667777(7,787H7X7\7h7x77777777777778 8888<8::: ;(;,;H;L;\;`;l;|;;;;;;;<4>h>>>>>>>>>>>>?t????????? 000000001 11,101@1D1P1`1@2D2T2X2d2t2222222222t;x;;;;;;;;;;;;<<<$<(<8<< >> >,><>L>P>`>d>t>x>>>>>>>>>>>>>??$?(?8?x>>>>>>>>>>>? ? ?$?8?L?P?`?d?p?x?????????????H000(0<0@0P0T0d0h0x0|0000000000000011(1,1<1@1P1T1`1p11111111111112222,202<2L2\2`2p2t22222222222223333(383D3T3d3h3x3|3333333333334;;< << <,<< > >$>4>8>H>L>X>h>x>|>>>>>>><<(=,=X=\=`=l=p= 8T0X0d0h0t0x000000000334 44 4,4<4L4P4`4d4t4x444444444444555$5(585<5H5X5h5l5|555555555555666$64686H6L6X6h6x6|666666666666777(7,7<7@7P7T7d7h7t7777777777777888(8,8<8@8P8T8`8p88888888888888 9|:::::::::::::::::0 55 5$54585D5T5d5h5x5|555555555556666$646D6H6X6\6l6p66666666666667 77(787<7L7P7\7l7|7777777777778888,808@8D8T8X8h8l8|888888P22(2,282H2T2d2t2x22222222222223333$343D3H3X3\3l3p33333333333333344(4,4<4@4P4T4`4p444444444444455 5$54585D5T5d5h5x5|555 6$6d6h667 7(7,7D7H7h77700 00000 0$0(0,0004080<0@0D0H0L0P0T0X0\0`0d0p0t0x0|000000000000000000000 11 1D1L1T1\1d1l1t1|111111111111111112 222$2,242<2D2L2T2\2d2l2t2|22222222222222223 333 3$3(3,3034383<3\3d3l3t3|333333333333333334 444$4,444<4D4L4T4\4d4l4t4|444444444444444445 555,545<5D5L5T5\5d5l5t5|555555555555555556 666$6,646<6D6L6T6\6d6l6t6|666666666666666667 7777$7(7D7L7T7\7d7l7t7|77777777777777777888$8,848<8D8L8T8\8d8l8x8|88888888888888889 999$9,949<9D9L9T9\9d9l9t9|99999999999999999: :::$:,:4:D:L:T:\:d:l:t:|:::::::::::;;; ;;;;(;,;0;4;8;<;@;D;\;d;l;t;|;;;;;;;;;;;;;;;;< <<<$<,<4<< >>>$>,>4><>D>L>T>\>d>l>t>|>>>>>>>>>>>>>>>>>? ???$?,?4? >>>$>,>4><>D>L>T>\>d>l>t>|>>>>>>>>>>>>>>>>>? ???$?,?4?>>>>,>0>8><>D>H>P>T>\>`>h>l>t>x>>>>>>>>>>>>>>>>>>>>??? ?$?0?4?8?l>`D@0T033;(;@;X;p;t;x;|;;;;;;;;;;;;;;;;;;dub-1.40.0/bin/ssleay32.dll000066400000000000000000010060001477246567400153030ustar00rootroot00000000000000MZ@ !L!This program cannot be run in DOS mode. $G/?AlAlAl lAl lAl lAl@luAl l?Al lAl lAl lAlRichAlPELKO!  Pz-`p$ P #@.text `.rdataа@@.data1.@.rsrc@@.reloc"$ &@B3|$H% ̸ 3|$H%̸f~4  u dž F4N +ND ;}PjjV? _YFD GtPt)jVL2h(h hjjj4_Yh+h hjjj_YO ;}WNT ‰GWNT ‰GWNT ‰AdwXF4 FTV<SZPdUl ?vTjVt1hCh h(jjj\][_YjVI1h7h hjjj1_YËFD+WPVG= ;tPjjVg> ][_YËFdtNhQVUSjRjЃNT t(jV0hTh j}jjj][_YÅjPPVZu{Fl~lGVTQPSV7t(V@P)u~hh jAjjj][3_YÃtGu+jV/hrh hjjj][_YjVfSh hjijJ[hBh hjij*FD+QPVF8PjnVm9,[F4Q VTv%jV+hPh jDjij[UWhFD+WPV7 ;}PjiV9 _][ËFdtNhQVUSjRjЃNT{r;u2stJ:uv?Q:Wu v2A:Gt*jV0+h`h hjij_][_][́~4@ uZF<@NTQd vhph jDhjRhQ@PNTF4A Qd BVDFHVw8́~4` u`F<@QD vhh jDhjXRHQ@PF4a QD BVDFHV8̸0SUnT3~4p WD$ D$D$q uOFL$ Q|$\jPL$,Q=FTRPD$8PSGPL$DQT$4RQP0~PL$$WQ |$ WT$D$ @D$ D$ D$PPsL$D$T$D$B\$BT$ PL$ QRD$4PL$8Q:T$0D$(L$(T$0QD$4D$0\$(D$+F4SFDFHV$[_],ÅtQT$t Rh?hjjjdj)́~4`FS赸؃FFTU39$+Ѝ:`0L$jQT$ R@ |0|hhh8 HQPT$ R虷jD$#PL$,Q舷FTD$+PdRhPD$8PoFTQPT$DRUjD$LWPƷFT<;0L$Q]_^[ hhjDhj辶]_^3[ hhjDhj虶_^3[ ̸Vt$ WD$D$ L$ džvhhh T$ W+RV } _^Y+uFdtNhQVjT$RjQjЃ_^Y̋D$@0VhChh ׵ trh jVehKhh譵F0thOhh菵 F4u-F4t PrF0t PbVY3^ËD$PpT^̋D$uL$PǀYY̸ fVt$jjjD$PL$$QR1u'jVjJhhj|j聴3^ ËD$PQ T$ RP ujRhht t:PD$u:j\hh: ujrhjAj|j߳3^ ËUPD$訴L$iFTT-NT0vhTjdhV VG]3^ ËT$z ~h$jih# NTSW|$$ۃPD$$#ݍL$RjPQFT׃RT$8#8D$0QjRPгFT(H\vT_3[]V`^ ̃|$D$tPTRHTPTR@Tt'V1~^u ҋ@XRPPQW̸|$$SUVt$(NTWty`YLiTy\YDiPL$,T$-ȍT$RL$2D$3IPL$QRβPD$$WPSL$0UQjT$XRD$@PL$djQT$LRdD$P@P赱_^][VFTWx(;|2|$t^P)X(vT^,_^ËH0H,^PNL)X(vT^,_^Ã~`u\$ 9L$ vL$ U3u9nPt9l$u 3G9l$tNL@0nP;t*/RQP迸H,t@0WQP詸VTj, FT@(;NTQ0VL}=j<F tBL$VT+QFJ0QP英 ~:;|;~:VTj,FT+x(6hhhjpjTvT~(]_^ËNT3A,VTB(|$t^P]_F^]_^PF^̋FTH;L$P;T$t W=<Ij׋Nt FTFP RP8PRQ謰 !hhhhj芯NT;A t~!ANT)A 띋@ NTFA$_hhjhj≠ƶSVFTW3ى\$9x tD$SP_^[9xt!|$ FTU9xt;v8QدP萯;ljD$ }_^[ËR諯D$ ?v/nT}u&v3E++L$3BwFTxu 3*3ҋu3+FTxt 3ɅVTl$ZHFTxȋAXIH?VTzt@NTQHPNTQ E@D$, HS؃D$0E W\$(=u4;4VVj Ut$D tpV |2!ue_[^] ;}hhh hj ȁuED$FD$PjU c}XD$4 D$4@}(L$ D$4Elt u%T$$SQPUuEljUD$4D$ MD$4t 18@D$4v;D$2hhh4hjD$$PjU" }jjj U T$4E\WRP耪A| t'U\WRUЃ uyD$(h eM\Qr; u2st2 :uv'J:Nu vR:VtD$(h*D$D$4ljD$4p D$4u"9t$ th9hD$/hT$(L$$ ;rh@hD$2h~L$QVPUlD$4ƃ}lD$4tk~gL$xQ3F~%T$VR.9xt.D$PF!;|hxhD$/hD$48@D$4;vhhD$2h33;D$ ~ <tA;|;ωT$4|hhD$2h}|5T$$D$(L$QRPL$@QU-uhhhTUl#hhhhj:VMXAAAAjQ D$PD$PjU n9ul&}9h t$$@0lQL$T$(RQPRPhUЃT$ElD$$t$;uAUPRU蜍D$$;uhhD$(hEt;t PȞEx;t P踞PEtR謞ExMX3;thhD$PhTWQ3E~Q$VPD$;tRF;| EXL$UXuhhD$PhU3~"L$ $;t@;|;hhD$/hV9EltD$xult\PT؃3\$(~?VR+Ћ 3T$~\$ ;t-@;|\$(F;|D$39ulD$;u3 EXދ;t P蠜L$9t$uh?hD$/hUt$EPPU;hIhD$(h<3@t$(teW~LVW@ t@ tD$(WF;|օt EXD(D$(t MXDEXDEЁu=| uUEuD$PjU豫 |$}D$D$t P;D$_[^] øS\${40!UVWDCS$QRlSD$$$$jP$L$,Xt PIW̅u h hjAhjԃt PLPiLtuh 댋NQdR U$PQVҋAD$$hR|$O P;~h hD$2h[.jPWӄ u h hjhj誂t P‚Q?u h PVxAzh hjDhj?D$ @33ҩt Q|t jR胃SuQ݁D$tRSI?0t h Ou OG skGQPD$PL$L$QVT$,WRo}h hj{hj3}h hjD$uj@MXPVWj$Qjr }"h hjvhj3|hh hjzhj3|ptuHPMXRPVWjQR~h hjphj3J|#uKHUX@QVWjRP>~h hh1hj3{,t*+t"h; hjDhj{+jPt}S} @tVh@P 3L${8@I@|EXj Pj@L$HQSu}S| h5 :D$/h hh hj'{2VjUF UXx^tP }EXǀxmXeߍL$ QzT$RzD$L$x_][3襂p̸FS\$C@0VWL$QRjh!3h!SD$,|$(|$$Ѓ9|$CX@Ut9t4hj hhhjz(VjS> S;~<9t4hq hhhjy VjS ]_^ǀT[à t4h| hhhj yVjS誉 K@L$(yl$;u h hjAhjDyD$(xH  WD$(;t4h hhhj2xVjS .3Ʌw_Ux;Vh hhhj(xVjSˆ $D$(p P lD$(;wYD$ VD$,Pjx D$tnL$ 9L$(T$PRuxD$;rl$$h hhhj2wVjS h hj hjwD$t PwD$th8PyD$]_^[h |h hjA릋tt h CXxtnSwuaPVjSY nUS=QWh hhhjvVjS #t PvUxu*Ru h 8th8Ryx)]_D$D$^[Vt$~4@!u\V蜰u4NXDy uA th hjDhju3^PVJF4A!FDFHjV讞^̸&}3ĉ$SUV$~4!WpjP\7|$h* hWcu؃ L$QR\$7WD$0Pj\$8D@jWGD6;D$D$L$QW\$6WBT$$F<ŠRPutpN@<P<H<8RQPx< VDF4"FHjV裙^Vt$FXu#h hhdh2jTp^ËNA0T$RhjCh"h"VЃ|$NXu#h hhch2jo^Ã}3^ËFDSW~@S;EL;T;u8h hSo tu!h hjAh2jo_[3^SGWPMw _x[^̸vSUVW3SD$ 3C|$\$v$D$$jPooS<t$$;t|$;tD$nVV0tVF@t V薴9u)hhhhjn_^][9t F4=!D$$ut@ -h,hhhj&nVSSj RFnFXnPN4:="=="-!=w4$V^0;SFXǀP"!VF4!^DuF4~^0tVe;T$;Z`džF40!^DsV;9^lt@Ƀ !N4^D=F4@!^D.VXD@$u;@ u2V;b,%P!F4^Dl$F4P!^DFXDI #ՉpVX9pt9LusukuftNA4;tTDB t5PGmNXDJ уҁ;l$F4`!^DV7;hF4`!^Dtl9tuYFXDOttC u>G u5V;NXF4!ǁP!^DVXFXF4p!l$9x`VksJV%;NXǁP!F4!^DV;gu F4!VX9tV;8^DF4!Vu;Ёu:=|3F4!^D9yFX9xu NXF4!^D9xtVj`3 $FX|<* *tMV zdPQjP1lPVҋFX|)RjPZj؃|3V7;FXɃP!N4^Dh!h!V9 ;9^ltF4^DW@҃ !V4^D9VT;F4!^DVb;cF4P!^DFXDFHdQV҃%h!h!V ; FF4!^DHdQj"V҃F@dH,P(QRh!h!V跑;F4!9^lt$FXɃP!P^D)VXǂP^DV?;`^DF4!=0t=@t=`In$;t UjV׃ %=F 9^hhRhjbej(jVu D$N;t Wh VЃ _^][ÐeAu%Tz6X߉     ̸H3|$H%̸VkSUl$}4Et$;18F(t$,D$, D$,0@t$D$, ;|qhthD$@0lQT$RhjHQPS҃t'D$uL$,C@HRЃ;uD|$,ƍuHr;st,:vV:Wv F:G;EhuS}lr;u?st :u'vN:OuvV:Wu t$Clhh\hhjD$$/^t$ClxDvjS?u D$PqDT$,VRHP9f D$,KƉD$,QHPҋu+hh\hhjD$$/+^$FtDȁu=}+hh\hhjD$$/]SBLjjD$4S賉VP]}+hh\hhjD$$/]t I{lt<;Ft+hh\hhjD$$/7]0KXDЁu=}S]L$,A{lL$,t9;t+hh\hXhjD$$/\u]KX;D$ L$T$RPQT$8RSnhh\hhjX\Qt+hh\hWhjD$$/!\PPFNhh\hhjD$$/[S)hh\hhj[_^][ËD$L$ 9D$,h&h\jshjD$$2f[bhxh\h,hjD$$/>[:hfh\h hj[T$@B  D$FT$RjS"k _^][̸vbS\$ C@0UVL$QRjh13h0SD$0l$$l$(l$,Ѓ9l$CX@ DB t W thQh\ jrW{@ZD$;u hXh\jAhjZ@/OW  M;th`h\2h3Ʌd$7WG  DD$$;VL$0Qj|$8Y D$9t$,T$PRYL$$|$,D$;rD$PSGPshh\hhjYVjS*i hih\2hhrh\*j hxh\2hh}oYft P覵D$jPGXUD$ yXCXD @ |$tb@ t\D$WU܊3Ƀ9L$;}jhh\L$hhjWVjSh D$t WEZthh\D$hhh\jEjP~WL<t PW/~t PfWhh\jEjPFW3-~N;t PW3ɋL$D$L$QVT$RVD$h8PXD$,_^][ǀT^][̸X^3ĉ$TSUW$hG@0L$TQRj3hAh@W\$,\$D\$<\$H\$@\$8Ѓl$9\$T GX@ tkDA t6L;t PVLWX_]T[$T3]XËVw@t$8;tw@l;tP&XZlAp;tPcVXpBt;t+P2VYtձGXDHX T$VR`PPu hgh\jhj.NUS Pu hlh\jhjNUPSNO؋GXDA \$Tt)S*O=~hwh\<h6SOD$$ND$,kF;l$l$ L$TЋD$$RSVPQ-OD$)l$tH @tHHQFMD$T$$\$4RSOT$4Yt3R\$@%ND$0P\$~jSWD$~]MVBd@ j0L$4QRVЋT$@j0RAdDD$,@ˋӈXPF4^DFHjVl$_^][3BKxhh\jDhj=Cn \;uj jVCSh h\h) XpSzDu h h\jhjBU?Du&h h\jhjBUCKUQWD؃ &h h\jhjfBUzCVBdP SWQVҋSjWAJURcC‹_MWQ,CUC,Hj(jVRh h\hgXtu/@HPAD$$28&XSBSB؅QCD$u h h\jAhj?ApUPCu h h\jhjABL$QgBu h/ h\j+hj@U.B h< h\j+hj@T$jRSPW^B؃ hC h\j+hjo@FHdA SWRVЋSjWAHT$,jjjjRAPUnAha h\SH@$D$JA|$D$t_t[L$T$PSQjRRAPU$Al$0SUGWGD$8PC@U?L$FXj PS>L$PQT$\RS3?S=AD$hPjjhjU?P}#h h\hhj5>fj L$dQT$$R$P0UGD$0@}#h h\hhj=D$=r GXXP$QGWEjjjjjUH?$~FXU/?VBd@ j L$hQRVЋT$4RA=;P{>‹_WQA> t P/=P>u h h\jAhj<PV3Bxh ,h "{3D$ u#h h\hhj8<ih$Qh$RLRVЃD$=v h h\jDhj;u#h h\hhj;P$QRlL$(QD$8$$jP$KCL$4T$@ Ht Pt;LR<9Lt.9u h- h\jAhj:t P:$Rz<9uh7 뛋VJdU$RPA VЋ$AKICu+ًS$_PWB D$ $hQ:$hR:|$ Tj(jV!J %j(jVJhS h\jDhj9 D$P;D$t P9D$t P:L$$Q9̸A3ĉ$SUV$WD$(3P|$9~4vNDBt2j jUF@hqh\hhj/D$4 ][VW}@h0 3D$;u hzh\jAhj/L$(w v D$( L$(3;vI0UX`@;rEЁ=F ؋˃+ύT1;T$vJj2jUc?hh\hhj/ D$thDP<1_^D$][uNSVU& t?L$(F ؋Ӄ+;T$thh\h|j2jU>hh\hh^3vi>N DD$;WT$Rjt$$j1 9t$PD$P~.D$;rL$(UXXEX\MXltыlhDP0MXT$_t$D$^]l[ u*j2jU=hh\ho t -fj2jU=hh\j >j2jU=hh\h hk̸4UVt$3~4pWl$l$ u%;t9(t9ht F4rF4q~4qD$ PL$QVZ ;}_F^]FuV9l$ t3D$;t+PVF4qHtT$ RVu!3h4 h\jjhj3Q,D$;t PX,D$ ;t PS,;u7>u"FXj)jVX=< _^]ËNXǁXF4r~4ru1VXF4sXu3 PVIYFDnHjVT_^]̸&3UV3WUl$D$3E|$ l$2$D$ jP++U<t$ ;t|$ ;tD$ S^Vl0tVrl@t Vp9t $F4=Ut-VUUj RF +sFXFPN4$=0gI-=$TVn0g;FNF4 nD;QP+-FV;9nltF49@F4{F40oV;u!@FlF46NXD@u:@ u1Vv;H,ҁ@V4D$F4@V;VF4PnDdV;F4`V/;FXDA tV!;3VXXF42V:;\F4V+;=FXXu F4 F4FXF4Vx;NXF4nDhhVsT ;FX҃P°V4nDDFX;uNQdBVЃDNQdBjVЃ+V;F4NAdP$@ RPhhVP;FXF4 9nlt+VXPFXvN4FXhe9@tNXǁPKVXǂP9V;cF4Vb;DF4@hhV9Q ;Fl%F4F4H8=@t =Pn$;t jjV׃ F9n|tD$ @^ËL$ t A>ut @@@@`t>|A@B@>|@@^Vt$~,~4@F0u$jjVF0FX tD^ËNXtVB_^3]L$`3dÍL$QkT$jR7D$WPlL$$Q Pȃ|0tT$tRD$SP HQPT$RWD$ hxPL$$QT$HRD$0PM$L$jQPT$RFPL$ VQWT$,hRD$0PL$TQT$uOVXǂt2y u VXǂzu NXǁ_[^ Vt$FXxt PNX|t VSPVXjxFXxjj Q^̋D$ L$T$SPj3I[̋T$S\$ Ul$VW|$ WU T$>PU@ _^][̸$v|$0L$(AXUVt0P`T PUD$T$PXȃ} ^]$3Ҹ0SWL$؍D$PL$ UQT$D$ RPL$0QST$8hxRoD$u FuX|_|$utPQadVXL$ǂFXVXFXuNQ|)P-%uhQ؃3ۋT$,L$(+G WOtAVuEhh|(hjhjB_]^[ 3ۃKRQPW W|$ t/WFHdAjRVЃ } _]^[ ËL$ Ooo t_VBdjVъWD$OHD$,G|$0t _]^[ ËT$L$y VXL$,h~XT$(t~XQRl~XPVpC_]^[ ̸$3ĉD$ D$0SVt$0NX3ۉD$\$9u Vt8D$4;t tu 9\$@t4t/hh|(jDhj^[L$ 3$UWFX9`|$D\3;~/VX9`v\$VX`O3A@;ڋVX39`$VX\VXGA;`r_]^[L$ 3$9^uOV'0t?F VЃ;Wu,hh|(hhj/$ $~X n9_t ~8u a;FX9t ?UF0_9L$<n\T$L$`o+;s;v%_W (\$+GOI3;wߋT$9@V$;FX`x8\l;^9R`FX8]^8^R8_FNd;tVhRVj\PjPSуV%FX9VV(}N Vу;YVX9JVFjVj Vq9uTT$L$XsVRj_VFmP j VYP;tkV$u]FX9uR>~J`rA\u8;t.9t&ujdUV_1 IFXXTUXFd;tNhVXQVjTRjQSЃ;u;tD$9\$t Ph@VT$ XNX;.dhh|(}'hSF0ue9o9_O 99D_Fd;tVhRVUQjQSЃVXV)/`ri9^udN4u$uV$ҁV4n(F VЃ;>NX9W|r=j=__phh|(hR__n]^3[L$ 3$Vk"0t |$<u9uh h|(jdD$D;O;wW WD$URPd 9\$Hu))oGo;uF8_t V_]^[L$ 3;$h`hmh|(2ji|N0_]^3[L$ 3$Ã}NXhh|(RhFjWh(D$|)P~%ubPE؃3ۋL$+G GOt;VRu:hh|(hjhjN3ۃKf fW W|$ t(WFHdAjRVЃ  L$ Ooo t_VBdjVъWD$OT$ HGFXhVXD$tVXQPǂlVXjVpFXǀ_^][ ËNXuVjjj RFdtNhVXQVjRjQjЃut"FX Rh@Vу _^][ ̸UVt$(FXFdVl$,ǀd\0t@~u:F VЃu'hPh|(hhj^]ËL$4+SL$W;v|$L$^XL$8;l$ D$\$(uV{ tT$4WURVFXtNQ|)P;%uuP؃3ۋT$L$ +G WOtNVuRhh|(hjhjPNXT$0_[^d]3ۃKRQPO O|$t$OVBdPjQV҃ |D$Goo t_NQdjVЊOD$,WL$@:Et6h h(3hhjWjV& _][3^Á~ uH@~h(hh( FXSQP}VX _][^Ã@~h(hh(H FXSQP5VX _][^̋D$H4;L$uP+$+0ø*ø3ø-ø,øPø(ø+ø.Ê+++++++++Vhh(j j 39t$t`\t;uHt@Puhh(j j Ruhh(S ^Ë^Vhh(j j |$t`\t);tuH;XsrP8@p3hh(j j t V+^SVt$ V =tVt =u FXuy CAtǀ@uWju_u hh(jAhjU^3[ËNXVXFXNL^[SVt$ V =tV =uFXup\ Suu\ SWj_u h*h(jAh#j^3[ËNXVX^[Vt$Veu^V&^Vt$FXt-SWjNX_ǁ[^Vt$FXt-SWjDNX_ǁ[^̋D$u=u!=u=u=u3̸(̋D$u"=u&d=u =u=u3̸$v3ĉD$ SUVW|$8334"WLj WI؃\$ wLD$$NL$(fVfT$,F D$.~Fu~u<k4<,~rfF<ruG4"<ruG4"uG4"uG4"<~~Fu~r ~ u~rX~ vF |z|u|uuujVh*Ӄ tXjVh)Ӄ tGjVh)Ӄ t6jVh)Ӄ t%jVh)Ӄ u(hh)hhh)h\$4"OLQI =D$T$ D$~hh)hPWQWPGLRPWGd tOhWPQOLWRQjjjЃoLU]ME E U ‹W*D$?T$@D$AD$BD$C t$ t$T|$$t*EEEEtEEECXǀt|tm‹QT$RL$$QS҃uJD$++PT$E3EtȃPMERU.CX l$ǀ+ǃu^]_[L$438Ë^GL$@]_[38̸覦D$T$ SUV03W|$$OXD$D$D$,ЍJT$;{ffNff ;tL$42_^]3[ÍB;$Nff Nff .L$0D$;tRUVQjWЋL$Hfu# fD$uf ?_MF;ǂt Phh /Sם SVP+ f#uV`tdQUVWЃwjjj W@qf|$0eLJ@yfu1?g(KfBLJ,Bt3f;GX$3v 0ɍD;r;RUVD$0PL$Ht P踙hh /j誙 Hu#hh /jAhjI_^][džDH@HAPt PChh /j2džL2+ PuLhv3ҋPAdr_^][Vt$FXD@ u @@trHt P虘hh /j苘 Hu hh /jAhj*^džDHBH@^̸膟Vt$ WD$pt upttPT$ RVы (t0t&<t@PVуtCt/u dž,ǃtRt6uX$_^YÿD$P398,뾋T$RjV5 _^YËD$PjV _^Y̸vVt$ HD$ptzDvqtavWFXD@ u@@tB3vAt3@;rhh /hhjO^YËW3t upttPT$ RVы (ty,uptf<t]8tPdž8dž<@<QV҃u D$q } D$Pǃ_t.tu4$^YËD$PjV舥 ^YËL$QjVr ^Y̸Ɯ3ĉ$$SUV$0ڋpW$D$ L$$D$y$RMD$,P諕8t7j$QT$0ROQWSЃ-utD$j׹+Ѝd$;sjÖPjR$P視OQ(Uj蝖PT$8R$PؔD$}L$(QI+V$WR'j$P$Q $RD$0 >$rU;0st-:UvQ:Uv A:EL$(Q谕T8D$,PT$ 螕+hh /V]T$(uR`D$VPL$QWR轖L$$D$(PQT$DR衖 ~oD$D$L$(QT$RD$ Pj|$(]Wےt4$tL$ WQVHRP D$$~D039D$ ђ$_^][34ĸ̋L$ 39 <t @rË@D$Hw%$Df˒邔͕Z齕鲕3Ð"f'f,f1f6f;f̸VD$Sȁu=}[YV3;u^3[YËD$;ƉsPsDs8st~qUl$HE@WD$EUT$tt u:p v|8|8uL$Qt uC8l$u_3]9sPuؔCP9sDuoCDgC89stu谔Ct^[YøVSUVt$NXWf8X@x ߍxFdl$|$tnhUVQRjRjЋl$4fh Ch /PD$$zL$}ES_QWjW\$8SUjV0|FdtVhRVSUjQjЃU}:_^][Yfu,u'O ;u@dž_^]3[YW|$ t#h h /hnh;jQ_W0SVh h /j%TCCsNjVjVj%SjWó,|%GdtWhRWj%SjQjЃLJSՎ^[_h h /hh;j{_h h /hmh;jX_øD$T$ SUVt$ NXW83$dž(L$,D$D$XD$;9ffoff +˃;H;d$Off Off ˃9\$$L$,;tT$,QRWQjVЋL$DffW ƒ;D$,\$$ZGfCfKCff L$,;L$,$9Vlf;4hUh /RɌ USP.) P@u+;dž$Gt/HL$@u+D$;uUSR u3$D$,+ŃD$,T$4_^]23[f fvfI;u΃hCh /P請 SSOQP P@u+;JL$42_^]3[f > Io;~ltP ǁh!h /Sǂ܊ SUP0 zf >dWڃ;n~lChEh /Sǀ4 %SWRQ腑 f#u.`dL$,RQWVЃf;u)L$4T$,QRWV25D$hf u]|$xfnOT$ \$$;PWV7 f><f;(\$$;D/W ;LO +й+مKPT$4R|$8j- D$$;|$00u^0D$$0PQ9bf/W \$$;|$04thP轉UL$4Qji 46;l$0*rdž(ffu +t+H @t3f;uFXu*"fuL$4T$,QRWVMD$$D$H; D$(8|$2%L$4hHh /hRh.j(y_^]3[ËT$4_^]p3[ËD$4_^]P3[ËRhL$8_^ǀ]p3[_^][ËT$$RD$4_^]23[ËL$$QʼnT$4_^]P3[ËD$$P褉L$4_^]P3[ËT$4_^]/3[_^][̋L$D$ S\$Ul$Vjjj S4(ǃ@@t^]3[Ë=~L$t;r^][=t=u t;sffF ЍD;sD;w;sPW;wFpff pff ;wf#tƍP;v_^]3[fu_^]ǃ@[ÃhuNL$QL$(URPt&tt_^][ǃ@_^][ǃ@_^][̋D$ u3Pa39 t Ar3ËtԋD$Prt‹L$A̸&3ĉ$$SU$W$D$0$L$8$VT$@D$0L$D$8؃}h@/hh0/ك T$RTD$HPJL$jQT$TjR $PWjhW(WjVL$ jQWjVT$PjR襆t$PL$UQ跂 D$0t$RPD$P葂 kD$8t$QPT$Rk ED$t FX3ɃHFXHL$A$tN|f|9t D$;hh0/h| ;uh=h0/jA9P|UPT$D$@;tP~;t OQ~;u h>t FX3ɃTHFXP`T$\D$ 9l$t D$Ps}FXxT$L$ RD$m{t?NXDz t JIɃ;|tzO\$S~%uX SE{؋$ t1!t,D$ (G(T$$;D$0|/D$4'D$(xT$$XD$0h/D$4VX;t~hh0/jDD$PQL$$QȁT$$R~ u=D$ T$D$,QRjP}L$LT$HPjQjRD$P}D$PPy(|$(T$NXRL$,zT$(P$PD$0$QWRT$Djjjjj Qj PjRVnP@tY$~lFX QT$dRL$HQjh//jjjjj Rj Pjh\/VRnPY@u3|$4tl$@ l@|$$t$V@|%$ u#PjW|$ jVW{USjW|(L$PUWjVQ{|$,Vv| tT$ tL$QPjWN|$j@Rx$j@PxL$Pj QxT$xj Rx $_^][3̸fU3Vt$ FXl$ l$l$9tt ^E]ÍL$QT$ RD$PL$ QT$0RPyu&hUh0/hhjWw^3]ËNXT$ |FXL$VXD$ NXT$SD$$WP~wL$,Qw\$Vthbh0/Ww؃;u'hdh0/jAhjvD$,_[^]ËVXhktFXh0/Wxv uhmh0/jAhjmvuWUSV_teuPV:EFX3;t*x u FXxu FXL$tWUvUv D$_[^]̸8f}3ĉD$4SU3VW|$L\$$9\$PPvt0QvPJv}h/hh0/)v wX0l$;u \$ Ux?؉\$ S?y%SuN9N t'hh0/h/@PPQtu_^][L$43|8ËRut0PuPLu}h/hh0/+u wX l$;u\$ Uw؉D$ VNFM^Q\$cwURD$x T$PGXtTt.t&L$(PT$,$ufy_O\t  |$PT$8t  fPl$L$9HL$:D$8fT$>L$pL$VQKsD$WX| QT$,R8rD$,PL$4VQoD$8;tD$D$T$RD$PEU o[T$j L$@QHRQ$PjjjjjjT$t+V‹T$LPQRWcP@uD$ D$$P?nD$$_^]3̃ u̸<u3ĉD$8|$HD$DSUVt$LD$F|WtD$FX0TD$FX WD$nPIn}h/hh0/&n l$ L$8T$9D$:K|$L$;ST$;9us :t* L/d$>;9u6s:u"hh0/hoh:ji3JL$$QL$QL$(QHQJXDPD$$jjjjjjjjPB$UP@|$t Wbit UUi_^][̋D$sX$܏3ø øøøøø(ø*ø+ø,ø-ø.ø/ø0ø1ø2ø3ø<øFøGøPøZødønøoøpøqørøsÃË+.4:@FL֏RX^djpv|ďʏЏ֏ 3|$H%/̸/3|$H%`0Vt$~4 !u-Fh;h0h3 %u ;uGm$;t,A ЀɁQPWՋ ;uhOh0hWX9Ht&SchVh0jDh j[bVc;u h\h0jh j,bGXHF;t99^t4u(PcENQwcE9]t;uhsUUcuhh눋]U E\$0T$4D$8E(;uEm,;t*A ȀҁRPWՃ ;uhh0h7OXL;tQZbh;u hh0j+h j aPb;u hh0j+h j`sWXVLNbtV;bt t-Vbu hh0j+h j`Va؃yVahVaWGXDA t3Sa=~#hh0h6h j`SfaP*D$u#hh0h;h j_fjjjjVBaPSahh0U_$D$(`|$(D$$tztvЋD$(RUPjV`PS`t$@u hh0j+h jW_L$$Q`33ۉl$$t$\$0l$4l$8l$j$QT$$R$Q$RVЃSUa$jPU$Q(RjWPD$,P>VjWPjQ$RW$ $$$D$D$@D$D$@D$D$@D$D$$xD$$|D$PT$$JL$$T$A D$RVL$P$PQ\T$,RVL$$USD$0PQL$4L$@QVD$8D$ PHRSN \$E\v hHE\QPCScNU\ jCPU?rPUetuhSh\1h Ctu3 P&F؊F3v WREGF;r|$@QFVUj u&hih\1jDhjE_^][Ë+D$PjPjWU8Sj+UE4uDEHB jUH_^][̸MF $QRjh'D$@0h&VЃ<$NX@tV\FXǀTYËSW~@;uY8GuT_v/PjVU _[YËF\SGWPLN\ V\_[Yhh\1h hjDG FPjVT _[Y̸K3ĉ$SU3V$~4Wl$l$l$$l$ gF~jSWD$~]MVBd@ j0L$4QRVЋT$@j0RACD$,SjSjPVVPj VF4^DFH @ jVE$_^][3pJhh\1jDhjkB Yp;SCu hh\1jhjBUCuhыSURWC؃ h붋FHdA SWRVЋSjWAIURB‹_MWQBUB,j(jVQhh\1hhhtu1@HPkAD$38'@UoBU\$uBD$Bl$u hh\1jAhj@cSUBu hh\1jhj@5UAu h1h\1j+hjh@SA h>h\1j+hj;@L$jUQPWA؃ hEh\1j+hj?VBdP SWQVҋSjWAGjjjjU(AT$$PR@hch\1S?$l$$@D$ t[tWPD$SUjP@L$$PQ@SUGWGT$DRC@Uu?D$@PI@L$LQO?4Lhhh\1jAhj ?h {hqg3D$u#hh\1hhj>Uh$RhL$lQLQVЃD$=v hh\1jDhj`>u#hh\1hhj9>vP$lRPL$Q$$jQD$@$EL$(T$@ Ht P=LRk?9Lt.9u hh\1jAhjf=t P~=T$`R>9uh랋VJdU$RPA VЋ\$pAKCu+ًSD$d_PWD D$L$`hQf=$hRT=|$j(jVL %j(jVLhh\1jDhj[< L$ Q=D$(t Ps<D$t P@=T$RC<x̸0C3ĉD$,SVt$<~4WD$\Fuh-h\1j L$ O T$ W N=CPL$QKWRjD$4PQ=uh>h\1j*mT$ W D$ G \$ SjSjWV2Hj V^DFH7 F4jV=_^[L$,3PB0hGh\1jDhjN:L$L_^[3̃ B0̸AUVt$3~4pWl$l$ u%;t9(t9ht F4rF4q~4qD$ PL$QVZ ;}_F^]FuV9l$ t3D$;t+PVF4qHtT$ RVu!3hh\1jjhj3Q9D$;t PX9D$ ;t PS9;u7>u"FXj)jVX=I _^]ËNXǁXF4r~4ru8VXF4sXu3 PVIUVFDnH 6jV;_^]̸&@UV3WUl$D$3E|$ l$?$D$ jP88U<t$ ;t|$ ;tD$ S^Vy0tVry@t V}9tV1 F4=D$$Qt[-%VUUj RF 8FX^PN4=0-=$Vn0?V V ;F\9(tNXF4ǁP F4 FNnD;(QP:FVs;7Vlҁ&V4nD#; V F\Ƀ0N4nDVm;u!@҃^lV4nDwFXD@u=@ u4V,;,Ɂ@N4nD+\$F4@nDVE;9VF4PnDV;F4`nDV;V FXX҃€PFXnDPN4wV V;F4nDOV| V;jFX9XuF4nDF4nDV4 V;"VXF4nD9nlu V hhV> ;FXF4nDDFX;uNQdBVЃ}NQdBjVЃdjV#9nlu VK NAdP$@ RPhhVJC;FXF4 9nlt*VXPFXt>N4FXhnD9@tNXǁPnDVXǂPnD|VY;F4nDZV;{F4@nD8F\hhV] ;FVNlɁN4nDF4X8=@t =Pn$;t SjV׃ %=t =F9n3 ~$t V\ǂjjj Vwt F\L$XV\8F\@N\D$DV\HN\T$_]P[V7 %%^̋D$PFtxu3Vt$F\3ɉHHF\HF\fV\Rjj-V5KP*V)^Vt$F\F\v!jjj/V SP*N\XV\ ^v"hh1h8h<j)3Vt$jh j V|vF\VǀT)~ L$ Qjj.VsJP*^ø 1VW|$G\uu fw\T$RL$D$ iNG\G\Pjj-WIP)_^ ̸ v0Vt$F\uu3^ ÍD$PT$ N\it$;|Ou9~EWD$8)0H)PP_y I@BP8u x:}3ɉH^ ËD$3ɉH^ ̸/L$ $PQ(t<$|$ 3̸v/Vt$D$PVt|$|$~3^ËF\fffF\fhh1hhj][_^YRQP!< @|$t54D$NQdJjPVу } ][_^YËT$4D$@<tPU|D$ 4NQdjVЋN\ F\ KFXPVSVf@fC5K4S\$$4 0FXTP4D$0L$,LJVXhVXtVXPQlVXSVp&][_^YSًC\8VQC\f4f;8QtwC\84RztVFPXVT Tt[KXS\(Q<RC\8Q> uC\ff<[\ff@f4^[^3[SUVW|$oX3 G\<@P؃t,SKQS _^][ÉUWP9_8u P WXjPj W!3;~ƒP uċwL_8MFFNF^ FF ÉE^GX f^fXF^ ÉE9 u ;S3E}@E8OP] ;v jSSW9!3҃;;t 3ҋG\G8]T$;u (A;Mt D$;9Tt }u>tW4u EGP39U|$tCWJW0ut G\TuUR4P3EGP=EGP#̸P3ĉD$LS\$`W|$\GX\$D$u WKt9D$`t tu |$lt4t/hh1jDhj_[L$L3PËL$hP3ۃ;$9_u4W=V0t$O Wу;uhh1hUVwX 4Gu79^u2W\PP;tUMQU{ WZ9^t 8u2Wo;%PW;y^]_[L$L3PËWX9t+>t&O\FPLQF30G0hl$h;yu)HG\D$9NsrG8^uHG\ЃtbWXWǂ4GjVj Vk3;vV^ (FN@;r3ۋD$9_$G\ 8;9G\8h8\8POd;tWhRWjPjPSуWSGX9WG(WO Wу;ekWX9\WG2G\9Gd;tOhQWRW\RjQSЃ;tD$;tD$D$;t Qh@WЃ 0WX;O0^]_3[L$L3PG0D$iN PQs?u9n9^|N 9p^Gd;tWhRWjQjQSЃG\9 WXWǂjW?G\f( 9_N D$ PQW\9Ft^|$ u"WPWC#^[O4u*WXu"G$%G4G(O Wу;hh1hhj~|*?{^^^]G_3[L$L3PWP0tu9uhch1jd5D$p;LN;wF FL$UPQ 9\$tu)nFn;u G8^^]_[L$L3PWJj^WGe/Pj WQ/Phhh1hhj;hh1jihjj}GXh}h1QhGj Vh(T$hjR[D$pPh(UE o0QR腂8^]_3[L$L3wPhh1/hG^^]_3[L$L3?Phh1jghj= OX9t=9t5G4t=|= ~ t= !=!|DhGh*h1jDh!h1h hj VjW ^]L$T_[3}PVt$VN0t8~u2F VЃ|gu#hgh1hh jK ^ËD$=@~#hnh1hNh j ^ËL$T$ PQRVt^SUVWhh1j433  t[thh1W  t5u,9\$tHhh1W ؃ ut V U _^]3[WjSA _^]0][̋F,t Py F0t Pi V` Y̋F NW~;v%hh1hh j /_;D$vhӋC\uqKF,t P'F0t PV_^]3[L$3 ËO\HPRL$ _^][3̸ S\$C\HVQtJW~G,t PG0t PW}VyS\HPp u_^[̡Vt$ j,jVD$PH H@ щVfPfHff fVPH H щV PH@  Ѓ V^̋D$L$k̸SUVt$NXXh@Wfx ݍhFdl$tnhUVQRjRjЋl$4fhCh1PD$ L$$}ES_QW{jW\$0SUjV0|FdtVhRVSUjQjЃU}A_^][Yfu3u.UE ;uVdž_^]3[YW|$ t#hh1hnh1j_WC?0SVhh1j%CCsNjVojVdj%SjW,|.GdtWhRWj%SjQjЃWLJSL^[_hh1hh1j_hh1hmh1j_̸VSUVt$F\X3W; SSj VMISSj(Ve%PV\XF\X; sYXV\X;u3I; @rXV\XSPj*V$PN\9Xsh3h h1 9^Hu/|$u(F\` 9NDth`3hh1c ;t#PPPl$R\$닆;t*RutQ|$\$~Du3_^][YËl$|$jjj V#PЋF\X+++ƒ  ,jjj V#P[~N\X++Ń ~D;~|$FHt, h83h8h1L FHFD w F\h\WPN\F<@FH\b\PQPQ@PQ PQPQPQ PQ PQQPIH sh3hFh1 Vtq\bPaP`@PePdPXXXbaP`H  MQPV FD;t'nH+ʼnFD\+~Db_^]3[Y_^][YËFdt"VhNHNDRVFDu'N\f&N\%N\$PFDF\3҉VH$jVƀ\`fdhlVD$F4jV$^̸,f3ĉD$(3S\$8D$ D$Vt$8V\W|$HL$$D$*\$+HQPu6Qh3@P _^3[L$(3.,x_WG,N<ۃU RQPRGÉFDoOWF\fd\`ǀhl\D$(F\f L$ ǀT$$WGF\L$,O W$fO(f W(M ;u&FXHTPXL$0N\T$4*PT.HX39WTRVbL$ T$$D$D$D$(N\f W(;]u)FXPTN\*@XT$0.FXL$,HTPXF\jjj VǀPL$DD$_^[3f,̋D$Vt$u$H\Tuf&f$H\f&@\$T$\T$fdL$h`lD$ ^W|$GXTtxǀTD$|BOX9@t7hh1hhjj jWT$< _ËD$OGIHAT$ R+AjPV|$ uD$PuǸ^Yh3@P^Y̸|$ SUVWl$PtQP|}uX0u3SV ;Vt'hh4h/@PP~x QVPR __^][YËt$P?tQ,P|NjvX u3 STl$ld\~Rl$tI|$tIǙD$+Qt @XtB,9;} ~8@;|Nl$|$u3ҋNV WQRS*|$F T8\$T$BtP[X/+؉\$ I\$;usD$u L$AXD$HXtJN*;d+;}^ ;\$E@;|n n++_N^][YËNVF QRPN N_^][Ỹ=UVWt4-x@u+;uD$SPRՃ t>u_^]ËL$_1^3]̸vUV'u%hhT4hjh5j^E]YSWj:Vt+ƍPI@u+L$QV*u%T$RUtw묋D$_[(^3]YhhT4hlh5j#_[^]Y̋D$L$P̋D$L$|P̋L$t|ut u3̋D$̋D$SUVt"|utt3UOt$؅u%hhT4hbh3j+^]C[ÍH;L$~'hhT4hkh3j^][ÊWV3~WUHPVG;|_L$D^]3[Ul$}/h>hT4hah6j{D$(2]Vt$NW> ׃yJBt1hJhT4hah6j#D$0_^2]ÍO;}1hRhT4hah6jT$0_^2]S)؅tI$N =t9At 9u QSuM;t2hphT4h`h6j]D$4[_^2]ËL$QlVt$$3"~o It$UVS3 ~$d$VSP;Wt"SF;|D$PE;|L$T$ t S[_^3]̋D$|$}$hhT4hkh4jrËL$u$hhT4hqh4jA@RQP@D$ 3̃|$ t.hhT4hah7jD$$2ËD$P ʃthHS ـxt/hhT4h`h7j~T$(/[Ul$ WUu/hhT4hgh7j:D$02G_][VW3~#VW9Xt@WF;|hhT4hah7jT$4^_]2[ËL$ ^_]3[̋L$D$94t4RPRPQotP_~h h4hhjC3̋D$ v!hh4hhj 3ËL$PD$ PQ ̋D$ v!hh4hhj3ËL$PD$ PQf hh4j j 1D$L$hh4j j  hh4jj D$L$hh4jj ̸3ĉ$$V$W$ v_3^$3ËVPT$XRL$t$Xy |$u$s+PL4TjQj D$Lhh4j j HT$RQhh4j j$$(3_^3̋L$T$̋L$QpT$dL$T$W̋L$QpT$:L$T$-̋L$QpT$SVt$ FtN;u I$N@$F \$t ;t PFt!W|$;t9F t P~_^ ^[ËT$^ V^[̋D$@ ̸D$@ hPD$xt $QjjiPa$Y̸VD$@hPD$(t $QjjiP$YVW3Pu hh4jhj@_^ËD$PjjhVL$VVQ_^S\$ UVt$F 3Wt:P=u*F UUjiPh;uF PPV< _^E][TP"u"hh4jhj_^][SjjhWN WQV_^][S\$ UVt$F3Wt:P=u*FUUjiP;uFPPV _^E][Pru"hh4jhj_^][SjjhWbNQWV5_^][̋L$IX3t)D$ V;vƋT$ P4QRL ^̋L$IX3t)D$ V8;vƋT$ PQR ^̋D$̋D$HpL$D$̋D$L$̋D$̋L$D$L$ t̋L$QpT$JD$L$A`̋D$@`̋D$HD$QPD$Vtt u3^hEh4jFjPJ^̋D$tt t3VW|$Wd@t$PVCF ;Gt@ VЋGFHVуSt*hnh4j jP džt SJ6[ v!hh4hhjP_^PǠWƠV _^̋L$tPtF8t?xu!hh4hhj3ËHQR|h~h4hhj3̋L$uhh4jChj3Ëu!hh4hhjU3Ë8u!hh4hhj-3Ãxu!hh4hhj3ËHQR̋D$HQ`̋D$x u"hh4hhj@0t @3ËHD$A̋D$x u"hh4hhj[@0t3ËHD$A̋D$x u"hh4hhj ùH0t%hh4hhjHËHD$A ̋D$uH(HD$Q(̋D$u ǀ@(HD$Q(̋L$39ËL$tD$PVt$VQPBlЃ ^ËL$ T$Jd̋D$@̋L$A>v0#$"L$ËL$T$ ËD$L$ËT$ËL$T$ ËL$T$ AQËD$@ËL$T$ A Q ËD$@ ËL$QR ËD$@4ËL$A<ËT$B8ËD$@@ËL$AHËT$BDËD$@XËL$A\ËT$BLËD$@PËL$ATËD$T$ ËL$ D$!ËD$T$ ËL$ D$!ËD$ >wL$3ËD$Vt$Vt$VQPBDЃ^Ð^!!!!!!!!" """!")"U">!I!!!!!r!}!">"j"" ̋L$tD$Vt$VQPBpЃ ^ËL$ T$̋D$T$A+Bu3ɅL ̋L$tAtut@u3̋L$tAxut@u3̋D$uVptut1@t*t$VL$;~QVt@^3^ËD$PD$HQPRP`t+P^u!h h4hh jH3ø̋D$PD$HxQPtRQ1`t+Pu!h2h4hhj3ø̋D$U|$SVW|$U3~MSUuPƒp@u+ƍH;L$6 BFu+T$:UGC<;|D$G_^[]ËD$;tO_^[]3]S\$ u3[UVt$St$3W|$US@tсu;|3@ u@t tL$ VPtOQL҃SEl;|;t$t6u-D$ VhtЃ_+D$^][ËGHLу_+D$^][SUl$ EX3W;tEHLSSыD$ t$hh4hhj_]3[Vt$ ;t;tPD$o D$9\$~jt$EXt/u>u%|7u|7uuQǀ$UBHVЃtL$PQtI;\$|t$ tD$D$^_][hh4hYhjj(jU- hh4jAhjD$ t8u T$R^_]3[̃|$t3ËD$t uË ̋D$t u t3Ã̋L$SU3VW9l$ D$(t$d$3vo.$:uLLt.r;u2ste:uvZY:^u vMA:FtEL$$t$|D$(;r.l*;l$ yt$Q T$_^] [ËD$t$ (Q T$_^] [̋L$tT$u D$ xT$ ̋D$L$T$ ̋D$L$T$ ̋D$8}ËHQdR4D$̋D$V;t^ËND;HDuHWPHr;ustD:+u1v5Fz+u v$Fz+uvFJ+_^_3^̋L$AKQJ QIIH  ̋D$L$P2̋GtV0Pƅu^WYVt$Phoh4j F`jP.t P[~t jV?QVj2F t PF t PFt P Ft Pt P'thDP-th8Pdžt PLt PVt PQW\t`t}V_^̋D$L$Al̋D$L$Ap̋D$L$T$ HdPh̋D$L$T$ ̋L$T$̸<vS3Ul$H\$8\$@;D$LH EVW;u9]u 3ɉL${ ω|$9]u;tP;~\$$|$$E ;u 9]$u3ɉL$,ω|$,9]$u;tP:;~\$(|$(9](u \$D9],t|$D9]0E4;t~P|$#;Ɖ|$ ~\$ 9]tRL$DQT$PRP|$<@tHD$@WЍwctu L$P 9t$ L$ T$=u |$P 9t$ |$ T$|$8t |$|$DD$PL$_^t ɀ ED$ ˀ ] MEE][<ø SVt$FXD$ D$D@ HX WL$t4T$RVVvt$ |$jjWvG t 8tPSL$QT$RP=Ã@u G(t-G,u'hh4h=hj#_^3[ ËL$ t2Ёu=}thh4hC t7Ёu=}tthh4hBrD$@t G(tG,uhh4h>K_^[ Vt$FXDWQWdVXDH @`u@tD@ _^tD@ _^tD@ _^ètD@ _^èt39G0D@ _^è uGtD@ _^étD@ _^h) h4jDhj_3^̋D$@L$t&yLt T$ t V4@t82^@D4èty@tԃy4t3ʨ@t ypthI h4jDhj3UVt$ }DpSX W#|$tq~lukuUP27tUpx(tIh_ h4jjQpJ(PVуuR*xTF$ FL$A`ËL$T$ A`Q`ËD$L$AhËD$T$ ËL$ D$!ËD$T$ ËL$ D$!ËT$ËL$T$ ]T$ ;}3ËL$=t=uA\XËD$ >wˋT$ËD$@XtËD$PVt$Vt$VQPB@Ѓ^EE&EREDE~EEEE;EgEF    S\$VW33;u$hh4hhjV_^3[8 }/hh4h hj)hPh4jA@hh4h= ;hWVȸ~ F FP~~C`j,N4WQF$~(~,~0茸F`~d~hdž;6h+h*~l~p~t~x~|kF; RF ;;4t4PVR^SP=;Sԯ;h5蔲;uhh4hh5i;uhh4hh5>;uhh4hl薯;QVjAIj@R~*jP肯~(jQm V<@LPTBh%h4j džX 裮`;tA8`h+z`h4j xq \;u`QRhPh4jA68\z\x_^[hh4hhjɭ;t V_^3[Ul$ ~3]t%3=] Vt$ W~u0~ jW臱uNjWxu'jWiuH~uj~jWQt _^]jW9t _^]jW!t(W.u _^]Ã_^]ø9Ft.u%>u _^]F0tVXt_^]Vt$~ Wu$h h4hhjd_^ËFH,VуF4pt V V҃_^Vt$FW3F$~0F4`HVN (;t PŮ;t P謮_^Vt$FW3~$~0F4PHVN ;t Pi;t PP_^SVW|$P3ۃ;u_^3[ËONGFU9t WV,dP VҋGF@VЃ9t0;t PQ ;RPV G`F`OdNdWhVh;tWpR PFpPQRj}<G ;tN QSj P萪G;t%;G tVRSj PlF FONWVG F O$N$G(F(O,N,W0V0G4F4O8N8Np^DWlVlGpPQ蟮Gt;tPaFt;tGx;tPJFx;th;tiW0;tOW~GSWU]PSWOtWC;|]_^[UV3]_^[Vt$t P艫L$ tjQP֨ ^Vt$~ u V?FHVу^Vt$~ u VoFHVу^yVhh@5j j豫,}ihh@5j j蒫hh@5j j  , }jjjh(5j,hh@5j j E,hh@5j j),Vhh@5h臧 uhh@5jAhj*3^hjVF0 džFP腩FD}F8˪Ft^Vhh@5h uhh@5jAhj誦3^WhjVu|$+σ0*D @GFONW V GFONG tPԧWVGFG twP貧F uhh@5jO At!P聧uhh@5jbV BG @t!PVuhh@5j7N AW$V$G(PF(h h@5jhjxFt PF t PrF(t P\4Ft PHt PE u_3^ËG,SF,3ۍD[  8thh@5j 0jQ[L84tOh h@5j L04jQĤwS$Sh<h@5hhj聤CldžOFPFDF81[Ft_^âSS\$hoh@5j jPlCt P触C t PC(t PVWs4Ft Pӣt PУ uSѣ_^[Vt$uhh@5jChjo3^Ã>u*uhh@5jAhj@3^ø^Vhh@5j|Z uhh@5jAhj3^j|jV̪F F Fx^W|$hh@5jGxjP辢wth8PΤSVw $t P茢 uGl^[t P Gpt PdGtt PNWg_̋D$L$A̸薩W$WԡVjW躡$WPH QT$R]u&h h@5j hj萡^3_ĈVlPD$P ~$`5uT5PL$ QɦVpRD$ P貦P蹥tPL$ Q苦HdtPhRD$ Pу L$Q蟤T$lD$P耤^_Ĉ3_ĈSVW|$W3螠~3IVW臠P t"PS٠tWFk;|_^[hDS虢_^3[Vt$thDPpD$^ËL$ ^Vt$thDP0D$^ËL$ ^̋D$̋L$yu%=uIXtl3Ëu Vt>?u 蹟t.VդP蟤tVP菟u VR3^ø^ËL$W|$_̋L$W|$Ǡ|_̸vSUVWh0D$3EP D$PjjlUWWL$QUu 軞taT$RӣtkP薣t\VSV| KSpVWijjD$PU苣u/hh@5jAhjthDW 3t S蝝t U辟D$t P軝t_^][Yhh@5jAhjw̸S\$UVWh0SD$D$ˢP裠 uShh@5jAhj3t WD$t PL$QS萝_^][ËD$ PjjlWVtjjL$QW>t\T$R'tPxVSlV| 蛟 SjjD$PWu躜8̸ ֣3ĉ$S$U$VWh,h@53jj t$ t$$譟D$ SP舡;IˍqAu+΋ȍyAu+ύL1PSh5T$$hR,=D$PU L$SQz8tihHh@5Pj j5h5Shl5j譞hJh@5jhj 8(h7h@5hhjD$|$_^][t $R^hRh@5jj h$D$3腢 ̋D$Vhh5jj D$thh5jj ^̋D$L$T$ PD$ QL$ RPQj̋T$T$̋L$L$VWhh5h3 ;u hh5jAhj臙_3^hWVSWdž0PVj._^̋L$D$tPDH̋D$S\$ Ul$ VW|$3PS諘~QSU脷 t F r_^]3[à s_^][Vt$>hh5jjP[QVjVjRޘFj0PӘNHj QȘ$t P+t Pt P豗t Pt Pdžt Pȗdžt P諗t P蘗t P腗t PrhVV^ ^Vt$W|$A\RЃu5NA\RЃu#hh5hhjʖ_3^;FtPVth3h5jjQ觖t P_^ËtPdž;FtPV4b_^̋D$uËL$̋D$uËËD$uËËL$u3ËD$̋D$ v!hrh5hh8j蜕3ËL$PAhD$ PlQL ̋L$u3ËT$A$Q$̋D$uË@$̋D$uËL$T$ hl̋D$uËL$T$ `dVt$>\tPdž\W|$hGh5P֔ \u hh5jAh&ju_3^ËL$t+f8\PP\WQHQ _^3f\_P^3^ËD$t{tqVw;u%W;u?/w;uǀǀ^̃Wtt VUOGuO01_É0_̋D$L$A(̋D$@(̋D$L$A,̋D$L$̋D$̋D$L$At̋D$@tVt$ V[uhmh5j&h"j薒3^V)u(hrh5hKh"jgV#3^ËD$^̋D$L$Ax̋D$L$A|̋D$L$ T$PD$QRPh5hp詗̋D$L$ T$PD$QRPh5hp̋D$L$jjjjjPQh5h0U$̋D$L$jjjjjPQh5h0+$̸SUV`_3;u^]3[YW|$p@$;u WB;tP9\$mu FDK=u9=u.=u#=u=u =FD 9@t^DhZh5j j*;up;th_h5j jVDD$ P^HSWT$,Ճuhfh5h-L$_FD;Ts?u+PL1HjQɗ NDVDRSWƮ th~h5h.L3ۋ ;tPk;u h9Htj;t P谏Dhh5P蝏 ;uhh5jADDHRQP 9P;t P:Lhh5R' ;uhh5jAaLLPRQP| %hph5h/+hMh5h^D v)hh5jDhjcV_^]3[YPRFlP Nh_^][YU3W9nD9l$ thh5j j 蔑GSVP@؃;uOVQ~V |$thh5j j It!ǃG,tSWЃS[_]_3]SًCt;~:SWVR;VdžC, _tVSЃV[̋L$Vt$^ø 膔D$$@D$t\D$Vhh5j j D$lD$p L$Q@ T$ h kR`D$(hh5j j p 0,^ SUVt$Whh5jUP\hh5j j |$8OVQ؃,t;tSS3ۋttS3jjj+Wjjj+WʹjjjW迴 ;~qwtj~DtdWVR;uSGVPTVhG, tSWЃSoTjjj+W\jjjWN$;hh5j j _^][̋L$Vt$ j^W|$tEG0u?Wf0u/WV@uVj^_3_̸ 63ĉ$$SU$V$$3Ƀ W$ L$L$\$;uL$L$QPVUW@w!$q\$L$9D$X|$GL$GpB D$t$`VL$hUQ芑hh5j j_pHT$8RQ$D$thh5jSP膉hh5j jL$ pXLL$px0p$RV$@0UWЃD$JpX\$t L$hh5jSQp@ uT$RPGL$Ah;ilr'$];st6:]n;v"^:]Yv F:EHt/u+h>h5hhjL$$\$$$$$%=|OAH$RЋL$WJH$PыT$L$tjL$+9p}'XP|$ t$pSL$5XXt PD$-tQf|$u@|$t3$_^][3茎 ÍImnnnn̸D$ jPL$ QD$D$ 1 tPjjjT$jRD$t P$hFhFh6ڋhFˋhF輋hF譋hF螋h|F菋hhF耋h\FqhPFbh@FSh0FDh`6/P <$}hFhhE hX6P܅(}hEhhE超 hE过t)P蝅,}hEhhEw hE耇hE<t 0 h86QP6h0647P 8̋D$T$ +hhEjj=hhEjj螇hhEjj 苇 =j贉hstcVhhEj ԃ t8xFt8u V讃 @VQF讃R3^j4hhEjj ކhhEjjdž̸ 薊S\$Wu_3[ Vt$0tJD$ t,L$ QP蔂|PR膂t$ Ul$(G=H?%x$x3        =wWtE=t.=t=uw m ] M ==t= u)  -GH@way$x3! t$, Mt 0L$0t5$*t$,EtD$0t9_u3L$$}uP薄 t >T$ ȁ=uuhGuV9_u1u+hFtL$$E]^_[ Áu$uhF謅t T$$E]^_[ ]^_3[ ÍIuuuv#vv6vvww,w3ww%wfwL$w4HT$t L$ ËT$ 3ËD$;t8V;up1H tpqHtp q A H @^̋D$Ul$EM M h Gu hGu  uM39   Ƀ ȉ  Ƀ ȉ@  Ƀဃ ȉ%  Ɂ ȉ%  Ɂ ȉ%  Ɂ ȉ39 ]   Ƀ ȉ   Ƀ ȉ=t=<t3 ȉø ȉSUl$(W339\$Vu$D$HXS3Ƀ;t@9tVj"khhEj Ye~tiVPd|4V'ejjh#hEh5hjd^[_átVPetjj^[3_Vdjjh+hEjAhhEh3hjfd[3_ø_̋D$t@3̋D$@4= =M=tylVøLV-=]$,Vø VøUøUøUøUø|Uø\UøDJޕPV   ǖKQ͖Ӗٖߖ !'-39?E!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!! !!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ̋D$@8-ttt4øVøVøVËD$@4= =)=t YøY-=$PYøYøYøYøYøYøYøYøYøYøYøxYøpYøhYø`YøXYøPYøHY=-=i$ @Yø8Yø0Yø(Yø YøYøYøYøYøXøXøXøXøXøXøXøXøXøXøXøXøXøX-tt XøXøxX=!- =$`pXøhXø`XøXXøPXøHXø@Xø8Xø0Xø(Xø XøXøXøXøXøWøWøWøWøWøW="-!=($WøWøWøWøWøWøWøWøWøWøxWøpWøhWø`WøXWøPWøHWø@Wø8Wø0Wø(Wø WøWøWøWøWøVøVøVøVøVøV="t=@tVøVøVÍIKQW]ciou{E;  IX^ "(.4:@FLRmsy;   ƞ̞Ҟ؞ޞ &,2;   I!ǟ͟ӟٟߟ msy;   ̋D$uYÃYt4̋D$uZÃYtY̋D$%s$<|ZøxZøtZøpZølZøhZødZø`Zø\ZøXZøTZøPZøLZøHZøDZø@Zø+]C^YW|$ uh-h\jChjM3_Vt$ ƘV'u h2h\jAhjM^3_S[^_̸UV3Wt$RPPu!hCh\jhjBM_^YËD$PjjlWM,hIh\jhj MW OD$ _^YËD$SUl$u HpPlQRjW RuHjWX Ru h^h\S3VUVD$L ][WND$ _^YhYh\j|hjcL][WbND$ _^Y̋L$D$ VQT$RD$D$jP9Ruhrh\j hjL3^ËL$ WVQV L _^SUl$ Vt$VK؃u$hh\h hjK^]3[SVK~}*hh\hhjgKSK^]3[ÍvW|4tPPSIQK8uHQ+Qu*D$RPOuQ,KfKSKTv Dtt PJ|$hh\jGjPJ>_u^E][Vt$ uhh\jChjJ3^S\$ ðSצu hh\jAhjHJ[3^W^Pu!hh\jhjJ_[3^VKVjW"P WJ_[^̸vQV3Wt$WOP/Mu!h0h\jhjI_^YËD$PjjlWJ,h6h\jhjkIWlKD$ _^YËD$S\$UujW kOuPKpSlQRjWhHOu hKh\U3VSaVD$K ][WJD$ _^YhFh\j|hjH][WJD$ _^Y̋L$ D$VQT$RjD$N uh_h\j hjuH3^ËD$WVPV K _^W|$ uhmh\jChj/H3_Vt$ ưV臤u hrh\jAhjG^3_S[^_̸vOV3Wt$WMP/Ku!hh\jhjG_^YËD$PjjlWH,hh\jhjkGWlID$ _^YËD$S\$UuKpSlQRjW EMuHjWh *Mu hh\U3VSVD$G ][WHD$ _^Yhh\j|hjF][WHD$ _^Y̋L$D$ VQT$RD$D$jPLuhh\j hjpF3^ËL$ WVQ VyF _^W|$ ujIh\jChj"F3_Vt$ ƘVzujNh\jAhjE^3_ËWP ^_̸fMV3Wt$GKPIuj_h\jhjE_^YËD$PjjlWF)jeh\jhjaEWbGD$ _^YËD$SUl$ujW mK!uvXHpPlQRjWJu*jzh\ShjD][WFD$ _^YVUVD$D][WFD$ _^Yjth\j|hjD][WFD$ _^Y̋D$ VPL$QjD uhh\j hjMD3^ËT$WVRVJD _^Vt$ uhh\jChjC3^S\$ ØSWu hh\jAhjC[3^WIu!hh\jhjC_[3^VEVjWIWC_[^̸JV3Wt$HPFu!hh\jhj"C_^YËD$PjjlWC,hh\jhjBWDD$ _^YËD$SUl$ujW H!uVXHpPlQRjWHu h h\S3VU[VD$#E ][WpDD$ _^Yhh\j|hjCB][WBDD$ _^Y̋L$ D$VQT$RjD$WV Dth8P@džFpNlPQjS Dt+WjjVngtCVpFlRPjSCuDȁ u%lu>|$W6>|$3t W#>_^S @][Y̸E3ĉ$|$U3V$D$l$0l$ l$8l$(l$4l$l$@l$uT$D$lT$D$|$lT$D$9t$T$D$D$|$$F$FDV$Fh$VH$F$Vl$$V$$$$$;t2P$$pR$$$YB;t2P$$R$$$B;t2P$$hR$$$A;t3xd$@u+ǿ$DŽ$$O;t$$ $$;~-P$$xR$$$CA;t'x@u+lj$,DŽ$0$4;t'x@u+lj$<DŽ$@$D;t'x@u+lj$LDŽ$P$TD$LUP@L$dUQ@T$|UR|@$UPl@$UQ\@(9nv$URD@9t!$UP/@jPjD$L@9t!$UQ@jPjD$j PjD$8>9t!$,UR>jPjD$P>9t!$<UP>jPjD$4>9t!$LUQx>j PjD$@d>jWjU> |$T$jjWL$jQD$$$>T$$RD$dP'>L$,QT$|R>D$4P$Q>T$D$\L$D;9PL$hL$$QT$$RD$$PL$TQ4؃yD$L;D$l !uT$\+T$DD$dLL$T$RD$HPV. u D$lp !u7D$\+D$DL$DD$dPQD$ v4uD$L?D$l4 T$dL$DD$\+‰D$\|$$|$,;9PD$$PL$lL$$QT$$RD$TP4؃yD$L;D$l !uL$\+L$DT$dDD$L$QT$HRD$P3 u D$l !u7D$\+D$DL$DD$dPQD$ 3uD$L?D$lN T$d+T$DT$\9|$,tCD$$ ~ D$L& EhD$$L$,PQUlR4D$8P,|$,|$$}hD$\|$4;L$D9PL$hL$$QT$$RD$$PL$TQ2؃yD$L;D$l!uT$\+T$DD$dLL$T$RD$HPL$Q2 u D$l\!u7D$\+D$DT$DD$dPRD$ b2uD$L?D$l D$d+D$DD$\9|$%D$`L$H|$$|$,;9PT$$RD$$L$lPL$$QT$TRJ+؃yD$L;D$lu!uD$\+D$DL$dTT$D$PL$HQT$R+ u D$lu!u7D$\+D$DD$dPD$D$HP*uD$L?D$luL$d+L$DD$,L$\;t(T$$RP*D$4P2$ |$,|$$L$DQQ*u D$l&D$xT$DD$t;t(_^][`D$LL$lT$LQh\Rjgj #$L$x+QP)tD$tt9(t U_^]3[`SVt$W|$h,_W)u4W=u4I=u4;=u4-=u4=u$_=_t4Ph_Wd) lu.ȁu %Ph^Rh^ @Ph^W) h^W)39^Dv"DHPh^W( C;^Drh^W(39^hv"LlQh^W( C;^hrhx^W(39^v"TRh^W[( cC;^rhd^WE(GFuh4HW((*)3ۅv#DPh^W' C;^rhP^W'u4HPhL^W' h0^W'u4HPhL^W' h^Wo'qu4HPhL^WC' KtPh]W"' *t7h]W ' jQRW&t]D$PjjjjVD$,D$,uQh]W& PJQRh]W|&tPh|]W[& ~gtPh`]W>& ~Jh\]W2&~8h@]W &~&P%PQh4]W%_^3[_^[V$P"ujah<_jhj3^ËD$WPjjjVyL$ QVV _^%P%%P%%P%%P%%P%%P% P%t%P%c%P%R%P|%A%Pq%0%Pf%%P[%%PP%$PE%$P:%@P$h`6hh5$h`6hh5$s P$hX6hh5$h_hhx_q$!Pl$ Pa$@q!PS$`!PH$m!P=$hl_hh\_#$hl_hhT_$hl_hhL_# !P#4辷 ̸djlh_j>3Ƀ ;ujoh_jAjvj 3ËT$HHH HHB J JVt$u3^W~ t Pi~t#~ t?t WfF Fv _t V^̸#3S\$D$D$;u[Ul$Vu W>jUD$,PSW.CSWEn$d Fv"^ 9F vFWF CD$~PNN;FWFCT$_^U][j UT$_^U][j UT$_^U][j U_D$ T$ ^U][j U_D$ T$ ^U][j UqD$T$_^U][ÍI>PPP≠&"3S\$D$D$;u[Ul$Vu W>jUD$,PSWBSWl$Fv"^ 9F vFWF KBD$~PNN;FWFBT$_^U][j UAT$_^U][j U%T$_^U][j U _D$ T$ ^U][j UD$T$_^U][ÍImD$SUVW|$o uu mt_^]3[ÍH~t$$V9eFN ;HuVl ;Hu VxlV^$tT$ D$L$RPQW _^][Ëv gT$ D$L$RPQV_^][_^3][Ã|$Vtfl_^][k_^][ËD$<]}jE_^E][ËD$=]_^E][Ë]_^][ÅtWWD$ T$O WP}8t.G$t PVhmh_jw$j,V{_ _^][ËD$ _0^][Ë__^][ËT$W_^][ËL$ T$QRPFP_^][V;؃N PPj Q{_^][jW{T$(D$$L$ RVPQRQW_^][ËG$;F PPV 7G$hh_j,jP _^][;|$ gF9F t P$thh_jj,WN3_FF ^][jWVGiSV\ht;t$j WO$QW_^][j Wi_^][j WW_^][Ë\$ C 8t P_V*jK W JC HW C J H W C JHW C JHS 39_^][ËL$ T$QRPF P_^][VVL$$_^][ËT$ L$RV QPR؃_^][ÐQk9iR,H     ̋D$H L$Vt^L$@ D$#L$QPLU^̋T$Vp@u+PD$ RP ^Whdu_ËD$VPZuW^3_Ã|$Vtg4gVjjmW^_̋D$VhPL$hQt)t%v >t@ 8tRP7^3^̋D$t9t@$uËP D$ _SP؃u[VWhdtJD$PYuWuSl_^3[VfVjjmWSWqu S>3_^[WPu_ËD$VPNtVWuWt V3^_̡PuhPh`jVt$W3;u_3^ËtPxQ|RPQR{PoQcRW$hdlpx|t_dž^Vt$W3;u_3^ËPQRPQRPQsRg$_dž^SVt$ 3W;=;/dhlpx;tP;|;tPh;;tPG;;tP&;tt;tP ;tW;tP;t:;tP;t;t!P;uhhHj1t;P;hhHjDh9j P QRPQRPQR$_^3[Ë_^[̋D$3;u3Édhlpx|tǀ̸4V3ĉD$0Vt$0$+[}" Yvu9 ,:HXjĚΚ֙™vlbXNKOW00pB>>":POpOOPB:: ?@?P??P> >0>0E5@?@?pB>>"5POpOOPB:: ?@?P??P> >0>00E5@?@?.\ssl\s2_srvr.cpB>>5:POpOOPB:: ?@?P??P> >0>0'0E5@?@?.\ssl\s2_clnt.cDES-CBC3-MD5DES-CBC-MD5IDEA-CBC-MD5EXP-RC2-CBC-MD5RC2-CBC-MD5EXP-RC4-MD5RC4-MD5SSLv2 part of OpenSSL 1.0.1c 10 May 2012A (A (A@!88.\ssl\s2_lib.cs->session->master_key_length >= 0 && s->session->master_key_length < (int)sizeof(s->session->master_key)error >= 0 && error <= (int)sizeof(buf)c->iv_len <= (int)sizeof(s->session->key_arg)s->s2->key_material_length <= sizeof s->s2->key_material.\ssl\s2_enc.c.\ssl\s2_pkt.cmac_size <= MAX_MAC_SIZEpp 0(0PpQ@5pp5 0(0PQ@5.\ssl\s3_srvr.cGOST signature length is %dpp5 0(0P@@5.\ssl\s3_clnt.cSRVRCLNTECDH-RSA-AES256-GCM-SHA384ECDH-RSA-AES128-GCM-SHA256ECDHE-RSA-AES256-GCM-SHA384ECDHE-RSA-AES128-GCM-SHA256ECDH-ECDSA-AES256-GCM-SHA384ECDH-ECDSA-AES128-GCM-SHA256ECDHE-ECDSA-AES256-GCM-SHA384ECDHE-ECDSA-AES128-GCM-SHA256ECDH-RSA-AES256-SHA384ECDH-RSA-AES128-SHA256ECDHE-RSA-AES256-SHA384ECDHE-RSA-AES128-SHA256ECDH-ECDSA-AES256-SHA384ECDH-ECDSA-AES128-SHA256ECDHE-ECDSA-AES256-SHA384ECDHE-ECDSA-AES128-SHA256SRP-DSS-AES-256-CBC-SHASRP-RSA-AES-256-CBC-SHASRP-AES-256-CBC-SHASRP-DSS-AES-128-CBC-SHASRP-RSA-AES-128-CBC-SHASRP-AES-128-CBC-SHASRP-DSS-3DES-EDE-CBC-SHASRP-RSA-3DES-EDE-CBC-SHASRP-3DES-EDE-CBC-SHAAECDH-AES256-SHAAECDH-AES128-SHAAECDH-DES-CBC3-SHAAECDH-RC4-SHAAECDH-NULL-SHAECDHE-RSA-AES256-SHAECDHE-RSA-AES128-SHAECDHE-RSA-DES-CBC3-SHAECDHE-RSA-RC4-SHAECDHE-RSA-NULL-SHAECDH-RSA-AES256-SHAECDH-RSA-AES128-SHAECDH-RSA-DES-CBC3-SHAECDH-RSA-RC4-SHAECDH-RSA-NULL-SHAECDHE-ECDSA-AES256-SHAECDHE-ECDSA-AES128-SHAECDHE-ECDSA-DES-CBC3-SHAECDHE-ECDSA-RC4-SHAECDHE-ECDSA-NULL-SHAECDH-ECDSA-AES256-SHAECDH-ECDSA-AES128-SHAECDH-ECDSA-DES-CBC3-SHAECDH-ECDSA-RC4-SHAECDH-ECDSA-NULL-SHAADH-AES256-GCM-SHA384ADH-AES128-GCM-SHA256DH-DSS-AES256-GCM-SHA384DH-DSS-AES128-GCM-SHA256DHE-DSS-AES256-GCM-SHA384DHE-DSS-AES128-GCM-SHA256DH-RSA-AES256-GCM-SHA384DH-RSA-AES128-GCM-SHA256DHE-RSA-AES256-GCM-SHA384DHE-RSA-AES128-GCM-SHA256AES256-GCM-SHA384AES128-GCM-SHA256ADH-SEED-SHADHE-RSA-SEED-SHADHE-DSS-SEED-SHADH-RSA-SEED-SHADH-DSS-SEED-SHASEED-SHAPSK-AES256-CBC-SHAPSK-AES128-CBC-SHAPSK-3DES-EDE-CBC-SHAPSK-RC4-SHAADH-CAMELLIA256-SHADHE-RSA-CAMELLIA256-SHADHE-DSS-CAMELLIA256-SHADH-RSA-CAMELLIA256-SHADH-DSS-CAMELLIA256-SHACAMELLIA256-SHAGOST2001-NULL-GOST94GOST94-NULL-GOST94GOST2001-GOST89-GOST89GOST94-GOST89-GOST89ADH-AES256-SHA256ADH-AES128-SHA256DHE-RSA-AES256-SHA256DHE-DSS-AES256-SHA256DH-RSA-AES256-SHA256DH-DSS-AES256-SHA256DHE-RSA-AES128-SHA256ADH-CAMELLIA128-SHADHE-RSA-CAMELLIA128-SHADHE-DSS-CAMELLIA128-SHADH-RSA-CAMELLIA128-SHADH-DSS-CAMELLIA128-SHACAMELLIA128-SHADHE-DSS-AES128-SHA256DH-RSA-AES128-SHA256DH-DSS-AES128-SHA256AES256-SHA256AES128-SHA256NULL-SHA256ADH-AES256-SHADHE-RSA-AES256-SHADHE-DSS-AES256-SHADH-RSA-AES256-SHADH-DSS-AES256-SHAAES256-SHAADH-AES128-SHADHE-RSA-AES128-SHADHE-DSS-AES128-SHADH-RSA-AES128-SHADH-DSS-AES128-SHAAES128-SHAADH-DES-CBC3-SHAADH-DES-CBC-SHAEXP-ADH-DES-CBC-SHAADH-RC4-MD5EXP-ADH-RC4-MD5EDH-RSA-DES-CBC3-SHAEDH-RSA-DES-CBC-SHAEXP-EDH-RSA-DES-CBC-SHAEDH-DSS-DES-CBC3-SHAEDH-DSS-DES-CBC-SHAEXP-EDH-DSS-DES-CBC-SHADH-RSA-DES-CBC3-SHADH-RSA-DES-CBC-SHAEXP-DH-RSA-DES-CBC-SHADH-DSS-DES-CBC3-SHADH-DSS-DES-CBC-SHAEXP-DH-DSS-DES-CBC-SHADES-CBC3-SHADES-CBC-SHAEXP-DES-CBC-SHAIDEA-CBC-SHARC4-SHANULL-SHANULL-MD5SSLv3 part of OpenSSL 1.0.1c 10 May 2012.\ssl\s3_lib.cCCCBBA.\ssl\s3_enc.cm.\ssl\s3_pkt.cmac_size >= 0SSL alert number %d.\ssl\s3_both.ci <= EVP_MAX_MD_SIZEIJIP7BE`FF55:(0EE5@E`E/0E5IJIP75E`FF55:(0EE5@E`E/0E5.\ssl\s23_srvr.cCONNECTPUT HEAD POST GET IJI5BE`FF55:(0EE5@E`E90E5.\ssl\s23_clnt.c.\ssl\s23_lib.cIJI 0(0PpH5IJI 0(0PpH5IJI 0(0PpH5IJI5 0(0PH5IJI5 0(0PH5IJI5 0(0PH5IJI5 0(0PPI5IJI5 0(0PPI5IJI5 0(0PPI5server finishedclient finishedTLSv1 part of OpenSSL 1.0.1c 10 May 2012.\ssl\t1_lib.c.\ssl\t1_enc.cchunk >= 0key expansionIV blockclient write keyserver write key%s:%d: rec->data != rec->input n >= 0t > 0t >= 0master secret `  @ P`АT5 `5  @ P`T5.\ssl\d1_srvr.c `5  @ P`T5.\ssl\d1_clnt.cDTLSv1 part of OpenSSL 1.0.1c 10 May 2012.\ssl\d1_lib.c.\ssl\d1_pkt.c0len <= SSL3_RT_MAX_PLAIN_LENGTH.\ssl\d1_both.c((long)msg_hdr->msg_len) > 0invalid state reached %s:%ds->d1->w_msg_hdr.msg_len + DTLS1_HM_HEADER_LENGTH == (unsigned int)s->init_nums->d1->w_msg_hdr.msg_len + ((s->version==DTLS1_VERSION)?DTLS1_CCS_HEADER_LENGTH:3) == (unsigned int)s->init_nums->init_off == 0len == (unsigned int)retlen >= DTLS1_HM_HEADER_LENGTHs->init_off > DTLS1_HM_HEADER_LENGTHs->init_num == (int)s->d1->w_msg_hdr.msg_len + DTLS1_HM_HEADER_LENGTHs->d1->mtu >= dtls1_min_mtu()retransmit: message %d non-existant dtls1_retransmit_message() failed .\ssl\d1_enc.cSRTP_AES128_CM_SHA1_32SRTP_AES128_CM_SHA1_80.\ssl\d1_srtp.cOpenSSL 1.0.1c 10 May 2012.\ssl\ssl_lib.cALL:!aNULL:!eNULL:!SSLv2SSLv2unknownSSLv3TLSv1TLSv1.1TLSv1.2s->sid_ctx_length <= sizeof s->sid_ctxssl3-sha1ssl3-md5ssl2-md5SSL for verify callback.\ssl\ssl_cert.cssl_serverssl_clientOPENSSL_DIR_read(&ctx, '')%s/%s.\ssl\ssl_sess.cSSL SESSION PARAMETERS%-23s %s Kx=%-8s Au=%-4s Enc=%-9s Mac=%-4s%s FIPSHIGHMEDIUMLOWEXPORT56EXPORT40EXPORTEXPSHA384SHA256GOST89MACGOST94SHASHA1MD5CAMELLIACAMELLIA256CAMELLIA128AESGCMAESAES256AES128eNULLSEEDIDEARC2RC43DESDESSRPPSKAECDHADHRSAKRB5NULLEECDHEDHaGOSTaGOST01aGOST94aPSKECDSAaECDSAaECDHaDHaNULLaKRB5DSSaDSSaRSAkGOSTkSRPkPSKECDHkEECDHkECDHkECDHekECDHrkKRB5DHkEDHkDHkDHdkDHrkRSACOMPLEMENTOFDEFAULTCOMPLEMENTOFALLALL77 777777777 7@7`7|7t7l7d7\7T7P7H7 @7<747,7@$7@77 77766 6 666666666666 6@6 6060|6p6d6`6X6T6L6@68606 444,6$66 66 6@55gost-macssl_mac_secret_size[SSL_MD_GOST94_IDX] >= 0md_gost94ssl_mac_secret_size[SSL_MD_SHA1_IDX] >= 0.\ssl\ssl_ciph.cssl_mac_secret_size[SSL_MD_MD5_IDX] >= 0id-aes256-GCMid-aes128-GCMSEED-CBCgost89-cntCAMELLIA-256-CBCCAMELLIA-128-CBCAES-256-CBCAES-128-CBCIDEA-CBCRC2-CBCDES-EDE3-CBCDES-CBCAES-256-CBC-HMAC-SHA1AES-128-CBC-HMAC-SHA1RC4-HMAC-MD5gost2001gost94STRENGTHDEFAULTBuffer too smallOPENSSL_malloc ErrorAEADAESGCM(128)AESGCM(256)SEED(128)AES(256)Camellia(128)Camellia(256)AES(128)IDEA(128)RC2(128)RC2(56)RC2(40)RC4(128)RC4(64)RC4(56)RC4(40)3DES(168)DES(56)DES(40)NoneECDH/ECDSAECDH/RSADH(1024)DH(512)DH/DSSDH/RSARSA(1024)RSA(512) exportTLSv1/SSLv3(NONE)before/connect initializationbefore/accept initializationunknown statebefore SSL initializationSSLv2/v3 read client hello BSSL renegotiate ciphersSSLv2/v3 read client hello ADTLS1 write hello verify request BDTLS1 write hello verify request ASSLv3 read certificate verify BSSLv3 read certificate verify ASSLv3 read client key exchange BSSLv3 read client key exchange ASSLv3 read client certificate BSSLv3 read client certificate ASSLv3 write server done BSSLv3 write server done ASSLv3 write session ticket BSSLv3 write session ticket ASSLv3 write certificate request BSSLv3 write certificate request ASSLv3 write key exchange BSSLv3 write key exchange ASSLv3 write certificate BSSLv3 write certificate ASSLv3 write server hello BSSLv3 write server hello ASSLv3 write hello request CSSLv3 write hello request BSSLv3 write hello request ASSLv3 read client hello CSSLv3 read client hello BSSLv3 read finished BSSLv3 read finished ASSLv3 read change cipher spec BSSLv3 read change cipher spec ASSLv3 write finished BSSLv3 write finished ASSLv3 write change cipher spec BSSLv3 write change cipher spec ASSLv3 read client hello ASSLv3 flush dataSSLv2 X509 read server certificateSSLv2 write request certificate DSSLv2 write request certificate CSSLv2 write request certificate BSSLv2 write request certificate ASSLv2 write server finished BSSLv2 write server finished ASSLv2 read client finished BSSLv2 read client finished ASSLv2 write server verify CSSLv2 write server verify BSSLv2 write server verify ASSLv2 read client master key BSSLv2 read client master key ASSLv2 write server hello BSSLv2 write server hello ASSLv2 read client hello CSSLv2 read client hello BSSLv2 read client hello ASSLv2 server start encryptionok/accept SSL initializationbefore accept initializationSSLv2/v3 write client hello BSSLv2/v3 read server hello ASSLv2/v3 read server hello BSSLv2/v3 write client hello ADTLS1 read hello verify request BDTLS1 read hello verify request ASSLv3 write certificate verify BSSLv3 write certificate verify ASSLv3 write client key exchange BSSLv3 write client key exchange ASSLv3 write client certificate DSSLv3 write client certificate CSSLv3 write client certificate BSSLv3 write client certificate ASSLv3 read server done BSSLv3 read server done ASSLv3 read server session ticket BSSLv3 read server session ticket ASSLv3 read server certificate request BSSLv3 read server certificate request ASSLv3 read server key exchange BSSLv3 read server key exchange ASSLv3 read server certificate BSSLv3 read server certificate ASSLv3 read server hello BSSLv3 read server hello ASSLv3 write client hello BSSLv3 write client hello ASSLv2 X509 read client certificateSSLv2 read server finished BSSLv2 read server finished ASSLv2 read server verify BSSLv2 read server verify ASSLv2 write client certificate DSSLv2 write client certificate CSSLv2 write client certificate BSSLv2 write client certificate ASSLv2 write client finished BSSLv2 write client finished ASSLv2 write client master key BSSLv2 write client master key ASSLv2 read server hello BSSLv2 read server hello ASSLv2 write client hello BSSLv2 write client hello ASSLv2 client start encryptionok/connect SSL initializationbefore connect initializationSSL negotiation finished successfullyread headerread bodyread done23RCHBPINIT UNKWN 23RCHADWCHVBDWCHVA3RCV_B3RCV_A3RCKEB3RCKEA3RCC_B3RCC_A3WSD_B3WSD_A3WCR_B3WCR_A3WSKEB3WSKEA3WSC_B3WSC_A3WSH_B3WSH_A3RCH_C3RCH_B3WHR_C3WHR_B3WHR_A3RFINB3RFINA3RCCSB3RCCSA3WFINB3WFINA3WCCSB3WCCSA3RCH_A3FLUSH2X9GSC2SRC_D2SRC_C2SRC_B2SRC_A2SSF_B2SSF_A2GCF_B2GCF_A2SSV_C2SSV_B2SSV_A2GCMKA2SSH_B2SSH_A2GCH_C2GCH_B2GCH_A2SSENCAINIT 23WCHB23RSHA23WCHADRCHVBDRCHVA3WCV_B3WCV_A3WCKEB3WCKEA3WCC_D3WCC_C3WCC_B3WCC_A3RSD_B3RSD_A3RCR_B3RCR_A3RSKEB3RSKEA3RSC_B3RSC_A3RSH_B3RSH_A3WCH_B3WCH_A2X9GCC2GSF_B2GSF_A2GSV_B2GSV_A2SCC_D2SCC_C2SCC_B2SCC_A2SCF_B2SCF_A2SCMKB2SCMKA2GSH_B2GSH_A2SCH_B2SCH_A2CSENCCINIT SSLOK fatalwarningUFWUKUPBHBRUNCOUENRUSIEISPVERCYDEADCARODCIPCUCECRUCBCNCHFDFBMUMCNunknown PSK identitybad certificate hash valuebad certificate status responseunrecognized namecertificate unobtainableunsupported extensionno renegotiationuser canceledinternal errorinsufficient securityprotocol versionexport restrictiondecrypt errordecode erroraccess deniedunknown CArecord overflowdecryption failedillegal parametercertificate unknowncertificate expiredcertificate revokedunsupported certificatebad certificateno certificatehandshake failuredecompression failurebad record macunexpected_messageclose notifyRHRBRD.\ssl\ssl_rsa.c.\ssl\ssl_asn1.cos.length <= (int)sizeof(ret->session_id)%ld (%s) Verify return code: Timeout : %ld (sec) Start Time: %ld Compression: %d (%s) Compression: %d TLS session ticket: TLS session ticket lifetime hint: %ld (seconds) SRP username: PSK identity hint: %s PSK identity: Key-Arg : Master-Key: Session-ID-ctx: %02X Session-ID: Cipher : %s Cipher : %04lX Cipher : %06lX Protocol : %s DTLSv1-badDTLSv1SSL-Session: .\ssl\ssl_txt.cdss1DSS1DSA-SHA1-oldDSA-SHA1RSA-SHA1-2RSA-SHA1ssl.\ssl\bio_ssl.cx509 verification setup problemsx509 libwrong version numberwrong ssl versionwrong signature typewrong signature sizewrong signature lengthwrong number of key bitswrong message typewrong cipher returnedwrite bio not setuse srtp not negotiatedunsupported status typeunsupported ssl versionunsupported protocolunsupported elliptic curveunsupported digest typeunsupported compression algorithmunsupported cipherunsafe legacy renegotiation disabledunknown ssl versionunknown remote error typeunknown protocolunknown pkey typeunknown key exchange typeunknown digestunknown cipher typeunknown cipher returnedunknown certificate typeunknown alert typeuninitializedunexpected recordunexpected messageunable to load ssl3 sha1 routinesunable to load ssl3 md5 routinesunable to load ssl2 md5 routinesunable to find ssl methodunable to find public key parametersunable to find ecdh parametersunable to find dh parametersunable to extract public keyunable to decode ecdh certsunable to decode dh certstried to use unsupported ciphertls rsa encrypted value length is wrongtls peer did not respond with certificate listtls invalid ecpointformat listtls illegal exporter labelheartbeat request already pendingpeer does not accept heartbeartstls client cert req with anon ciphertlsv1 unsupported extensiontlsv1 unrecognized nametlsv1 certificate unobtainabletlsv1 bad certificate status responsetlsv1 bad certificate hash valuetlsv1 alert user cancelledtlsv1 alert unknown catlsv1 alert record overflowtlsv1 alert protocol versiontlsv1 alert no renegotiationtlsv1 alert internal errortlsv1 alert insufficient securitytlsv1 alert export restrictiontlsv1 alert decrypt errortlsv1 alert decryption failedtlsv1 alert decode errortlsv1 alert access deniedssl session id is differentssl session id has bad lengthssl session id context too longssl session id conflictssl session id callback failedssl library has no ciphersssl handshake failuressl ctx has no default ssl versionsslv3 alert unsupported certificatesslv3 alert unexpected messagesslv3 alert no certificatesslv3 alert illegal parametersslv3 alert handshake failuresslv3 alert decompression failuresslv3 alert certificate unknownsslv3 alert certificate revokedsslv3 alert certificate expiredsslv3 alert bad record macsslv3 alert bad certificatessl3 session id too shortssl3 session id too longssl3 ext invalid servername typessl3 ext invalid servernamessl3 ext invalid ecpointformatssl2 connection id too longssl23 doing session id reusesrtp unknown protection profilesrtp protection profile list too longsrtp could not allocate profileserror with the srp paramssignature for non signing certificatesignature algorithms errorshort readsession id context uninitializedserverhello tlsextscsv received when renegotiatingreuse cipher list not zeroreuse cert type not zeroreuse cert length not zerorequired compresssion algorithm missingrequired cipher missingrenegotiation mismatchrenegotiation encoding errrenegotiate ext too longrecord too smallrecord too largerecord length mismatchread wrong packet typeread timeout expiredread bio not setpublic key not rsapublic key is not rsapublic key encrypt errorpsk no server cbpsk no client cbpsk identity not foundprotocol is shutdownproblems mapping cipher functionspre mac length too longpeer error unsupported certificate typepeer error no cipherpeer error no certificatepeer error certificatepeer errorpeer did not return a certificatepath too longparse tlsextpacket length too longopaque PRF input too longonly tls allowed in fips modeold session compression algorithm not returnedold session cipher not returnednull ssl method passednull ssl ctxno verify callbackno srtp profilesno shared cipherdigest requred for handshake isn't computedno publickeyno protocols availableno private key assignedno privatekeyno method specifiedPeer haven't sent GOST certificate, required for selected ciphersuiteno compression specifiedno client cert receivedno client cert methodno cipher matchno cipher listno ciphers specifiedno ciphers passedno ciphers availableno certificate specifiedno certificate setno certificate returnedno certificate assignedno certificates returnednon sslv2 initial packetmultiple sgc restartsmissing verify messagemissing tmp rsa pkeymissing tmp rsa keymissing tmp ecdh keymissing tmp dh keycan't find SRP server parammissing rsa signing certmissing rsa encrypting certmissing rsa certificatemissing export tmp rsa keymissing export tmp dh keymissing dsa signing certmissing dh rsa certmissing dh keymissing dh dsa certmessage too longlibrary has no cipherslibrary buglength too shortlength mismatchkrb5 server tkt skewkrb5 server tkt not yet validkrb5 server tkt expiredkrb5 server rd_req (keytab perms?)krb5 server initkrb5 server bad ticketkrb5 client mk_req (expired tkt?)krb5 client initkrb5 client get credkrb5 client cc principal (no tkt?)krb5key arg too longinvalid trustinvalid ticket keys lengthinvalid status responseinvalid srp usernameinvalid purposeinvalid compression algorithminvalid commandinvalid challenge lengthinconsistent compressionillegal paddinghttp requesthttps proxy requestgot next proto without seeing extensiongot next proto before a ccsgot a fin before a ccsextra data in messageexcessive message sizeerror in received cipher listerror generating tmp rsa keyencrypted length too longempty srtp protection profile listecgroup too large for cipherecc cert should have sha1 signatureecc cert should have rsa signatureecc cert not for signingecc cert not for key agreementduplicate compression iddtls message too bigdigest check faileddh public value length is wrongdecryption failed or bad record macdata length too longdata between ccs and finishedcookie mismatchconnection type not setconnection id is differentcompression library errorcompression id not within private rangecompression failurecompression disabledcompressed length too longclienthello tlsextcipher table src errorcipher or hash unavailablecipher code wrong lengthchallenge is differentcert length mismatchcertificate verify failedccs received earlyca dn too longca dn length mismatchbn libblock cipher pad is wrongbio not setbad write retrybad statebad ssl session id lengthbad ssl filetypebad srtp protection profile listbad srtp mki valuebad srp s lengthbad srp n lengthbad srp g lengthbad srp b lengthbad srp a lengthbad signaturebad rsa signaturebad rsa modulus lengthbad rsa e lengthbad rsa encryptbad rsa decryptbad response argumentbad psk identity hint lengthbad protocol version numberbad packet lengthbad message typebad mac lengthbad mac decodebad lengthbad hello requestbad handshake lengthbad ecpointbad ecdsa signaturebad ecc certbad dsa signaturebad digest lengthbad dh p lengthbad dh pub key lengthbad dh g lengthbad decompressionbad data returned by callbackbad checksumbad change cipher specbad authentication typebad alert recordattempt to reuse session in different contextapp data in handshakeWRITE_PENDINGTLS1_SETUP_KEY_BLOCKtls1_prfTLS1_PREPARE_SERVERHELLO_TLSEXTTLS1_PREPARE_CLIENTHELLO_TLSEXTSSL_F_TLS1_HEARTBEATTLS1_EXPORT_KEYING_MATERIALTLS1_ENCTLS1_CHECK_SERVERHELLO_TLSEXTTLS1_CHANGE_CIPHER_STATEtls1_cert_verify_macSSL_writeSSL_VERIFY_CERT_CHAINSSL_use_RSAPrivateKey_fileSSL_use_RSAPrivateKey_ASN1SSL_use_RSAPrivateKeySSL_use_psk_identity_hintSSL_use_PrivateKey_fileSSL_use_PrivateKey_ASN1SSL_use_PrivateKeySSL_use_certificate_fileSSL_use_certificate_ASN1SSL_use_certificateSSL_UNDEFINED_VOID_FUNCTIONSSL_UNDEFINED_FUNCTIONSSL_UNDEFINED_CONST_FUNCTIONSSL_SRP_CTX_initSSL_shutdownSSL_set_wfdSSL_set_trustSSL_set_session_ticket_extSSL_set_session_id_contextSSL_set_sessionSSL_set_rfdSSL_set_purposeSSL_SET_PKEYSSL_set_fdSSL_set_cipher_listSSL_SET_CERTSSL_SESS_CERT_NEWSSL_SESSION_set1_id_contextSSL_SESSION_print_fpSSL_SESSION_newSSL_RSA_PUBLIC_ENCRYPTSSL_RSA_PRIVATE_DECRYPTSSL_readSSL_PREPARE_SERVERHELLO_TLSEXTSSL_PREPARE_CLIENTHELLO_TLSEXTSSL_peekSSL_PARSE_SERVERHELLO_USE_SRTP_EXTSSL_PARSE_SERVERHELLO_TLSEXTSSL_PARSE_SERVERHELLO_RENEGOTIATE_EXTSSL_PARSE_CLIENTHELLO_USE_SRTP_EXTSSL_PARSE_CLIENTHELLO_TLSEXTSSL_PARSE_CLIENTHELLO_RENEGOTIATE_EXTSSL_newSSL_load_client_CA_fileSSL_INIT_WBIO_BUFFERSSL_GET_SIGN_PKEYSSL_GET_SERVER_SEND_CERTSSL_GET_PREV_SESSIONSSL_GET_NEW_SESSIONSSL_do_handshakeSSL_CTX_use_RSAPrivateKey_fileSSL_CTX_use_RSAPrivateKey_ASN1SSL_CTX_use_RSAPrivateKeySSL_CTX_use_psk_identity_hintSSL_CTX_use_PrivateKey_fileSSL_CTX_use_PrivateKey_ASN1SSL_CTX_use_PrivateKeySSL_CTX_use_certificate_fileSSL_CTX_use_certificate_chain_fileSSL_CTX_use_certificate_ASN1SSL_CTX_use_certificateSSL_CTX_set_trustSSL_CTX_set_ssl_versionSSL_CTX_set_session_id_contextSSL_CTX_set_purposeSSL_CTX_set_client_cert_engineSSL_CTX_set_cipher_listSSL_CTX_newSSL_CTX_MAKE_PROFILESSSL_CTX_check_private_keySSL_ctrlSSL_CREATE_CIPHER_LISTSSL_COMP_add_compression_methodSSL_clearSSL_CIPHER_STRENGTH_SORTSSL_CIPHER_PROCESS_RULESTRSSL_CHECK_SRVR_ECC_CERT_AND_ALGSSL_CHECK_SERVERHELLO_TLSEXTSSL_check_private_keySSL_CERT_NEWSSL_CERT_INSTANTIATESSL_CERT_INSTSSL_CERT_DUPSSL_BYTES_TO_CIPHER_LISTSSL_BAD_METHODSSL_ADD_SERVERHELLO_USE_SRTP_EXTSSL_ADD_SERVERHELLO_TLSEXTSSL_ADD_SERVERHELLO_RENEGOTIATE_EXTSSL_add_file_cert_subjects_to_stackSSL_add_dir_cert_subjects_to_stackSSL_ADD_CLIENTHELLO_USE_SRTP_EXTSSL_ADD_CLIENTHELLO_TLSEXTSSL_ADD_CLIENTHELLO_RENEGOTIATE_EXTSSL3_WRITE_PENDINGSSL3_WRITE_BYTESSSL3_SETUP_WRITE_BUFFERSSL3_SETUP_READ_BUFFERSSL3_SETUP_KEY_BLOCKSSL3_SEND_SERVER_KEY_EXCHANGESSL3_SEND_SERVER_HELLOSSL3_SEND_SERVER_CERTIFICATESSL3_SEND_CLIENT_VERIFYSSL3_SEND_CLIENT_KEY_EXCHANGESSL3_SEND_CLIENT_CERTIFICATESSL3_SEND_CERTIFICATE_REQUESTSSL3_READ_NSSL3_READ_BYTESSSL3_PEEKSSL3_OUTPUT_CERT_CHAINSSL3_NEW_SESSION_TICKETssl3_handshake_macSSL3_GET_SERVER_HELLOSSL3_GET_SERVER_DONESSL3_GET_SERVER_CERTIFICATESSL3_GET_RECORDSSL3_GET_NEXT_PROTOSSL3_GET_NEW_SESSION_TICKETSSL3_GET_MESSAGESSL3_GET_KEY_EXCHANGESSL3_GET_FINISHEDSSL3_GET_CLIENT_KEY_EXCHANGESSL3_GET_CLIENT_HELLOSSL3_GET_CLIENT_CERTIFICATESSL3_GET_CERT_VERIFYSSL3_GET_CERT_STATUSSSL3_GET_CERTIFICATE_REQUESTSSL3_GENERATE_KEY_BLOCKSSL3_ENCSSL3_DO_CHANGE_CIPHER_SPECSSL3_DIGEST_CACHED_RECORDSSSL3_CTX_CTRLSSL3_CTRLSSL3_CONNECTSSL3_CLIENT_HELLOSSL3_CHECK_CLIENT_HELLOSSL3_CHECK_CERT_AND_ALGORITHMSSL3_CHANGE_CIPHER_STATESSL3_CALLBACK_CTRLSSL3_ADD_CERT_TO_BUFSSL3_ACCEPTSSL2_WRITESSL2_SET_CERTIFICATESSL2_READ_INTERNALSSL2_READSSL2_PEEKSSL2_GENERATE_KEY_MATERIALSSL2_ENC_INITSSL2_CONNECTSSL2_ACCEPTSSL23_WRITESSL23_READSSL23_PEEKSSL23_GET_SERVER_HELLOSSL23_GET_CLIENT_HELLOSSL23_CONNECTSSL23_CLIENT_HELLOSSL23_ACCEPTSERVER_VERIFYSERVER_HELLOSERVER_FINISHREQUEST_CERTIFICATEREAD_Ni2d_SSL_SESSIONGET_SERVER_VERIFYGET_SERVER_HELLOGET_SERVER_FINISHEDGET_CLIENT_MASTER_KEYGET_CLIENT_HELLOGET_CLIENT_FINISHEDDTLS1_WRITE_APP_DATA_BYTESDTLS1_SEND_SERVER_KEY_EXCHANGEDTLS1_SEND_SERVER_HELLODTLS1_SEND_SERVER_CERTIFICATEDTLS1_SEND_HELLO_VERIFY_REQUESTDTLS1_SEND_CLIENT_VERIFYDTLS1_SEND_CLIENT_KEY_EXCHANGEDTLS1_SEND_CLIENT_CERTIFICATEDTLS1_SEND_CERTIFICATE_REQUESTDTLS1_READ_FAILEDDTLS1_READ_BYTESDTLS1_PROCESS_RECORDDTLS1_PROCESS_OUT_OF_SEQ_MESSAGEDTLS1_PREPROCESS_FRAGMENTDTLS1_OUTPUT_CERT_CHAINDTLS1_HEARTBEATDTLS1_HANDLE_TIMEOUTDTLS1_GET_RECORDDTLS1_GET_MESSAGE_FRAGMENTDTLS1_GET_MESSAGEDTLS1_GET_HELLO_VERIFYDTLS1_ENCDTLS1_CONNECTDTLS1_CLIENT_HELLODTLS1_CHECK_TIMEOUT_NUMDTLS1_BUFFER_RECORDDTLS1_ADD_CERT_TO_BUFDTLS1_ACCEPTDO_SSL3_WRITEDO_DTLS1_WRITEd2i_SSL_SESSIONCLIENT_MASTER_KEYCLIENT_HELLOCLIENT_FINISHEDCLIENT_CERTIFICATE.\ssl\tls_srp.c.\ssl\t1_reneg.c!expected_len || s->s3->previous_server_finished_len!expected_len || s->s3->previous_client_finished_len@HRSDSk|RB0ͱrD:\CFILES\Projects\WinSSL\openssl-1.0.1c\out32dll\ssleay32.pdbauq@DȘʙl\F&DXl֛, % F k !U4! x C N "Ym'8^ +x noj o   bN.q < l O{vBzR{ ; o Mb= <] /m <k DAWZ  c HI0 N_yx39noRR   4 U 0N` x?%  ~ B V CA5b G | rz l:vt2 m u|Jw?L ]D I porqwy{;{: %#86Ct ' z VXM%J0 S EI>0$+[}" Yvu9 ,:HXjĚΚ֙™vlbXNLIBEAY32.dll_time32&memcpy*memset(memmove[strncpyfprintf__iob_funcZstrncmp_ftime32Nstrchrp_errnoabortMSVCR90.dllj_encode_pointer_malloc_crtfreek_encoded_null`_decode_pointer_initterm_initterm_e_amsg_exit _adjust_fdivj__CppXcptFilterK_crt_debugger_hook__clean_type_info_names_internal_unlock__dllonexitv_lock_onexits_except_handler4_commonsSetLastErrorGetLastErrorInterlockedExchangeSleepInterlockedCompareExchangeTerminateProcessGetCurrentProcessUnhandledExceptionFilterSetUnhandledExceptionFilterIsDebuggerPresentDisableThreadLibraryCallsQueryPerformanceCounterGetTickCountGetCurrentThreadIdGetCurrentProcessIdyGetSystemTimeAsFileTimeKERNEL32.dllKOƨj0` Yk !0k`+PX@]Fm0$X,-Яp@p@` ж_`^OY`00>O DL`W0B6@$#`X@pJP%P5!@YPp?P` K$WPLa 5PD0P E/9>  'Q0p0pK670@0b@b bb:8p9`9:p8@^ ^80:8]89:9e89899P:]9H IIpP0;:bb::PZ;P;[@ -p ,, 0Pp]p# !` ==p;;Г07P70_d88e0ee@ee eePe88p((`e;P=`f=`=@<@=0ffp= =f@cbc` `H@IHP0p`0Pp`p`=0e0I`bH=9PPH0*0))(P*P_Өݨ0AVdyҩAXl~ɪ&<Pfȫ+E`t߬2Me}ӭ 'Ba7Qjѯ ?VzŰ %9Uqѱ9SrƲ$5Kgijܳ/G\tִ+A\j|ŵֵ޵ 1BRfzԶ$G\gxշ.;NZxʸ۸ !7Jbwʹ&.7CL\xϺ޺#:P^nyܻ2>Niͼ#;Smνڽ %;Nf~ʾ޾*6?I^lʿ׿&<Kauxy{ ,-0'KNL\MI  5$ hb&.G;CDH=Ee %Si !UQJ"#$%&'W()*+,|-.`/c0367~89:;<}/(>?d"#@A<>BgADEFGHIJKLMN7OPf4!QRSTURV +)WXY213?@Z[a ]^_O`abcdefghij*kmnopqrstu98:T]Vvw SSLEAY32.dllBIO_f_sslBIO_new_buffer_ssl_connectBIO_new_sslBIO_new_ssl_connectBIO_ssl_copy_session_idBIO_ssl_shutdownDTLSv1_client_methodDTLSv1_methodDTLSv1_server_methodERR_load_SSL_stringsPEM_read_SSL_SESSIONPEM_read_bio_SSL_SESSIONPEM_write_SSL_SESSIONPEM_write_bio_SSL_SESSIONSRP_Calc_A_paramSRP_generate_client_master_secretSRP_generate_server_master_secretSSL_CIPHER_descriptionSSL_CIPHER_get_bitsSSL_CIPHER_get_idSSL_CIPHER_get_nameSSL_CIPHER_get_versionSSL_COMP_add_compression_methodSSL_COMP_get_compression_methodsSSL_COMP_get_nameSSL_CTX_SRP_CTX_freeSSL_CTX_SRP_CTX_initSSL_CTX_add_client_CASSL_CTX_add_sessionSSL_CTX_callback_ctrlSSL_CTX_check_private_keySSL_CTX_ctrlSSL_CTX_flush_sessionsSSL_CTX_freeSSL_CTX_get_cert_storeSSL_CTX_get_client_CA_listSSL_CTX_get_client_cert_cbSSL_CTX_get_ex_dataSSL_CTX_get_ex_new_indexSSL_CTX_get_info_callbackSSL_CTX_get_quiet_shutdownSSL_CTX_get_timeoutSSL_CTX_get_verify_callbackSSL_CTX_get_verify_depthSSL_CTX_get_verify_modeSSL_CTX_load_verify_locationsSSL_CTX_newSSL_CTX_remove_sessionSSL_CTX_sess_get_get_cbSSL_CTX_sess_get_new_cbSSL_CTX_sess_get_remove_cbSSL_CTX_sess_set_get_cbSSL_CTX_sess_set_new_cbSSL_CTX_sess_set_remove_cbSSL_CTX_sessionsSSL_CTX_set1_paramSSL_CTX_set_cert_storeSSL_CTX_set_cert_verify_callbackSSL_CTX_set_cipher_listSSL_CTX_set_client_CA_listSSL_CTX_set_client_cert_cbSSL_CTX_set_client_cert_engineSSL_CTX_set_cookie_generate_cbSSL_CTX_set_cookie_verify_cbSSL_CTX_set_default_passwd_cbSSL_CTX_set_default_passwd_cb_userdataSSL_CTX_set_default_verify_pathsSSL_CTX_set_ex_dataSSL_CTX_set_generate_session_idSSL_CTX_set_info_callbackSSL_CTX_set_msg_callbackSSL_CTX_set_next_proto_select_cbSSL_CTX_set_next_protos_advertised_cbSSL_CTX_set_psk_client_callbackSSL_CTX_set_psk_server_callbackSSL_CTX_set_purposeSSL_CTX_set_quiet_shutdownSSL_CTX_set_session_id_contextSSL_CTX_set_srp_cb_argSSL_CTX_set_srp_client_pwd_callbackSSL_CTX_set_srp_passwordSSL_CTX_set_srp_strengthSSL_CTX_set_srp_usernameSSL_CTX_set_srp_username_callbackSSL_CTX_set_srp_verify_param_callbackSSL_CTX_set_ssl_versionSSL_CTX_set_timeoutSSL_CTX_set_tlsext_use_srtpSSL_CTX_set_tmp_dh_callbackSSL_CTX_set_tmp_ecdh_callbackSSL_CTX_set_tmp_rsa_callbackSSL_CTX_set_trustSSL_CTX_set_verifySSL_CTX_set_verify_depthSSL_CTX_use_PrivateKeySSL_CTX_use_PrivateKey_ASN1SSL_CTX_use_PrivateKey_fileSSL_CTX_use_RSAPrivateKeySSL_CTX_use_RSAPrivateKey_ASN1SSL_CTX_use_RSAPrivateKey_fileSSL_CTX_use_certificateSSL_CTX_use_certificate_ASN1SSL_CTX_use_certificate_chain_fileSSL_CTX_use_certificate_fileSSL_CTX_use_psk_identity_hintSSL_SESSION_freeSSL_SESSION_get0_peerSSL_SESSION_get_compress_idSSL_SESSION_get_ex_dataSSL_SESSION_get_ex_new_indexSSL_SESSION_get_idSSL_SESSION_get_timeSSL_SESSION_get_timeoutSSL_SESSION_newSSL_SESSION_printSSL_SESSION_print_fpSSL_SESSION_set1_id_contextSSL_SESSION_set_ex_dataSSL_SESSION_set_timeSSL_SESSION_set_timeoutSSL_SRP_CTX_freeSSL_SRP_CTX_initSSL_acceptSSL_add_client_CASSL_add_dir_cert_subjects_to_stackSSL_add_file_cert_subjects_to_stackSSL_alert_desc_stringSSL_alert_desc_string_longSSL_alert_type_stringSSL_alert_type_string_longSSL_cache_hitSSL_callback_ctrlSSL_check_private_keySSL_clearSSL_connectSSL_copy_session_idSSL_ctrlSSL_do_handshakeSSL_dupSSL_dup_CA_listSSL_export_keying_materialSSL_freeSSL_get0_next_proto_negotiatedSSL_get1_sessionSSL_get_SSL_CTXSSL_get_certificateSSL_get_cipher_listSSL_get_ciphersSSL_get_client_CA_listSSL_get_current_cipherSSL_get_current_compressionSSL_get_current_expansionSSL_get_default_timeoutSSL_get_errorSSL_get_ex_dataSSL_get_ex_data_X509_STORE_CTX_idxSSL_get_ex_new_indexSSL_get_fdSSL_get_finishedSSL_get_info_callbackSSL_get_peer_cert_chainSSL_get_peer_certificateSSL_get_peer_finishedSSL_get_privatekeySSL_get_psk_identitySSL_get_psk_identity_hintSSL_get_quiet_shutdownSSL_get_rbioSSL_get_read_aheadSSL_get_rfdSSL_get_selected_srtp_profileSSL_get_servernameSSL_get_servername_typeSSL_get_sessionSSL_get_shared_ciphersSSL_get_shutdownSSL_get_srp_NSSL_get_srp_gSSL_get_srp_userinfoSSL_get_srp_usernameSSL_get_srtp_profilesSSL_get_ssl_methodSSL_get_verify_callbackSSL_get_verify_depthSSL_get_verify_modeSSL_get_verify_resultSSL_get_versionSSL_get_wbioSSL_get_wfdSSL_has_matching_session_idSSL_library_initSSL_load_client_CA_fileSSL_load_error_stringsSSL_newSSL_peekSSL_pendingSSL_readSSL_renegotiateSSL_renegotiate_abbreviatedSSL_renegotiate_pendingSSL_rstate_stringSSL_rstate_string_longSSL_select_next_protoSSL_set1_paramSSL_set_SSL_CTXSSL_set_accept_stateSSL_set_bioSSL_set_cipher_listSSL_set_client_CA_listSSL_set_connect_stateSSL_set_debugSSL_set_ex_dataSSL_set_fdSSL_set_generate_session_idSSL_set_info_callbackSSL_set_msg_callbackSSL_set_psk_client_callbackSSL_set_psk_server_callbackSSL_set_purposeSSL_set_quiet_shutdownSSL_set_read_aheadSSL_set_rfdSSL_set_sessionSSL_set_session_id_contextSSL_set_session_secret_cbSSL_set_session_ticket_extSSL_set_session_ticket_ext_cbSSL_set_shutdownSSL_set_srp_server_paramSSL_set_srp_server_param_pwSSL_set_ssl_methodSSL_set_stateSSL_set_tlsext_use_srtpSSL_set_tmp_dh_callbackSSL_set_tmp_ecdh_callbackSSL_set_tmp_rsa_callbackSSL_set_trustSSL_set_verifySSL_set_verify_depthSSL_set_verify_resultSSL_set_wfdSSL_shutdownSSL_srp_server_param_with_usernameSSL_stateSSL_state_stringSSL_state_string_longSSL_use_PrivateKeySSL_use_PrivateKey_ASN1SSL_use_PrivateKey_fileSSL_use_RSAPrivateKeySSL_use_RSAPrivateKey_ASN1SSL_use_RSAPrivateKey_fileSSL_use_certificateSSL_use_certificate_ASN1SSL_use_certificate_fileSSL_use_psk_identity_hintSSL_versionSSL_wantSSL_writeSSLv23_client_methodSSLv23_methodSSLv23_server_methodSSLv2_client_methodSSLv2_methodSSLv2_server_methodSSLv3_client_methodSSLv3_methodSSLv3_server_methodTLSv1_1_client_methodTLSv1_1_methodTLSv1_1_server_methodTLSv1_2_client_methodTLSv1_2_methodTLSv1_2_server_methodTLSv1_client_methodTLSv1_methodTLSv1_server_methodd2i_SSL_SESSIONi2d_SSL_SESSIONssl2_ciphersssl3_ciphers( 0( 0 0(A0(A0 0('A0' 0(8' !088' 0'  0(8' !088' 0l' 0(8X'!088D'0,' 0(8'!088'0& 0(8&!088&0& 0(&A0& 0(|&!088h&0\&/@0H&0@04&1@0 &2@0 &3@0%4@0%50%60%70%80%90%:0%; 0t%<@0d%=0L%>@04%?@0%@@0 %A0$B0$C0$D0$E0$F0$g@0h$h0P$i08$j0 $k0 $l@0#m0#D#D# @# @#0t#0\#0D#0,#0#0 #A0"0"@0"0"A0"A0"A0"A0x"A0h"A0T"@@" @$"@" @!@! @!@! @|!@`! @H!@0! @!@ 0!@A0 @0 @@0 @0 @ 0 @A0x @0`  @@0H  @04    0   A0   0 @0 0 0A00@0p0` 0PA0<0(@00000@0@0@0l 0T!0<"0 #@@$@ %@@&@ '@( )@@l*@ L+@@,,@ @ -@@.@ @/@0 @1@@|2@ @p@PP$tl5666666666666666666666666666666666666666666666666\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\d(`(\(P0P0zp ..@t 0P0zp ..?<4$4d4555555555WWWWW @5_@p@4p $P`pPА`pp|hXL4 ؏ȏpX D00@P`Ўpt\<  ̍xh XH08@$P`p،̌xl`L4(ԋ 0@xP lP\P@@$`pȊ|\H0 ܉   |d0 L@@ 0P $` p  Ĉ   t T < $0   P0p \ 8`@Ԇ Ć   ` p `0 H(p`pЅ@ ąP `  dPL @(   Ԅ P  p L ,   ܃  0 `@ LP 8  ` p  ؂  p`L$pԁ  p X H 0 ܀ Ѐ 0 0  x`\@L@ @00P @` p  | h P 8  ~ ~ ~ ~~ t~ T~ H~,~~0}@}}0 }@ }d}X}eD}f,}g}h}j|k|l|m|n|o|pp|0`|1L|2@|L(|i||q{M{r{s{t{<{ul{v\{wL{x8{y {z {{z[z\z]z^z_z`za`z|Lz}0z~$zzzyyyyyyty\y@y$y yxxWxx3xlxPx8x4(xxw[wwwNw5dw=Dw>(wBwCv6vbvv`v@v(vvucuduuuuTduHu8uUuuetHtEttttltTt @t!t"t#s$s%s&s'|slsXsLs4s( s srrrrr|r`rDrf(rr7qqqqZqqhqPq8q$qqpppppKptpXpJpoooooS[DologXoDo4oonXn)nGnxnhnXn4n(nnmmmmxm`mHm4m mmlll8ll|lhl*TlO8lPlQlkVkkkpkYLk8kkkhjjijjjk\jl PAPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXXPADDINGPADDINGXP 0!0<00011b235344466;77C88i:::E;m;;b>>?O?? L0h011h222T34 4536h6l6p6t6x6|666666!7<7789v<<==>>0d 0f00D1[111(2Q2233J444L5555>667^77@88t9:H:8;?;<B>?@\00P1u111w222334244444g77I888];;;;;;;;;f??%???P@0011124 5?6m67/7j7777999:}:{;;[==>^???`H0>01?1n11>2x223U3u346X7388#949T9;+<<(==:>f>>>>y?pd80000(1_11;2222Y3}33:4/5d55)7b77889.9:1:Y:::;J;r;;>>?h?0'0X00011F22278^8{88 9{99Z:a:l2`22222222333 33333 3$3(3,30314O44566a6d7=99":q::J;;;+?4?M?c?\0p000c11334K4444I55K6667777$8z8899:b:_;x;;;;<==>2>O>>?P4000o11162L22 3445A5n55A666.7[77 8889&:Z;;.>LS000N112<3333434\445057888R889r999::::;<<==3334T4X4\4`4d4h4l4p4t4x4|44444455677j9997:W:::::F;_;};;;N>> >>>>> >$>(>,>0>4>8>>>>D?H?L?P?T????h90Z00000-1j11"333R334 44444 4$4(4,4044484<4@4D4H4L4445555555555*6=>[?\$1[1111i2222b34]778C9K::u;;[<<===(>,>0>4>8><>@>D>H>L>P>T>X>\>`>1???<12m34,4t44U66L778B9::9<<<<<< ==>?,0D1:257T778@8V8888O9]9{;m=? T1+252h2r2t334C4M444D5 6678&999;;;;;;;;;;;'?0<K022222233g347a7789:;<===> ?|?@ 01z222344!556e6667Q8a8|88888888 9!919A9\9i9{991:: ;;;;; ;$;(;,;0;4;8;<;@;D;H;L;P;T;X;\;`;d;h;l;p;;P$0w14k46{:;=>>?T???`4124566D6H6L6P6T6X678899;*<p001I4[44459b::;;;9?$?d061O111[2222373=3G33356[78{8;:::;;;<<<====0>V>> ?'??????????p000 00000 0$0(0,0004080<0@0D0H0L0P0T0001"2v22;4485555667y77'8d889%9:1;F_>p>H06111#5Z5566>>>>>>>>>>>>>>>>>>>>>?H001E2334556K7y777889V999::;<<??<007x7777777777777888 88989.>>r?,j3l44667k:: ;;;<<<<<<>42338999:E:[:::@;N;;;:<^<<=h=>\t0~00E112445 5555555J6g6q68A9::}:;<;;;;;/<<D>>>?@z01[3333T6+858h8r8X99;;<<<<<>>>>>_???X0111 2w2}2233-444355555O666667779y:);^>>>>?`?? X0,031:1222222222222222333 33333 3$3(3,34)56678.8s;#<;<0@182q2234455555 66%6.6567#9;<=>l>???@d000111C233'4444 6$6(6,6064686<6@6D6H6L6P666777788_8{8888899 :B:;P0$050H0W0i0x0000000G1d1 222]22-3Y3y333344445c55~66677 8O9,:F::::;>>?`\A1w1o2|33x5566E6J6u6z6666777W888+9H9{9993:~:W;;;;;<==>h>>>9?p 00111112 2222)2.282=2G2L2V2[2e2j2t2y2222222222222222333'343A3S3\3f3s3}333333333344)4<4N4a4n4}44445a5}5555566+6>6Q666666677?7M7^778M8888888888888888,9=999/:9:O:b:v:::::::;*;F;Q;g;z;;;;#=50R0u222S4}455::@;L;c;`<<<<===-=M=T=g=t==========>!>*>6>=>E>O>Y>c>m>}>>>>>>>>> ??(?/?6?=?D?m?t?{?????????????0&0?0e0000000000000000L1P1T1X1\1`1d111111111222223<3^3334"4:4A4F4L4R4X4^4d4j4p4v4|444444444444444445 5555#5)5/555;5A5G5M5S5Y5_5e5k5q5w555555555555556 6666!6'6-63696?6E6K6Q6W6]66666666666666666666667 7777"7(7.747:7@7F7L7R7X7u7{777777777777777777777777788888888888888888888888888888899999999999::: ::::: :$:(:,:0:4:8:<;@;D;H;L;P;T;X;\;`;d;h;l;p;t;x;|;;;;;;;;;;;;;;;;;;<<<<="=:=A=F=L=R=X=^=d=j=p=v=|=================> >>>>#>)>/>5>;>A>G>M>S>Y>_>e>>>>>>>>>>>>>>>>>? ????!?'?-?3?9?b?i?n?t?z???????????????????????(0 0000"0(0<0B0H0P0T0X0\0`0d0h0l0p0t0x0|00000000 1$1(1,1014181<1@1D1H1L1P1T1X1\1`1d1h1l1p1t1x1|11111111`2d2h2l2p2t2x2|222222222222223333333333333333333333444 44444 4$4 555=5F5M5u5|555555555555555555555556 6666#6)6/656<6@6D6H6L6P6T6X6\6`6d6h6l6p6t6x6|666666666666666E7L7Q7W7]7c7i7o7u7{777777777777777777777778 88888 8$8(8,8084888<8@8D8H8L8P8T8X8\8`8d8h8l8p8t8x8|88899%9+9R99?:v::;w;;>?E???L0l001E111 2y223r334F4r44#555)6o6663777A88819w9;:557844555+595G5S5Z5`5555556*6F6i666667 77?7E7[7{7777.8H8i888889::#:-:B:L:V:`:::::::;;*;:<>>>>??? ?F?M?L0011$3(3,3034383<3@3D3H3L3P3T3X3\3`3d3h3l3p3r45a6s6~6:::k;?u013394x44c5(626K6U6p6657778 8888"8(8.848:8@8F8L8R8X8^8d8j8p8v8|888888888888888888888899 9999$9*90969<9B9H9N9T9Z9`9f9l9r9x9~9999999999999999999999::::: :&:,:2:8:>:D:J:P:V:\:b:h:n:t:z:::::::::::::::::::::::; ;;;;";(;.;4;:;@;F;L;R;X;^;d;j;p;v;|;;;;;;;;;;;;;;;;;;;;;;<< <<<<$<*<0<6<<=D=J=P=V=\=b=h=n=t=z=======================> >>>>">(>.>4>:>@>F>L>R>X>^>d>j>p>v>|>>>>>>>>>>>>>>>>>>>>>>?? ????$?*?0?6?>> >>>>> >$>(>,>0>4>8><>@>D>H>L>P>T>X>d>h>l>p>t>x>|>>>>>>>>>>>>>>>>>>>>>>?????0000 00000 0$0(0,0004080<0@0D0H0L0P0T0X0d0h0l0p0t0x0|000000000000000000000000000111 11111 1$1(1,1014181<1@1D1H1L1P1T1X1848d8888$9T9999:D:t:::;4;d;;;;$4>d>>>>$?T????@D0D0t000141d1111$2T22223D3t333444d4444$5T511$2(2222233040d0000$1T11112D2t222343d3333$4T44445D5t555646d6666$7T77778D8t888949d9999$:T::::;D;t;;;<4D>t>>>?4?d????$0T00001D1t111242d2222$3T33334D4t444545d5555$6T66667D7t777848d8888$9T9999:@:D:H:L:P:T:\:`:h:p:t::::::::::;;;;;T >>>$>,>4><>D>L>T>\>d>l>t>|>>>>>>>>>>>>>>>>>? ???$?,?4? 1 ? args[1 .. $] : DefaultDFLAGS; // Compiler says no to immutable (because it can't handle the appending) const command = [ dmd, OutputFlag, IncludeFlag, "-version=DubUseCurl", "-version=DubApplication", ] ~ dflags ~ [ "@build-files.txt" ]; writeln("Building dub using ", dmd, " (dflags: ", dflags, "), this may take a while..."); auto proc = execute(command); if (proc.status != 0) { writeln("Command `", command, "` failed, output was:"); writeln(proc.output); return 1; } writeln("DUB has been built as: ", DubBinPath); version (Posix) writeln("You may want to run `sudo ln -s ", DubBinPath, " /usr/local/bin` now"); else version (Windows) writeln("You may want to add the following entry to your PATH " ~ "environment variable: ", DubBinPath); return 0; } /** Generate the version file describing DUB's version / commit Params: dubVersion = User provided version file. Can be `null` / empty, in which case the existing file (if any) takes precedence, or the version is infered with `git describe`. A non-empty parameter will always override the existing file. */ bool writeVersionFile(string dubVersion) { if (!dubVersion.length) { if (std.file.exists(VersionFilePath)) { writeln("Using pre-existing version file. To force a rebuild, " ~ "provide an explicit version (first argument) or remove: ", VersionFilePath); return true; } auto pid = execute(["git", "describe"]); if (pid.status != 0) { writeln("Could not determine version with `git describe`. " ~ "Make sure 'git' is installed and this is a git repository. " ~ "Alternatively, you can provide a version explicitly via the " ~ "`GITVER environment variable or pass it as the first " ~ "argument to this script"); return false; } dubVersion = pid.output.strip(); } try { std.file.write(VersionFilePath, q{ /** DUB version file This file is auto-generated by 'build.d'. DO NOT EDIT MANUALLY! */ module dub.version_; enum dubVersion = "%s"; }.format(dubVersion)); writeln("Wrote version_.d` file with version: ", dubVersion); return true; } catch (Exception e) { writeln("Writing version file to '", VersionFilePath, "' failed: ", e.msg); return false; } } /** Detect which compiler is available Default to DMD, then LDC (ldmd2), then GDC (gdmd). If none is in the PATH, an error will be thrown. Note: It would be optimal if we could get the path of the compiler invoking this script, but AFAIK this isn't possible. */ string getCompiler () { auto env = environment.get("DMD", ""); // If the user asked for a compiler explicitly, respect it if (env.length) return env; static immutable Compilers = [ "dmd", "ldmd2", "gdmd" ]; foreach (bin; Compilers) { try { auto pid = execute([bin, "--version"]); if (pid.status == 0) return bin; } catch (Exception e) continue; } writeln("No compiler has been found in the PATH. Attempted values: ", Compilers); writeln("Make sure one of those is in the PATH, or set the `DMD` variable"); return null; } dub-1.40.0/build.sh000077500000000000000000000010771477246567400140370ustar00rootroot00000000000000#!/usr/bin/env bash set -e echo "@@@@@ WARNING @@@@@" echo "@ This script is DEPRECATED. Use build.d directly instead @" echo "@@@@@@@@@@@@@@@@@@@@" if [ "$DMD" = "" ]; then if [ ! "$DC" = "" ]; then # backwards compatibility with DC DMD=$DC else command -v gdmd >/dev/null 2>&1 && DMD=gdmd || true command -v ldmd2 >/dev/null 2>&1 && DMD=ldmd2 || true command -v dmd >/dev/null 2>&1 && DMD=dmd || true fi fi if [ "$DMD" = "" ]; then echo >&2 "Failed to detect D compiler. Use DMD=... to set a dmd compatible binary manually." exit 1 fi $DMD -run build.d $* dub-1.40.0/changelog/000077500000000000000000000000001477246567400143235ustar00rootroot00000000000000dub-1.40.0/changelog/README.md000066400000000000000000000027201477246567400156030ustar00rootroot00000000000000This directory will get copied to dlang.org and cleared when master gets merged into stable prior to a new release. How to add a new changelog entry to the pending changelog? ========================================================== Create a new file in the `changelog` folder. It should end with `.dd` and look similar to a git commit message. The first line represents the title of the change. After an empty line follows the long description: ``` My fancy title of the new feature A long description of the new feature in `std.range`. It can be followed by an example: ------- import std.range : padLeft, padRight; import std.algorithm.comparison : equal; assert([1, 2, 3, 4, 5].padLeft(0, 7).equal([0, 0, 1, 2, 3, 4, 5])); assert("Hello World!".padRight('!', 15).equal("Hello World!!!!")); ------- and links to the documentation, e.g. $(REF drop, std, range) or $(REF_ALTTEXT a custom name for the function, drop, std, range). Links to the spec can look like this $(LINK2 $(ROOT_DIR)spec/module.html, this) and of course you can link to other $(LINK2 https://forum.dlang.org/, external resources). ``` The title can't contain links (it's already one). For more infos, see the [Ddoc spec](https://dlang.org/spec/ddoc.html). Preview changes --------------- If you have cloned the [tools](https://github.com/dlang/tools) and [dlang.org](https://github.com/dlang/dlang.org) repo, you can preview the changelog with: ``` make -C ../dlang.org -f posix.mak pending_changelog ``` dub-1.40.0/changelog/fix-cimport-paths.dd000066400000000000000000000002351477246567400202120ustar00rootroot00000000000000Fix issue where cImportPaths wasn't working with dmd and ldc dub was passing -I instead of -P-I as is required by those compilers dub-1.40.0/docker/000077500000000000000000000000001477246567400136435ustar00rootroot00000000000000dub-1.40.0/docker/Dockerfile.alpine000066400000000000000000000012521477246567400171040ustar00rootroot00000000000000FROM alpine:edge AS Builder # DCPKG is the name of the package, DCBIN the name of the binary # We need this because of the ldc / ldc2 disparity ARG DCPKG ARG DCBIN # Build dub (and install tests dependencies in the process) WORKDIR /root/build/ RUN apk add --no-cache bash build-base curl curl-dev dtools dub git grep rsync $DCPKG ADD . /root/build/ RUN dub test --compiler=$DCBIN && dub build --compiler=$DCBIN # Remove dub to avoid the risk of using the wrong binary RUN apk del dub # Used by the `run-unittest.sh` script ENV DUB=/root/build/bin/dub ENV DC=$DCBIN # Finally, just run the test-suite WORKDIR /root/build/test/ ENTRYPOINT [ "/root/build/test/run-unittest.sh" ] dub-1.40.0/dub.sdl000066400000000000000000000024451477246567400136570ustar00rootroot00000000000000name "dub" description "Package manager for D packages" authors "Sönke Ludwig" "Martin Nowak" "Matthias Dondorff" "Sebastian Wilzbach" \ "more than 80 contributors total" copyright "Copyright © 2012-2016 rejectedsoftware e.K., Copyright © 2012-2014 Matthias Dondorff" license "MIT" targetPath "bin" // Deprecated module(s) excludedSourceFiles "source/dub/packagesupplier.d" configuration "application" { targetType "executable" mainSourceFile "source/app.d" versions "DubUseCurl" "DubApplication" // Uncomment to get rich output about the file parsing and json <-> YAML // integrity checks //debugVersions "ConfigFillerDebug" dflags "-preview=in" platform="dmd" dflags "-preview=in" platform="ldc" //Disabled due to ICEs in gdc. //dflags "-fpreview=in" platform="gdc" } configuration "library" { targetType "library" excludedSourceFiles "source/app.d" copyFiles "bin/libcurl.dll" "bin/libeay32.dll" "bin/ssleay32.dll" platform="windows" versions "DubUseCurl" } configuration "library-nonet" { dependency "vibe-d:http" version=">=0.9.0 <0.11.0" optional=true targetType "library" excludedSourceFiles "source/app.d" } configuration "dynamic-library-nonet" { dependency "vibe-d:http" version=">=0.9.0 <0.11.0" optional=true targetType "dynamicLibrary" excludedSourceFiles "source/app.d" } dub-1.40.0/dub.selections.json000066400000000000000000000006431477246567400162130ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "diet-ng": "1.8.2", "eventcore": "0.9.35", "mir-linux-kernel": "1.2.1", "openssl": "3.3.4", "openssl-static": "1.0.5+3.0.8", "stdx-allocator": "2.77.5", "taggedalgebraic": "0.11.23", "vibe-container": "1.4.0", "vibe-core": "2.9.6", "vibe-d": "0.10.1", "vibe-http": "1.2.1", "vibe-inet": "1.1.0", "vibe-serialization": "1.0.7", "vibe-stream": "1.1.1" } } dub-1.40.0/examples/000077500000000000000000000000001477246567400142125ustar00rootroot00000000000000dub-1.40.0/examples/app-sdl/000077500000000000000000000000001477246567400155525ustar00rootroot00000000000000dub-1.40.0/examples/app-sdl/dub.sdl000066400000000000000000000000721477246567400170270ustar00rootroot00000000000000name "app-example"; description "A simple D application"; dub-1.40.0/examples/app-sdl/source/000077500000000000000000000000001477246567400170525ustar00rootroot00000000000000dub-1.40.0/examples/app-sdl/source/app.d000066400000000000000000000000761477246567400200020ustar00rootroot00000000000000import std.stdio; void main() { writeln("Hello, World."); } dub-1.40.0/examples/app/000077500000000000000000000000001477246567400147725ustar00rootroot00000000000000dub-1.40.0/examples/app/dub.json000066400000000000000000000001051477246567400164330ustar00rootroot00000000000000{ "name": "app-example", "description": "A simple D application" } dub-1.40.0/examples/app/source/000077500000000000000000000000001477246567400162725ustar00rootroot00000000000000dub-1.40.0/examples/app/source/app.d000066400000000000000000000000761477246567400172220ustar00rootroot00000000000000import std.stdio; void main() { writeln("Hello, World."); } dub-1.40.0/examples/generated-sources/000077500000000000000000000000001477246567400176315ustar00rootroot00000000000000dub-1.40.0/examples/generated-sources/dub.json000066400000000000000000000003061477246567400212750ustar00rootroot00000000000000{ "name": "generated-sources", "description": "Example of using pre generate commands to generate source code.", "preGenerateCommands": [ "echo 'int fun() { return 42; }' > source/test.d" ] } dub-1.40.0/examples/generated-sources/source/000077500000000000000000000000001477246567400211315ustar00rootroot00000000000000dub-1.40.0/examples/generated-sources/source/app.d000066400000000000000000000001201477246567400220470ustar00rootroot00000000000000import std.stdio; import test; void main() { writefln("Result: %s", fun()); } dub-1.40.0/examples/header-lib/000077500000000000000000000000001477246567400162065ustar00rootroot00000000000000dub-1.40.0/examples/header-lib/dub.json000066400000000000000000000002361477246567400176540ustar00rootroot00000000000000{ "name": "header-lib-example", "description": "A simple D header library (C binding to libmylib.so)", "targetType": "sourceLibrary", "libs": ["mylib"] } dub-1.40.0/examples/header-lib/import/000077500000000000000000000000001477246567400175205ustar00rootroot00000000000000dub-1.40.0/examples/header-lib/import/mylib.d000066400000000000000000000000461477246567400210010ustar00rootroot00000000000000module mylib; extern(C) void test(); dub-1.40.0/examples/injected-from-dependency/000077500000000000000000000000001477246567400210545ustar00rootroot00000000000000dub-1.40.0/examples/injected-from-dependency/dependency/000077500000000000000000000000001477246567400231725ustar00rootroot00000000000000dub-1.40.0/examples/injected-from-dependency/dependency/ahook.d000066400000000000000000000003651477246567400244440ustar00rootroot00000000000000module ahook; version(D_BetterC) { pragma(crt_constructor) extern(C) void someInitializer() { import core.stdc.stdio; printf("Hook ran!\n"); } } else { shared static this() { import std.stdio; writeln("We have a runtime!!!!"); } } dub-1.40.0/examples/injected-from-dependency/dependency/dub.json000066400000000000000000000004361477246567400246420ustar00rootroot00000000000000{ "name": "toload", "description": "Example to showcase injection of a source file from a dependency dependency.", "targetType": "library", "buildOptions": ["betterC"], "sourcePaths": ["source"], "importPaths": ["source"], "injectSourceFiles": ["ahook.d"] } dub-1.40.0/examples/injected-from-dependency/dependency/source/000077500000000000000000000000001477246567400244725ustar00rootroot00000000000000dub-1.40.0/examples/injected-from-dependency/dependency/source/something.d000066400000000000000000000003051477246567400266320ustar00rootroot00000000000000module something; void doSomething() { import core.stdc.stdio; version(D_BetterC) { printf("druntime is not in the executable :(\n"); } else { printf("druntime is in executable!\n"); } } dub-1.40.0/examples/injected-from-dependency/usage/000077500000000000000000000000001477246567400221605ustar00rootroot00000000000000dub-1.40.0/examples/injected-from-dependency/usage/dub.json000066400000000000000000000003401477246567400236220ustar00rootroot00000000000000{ "name": "runner", "description": "Example to showcase injection of a source file from a dependency runner.", "targetType": "executable", "dependencies": { "toload": {"path": "../dependency"} } }dub-1.40.0/examples/injected-from-dependency/usage/source/000077500000000000000000000000001477246567400234605ustar00rootroot00000000000000dub-1.40.0/examples/injected-from-dependency/usage/source/entry.d000066400000000000000000000000631477246567400247650ustar00rootroot00000000000000void main() { import something; doSomething(); } dub-1.40.0/examples/lib-user/000077500000000000000000000000001477246567400157345ustar00rootroot00000000000000dub-1.40.0/examples/lib-user/dub.json000066400000000000000000000002641477246567400174030ustar00rootroot00000000000000{ "name": "lib-user-example", "description": "An application using a local library dependency", "dependencies": { "lib-example": {"version": "~master", "path": "../lib"} } } dub-1.40.0/examples/lib-user/source/000077500000000000000000000000001477246567400172345ustar00rootroot00000000000000dub-1.40.0/examples/lib-user/source/app.d000066400000000000000000000000461477246567400201610ustar00rootroot00000000000000import lib; void main() { test(); } dub-1.40.0/examples/lib/000077500000000000000000000000001477246567400147605ustar00rootroot00000000000000dub-1.40.0/examples/lib/dub.json000066400000000000000000000001331477246567400164220ustar00rootroot00000000000000{ "name": "lib-example", "description": "A simple D library", "targetType": "library" } dub-1.40.0/examples/lib/source/000077500000000000000000000000001477246567400162605ustar00rootroot00000000000000dub-1.40.0/examples/lib/source/lib.d000066400000000000000000000001131477246567400171660ustar00rootroot00000000000000module lib; import std.stdio; void test() { writeln("Hello, World."); } dub-1.40.0/examples/mixed/000077500000000000000000000000001477246567400153205ustar00rootroot00000000000000dub-1.40.0/examples/mixed/dub.json000066400000000000000000000001471477246567400167670ustar00rootroot00000000000000{ "name": "mixed-example", "description": "A package usable as both, an application and a library" } dub-1.40.0/examples/mixed/source/000077500000000000000000000000001477246567400166205ustar00rootroot00000000000000dub-1.40.0/examples/mixed/source/app.d000066400000000000000000000000631477246567400175440ustar00rootroot00000000000000module app; import lib; void main() { test(); } dub-1.40.0/examples/mixed/source/lib.d000066400000000000000000000001131477246567400175260ustar00rootroot00000000000000module lib; import std.stdio; void test() { writeln("Hello, World!"); } dub-1.40.0/examples/vibed-main/000077500000000000000000000000001477246567400162255ustar00rootroot00000000000000dub-1.40.0/examples/vibed-main/dub.json000066400000000000000000000002261477246567400176720ustar00rootroot00000000000000{ "name": "vibed-main-example", "description": "A project using vibe.d and a custom main() function", "dependencies": { "vibe-d": "~master" } } dub-1.40.0/examples/vibed-main/source/000077500000000000000000000000001477246567400175255ustar00rootroot00000000000000dub-1.40.0/examples/vibed-main/source/app.d000066400000000000000000000003451477246567400204540ustar00rootroot00000000000000import vibe.vibe; void main() { listenHTTP(new HTTPServerSettings, &handleRequest); lowerPrivileges(); runEventLoop(); } void handleRequest(HTTPServerRequest req, HTTPServerResponse res) { res.writeBody("Hello, World!"); } dub-1.40.0/examples/vibed/000077500000000000000000000000001477246567400153035ustar00rootroot00000000000000dub-1.40.0/examples/vibed/dub.json000066400000000000000000000001641477246567400167510ustar00rootroot00000000000000{ "name": "vibed-example", "description": "A project using vibe.d", "dependencies": { "vibe-d": "~master" } } dub-1.40.0/examples/vibed/source/000077500000000000000000000000001477246567400166035ustar00rootroot00000000000000dub-1.40.0/examples/vibed/source/app.d000066400000000000000000000003061477246567400175270ustar00rootroot00000000000000import vibe.d; shared static this() { listenHTTP(new HTTPServerSettings, &handleRequest); } void handleRequest(HTTPServerRequest req, HTTPServerResponse res) { res.writeBody("Hello, World!"); } dub-1.40.0/scripts/000077500000000000000000000000001477246567400140635ustar00rootroot00000000000000dub-1.40.0/scripts/bash-completion/000077500000000000000000000000001477246567400171475ustar00rootroot00000000000000dub-1.40.0/scripts/bash-completion/dub.bash000066400000000000000000000036661477246567400205730ustar00rootroot00000000000000# dub(1) completion -*- shell-script -*- _dub() { local cur prev words cword split _init_completion -s || return local creation_commands creation_commands='init run build test generate describe clean dustmite' local management_commands management_commands='fetch remove upgrade add-path remove-path add-local remove-local list add-override remove-override list-overrides clean-caches' case "$prev" in -h|--help) return 0 ;; esac $split && return 0 # Use -h -v -q because lack of comma separation between -h and --help local common_options common_options='-h -v -q'; local packages packages=$(dub list| awk '/^[[:space:]]+/ { print $1 }') if [[ $cword -eq 1 ]] ; then # if one argument given if [[ "$cur" == -* ]]; then COMPREPLY=( $( compgen -W '$common_options $( _parse_help "$1" )' -- "$cur" ) ) else COMPREPLY=( $( compgen -W "$creation_commands $management_commands" -- "$cur" ) ) fi else local command=${words[1]}; # use $prev instead? local specific_options specific_options=$( "$1" $command --help 2>/dev/null | _parse_help - ) case $command in init | add-path | remove-path | add-local | remove-local | dustmite ) COMPREPLY=( $( compgen -d -W '$common_options $specific_options' -- "$cur" ) ) ;; run | build | test | generate | describe | clean | upgrade | add-override | remove-override ) COMPREPLY=( $( compgen -W '$packages $common_options $specific_options' -- "$cur" ) ) ;; *) COMPREPLY=( $( compgen -W '$common_options $specific_options' -- "$cur" ) ) ;; esac fi [[ $COMPREPLY == *= ]] && compopt -o nospace return # NOTE: Disabled for now # _filedir } && complete -F _dub dub dub-1.40.0/scripts/ci/000077500000000000000000000000001477246567400144565ustar00rootroot00000000000000dub-1.40.0/scripts/ci/ci.sh000077500000000000000000000011571477246567400154140ustar00rootroot00000000000000#!/bin/bash set -v -e -o pipefail vibe_ver=$(jq -r '.versions | .["vibe-d"]' < dub.selections.json) dub fetch vibe-d@$vibe_ver # get optional dependency dub test --compiler=${DC} -c library-nonet export DMD="$(command -v $DMD)" ./build.d -preview=dip1000 -preview=in -w -g -debug if [ "$COVERAGE" = true ]; then # library-nonet fails to build with coverage (Issue 13742) dub test --compiler=${DC} -b unittest-cov ./build.d -cov else dub test --compiler=${DC} -b unittest-cov ./build.d fi DUB=`pwd`/bin/dub DC=${DC} dub --single ./test/run-unittest.d DUB=`pwd`/bin/dub DC=${DC} test/run-unittest.sh dub-1.40.0/scripts/ci/summary_comment.sh000077500000000000000000000013511477246567400202340ustar00rootroot00000000000000#!/usr/bin/env bash set -u # Output from this script is piped to a file by CI, being run from before a # change has been made and after a change has been made. Then both outputs are # compared using summary_comment_diff.sh # cd to git folder, just in case this is manually run: ROOT_DIR="$( cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd )" cd ${ROOT_DIR} dub --version ldc2 --version # fetch missing packages before timing dub upgrade --missing-only start=`date +%s` dub build --build=release --force 2>&1 || echo "BUILD FAILED" end=`date +%s` build_time=$( echo "$end - $start" | bc -l ) strip bin/dub echo "STAT:statistics (-before, +after)" echo "STAT:executable size=$(wc -c bin/dub)" echo "STAT:rough build time=${build_time}s" dub-1.40.0/scripts/ci/summary_comment_diff.sh000077500000000000000000000044641477246567400212340ustar00rootroot00000000000000#!/usr/bin/env bash set -u EMPTY=1 ADDED=$(diff --new-line-format='%L' --old-line-format='' --unchanged-line-format='' "$1" "$2") REMOVED=$(diff --new-line-format='' --old-line-format='%L' --unchanged-line-format='' "$1" "$2") TOTAL=$(cat "$2") STATS_OLD=$(grep -E '^STAT:' "$1" | sed -E 's/^STAT://') STATS_NEW=$(grep -E '^STAT:' "$2" | sed -E 's/^STAT://') STATS_DIFFED=$(diff --new-line-format='+%L' --old-line-format='-%L' --unchanged-line-format=' %L' <(echo "$STATS_OLD") <(echo "$STATS_NEW")) ADDED_DEPRECATIONS=$(grep -Pi '\b(deprecation|deprecated)\b' <<< "$ADDED") REMOVED_DEPRECATIONS=$(grep -Pi '\b(deprecation|deprecated)\b' <<< "$REMOVED") ADDED_WARNINGS=$(grep -Pi '\b(warn|warning)\b' <<< "$ADDED") REMOVED_WARNINGS=$(grep -Pi '\b(warn|warning)\b' <<< "$REMOVED") DEPRECATION_COUNT=$(grep -Pi '\b(deprecation|deprecated)\b' <<< "$TOTAL" | wc -l) WARNING_COUNT=$(grep -Pi '\b(warn|warning)\b' <<< "$TOTAL" | wc -l) if [ -z "$ADDED_DEPRECATIONS" ]; then # no new deprecations true else echo "⚠️ This PR introduces new deprecations:" echo echo '```' echo "$ADDED_DEPRECATIONS" echo '```' echo EMPTY=0 fi if [ -z "$ADDED_WARNINGS" ]; then # no new deprecations true else echo "⚠️ This PR introduces new warnings:" echo echo '```' echo "$ADDED_WARNINGS" echo '```' echo EMPTY=0 fi if grep "BUILD FAILED" <<< "$TOTAL"; then echo '❌ Basic `dub build` failed! Please check your changes again.' echo else if [ -z "$REMOVED_DEPRECATIONS" ]; then # no removed deprecations true else echo "✅ This PR fixes following deprecations:" echo echo '```' echo "$REMOVED_DEPRECATIONS" echo '```' echo EMPTY=0 fi if [ -z "$REMOVED_WARNINGS" ]; then # no removed warnings true else echo "✅ This PR fixes following warnings:" echo echo '```' echo "$REMOVED_WARNINGS" echo '```' echo EMPTY=0 fi if [ $EMPTY == 1 ]; then echo "✅ PR OK, no changes in deprecations or warnings" echo fi echo "Total deprecations: $DEPRECATION_COUNT" echo echo "Total warnings: $WARNING_COUNT" echo fi if [ -z "$STATS_DIFFED" ]; then # no statistics? true else echo "Build statistics:" echo echo '```diff' echo "$STATS_DIFFED" echo '```' echo fi echo '
' echo echo 'Full build output' echo echo '```' echo "$TOTAL" echo '```' echo echo '
' dub-1.40.0/scripts/fish-completion/000077500000000000000000000000001477246567400171635ustar00rootroot00000000000000dub-1.40.0/scripts/fish-completion/dub.fish000066400000000000000000000165011477246567400206130ustar00rootroot00000000000000# # Completions for the dub command # # # Subcommands # # Package creation complete -c dub -n '__fish_use_subcommand' -x -a init -d 'Initializes an empty package skeleton' # Build, test, and run complete -c dub -n '__fish_use_subcommand' -x -a run -d 'Builds and runs a package' complete -c dub -n '__fish_use_subcommand' -x -a build -d 'Builds a package' complete -c dub -n '__fish_use_subcommand' -x -a test -d 'Executes the tests of the selected package' complete -c dub -n '__fish_use_subcommand' -x -a generate -d 'Generates project files using the specified generator' complete -c dub -n '__fish_use_subcommand' -x -a describe -d 'Prints a JSON description of the project and its dependencies' complete -c dub -n '__fish_use_subcommand' -x -a clean -d 'Removes intermediate build files and cached build results' complete -c dub -n '__fish_use_subcommand' -x -a dustmite -d 'Create reduced test cases for build errors' # Package management complete -c dub -n '__fish_use_subcommand' -x -a fetch -d 'Manually retrieves and caches a package' complete -c dub -n '__fish_use_subcommand' -x -a remove -d 'Removes a cached package' complete -c dub -n '__fish_use_subcommand' -x -a upgrade -d 'Forces an upgrade of all dependencies' complete -c dub -n '__fish_use_subcommand' -x -a add-path -d 'Adds a default package search path' complete -c dub -n '__fish_use_subcommand' -x -a remove-path -d 'Removes a package search path' complete -c dub -n '__fish_use_subcommand' -x -a add-local -d 'Adds a local package directory' complete -c dub -n '__fish_use_subcommand' -x -a remove-local -d 'Removes a local package directory' complete -c dub -n '__fish_use_subcommand' -x -a list -d 'Prints a list of all local packages dub is aware of' complete -c dub -n '__fish_use_subcommand' -x -a add-override -d 'Adds a new package override' complete -c dub -n '__fish_use_subcommand' -x -a remove-override -d 'Removes an existing package override' complete -c dub -n '__fish_use_subcommand' -x -a list-overrides -d 'Prints a list of all local package overrides' complete -c dub -n '__fish_use_subcommand' -x -a clean-caches -d 'Removes cached metadata' # # Subcommand options # for cmd in run build complete -c dub -n "contains '$cmd' (commandline -poc)" -l rdmd -d "Use rdmd" end for cmd in run build test complete -c dub -n "contains '$cmd' (commandline -poc)" -s f -l force -d "Force recompilation" end for cmd in run complete -c dub -n "contains '$cmd' (commandline -poc)" -l temp-build -d "Build in temp folder" end for cmd in run build test generate describe dustmite complete -c dub -n "contains '$cmd' (commandline -poc)" -s c -l config -r -d "Build configuration" complete -c dub -n "contains '$cmd' (commandline -poc)" -s a -l arch -r -d "Force architecture" complete -c dub -n "contains '$cmd' (commandline -poc)" -s d -l debug -r -d "Debug identifier" complete -c dub -n "contains '$cmd' (commandline -poc)" -s d -l d-version -r -d "Version identifier" complete -c dub -n "contains '$cmd' (commandline -poc)" -l nodeps -d "No dependency check" complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov cov-ctfe unittest-cov unittest-cov-ctfe syntax" complete -c dub -n "contains '$cmd' (commandline -poc)" -l build-mode -x -d "How compiler & linker are invoked" -a "separate allAtOnce singleFile" complete -c dub -n "contains '$cmd' (commandline -poc)" -l compiler -x -d "Compiler binary" -a "dmd gdc ldc gdmd ldmd" end for cmd in run build test generate describe dustmite fetch remove upgrade complete -c dub -n "contains '$cmd' (commandline -poc)" -l force-remove -x -d "Force deletion" end for cmd in run build dustmite complete -c dub -n "contains '$cmd' (commandline -poc)" -l combined -d "Build project in single compiler run" end for cmd in run build test generate complete -c dub -n "contains '$cmd' (commandline -poc)" -l print-builds -d "Print list of build types" end for cmd in run build generate complete -c dub -n "contains '$cmd' (commandline -poc)" -l print-configs -d "Print list of configurations" complete -c dub -n "contains '$cmd' (commandline -poc)" -l print-platform -d "Print build platform identifiers" end for cmd in build dustmite fetch remove complete -c dub -n "contains '$cmd' (commandline -poc)" -x -d "Package" -a '(dub list | awk \'/^[[:space:]]+/ { print $1 }\' | cut -f 3 -d " ")' end for cmd in clean complete -c dub -n "contains '$cmd' (commandline -poc)" -l all-packages -d "Clean all known packages" end for cmd in dustmite complete -c dub -n "contains '$cmd' (commandline -poc)" -l compiler-status -x -d "Expected compiler status code" complete -c dub -n "contains '$cmd' (commandline -poc)" -l compiler-regex -x -d "Compiler output regular expression" complete -c dub -n "contains '$cmd' (commandline -poc)" -l linker-status -x -d "Expected linker status code" complete -c dub -n "contains '$cmd' (commandline -poc)" -l linker-regex -x -d "Linker output regular expression" complete -c dub -n "contains '$cmd' (commandline -poc)" -l program-status -x -d "Expected program status code" complete -c dub -n "contains '$cmd' (commandline -poc)" -l program-regex -x -d "Program output regular expression" complete -c dub -n "contains '$cmd' (commandline -poc)" -l test-package -x -d "Perform a test run" end for cmd in fetch remove complete -c dub -n "contains '$cmd' (commandline -poc)" -l version -r -d "Version to use" complete -c dub -n "contains '$cmd' (commandline -poc)" -l system -d "Deprecated" complete -c dub -n "contains '$cmd' (commandline -poc)" -l local -d "Deprecated" end for cmd in upgrade complete -c dub -n "contains '$cmd' (commandline -poc)" -l prerelease -d "Use latest pre-release version" complete -c dub -n "contains '$cmd' (commandline -poc)" -l verify -d "Update if successful build" complete -c dub -n "contains '$cmd' (commandline -poc)" -l missing-only -d "Update dependencies without a selected version" end for cmd in add-path remove-path add-local remove-local add-override remove-override complete -c dub -n "contains '$cmd' (commandline -poc)" -l system -d "System-wide" end # Common options complete -c dub -s h -l help -d "Display help" complete -c dub -l root -r -d "Path to operate in" complete -c dub -l registry -r -d "Use DUB registry URL" complete -c dub -l annotate -d "Just print actions" complete -c dub -s v -l verbose -d "Print diagnostic output" complete -c dub -l vverbose -d "Print debug output" complete -c dub -s q -l quiet -d "Only print warnings and errors" complete -c dub -l vquiet -d "Print no messages" complete -c dub -l cache -x -d "Use cache location" -a "local system user" dub-1.40.0/scripts/man/000077500000000000000000000000001477246567400146365ustar00rootroot00000000000000dub-1.40.0/scripts/man/.gitignore000066400000000000000000000000221477246567400166200ustar00rootroot00000000000000*.1 *.md /gen_man dub-1.40.0/scripts/man/README.md000066400000000000000000000002451477246567400161160ustar00rootroot000000000000001) Build -------- ```shell ./gen_man.d ``` 2) Preview ---------- On Linux: ```shell man -l dub.1 ``` On OSX: ```shell mkdir -p man1 mv *.1 man1 man -M . dub ``` dub-1.40.0/scripts/man/gen_man.d000077500000000000000000000277111477246567400164220ustar00rootroot00000000000000#!/usr/bin/env dub /+dub.sdl: dependency "dub" path="../.." +/ import std.algorithm, std.conv, std.format, std.path, std.range; import std.stdio : File; import dub.internal.dyaml.stdsumtype; import dub.commandline; static struct Config { import std.datetime; SysTime date; string[] relatedSubCommands; static Config init(){ import std.process : environment; Config config; config.date = Clock.currTime; auto diffable = environment.get("DIFFABLE", "0"); if (diffable == "1") config.date = SysTime(DateTime(2018, 01, 01)); config.cwd = __FILE_FULL_PATH__.dirName; return config; } string cwd; } struct ManWriter { enum Mode { man, markdown } File output; Mode mode; string escapeWord(string s) { final switch (mode) { case Mode.man: return s.replace(`\`, `\\`).replace(`-`, `\-`).replace(`.`, `\&.`); case Mode.markdown: return s.replace(`<`, `<`).replace(`>`, `>`); } } string escapeFulltext(string s) { final switch (mode) { case Mode.man: return s; case Mode.markdown: return s.replace(`<`, `<`).replace(`>`, `>`); } } string italic(string w) { final switch (mode) { case Mode.man: return `\fI` ~ w ~ `\fR`; case Mode.markdown: return `` ~ w ~ ``; } } string bold(string w) { final switch (mode) { case Mode.man: return `\fB` ~ w ~ `\fR`; case Mode.markdown: return `` ~ w ~ ``; } } string header(string heading) { final switch (mode) { case Mode.man: return ".SH " ~ heading; case Mode.markdown: return "## " ~ heading; } } string subheader(string heading) { final switch (mode) { case Mode.man: return ".SS " ~ heading; case Mode.markdown: return "### " ~ heading; } } string url(string urlAndText) { return url(urlAndText, urlAndText); } string url(string url, string text) { final switch (mode) { case Mode.man: return ".UR" ~ url ~ "\n" ~ text ~ "\n.UE"; case Mode.markdown: return format!"[%s](%s)"(text, url); } } string autolink(string s) { final switch (mode) { case Mode.man: return s; case Mode.markdown: auto sanitized = s .replace("", "") .replace("", "") .replace("", "") .replace("", "") .replace("*", ""); if (sanitized.startsWith("dub") && sanitized.endsWith("(1)")) { sanitized = sanitized[0 .. $ - 3]; return url(sanitized ~ ".md", s); } return s; } } /// Links subcommands in the main dub.md file (converts the subcommand name /// like `init` into a link to `dub-init.md`) string specialLinkMainCmd(string s) { final switch (mode) { case Mode.man: return s; case Mode.markdown: return url("dub-" ~ s ~ ".md", s); } } void write(T...)(T args) { output.write(args); } void writeln(T...)(T args) { output.writeln(args); } void writefln(T...)(T args) { output.writefln(args); } void writeHeader(string manName, const Config config) { import std.uni : toLower; final switch (mode) { case Mode.man: static immutable manHeader = `.TH %s 1 "%s" "The D Language Foundation" "The D Language Foundation" .SH NAME`; writefln(manHeader, manName, config.date.toISOExtString.take(10)); break; case Mode.markdown: writefln("# %s(1)", manName.toLower); break; } } void writeFooter(string seeAlso, const Config config) { const manFooter = header("FILES") ~ '\n' ~ italic(escapeWord("dub.sdl")) ~ ", " ~ italic(escapeWord("dub.json")) ~ '\n' ~ header("AUTHOR") ~ '\n' ~ `Copyright (c) 1999-%s by The D Language Foundation` ~ '\n' ~ header("ONLINE DOCUMENTATION") ~ '\n' ~ url(`http://code.dlang.org/docs/commandline`) ~ '\n' ~ header("SEE ALSO"); writefln(manFooter, config.date.year); writeln(seeAlso); } string highlightArguments(string args) { import std.regex : regex, replaceAll; static auto re = regex("<([^>]*)>"); const reReplacement = escapeWord("<%s>").format(italic(escapeWord(`$1`))); auto ret = args.replaceAll(re, reReplacement); if (ret.length) ret ~= ' '; return ret; } void beginArgs(string cmd) { if (mode == Mode.markdown) writeln("\n
\n"); } void endArgs() { if (mode == Mode.markdown) writeln("\n
\n"); } void writeArgName(string cmd, string name) { import std.regex : regex, replaceAll; final switch ( mode ) { case Mode.man: writeln(".PP"); writeln(name); break; case Mode.markdown: string nameEscape = name.replaceAll(regex("[^a-zA-Z0-9_-]+"), "-"); writeln(); writefln(`
`); writeln(); break; } } void beginArgDescription() { final switch ( mode ) { case Mode.man: writeln(".RS 4"); break; case Mode.markdown: writeln(); writefln(`
`); writeln(); break; } } void endArgDescription() { final switch ( mode ) { case Mode.man: writeln(".RE"); break; case Mode.markdown: writeln(); writefln(`
`); writeln(); break; } } void writeArgs(string cmdName, CommandArgs args) { beginArgs(cmdName); foreach (arg; args.recognizedArgs) { auto names = arg.names.split("|"); assert(names.length == 1 || names.length == 2); string sarg = names[0].length == 1 ? names[0] : null; string larg = names[0].length > 1 ? names[0] : names.length > 1 ? names[1] : null; string name; if (sarg !is null) { name ~= bold(escapeWord("-%s".format(sarg))); if (larg !is null) name ~= ", "; } if (larg !is null) { name ~= bold(escapeWord("--%s".format(larg))); if (arg.defaultValue.match!((bool b) => false, _ => true)) name ~= escapeWord("=") ~ italic("VALUE"); } writeArgName(cmdName, name); beginArgDescription(); writeln(arg.helpText.join(mode == Mode.man ? "\n" : "\n\n")); endArgDescription(); } endArgs(); } void writeDefinition(string key, string definition) { final switch (mode) { case Mode.man: writeln(".TP"); writeln(bold(key)); writeln(definition); break; case Mode.markdown: writeln(`
`); writeln(); writeln(bold(key)); writeln(); writeln("
"); writeln(`
`); writeln(); writeln(definition); writeln(); writeln("
"); break; } } void beginDefinitionList() { final switch (mode) { case Mode.man: break; case Mode.markdown: writeln(); writeln(`
`); writeln(); break; } } void endDefinitionList() { final switch (mode) { case Mode.man: break; case Mode.markdown: writeln("\n
\n"); break; } } void writeDefaultExitCodes() { string[2][] exitCodes = [ ["0", "DUB succeeded"], ["1", "usage errors, unknown command line flags"], ["2", "package not found, package failed to load, miscellaneous error"] ]; final switch (mode) { case Mode.man: foreach (cm; exitCodes) { writeln(".TP"); writeln(".BR ", cm[0]); writeln(cm[1]); } break; case Mode.markdown: beginDefinitionList(); foreach (cm; exitCodes) { writeDefinition(cm[0], cm[1]); } endDefinitionList(); break; } } } void writeMainManFile(CommandArgs args, CommandGroup[] commands, string fileName, const Config config) { auto manFile = ManWriter( File(config.cwd.buildPath(fileName), "w"), fileName.endsWith(".md") ? ManWriter.Mode.markdown : ManWriter.Mode.man ); manFile.writeHeader("DUB", config); auto seeAlso = [ manFile.autolink(manFile.bold("dmd") ~ "(1)"), manFile.autolink(manFile.bold("rdmd") ~ "(1)") ] .chain(commands .map!(a => a.commands) .joiner .map!(cmd => manFile.autolink(manFile.bold("dub-" ~ cmd.name) ~ "(1)"))) .joiner(", ") .to!string; scope(exit) manFile.writeFooter(seeAlso, config); alias writeln = (m) => manFile.writeln(m); writeln(`dub \- Package and build management system for D`); writeln(manFile.header("SYNOPSIS")); writeln(manFile.bold("dub") ~ text( " [", manFile.escapeWord("--version"), "] [", manFile.italic("COMMAND"), "] [", manFile.italic(manFile.escapeWord("OPTIONS...")), "] ", manFile.escapeWord("--"), " [", manFile.italic(manFile.escapeWord("APPLICATION ARGUMENTS...")), "]" )); writeln(manFile.header("DESCRIPTION")); writeln(`Manages the DUB project in the current directory. DUB can serve as a build system and a package manager, automatically keeping track of project's dependencies \- both downloading them and linking them into the application.`); writeln(manFile.header("COMMANDS")); manFile.beginDefinitionList(); foreach (grp; commands) { foreach (cmd; grp.commands) { manFile.writeDefinition(manFile.specialLinkMainCmd(cmd.name), cmd.helpText.join( manFile.mode == ManWriter.Mode.markdown ? "\n\n" : "\n" )); } } writeln(manFile.header("COMMON OPTIONS")); manFile.writeArgs("-", args); } void writeManFile(Command command, const Config config, ManWriter.Mode mode) { import std.uni : toUpper; auto args = new CommandArgs(null); command.prepare(args); string fileName = format(mode == ManWriter.Mode.markdown ? "dub-%s.md" : "dub-%s.1", command.name); auto manFile = ManWriter(File(config.cwd.buildPath(fileName), "w"), mode); auto manName = format("DUB-%s", command.name).toUpper; manFile.writeHeader(manName, config); string[] extraRelated; foreach (arg; args.recognizedArgs) { if (arg.names.canFind("rdmd")) extraRelated ~= manFile.autolink(manFile.bold("rdmd") ~ "(1)"); } if (command.name == "dustmite") extraRelated ~= manFile.autolink(manFile.bold("dustmite") ~ "(1)"); const seeAlso = [manFile.autolink(manFile.bold("dub") ~ "(1)")] .chain(config.relatedSubCommands.map!(s => manFile.autolink(manFile.bold("dub-" ~ s) ~ "(1)"))) .chain(extraRelated) .joiner(", ") .to!string; scope(exit) manFile.writeFooter(seeAlso, config); alias writeln = (m) => manFile.writeln(m); manFile.writefln(`dub-%s \- %s`, command.name, manFile.escapeFulltext(command.description)); writeln(manFile.header("SYNOPSIS")); manFile.write(manFile.bold("dub %s ".format(command.name))); manFile.write(manFile.highlightArguments(command.argumentsPattern)); writeln(manFile.italic(manFile.escapeWord(`OPTIONS...`))); if (command.acceptsAppArgs) { writeln("[-- <%s>]".format(manFile.italic(manFile.escapeWord("application arguments...")))); } writeln(manFile.header("DESCRIPTION")); writeln(manFile.escapeFulltext(command.helpText.join("\n\n"))); writeln(manFile.header("OPTIONS")); manFile.writeArgs(command.name, args); writeln(manFile.subheader("COMMON OPTIONS")); manFile.writeln("See ", manFile.autolink(manFile.bold("dub") ~ "(1)")); manFile.writeln(manFile.header("EXIT STATUS")); if (command.name == "dustmite") { manFile.writeln("Forwards the exit code from " ~ manFile.autolink(manFile.bold(`dustmite`) ~ `(1)`)); } else { manFile.writeDefaultExitCodes(); } } void main() { Config config = Config.init; auto commands = getCommands(); // main dub.1 { CommonOptions options; auto args = new CommandArgs(null); options.prepare(args); args.writeMainManFile(commands, "dub.1", config); args.writeMainManFile(commands, "dub.md", config); } string[][] relatedSubCommands = [ ["run", "build", "test"], ["test", "dustmite", "lint"], ["describe", "generate"], ["add", "fetch"], ["init", "add", "convert"], ["add-path", "remove-path"], ["add-local", "remove-local"], ["list", "search"], ["add-override", "remove-override", "list-overrides"], ["clean-caches", "clean", "remove"], ]; // options for each specific command foreach (cmd; commands.map!(a => a.commands).joiner) { string[] related; foreach (relatedList; relatedSubCommands) { if (relatedList.canFind(cmd.name)) related ~= relatedList; } related = related.sort!"a c == cmd.name); config.relatedSubCommands = related; cmd.writeManFile(config, ManWriter.Mode.man); cmd.writeManFile(config, ManWriter.Mode.markdown); } } dub-1.40.0/scripts/rpm-package/000077500000000000000000000000001477246567400162525ustar00rootroot00000000000000dub-1.40.0/scripts/rpm-package/dub.spec000066400000000000000000000017601477246567400177040ustar00rootroot00000000000000## command is: # rpmbuild -ba dub.spec --define 'ver 0.9.21' --define 'rel 0.rc.3' # rpm file will be in ./dub*.rpm # if built on a i386 platform, rpm file will be in ~/rpmbuild/RPMS/i386/dub*.rpm Name: dub Summary: Package manager and meta build tool for the D programming language Vendor: rejectedsoftware e.K. Version: %{ver} Release: %{rel} License: MIT Group: Applications/Programming #Source: dub.tar.gz BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id} -u -n) URL: http://code.dlang.org BuildRequires: tar %description Package Manager for the D Programming language %prep #echo prep #tar -xf %{_sourcedir}/dub.tar.gz %build echo build cd %{srcpath} && ./build.d %install echo install rm -rf $RPM_BUILD_ROOT mkdir -p $RPM_BUILD_ROOT%{_bindir}/ cp %{srcpath}/bin/dub $RPM_BUILD_ROOT%{_bindir}/ %files # # list all files that need to be copied here # %defattr(755,root,root,-) /usr/bin/dub %clean cp $RPM_BUILD_ROOT/../../RPMS/*/dub*.rpm . rm -rf $RPM_BUILD_ROOT/../../RPMS/* dub-1.40.0/scripts/rpm-package/make_installer.sh000077500000000000000000000010761477246567400216070ustar00rootroot00000000000000#!/usr/bin/env bash set -e cd ../../ DUB_PATH=`pwd` #rm -f ~/rpmbuild/SOURCES/dub.tar.gz #tar -pczf ~/rpmbuild/SOURCES/dub.tar.gz source build-files.txt build.d LICENSE* cd scripts/rpm-package/ for i in $(git describe | tr "-" "\n"); do if [ "$VER" == "" ]; then VER=${i:1} elif [ "$REL" == "" ]; then REL=0.$i else REL=$REL.$i fi done if [ "$REL" == "" ]; then REL=1 fi ARCH=$(uname -i) echo Building RPM FOR $VER-$REL-$ARCH rpmbuild -ba dub.spec --define "ver $VER" --define "rel $REL" --define="srcpath $DUB_PATH" cp ~/rpmbuild/BUILD/dub-$VER-$REL.$ARCH.rpm . dub-1.40.0/scripts/win-installer/000077500000000000000000000000001477246567400166535ustar00rootroot00000000000000dub-1.40.0/scripts/win-installer/EnvVarUpdate.nsh000066400000000000000000000251571477246567400217430ustar00rootroot00000000000000/** * EnvVarUpdate.nsh * : Environmental Variables: append, prepend, and remove entries * * WARNING: If you use StrFunc.nsh header then include it before this file * with all required definitions. This is to avoid conflicts * * Usage: * ${EnvVarUpdate} "ResultVar" "EnvVarName" "Action" "RegLoc" "PathString" * * Credits: * Version 1.0 * * Cal Turney (turnec2) * * Amir Szekely (KiCHiK) and e-circ for developing the forerunners of this * function: AddToPath, un.RemoveFromPath, AddToEnvVar, un.RemoveFromEnvVar, * WriteEnvStr, and un.DeleteEnvStr * * Diego Pedroso (deguix) for StrTok * * Kevin English (kenglish_hi) for StrContains * * Hendri Adriaens (Smile2Me), Diego Pedroso (deguix), and Dan Fuhry * (dandaman32) for StrReplace * * Version 1.1 (compatibility with StrFunc.nsh) * * techtonik * * http://nsis.sourceforge.net/Environmental_Variables:_append%2C_prepend%2C_and_remove_entries * */ !ifndef ENVVARUPDATE_FUNCTION !define ENVVARUPDATE_FUNCTION !verbose push !verbose 3 !include "LogicLib.nsh" !include "WinMessages.NSH" !include "StrFunc.nsh" ; ---- Fix for conflict if StrFunc.nsh is already includes in main file ----------------------- !macro _IncludeStrFunction StrFuncName !ifndef ${StrFuncName}_INCLUDED ${${StrFuncName}} !endif !ifndef Un${StrFuncName}_INCLUDED ${Un${StrFuncName}} !endif !define un.${StrFuncName} "${Un${StrFuncName}}" !macroend !insertmacro _IncludeStrFunction StrTok !insertmacro _IncludeStrFunction StrStr !insertmacro _IncludeStrFunction StrRep ; ---------------------------------- Macro Definitions ---------------------------------------- !macro _EnvVarUpdateConstructor ResultVar EnvVarName Action Regloc PathString Push "${EnvVarName}" Push "${Action}" Push "${RegLoc}" Push "${PathString}" Call EnvVarUpdate Pop "${ResultVar}" !macroend !define EnvVarUpdate '!insertmacro "_EnvVarUpdateConstructor"' !macro _unEnvVarUpdateConstructor ResultVar EnvVarName Action Regloc PathString Push "${EnvVarName}" Push "${Action}" Push "${RegLoc}" Push "${PathString}" Call un.EnvVarUpdate Pop "${ResultVar}" !macroend !define un.EnvVarUpdate '!insertmacro "_unEnvVarUpdateConstructor"' ; ---------------------------------- Macro Definitions end------------------------------------- ;----------------------------------- EnvVarUpdate start---------------------------------------- !define hklm_all_users 'HKLM "SYSTEM\CurrentControlSet\Control\Session Manager\Environment"' !define hkcu_current_user 'HKCU "Environment"' !macro EnvVarUpdate UN Function ${UN}EnvVarUpdate Push $0 Exch 4 Exch $1 Exch 3 Exch $2 Exch 2 Exch $3 Exch Exch $4 Push $5 Push $6 Push $7 Push $8 Push $9 Push $R0 /* After this point: ------------------------- $0 = ResultVar (returned) $1 = EnvVarName (input) $2 = Action (input) $3 = RegLoc (input) $4 = PathString (input) $5 = Orig EnvVar (read from registry) $6 = Len of $0 (temp) $7 = tempstr1 (temp) $8 = Entry counter (temp) $9 = tempstr2 (temp) $R0 = tempChar (temp) */ ; Step 1: Read contents of EnvVarName from RegLoc ; ; Check for empty EnvVarName ${If} $1 == "" SetErrors DetailPrint "ERROR: EnvVarName is blank" Goto EnvVarUpdate_Restore_Vars ${EndIf} ; Check for valid Action ${If} $2 != "A" ${AndIf} $2 != "P" ${AndIf} $2 != "R" SetErrors DetailPrint "ERROR: Invalid Action - must be A, P, or R" Goto EnvVarUpdate_Restore_Vars ${EndIf} ${If} $3 == HKLM ReadRegStr $5 ${hklm_all_users} $1 ; Get EnvVarName from all users into $5 ${ElseIf} $3 == HKCU ReadRegStr $5 ${hkcu_current_user} $1 ; Read EnvVarName from current user into $5 ${Else} SetErrors DetailPrint 'ERROR: Action is [$3] but must be "HKLM" or HKCU"' Goto EnvVarUpdate_Restore_Vars ${EndIf} ; Check for empty PathString ${If} $4 == "" SetErrors DetailPrint "ERROR: PathString is blank" Goto EnvVarUpdate_Restore_Vars ${EndIf} ;;khc - here check if length is going to be greater than max string length ;; and abort if so - also abort if original path empty - may mean ;; it was too long as well- write message to say set it by hand Push $6 Push $7 Push $8 StrLen $7 $4 StrLen $6 $5 IntOp $8 $6 + $7 ${If} $5 == "" ${OrIf} $8 >= ${NSIS_MAX_STRLEN} SetErrors DetailPrint "Current $1 length ($6) too long to modify in NSIS; set manually if needed" Pop $8 Pop $7 Pop $6 Goto EnvVarUpdate_Restore_Vars ${EndIf} Pop $8 Pop $7 Pop $6 ;;khc ; Make sure we've got some work to do ${If} $5 == "" ${AndIf} $2 == "R" SetErrors DetailPrint "$1 is empty - Nothing to remove" Goto EnvVarUpdate_Restore_Vars ${EndIf} ; Step 2: Scrub EnvVar ; StrCpy $0 $5 ; Copy the contents to $0 ; Remove spaces around semicolons (NOTE: spaces before the 1st entry or ; after the last one are not removed here but instead in Step 3) ${If} $0 != "" ; If EnvVar is not empty ... ${Do} ${${UN}StrStr} $7 $0 " ;" ${If} $7 == "" ${ExitDo} ${EndIf} ${${UN}StrRep} $0 $0 " ;" ";" ; Remove ';' ${Loop} ${Do} ${${UN}StrStr} $7 $0 "; " ${If} $7 == "" ${ExitDo} ${EndIf} ${${UN}StrRep} $0 $0 "; " ";" ; Remove ';' ${Loop} ${Do} ${${UN}StrStr} $7 $0 ";;" ${If} $7 == "" ${ExitDo} ${EndIf} ${${UN}StrRep} $0 $0 ";;" ";" ${Loop} ; Remove a leading or trailing semicolon from EnvVar StrCpy $7 $0 1 0 ${If} $7 == ";" StrCpy $0 $0 "" 1 ; Change ';' to '' ${EndIf} StrLen $6 $0 IntOp $6 $6 - 1 StrCpy $7 $0 1 $6 ${If} $7 == ";" StrCpy $0 $0 $6 ; Change ';' to '' ${EndIf} ; DetailPrint "Scrubbed $1: [$0]" ; Uncomment to debug ${EndIf} /* Step 3. Remove all instances of the target path/string (even if "A" or "P") $6 = bool flag (1 = found and removed PathString) $7 = a string (e.g. path) delimited by semicolon(s) $8 = entry counter starting at 0 $9 = copy of $0 $R0 = tempChar */ ${If} $5 != "" ; If EnvVar is not empty ... StrCpy $9 $0 StrCpy $0 "" StrCpy $8 0 StrCpy $6 0 ${Do} ${${UN}StrTok} $7 $9 ";" $8 "0" ; $7 = next entry, $8 = entry counter ${If} $7 == "" ; If we've run out of entries, ${ExitDo} ; were done ${EndIf} ; ; Remove leading and trailing spaces from this entry (critical step for Action=Remove) ${Do} StrCpy $R0 $7 1 ${If} $R0 != " " ${ExitDo} ${EndIf} StrCpy $7 $7 "" 1 ; Remove leading space ${Loop} ${Do} StrCpy $R0 $7 1 -1 ${If} $R0 != " " ${ExitDo} ${EndIf} StrCpy $7 $7 -1 ; Remove trailing space ${Loop} ${If} $7 == $4 ; If string matches, remove it by not appending it StrCpy $6 1 ; Set 'found' flag ${ElseIf} $7 != $4 ; If string does NOT match ${AndIf} $0 == "" ; and the 1st string being added to $0, StrCpy $0 $7 ; copy it to $0 without a prepended semicolon ${ElseIf} $7 != $4 ; If string does NOT match ${AndIf} $0 != "" ; and this is NOT the 1st string to be added to $0, StrCpy $0 $0;$7 ; append path to $0 with a prepended semicolon ${EndIf} ; IntOp $8 $8 + 1 ; Bump counter ${Loop} ; Check for duplicates until we run out of paths ${EndIf} ; Step 4: Perform the requested Action ; ${If} $2 != "R" ; If Append or Prepend ${If} $6 == 1 ; And if we found the target DetailPrint "Target is already present in $1. It will be removed and" ${EndIf} ${If} $0 == "" ; If EnvVar is (now) empty StrCpy $0 $4 ; just copy PathString to EnvVar ${If} $6 == 0 ; If found flag is either 0 ${OrIf} $6 == "" ; or blank (if EnvVarName is empty) DetailPrint "$1 was empty and has been updated with the target" ${EndIf} ${ElseIf} $2 == "A" ; If Append (and EnvVar is not empty), StrCpy $0 $0;$4 ; append PathString ${If} $6 == 1 DetailPrint "appended to $1" ${Else} DetailPrint "Target was appended to $1" ${EndIf} ${Else} ; If Prepend (and EnvVar is not empty), StrCpy $0 $4;$0 ; prepend PathString ${If} $6 == 1 DetailPrint "prepended to $1" ${Else} DetailPrint "Target was prepended to $1" ${EndIf} ${EndIf} ${Else} ; If Action = Remove ${If} $6 == 1 ; and we found the target DetailPrint "Target was found and removed from $1" ${Else} DetailPrint "Target was NOT found in $1 (nothing to remove)" ${EndIf} ${If} $0 == "" DetailPrint "$1 is now empty" ${EndIf} ${EndIf} ; Step 5: Update the registry at RegLoc with the updated EnvVar and announce the change ; ClearErrors ${If} $3 == HKLM WriteRegExpandStr ${hklm_all_users} $1 $0 ; Write it in all users section ${ElseIf} $3 == HKCU WriteRegExpandStr ${hkcu_current_user} $1 $0 ; Write it to current user section ${EndIf} IfErrors 0 +4 MessageBox MB_OK|MB_ICONEXCLAMATION "Could not write updated $1 to $3" DetailPrint "Could not write updated $1 to $3" Goto EnvVarUpdate_Restore_Vars ; "Export" our change SendMessage ${HWND_BROADCAST} ${WM_WININICHANGE} 0 "STR:Environment" /TIMEOUT=1 EnvVarUpdate_Restore_Vars: ; ; Restore the user's variables and return ResultVar Pop $R0 Pop $9 Pop $8 Pop $7 Pop $6 Pop $5 Pop $4 Pop $3 Pop $2 Pop $1 Push $0 ; Push my $0 (ResultVar) Exch Pop $0 ; Restore his $0 FunctionEnd !macroend ; EnvVarUpdate UN !insertmacro EnvVarUpdate "" !insertmacro EnvVarUpdate "un." ;----------------------------------- EnvVarUpdate end---------------------------------------- !verbose pop !endif dub-1.40.0/scripts/win-installer/banner.bmp000066400000000000000000004556561477246567400206450ustar00rootroot00000000000000BM[6(:x[}~uh|~uh|}tg|}tg{|sg{|sg{|sg{|sgz|sg~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~z|sg~~}}}}|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||}}}}~~y{re~~}}}||{{zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz{{||}}}~~y{re~}}}||{zzyyyxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxyyyzz{||}}}~y{re~~}}||{zyyxxxwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwxxxyyz{||}}~~yzqe~~}||{zyyxxwvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvwxxyyz{||}~~yyqe~~}||{{zyywwvuuttsssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssttuuvwwyyz{{||}~~wyqd~~}|~{}|y{zw{zw{zw{zw{yw{yvzyvzyvzyvzyvzyvzxvzxvywvywvywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuywuxvuxvuxvuxvuxvuxvuxvuxvuxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtxvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwvtwutwutwutwutwutwutwutwutwuswuswuswuswuswuswuswuswusvuswuswuswuswuswutwutwvtwvtwvtxvtxvtxwtxwtywt{zw}}~~wyqd~}||[YWxxxiiiggggggggggggggggggggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccccccccccccccccccccccccccffftttni`|}~wypd~}}|{WVSPPPxxxiiigggggggggggggggggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddcccccccccccccccccccccccccccccccccccccccfffttt>>>lf^|}}~~wypd~~}|{zWUSKKKQQQ{{{iiiggggggggggggggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccccccccccccccccccccgggqqq>>>:::kf]{|}~~~wypd~}|{zzyWUSKKKKKKQQQ{{{iiigggggggggggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccccccccccccccccchhhsss===::::::ke]zz{|}~}wwoc~}|{{zyxWUSKKKKKKKKKQQQ{{{iiiggggggggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddcccccccccccccccccccccccccccccchhhsss===;;;::::::je\yz{{|}~}vwoc}}|{zyxwWURKKKKKKKKKKKKQQQzzziiigggggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccccccccccchhhsss===;;;;;;::::::id\xyz{|}}}vwoc~}|{{zxxvVURKKKKKKKKKKKKKKKPPPzzziiiggggggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccccccccjjjppp<<<;;;;;;;;;::::::hc[xxz{{|}~}vvoc~}|{zyxwvVURKKKKKKKKKKKKKKKJJJPPPzzziiigggggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddcccccccccccccccccccccjjjrrr<<<;;;;;;;;;;;;::::::hc[wxyz{|}~|vvoc~~}{zzywvutVTRKKKKKKKKKKKKKKKJJJJJJPPP{{{iiiggggggggggggggggggfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccjjjqqq<<<;;;;;;;;;;;;;;;::::::gbZuvwyzz{}~~{uvnb~}|{zyxwvtsVTRKKKKKKKKKKKKKKKJJJJJJJJJPPPwwwuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttsssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssrrrrrrrrrrrrrrrrrrmmm<<<;;;;;;;;;;;;;;;;;;::::::gbZtvwxyz{|}~{uvnb~}|{zyxvutsqUTQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::faXstuvxyz{|}~~ztvna~~}|{zxwvutrqUTQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::eaXrtuvwxz{|}~~~ztvna~~}|{zxwvusqpUTQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::e`Xqsuvwxz{|}~~~ztvna~~}|{zxwutsq~pUSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::d`Xqstuwxz{|}~~~ztuma~~}{zyxvutrq}oUSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::d_Wqrtuvxyz{}~~~ztuma~}}|zyxwvtsq~p|nUSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::c^W~pqstvwxyz|}}}zsum`~}||zyxwvtrq~o|nUSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::c^W~oqrtvwxyz||}}zsum`~}||zyxwvtrq~o|nTSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::c^W~oqrtvwxyz||}}zstm`~}|{zyxwvtrq~o|nTSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::c^W~oqrtvwxyz{|}}zstm`}|{zyxwvusrp}n{mTSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b^V}nprsuvwxyz{||yrsl_}|{zyxwvusrp}n{mTSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHFNT?m?=99799<>?~BUaFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDCFH>h><:9788;>=>YiBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@???????@A;\p<<:88789;;:Zn>@A=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b^V}nprsuvwxyz{||yrsl_}|{zyxwvusr~p|nzmTSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHGKN?z<73*((((((((.6:@DV`FFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDEE@u=73*((((((((-58>@\mAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@=dz=84,((((((((-57<:bz===============<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b]V|n~prsuvwxyz{||yrsk_}|{zyxwvusr~p|nzmTSQKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIHLO@90((((((((((((((()5>EapFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDFFA:/((((((((((((((((3;@lAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@w:1((((((((((((((()3:BS\AAAAAAAAAAAA@@@@@@@@@@@@A^l<-((((((((((((((((((((0<<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b]V|n~oqrtuvwxyz{{xqsk^|{zyxwvutrq}o{nylTSPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIGW_=*((((((((((((((((((((((((((2DEEEEEEEEEEQW@)((((((((((((((((((((((((((.BABBAAA@@@@CDA+((((((((((((((((((((((((((->=@C<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b]V{n}oqrtuvwxyz{{xqrj^|{zyxwvutrq}o{nylTSPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIG[g<(((((((((((((((((((((((((((((.DEEEFW^=(((((((((((((((((((((((((((((*AABBAGI?)((((((((((((((((((((((((((((*==DG<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b]V{n}oqrtuvwxyz{{xqrj^{zyxwvutsqp}n{mykTSPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIHT[<(((((((((((((((((((((((((((((((.F=(((((((((((((((((((((((((((((((*B@)(((((((((((((((((((((((((((((((>=?A<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b]U{m}npqstuvwxyzzwprj^{zyxwvutsqp}n{mykTSPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIKK?(((((((((((((((((((((((((((((((((+(((((((((((((((((((((((((((((((((*)(((((((((((((((((((((((((((((((()@<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::b]U{m}npqstuvwxyzzwprj^zyxwvutsrp~o|mzlyjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJB*((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((,A|<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::a]Uzl|m~oprstuvwxyyv~oqj]zyxwvutsrp~o|mzlyjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJGn1((((((((((((((6?BCB@;+((((((((((((((((((((((((((2>@AB@<-((((((((((((((((((((((((((.<>??>=.((((((((((((((4?T]<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::a]Uzl|m~oprstuvwxyyv~oqj]zyxwvutsr~p}o{mylxjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJKK>((((((((((((,AI{IY`GGGGGGGGGGGGGGGHNQJsD1((((((((((((((((((((()=F~F\eCCCCCCCCCCCCCCCBEFFjyC5((((((((((((((((((((((7CDcq?AB???>>>>>>>>>>@AC`lB7(((((((((((((?<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::a\Uyl{m}o~prstuvwxyyv}opi]zyxwvutsr~p}o{mylxjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJG,(((((((((((:IwHIIHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFHftA(((((((((((((((((((6E~CGICCCCCCCCCCCCCCCCCCCCCBBBBBBBBBE\eB+((((((((((((((((((-CBRY????????????>>>>>>>>>>>>>>>>>>APWC-(((((((((((.Adt<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::a\Uyl{m}o~prstuvwxyyv}opi]zyxwvutsq~p}n{mykxjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJ>(((((((((()AHX_HHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFGLOE+((((((((((((((((=D^iDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBDEE/((((((((((((((((0D|?@A???????????????>>>>>>>>>>>>>>>>>>>>>>?@Dz0(((((((((((?<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::a\Uyk{m}n~pqstuvwxyyv}npi]zyxwvutsq~p|nzmykwjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJHl~/((((((((((@HLNHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFGGE(((((((((((((((>>>>>>>>>>>>>>>>>>>>>>>===Dy,((((((((((2@Wa;;;;;;;;;;;;;;;;;;;;;;;;::::::`\Uykzm|n~pqstuvwxyyv}nph]zyxwvutsq~p|nzmykwjTRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJB((((((((((:HU\HHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFGLOA(((((((((((((6C]jDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBEFB((((((((((((((D?@A?????????????????????>>>>>>>>>>>>>>>>>>>>>>>>====>?B~(((((((((((B{;;;;;;;;;;;;;;;;;;;;;;;;::::::`\Uykzm|n~pqstuvwxyyv}nph]yxwvutsrp~o|mzlyj~wiSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJ<(((((((((,E~HHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFHhw0(((((((((((,@DDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBDZc4((((((((((((9CV^????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>======@S[7((((((((((>;;;;;;;;;;;;;;;;;;;;;;;;::::::`\Tyjzl|m~oprstuvwxxu}noh\zyxwvutsq~p|nzmyk~wjSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJJY`2(((((((((=HHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFC(((((((((((:DGHDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAD((((((((((()F~@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=========Dw)(((((((((5>KQ;;;;;;;;;;;;;;;;;;;;;::::::`\Uykzm|n~pqstuvwxyyv}noh]zyxwvuts~q}p{nymxk~vjSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJG}*(((((((()CHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFHt*((((((((()?DDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAFgu-((((((((((0Ees@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=========Cfu0(((((((((+Bet;;;;;;;;;;;;;;;;;;;;;::::::`[Uxkym{n}p~qstuvwxyyv|nog]yxwvutsr~p|ozmylwj~uiSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJE(((((((((4HYbHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFGOS9(((((((((3>dzDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAABFH=((((((((((?AEG@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=========>EH=((((((((((Bo;;;;;;;;;;;;;;;;;;;;;::::::`[Twjylzm|o~prstuvwxxu{nng\yxwvutsr~p|ozmyl~wj}uiSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJC(((((((((:IIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFE(((((((((6BJPDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAK((((((((((M@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>============Az((((((((((Ax;;;;;;;;;;;;;;;;;;;;;::::::_[T~wjylzm|o~prstuvwxxu{nmg\yxwvutsr~p|ozmyl~wj}uiSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJB(((((((((>>>>>>>>>>>>>>>>>>>>>>>============Cv((((((((((B~;;;;;;;;;;;;;;;;;;;;;::::::_[T~wjylzm|o~prstuvwxxu{nmg\xwvutsr~q}o{nylxk~vi}thSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJ@(((((((((>>>>>>>>>>>>>>>>>>>>>>>============Dr((((((((((@{;;;;;;;;;;;;;;;;;;;;;::::::_ZS~vixkyl{n}o~qrstuvwwtzmmf[xwvutsr~q}o{nylxk~vi}thSRPKKKKKKKKKKKKKKKJJJJJJJJJJJJC(((((((((;IIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFM(((((((((:DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAZ((((((((((V@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>============Cy((((((((((A{;;;;;;;;;;;;;;;;;;;;;::::::_ZS~vixkyl{n}o~qrstuvwwtzmmf[wvutsr~q}p|nzmxkwj~ui|tgSROKKKKKKKKKKKKKKKJJJJJJJJJJJJD(((((((((7IIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFG(((((((((:IIIDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAABBBZ((((((((((Q@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>============A((((((((((Bu;;;;;;;;;;;;;;;;;;;;;::::::^ZS~uiwjxkzm|n}p~qrstuvvsylme[wvutsr~q}p|nzmxkwj}ui{tgSROKKKKKKKKKKKKKKKJJJJJJJJJJJJF(((((((((/FoHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFGUZ8(((((((((9^^^DDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAOOOY((((((((((DBQX@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=========?IN:((((((((()Ck};;;;;;;;;;;;;;;;;;;;;::::::^ZS}uiwjxkzm|n}p~qrstuvvsylle[wvutsr~q}p|nzmxkwj}ui{tgSROKKKKKKKKKKKKKKKJJJJJJJJJJJJIu*(((((((((AHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFG((((((((((9qqqDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAbbbY((((((((((.Fo@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=========Dj{-(((((((((/@]i;;;;;;;;;;;;;;;;;;;;;::::::^ZS}uiwjxkzm|n}p~qrstuvvsylle[vutsrq~p|o{mylxkvi}th{sfSROKKKKKKKKKKKKKKKJJJJJJJJJJJJKSW5(((((((((8GSYHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFGG?((((((((((9sssOOODDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBCCCkkkZ(((((((((((C@AA????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=========B((((((((((:<@A;;;;;;;;;;;;;;;;;;;;;::::::^ZR}thvixkyl{m|o~pqrstuurxkldZvutsrq~p|o{mylxk~vi|thzsfSROKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJ?((((((((()AHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFF{+((((((((((9iiiqqqDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBB]]]iiiZ(((((((((((0Ek{????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>======Cds1((((((((((@;;;;;;;;;;;;;;;;;;;;;;;;::::::]ZR|th~vixkyl{m|o~pqrstuurxkkdZvutsrq~p|o{mylxk~vi|thzsfSROKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJF((((((((((2DqHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFYb9(((((((((((9hhhpppcccDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBLLLnnnhhhZ((((((((((((>@IM?????????????????????>>>>>>>>>>>>>>>>>>>>>>>>===>EH>((((((((((*Cp;;;;;;;;;;;;;;;;;;;;;;;;::::::]ZR|th~vixkyl{m|o~pqrstuurxkkdZutsrq~p}o|nzmxkwj}uh{tgyreSQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJ\d3((((((((((7FaqHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFGNR>((((((((((((9hhhhhhrrrZZZDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBFFFppphhhhhhZ(((((((((((((B?AB??????????????????>>>>>>>>>>>>>>>>>>>>>>>>=>?A(((((((((((9>FJ;;;;;;;;;;;;;;;;;;;;;;;;::::::]YR{tg}uhwjxkzm|n}o~pqrstt~qwjjdYutsr~q}p|o{nymwkvj}th{sgyqeSQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJB(((((((((((7DsHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFEXb=)((((((((((((9hhhhhhhhhrrrcccDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBJJJpppiiihhhhhhZ(((((((((((((*@@FH???????????????>>>>>>>>>>>>>>>>>>>>>>AC@*(((((((((((B<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::]XR{sg}thvjwkym{n|o}p~qrstt}qwjjcYtsrq~p}o|n{mylwj~vi|tgzsfyqeRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJIk{/(((((((((((2AFRXHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFC8((((((((((((((9hhhhhhhhhhhhoooqqqOOOCCCCCCCCCCCCCCCCCCCCCBBBBBBCCC___rrriiihhhhhhhhhZ(((((((((((((();@k????????????>>>>>>>>>>>>>>>>>>?g|=)(((((((((((5?OV<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::]XRzsf|tg~viwjyl{m|n}o~pqrss}p~wijcXtsrq~p}o|n{mylwj~vi|tgzsfyqeRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJC(((((((((((()8@CrGHHGGGGGGGGGGGGD]kB<,(((((((((((((((9hhhhhhhhhhhhhhhiiisssooo^^^FFFCCCCCCCCCCCCKKKeeeqqqnnnhhhhhhhhhhhhhhhZ((((((((((((((((0HN???>>>>>>>>>=FK?=0(((((((((((()C<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::]XRzsf|tg~viwjyl{m|n}o~pqrss}p~wijcXtsr~q}p|o{nzmxl~wj}ui{sgyrfxpeRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJZa8((((((((((((((07:::83)(((((((((((((((((9hhhhhhhhhhhhhhhhhhhhhhhhnnnttttttqqqssssssssskkkhhhhhhhhhhhhhhhhhhhhhZ((((((((((((((((((+579986+((((((((((((((==CE<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::\XRyrf{sg}ui~wjxlzm{n|o}p~qrss|p}viibXsr~q}p|o{nzmylwk~vi}th{sgyqexodRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJH|.((((((((((((((((((((((((((((((((((((((9hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhZ(((((((((((((((((((((((((((((((((((((((3@[g<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::\WQyqe{sg}th~viwkylzm{n|o}p~qr~r{o}uhiaXsr~q}p|o{nzmylwk~vi}th{sgyqexodRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIE*(((((((((((((((((((((((((((((((((((((9hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhZ((((((((((((((((((((((((((((((((((((((/Au<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::\WQyqe{sg}th~viwkylzm{n|o}p~qr~r{o}uhiaXr~q}p|o{nzmylxk~vj}ui{sgyrfxpdvocRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIJJC)((((((((((((((((((((((((((((((((((((9hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhZ(((((((((((((((((((((((((((((((((((((,B<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::[WQxpdyrf{sg}ui~vjxkylzm{n|o}p~q~qzn|tggaWr~q}p|o{nzmylxk~vj}ui{sgyrfxpdvocRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIILMC*(((((((((((((((((((((((((((((((((((9hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhZ((((((((((((((((((((((((((((((((((((.A<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::[WQxpdyrf{sg}ui~vjxkylzm{n|o}p~q~qzn|tggaWq~p}o|n{mzlykxj~vi}uh{sfyrexpcvobRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIJJE.((((((((((((((((((((((((((((((((((9hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhZ(((((((((((((((((((((((((((((((((((3Av<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::[WPxpcyre{sf}uh~vixjykzl{m|n}o~p~pzm|tfgaV~q}p|o{nzmylxkwj~vi|thzrfyqewocunbRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIHz8((((((((((((((((((((((()(((((((((9iiihhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh[(((((((((()(((((((((((((((((((((()<@^l<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::[WPwocyqezrf|th~viwjxkylzm{n|o}p}pzm{sfg`V}p|o{nzmylxkwjvj~uh|sgzreypdwncumaRQNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIY_C1(((((((((((((((((((,?C(((((((((7lllnnnhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhiiinnnX((((((((((D;+(((((((((((((((((((5A>AC===<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::[VPwncypdzre|sg~uhvjwjxkylzm{n|o|oyl{reg_U}p|o{nzmylxkwjvj~uh|sgzreypdwncumaRQNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHgvB5(((((((((((((((0>FxFFFC(((((((((6DDDTTTllloooiiihhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhkkkooogggIIIDx((((((((((Dq@AA@;,((((((((((((((*8@?LR=========<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::[VPwncypdzre|sg~uhvjwjxkylzm{n|o|oyl{reg_U|o{nzmylxkwj~vj}ui|tgzrfxqewocunbtl`RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHY_F?8+(((((((*5=CGbpFFFFFFFFFC(((((((((6DDDDDDDDDMMMfffnnnooommmiiihhhhhhhhhhhhhhhhhhjjjnnnppplll^^^FFFAAAAAADx((((((((((Dq@@@@@@?@ABn>:1)(((((((.9>Aw>DF===============<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZVOunbwocxqezrf|tg}ui~vjwjxkylzm{n{nxkyqee^U|o{nzmylxkwj~vj}ui|tgzrfxqewocunbtl`RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHNQHqGCBABDEGuGQUFFFFFFFFFFFFFFFFFFC(((((((((6DDDDDDDDDDDDDDDDDDHHHXXXeeeiiikkkkkkllliiiiiiaaaTTTDDDBBBBBBBBBAAAAAADx((((((((((Dq@@@@@@????????????BT]C{A?>>>@B{A^l>@A>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZVOunbwocxqezrf|tg}ui~vjwjxkylzm{n{nxkyqee^U{oznymxlwkvj~uj}ti|sgzqfxpewncumbtk`RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFC(((((((((6DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAADx((((((((((Dq@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZUOumbwncxpezqf|sg}ti~ujvjwkxlymznznwkypee^U{nzmylxkwjvj~ui|th{sfyqexpdvnbtmask`RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFC(((((((((6DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAADx((((((((((Dq@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZUOtmavnbxpdyqe{sf|th~uivjwjxkylzmzmwjxpdd^Tzmylxkxkvj~ui}th|sgzrexpdwocumatl`rj_QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFC(((((((((6DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAADx((((((((((Dq@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::YUNtl`umawocxpdzre|sg}th~uivjxkxkylyl~viwocd]Szmylxkxkvj~ui}th|sgzrexpdwocumatl`rj_QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFC(((((((((6DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAADx((((((((((Dq@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::YUNtl`umawocxpdzre|sg}th~uivjxkxkylyl~viwocd]Szmylylxkvj~ui}th|sgzrexpdwocvnbtlarj_QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFC(((((((((8DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAD{((((((((((Es@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::YUNtlavnbwocxpdzre|sg}th~uivjxkylylyl~viwocd]Sylxkwkvj}ui|th{sgzrfxpewocvnbtlark`pi^QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFE(((((((((;DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAFy((((((((()Gs@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::XTNrk`tlavnbwocxpezrf{sg|th}uivjwkxkxk|uhvnbb\Rykxjwjvi}uh|tg{sfzreyqdwocvnbtmark_qj^QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFHhw.(((((((*>DDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAE_k2((((((((5D\g@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::XUNrk_tmavnbwocyqdzre{sf|tg}uhviwjxjxj|ugvnab\Rxkwjvj~ui}th{sgzrfyqexpdwocumbtlarj_pi^QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFC)((((((:BMTDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAC*((((((+E@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::XTNrj_tlaumbwocxpdyqezrf{sg}th~uivjwj~wj{tgumaa[Rxj~wi~vi}uh|tg{sfzrexqewpdvocunbsl`rk_qj^QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFFJL@/(((*;AdwDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAADEA2((((4B@AA@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::XUNrk_sl`unbvocwpdxqezre{sf|tg}uh~vi~wi~wiztftmaa[Qxj~wi~vi}uh|uh{tgzsfyrexqdvocunbtmasl`qj^QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEECo?;;;>@Vb@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::XUNsl`tmaunbvocxqdyrezsf{tg|uh}uh~vi~wi~wiztftmaa[Qwkvj~vi}uh|th{sgzrfzqexpdwocvnbumbsk`rj_RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::YUNsk`umbvnbwocxpdzqezrf{sg|th}uh~vivj~vjzsftlaa[Q~vi}uh|ug{tg{sfzreyqdxpdwocvnbumatm`sl_qj^QPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::XUNsl_tm`umavnbwocxpdyqdzre{sf{tg|ug}uh}uhyrdrk_`ZO~vi}vh|ug|tg{sfzsfyrexqdwpcvobunatmasl`rk^RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::YUNsl`tmaunavobwpcxqdyrezsf{sf|tg|ug}vh}uhyrdrl_`ZO~ui}uh|tg|sg{rfzrfyqeypdxodwocvnbumatl`sk_RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZUNtl`umavnbwocxodypdyqezrf{rf|sg|tg}uh}thyqdrk_`YO}uh|tg|sg{sfzrfzqeypdxpdwocvnbumaumatl`rj_RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::YUNtl`umaumavnbwocxpdypdzqezrf{sf|sg|tg|tgypdqj^_XO}uh}th|sg{sf{rfzqeyqexpdxpdwocvnbumatlask`RPNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZUOtlaumavnbwocxpdxpdyqezqe{rf{sf|sg}th|tgypdqj^_XO|uh|th{tg{sgzsfyrfyqexpdwpdvocvocunbtmasl`RQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZVOtmaunbvocvocwpdxpdyqeyrfzsf{sg{tg|th{tgxqdqj^^XO}th|tg|sg{rfzrfzqeypdxpdxpcwocvnbumaumatl`RQNKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZVOumaumavnbwocxpcxpdypdzqezrf{rf|sg|tg|sgxpdqi]^XO|th{tg{sgzsfzrfyrfxqexpdwpdvocvocunbtmaslaRQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZVPtmaunbvocvocwpdxpdxqeyrfzrfzsf{sg{tg{sgwpdpi]^XO|tg{sg{sfzrfyqeyqexpdxpdwpcvocvnbunbtmatl`RQOKKKKKKKKKKKKKKKJJJJJJJJJJJJJJJJJJJJJJJJIIIIIIIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHHHGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDDDDDDDDDDDDDDDDDDDCCCCCCCCCCCCCCCCCCCCCCCCBBBBBBBBBBBBBBBBBBBBBAAAAAAAAAAAAAAAAAAAAAAAA@@@@@@@@@@@@@@@@@@@@@@@@????????????????????????>>>>>>>>>>>>>>>>>>>>>>>>=====================<<<<<<<<<<<<<<<<<<<<<<<<;;;;;;;;;;;;;;;;;;;;;;;;::::::ZVOtmaunbvnbvocwpcxpdxpdyqeyqezrf{sf{sg{sfwodoh]^XN|tg{sg{sfzrfzreyqeyqexpdwpcwocvocvnbumatmatl`sk`rk_qj^qi^pi]oh]ng\ng[mf[leZkdYkdYjcYjcXibXibXibXibXibXibXibXibXibXibXtogwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwtogibYibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXjcXjcYkdYkdYleZmf[ng[ng\oh]pi]qi^qj^rk_sk`tl`tmaumavnbvocwocwpcxpdyqeyqezrezrf{sf{sg{sfwodpi]^XN|sh|sg{rg{rfzqfzqeypeypexpdwocwncvncvmbulatlatk`sj`rj_rj_qi^ph^ph]og\nf\nf[me[ldZldZkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYvpiyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyvpikcZkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYldZldZme[nf[nf\og\ph]ph^qi^rj_rj_sj`tk`tlaulavmbvncwncwocxpdypeypezqezqf{rf{rg|sg{rgwndph]^WN{sg{sfzrfzrezreyqexpdxpdxpdwocvobvnbunbumatmasl`sk`rk_rj_qj^pi]pi]oh\oh\ng[mf[mf[leZleZleZleZleZleZleZleZleZleZwrjyzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzwrjle[leZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZmf[mf[ng[oh\oh\pi]pi]qj^rj_rk_sk`sl`tmaumaunbvnbvobwocxpdxpdxpdyqezrezrezrf{sfzrfwocoh\]WMzrgzrgzrfyqfyqexpexpdwodwodvocvncunbumbtmbtlaslask`rk`rj`qj_pi^pi^oh]oh]ng\ng\mf\mf[le[le[le[le[le[le[le[le[xsk{|||||||||||||||||||||||||||||||||||||||||||||||||||||xskle\le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[le[mf[mf\ng\ng\oh]oh]pi^pi^qj_rj`rk`sk`slatlatmbumbunbvncvocwodwodxpdxpeyqeyqfzrfzrgyqfvncng\]WN{sgzrgzrfzrfyqfyqexpexpdwodwodvocvncunbumbtmbtlaslask`rk`rj`qj_qj_pi^pi^oh]oh]oh]ng]ng\ng\ng\ng\ng\ng\ng\zum}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~zumng]ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng]oh]oh]oh]pi^pi^qj_qj_rj`rk`sk`slatlatmbumbunbvncvocwodwodxpdxpeyqeyqfzrfzrfzrgzrfvncng\]WN{rfzqfzqfzqeypeypdypdxodxodwocwncwncvnbumaumaulatk`tk`sk`sj_rj_rj_qi^qi^ph]ph]ph]og]og]og]og]og]og]og]|vm~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~|vmog]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]og]ph]ph]ph]qi^qi^rj_rj_sj_sk`tk`tk`ulaumaumavnbwncwncwocxodxodypdypdypezqezqfzqfzqevmbog\]VMzrezreyqeyqeyqdxpdxpcxpcwocwocvobvnbvnbunaumatmatl`sl`sk_sk_rk_rj_qj^qj^qj^pi]pi]pi]pi]oh\oh\oh\oh\|wn|wnoh]oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\oh\pi]pi]pi]pi]qj^qj^qj^rj_rk_sk_sk_sl`tl`tmaumaunavnbvnbvobwocwocxpcxpcxpdyqdyqeyqezreyqdumang\\VLzrezrezreyqeyqeyqdxpdxpcxpcwocwocvobvnbvnbunaumatmatmatl`sl`sk_sk_rk_rj_qj^qj^qj^qj^qj^qj^qj^qj^~xo~xoqj_qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^qj^rj_rk_sk_sk_sl`tl`tmatmaumaunavnbvnbvobwocwocxpcxpcxpdyqdyqeyqezrezreyqdvnbng\\VLzrfyqeyqeyqexpexpexpdwodwocwocvncvncvncunbumbumbtmatlaslaslask`rk`rj_rj_rj_qj_qj_qj_qj_qj_qj_xpxpqj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_rj_rj_rj_rk`sk`slaslatlatmaumbumbunbvncvncvncwocwocwodxpdxpexpeyqeyqeyqeyqeumbnf\[ULzreyqdyqdyqdyqdxpdxpdxpcwocwocwobwobvnbvnbunaumaumaumatm`tl`sl`sl`sk_sk_rk_rk_rj^rj^rj^rj^ypyprj_rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rj^rk_rk_sk_sk_sl`sl`tl`tm`umaumaumaunavnbvnbwobwobwocwocxpcxpdxpdyqdyqdyqdyqdyqdumanf[[ULzrezreyqdyqdyqdyqdxpdxpdxpcxpcwocwobwobvnbvnbvnbunaumaumaumatm`tm`tl`sl`sl`sl`sl`sk_sk_zqzqsk`sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sl`sl`sl`sl`tl`tm`tm`umaumaumaunavnbvnbvnbwobwobwocxpcxpcxpdxpdyqdyqdyqdyqdzreyqdumanf[[ULzqezqezqeypdypdypdypdxodxodxocxocwncwncwnbwnbvnbvnbvmbumaumaulaulaulatl`tl`tl`tl`tk`{r{rtlatk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tk`tl`tl`tl`tl`ulaulaulaumaumavmbvnbvnbwnbwnbwncwncxocxocxodxodypdypdypdypdzqezqeypdulane[\ULzqezqezqezqeypdypdypdypdxodxodxodxocxocwncwncwnbwnbvnbvnbvmbvmbumaumaumaulaulaula|s|sumbulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaumaumaumavmbvmbvnbvnbwnbwnbwncwncxocxocxodxodxodypdypdypdypdzqezqezqeypdulane[\ULypdypdypdypdypdypdxocxocxocxocwncwncwncwnbwnbvmbvmbvmavmavmaumaumaumaulaulaula|s|rtmatl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`ulaulaulaulaulaumaumaumavmavmavmavmbvmbwnbwnbwncwncwncxocxocxocxocypdypdypdypdypdxoctk`ne[[ULypdypdypdypdypdypdypdxocxocxocxocxocwncwncwncwnbwnbvmbvmbvmbvmavmavmavmavma}s}tunbumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumavmavmavmavmavmavmbvmbvmbwnbwnbwncwncwncxocxocxocxocxocypdypdypdypdypdypdxoculane[[ULyqexpdxpdxpdxpdxpdxpdxpdxpdwocwocwocwocwocvncvncvncvncvnbvnbvnbumbumbumb~t}uunbumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumbumbumbumbumbumbumbvnbvnbvnbvncvncvncvncwocwocwocwocwocxpdxpdxpdxpdxpdxpdxpdxpdxpdtlame[[ULypdypdypdxocxocxocxocxocxocxocxocwnbwnbwnbwnbwnbwnbvmbvmbvmbvmbvmbvmb}u}uvnbvmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmavmbvmbvmbvmbvmbvmbwnbwnbwnbwnbwnbwnbxocxocxocxocxocxocxocxocypdypdxoctk`meZ[TKypdypdypdypdxocxocxocxocxocxocxocxocxocwnbwnbwnbwnbwnbwnbwnbwnbwnb~u~uvncvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbvmbwnbwnbwnbwnbwnbwnbwnbwnbwnbxocxocxocxocxocxocxocxocxocypdypdypdxoctk`meZ[TKypdypdypdypdypdypdxocxocxocxocxocxocxocxocxocxocwnbwnbwnbwnbwnbv~vwocwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbxocxocxocxocxocxocxocxocxocxocypdypdypdypdypdxoctk`meZ[TKxpdxpdxpdxpdxpdxpdxpdxpdwocwocwocwocwocwocwocwocwocwocwocwocwwwpdwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocxpdxpdxpdxpdxpdxpdxpdwocsk`leZZTKxpdxpdxpdxpdxpdxpdxpdxpdxpdxpdxpdwocwocwocwocwocwocwocwocwwwpdwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocxpdxpdxpdxpdxpdxpdxpdxpdxpdxpdwocsk`leZZTKwocwocwocwocwocwocwocwocwocwocwocwocwocwocwocvnbvnbvnbvvvocvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbwocwocwocwocwocwocwocwocwocwocwocwocwocwocvnbrj_kdYYSJwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbuvwocwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbvmari^kcXYRIwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbuvwocwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbvmari^kcXYRIwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbvvwocwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbwnbvmari^kcXYRIvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvvwocvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbumaqi^jcXXRIvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvvwocvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbvnbumaqi^jcXXRIumaumaumaumaumaumaumaumaumaumaumauma~vvvnbumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumatl`ph]ibWXRIumaumaumaumaumaumaumaumaumaumauma~v~vvnbumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumaumatl`ph]ibWXRIulaulaulaulaulaulaulaulaulaula~v~vvmbulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulatk`pg]iaWXQIulaulaulaulaulaulaulaulaula~v~vvmbulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulaulatk`pg]iaWXQItl`tl`tl`tl`tl`tl`tl`tl`~u~uumbtl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`sk_og\haVWQHtl`tl`tl`tl`tl`tl`tl`~u~uumbtl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`tl`sk_og\haVWQHsk`sk`sk`sk`sk`sk`}u}utlbsk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`rj_nf\g`VVPHsk`sk`sk`sk`sk`}t}utlbsk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`rj_nf\g`VVPHsk`sk`sk`sk`}t}utlbsk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`rj_nf\g`VVPHsk_sk_sk_}t}ttlask_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_rj^nf[g`UVPGsk_sk_}t}ttlask_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_rj^nf[g`UVPGsk_}t}ttlask_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_sk_rj^nf[g`UVPG}s}tskarj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_qi^me[f_UUOG}tskarj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_rj_qi^me[f_UUOG|srj`qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^ph]ldZe^TUOF|srj`qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^ph]ldZe^TUOF|srj`qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^ph]ldZe^TUOF|rrj_qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]ph\ldYe^TUOF|rrj_qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]ph\ldYe^TUOF|rrj_qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]qi]ph\ldYe^TUOF{rqj_ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]og\kdYe]TTNF{rqj_ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]og\kdYe]TTNF{rqj`ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]ph]og\kdYe]TTNFzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEzqpi_og\og\og\og\og\og\og\og\og\og\og\og\og\og\nf[jcXd\SSMEypoh^nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[meZibWc\RRLDypoh^nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[nf[meZibWc\RRLD~ypoh^mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLD~ypoh^mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLD~ypoh^mf[mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLD~ypoh^mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLD~ypoh^mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLD~xoog]meZmeZmeZmeZmeZmeZldYhaVb[QRLC~xoog]meZmeZmeZmeZmeZldYhaVb[QRLC~xoog]meZmeZmeZmeZldYhaVb[QRLC}wong]ldZldZldZkcYg`VaZQQKC}wong^ldZldZkcYg`VaZQQKC}wong^ldZkcYg`VaZQQKC}womg^jcYf`V`ZQPKC}wolf]f`V`ZQPKC|vnhcZ`ZQPKCxrjb\TPKC~pkdRMF}|v^YTkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYnh^kdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYjcXf`U`ZPPKCkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYnh^kdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYjcXf`U`ZPPKCkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYnh^kdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYkdYjcXf`U`ZPPKCjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYmg^jcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYibXe_U_YPOJCjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYmg^jcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYibXe_U_YPOJCjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYmg^jcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYibXe_U_YPOJCjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYmg^jcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYibXe_U_YPOJCicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXlg]icXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXhbWd_T^YOOJBicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXlg]icXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXhbWd_T^YOOJBicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXlg]icXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXhbWd_T^YOOJBicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXlg]icXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXhbWd_T^YOOJBicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXlg]icXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXicXhbWd_T^YOOJBibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWlf\ibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWhaVd^S^XNOIAibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWlf\ibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWhaVd^S^XNOIAibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWlf\ibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWhaVd^S^XNOIAibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWlf\ibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWhaVd^S^XNOIAibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWlf\ibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWibWhaVd^S^XNOIAhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWke\haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWg`Vd]S]WNNIAhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWke\haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWg`Vd]S]WNNIAhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWke\haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWg`Vd]S]WNNIAhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWke\haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWg`Vd]S]WNNIAhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWke\haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWg`Vd]S]WNNIAgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVje[gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVf`Uc]R\WMMI@gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVje[gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVf`Uc]R\WMMI@gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVje[gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVf`Uc]R\WMMI@gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVje[gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVf`Uc]R\WMMI@gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVje[gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVf`Uc]R\WMMI@gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVje[gaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVgaVf`Uc]R\WMMI@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@g_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_UjcZg_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Uf^Tc[Q\ULMG@g_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_UjcZg_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Uf^Tc[Q\ULMG@g_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_UjcZg_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Uf^Tc[Q\ULMG@g_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_UjcZg_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Uf^Tc[Q\ULMG@g_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_UjcZg_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Ug_Uf^Tc[Q\ULMG@g_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vjc[g_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vf^Uc[R\UMMG@g_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vjc[g_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vf^Uc[R\UMMG@g_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vjc[g_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vg_Vf^Uc[R\UMMG@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vjd[g`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vg`Vf_Uc\R\VMMH@haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWke\haWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWhaWg`Vd]S]WNNIAiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWle\iaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWiaWh`Vd]S^WNOIAibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXlf]ibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXibXhaWd^T^XOOIBjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYmg^jcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYjcYibXe_U_YPOJCkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYng^kcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYkcYjbXf_U`YPPJCkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZnh_kdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZkdZjcYf`V`ZQPKCleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZoi_leZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZleZkdYgaVa[QQLCmf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[pi`mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLDmf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[pi`mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLDmf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[pi`mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[mf[leZhbWb\RRLDng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\pjang\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\ng\mf[icXc\SRMEoh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]qkaoh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]ng\jdYd]TSNFoh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]qkaoh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]oh]ng\jdYd]TSNFpi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^rlbpi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^pi^oh]kdZe^TTOFqi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^slbqi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^qi^ph]ldZe^TUOFqj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_smcqj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_qj_pi^le[e_UUOGrk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_tncrk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_qj^mf[f`UUPGrk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_tncrk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_rk_qj^mf[f`UUPGsk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`undsk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`sk`rj_nf\g`VVPHsl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`uodsl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`sl`rk_ng\gaVVQHtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbvpftmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbtmbslaoh^hbXWRIumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbwpfumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbumbtlaph^ibXXRIvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncwpgvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncvncumbqi_jcYXRJwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncxpgwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncwncvmbri_kcYYRJwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodxqhwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodwodvncrj`kdZYSKxpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpeyrixpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpewodskale[ZTLxpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpeyrixpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpexpewodskale[ZTLypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypezriypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypeypexodtkame[[TLyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfzsiyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfyqfxpetlbme\[ULzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrf{tizrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfzrfyqeumbnf\[UL{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg|uj{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sg{sgzrfvncng\\VM{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg|vj{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tg{tgzsfvocnh\\WM|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th}vk|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th|th{sgwodoh]]WN}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh~wk}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh}uh|tgxpdpi]^XN}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui~wl}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui}ui|thxpdpi^^XO}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj~wm}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj}uj|tixpepi_^XO~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~wm~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj~vj}uiyqeqj_^XOwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkxnwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwk~vjzrfrk`_YPwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkxnwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwkwk~vjzrfrk`_YPxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlyoxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxlxl~wkzsgsla`ZQymymymymymymymymymymymymymymymymymymymzpymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymxl|thumba[Rymymymymymymymymymymymymymymymymymymymzpymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymymxl|thumba[Rznznznznznznznznznznznznznznznznznznzn{pznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznym}uivncb[Rznznznznznznznznznznznznznznznznznznzn{pznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznznym~uiwncc[R{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n|p{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{n{nzm~viwncc\R|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p}r|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p|p{o~wkwoec]T}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p~r}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p}p|oxkwped^T}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q~s}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q}q|pxlxped^U~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~s~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q~q}pylxqed^U~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~t~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r~r}qymyqfe^Usssssssssssssssssssussssssssssssssssssssssssssssssssssssssssssssssssssss~rznzrgf_Vsssssssssssssssssssussssssssssssssssssssssssssssssssssssssssssssssssssss~rzn{sgg`Vtttttttttttttttttttvtttttttttttttttttttttttttttttttttttttttttttttttttttt~szo|shg`Wtttttttttttttttttttvtttttttttttttttttttttttttttttttttttttttttttttttttttt~szo|shg`Wvvvvvvvvvvvvvvvvvvvwvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvu|q}ujhaXvvvvvvvvvvvvvvvvvvvwvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvu}q~vjibXvvvvvvvvvvvvvvvvvvvwvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvu}q~vjibXwwwwwwwwwwwwwwwwwwwxwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwv~rwkjcYxxxxxxxxxxxxxxxxxxxyxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxw~swljcZyyyyyyyyyyyyyyyyyyyzyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyxtwmkd[yyyyyyyyyyyyyyyyyyyzyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyxtxmkd[zzzzzzzzzzzzzzzzzzz{zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzyuynle[{{{{{{{{{{{{{{{{{{{|{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{zvznmf\|||||||||||||||||||}||||||||||||||||||||||||||||||||||||||||||||||||||||{wzomf]}}}}}}}}}}}}}}}}}}}~}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}|x|png^}}}}}}}}}}}}}}}}}}}~}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}|x|pog^~z~rpi_~z~spi`~zsqj`{trka|uska}vslbwumdxundyvoezwpf|xqg}yrh{sj|uk|uk~wmypºººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººººypýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýýý|sŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽ}tv¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹¹yŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽý}ƿƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽſƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽƽĻŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽĽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽŽûĽdub-1.40.0/scripts/win-installer/header.bmp000066400000000000000000000623321477246567400206110ustar00rootroot00000000000000BMd6(9d¼½ýýľľſ»»üĽĽŽƾƿǿ]]¼ýýþĿĿºûüüýſ]]½½Ŀſ¼Ľ]]¾½ž]]ÿ¼ž]]]]yyyhhhggggggggggggggggggffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddcccccccccccccccccccccxxxwvtý]]PPPwwwhhhgggggggggggggggffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccccccccuuuLLLvurſ]]KKKSSSwwwgggggggggggggggffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddcccccccccccccccpppTTT:::usqÿþ]]KKKKKKTTTwwwggggggggggggffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddccccccccccccmmmZZZ;;;:::trp½]]KKKKKKKKKUUUtttgggggggggffffffffffffffffffffffffffffffffffffffffffffffffffffffeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeedddddddddddddddddddddddddddddddddddddddddddddddddddccccccccchhhccc;;;;;;:::tro]]KKKKKKKKKJJJYYYooommmllllllllllllllllllllllllkkkkkkkkkkkkkkkkkkkkkkkkjjjjjjjjjjjjjjjjjjjjjjjjiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiihhhhhhhhhhhhhhhhhhhhhhhhggggggggggggggggggggggggggggggggggggbbb;;;;;;;;;:::sqo]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::sqo]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHIakGDBCEHpFIJFFFFFFEEEEEEEEEEEEDDDDDDDDDEKNEqCB@BD~EYbBBBAAAAAAAAAAAA@@@@@@@@@@@@???BU]C{@>@ACp>CE=========<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIJJG>1(((((+;DGPTEEEEEEEEEEEEDDDEW`B8*(((((2>CuABBAAAAAA@@@@@@@@@Cq>2(((((*8?@U^===<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIal@+((((((((((:EvEEEEEEEEED5((((((((((+?BRYAAA@@@AOV@-((((((((((4A<>?<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIGo8(((((((((((((/EEFFD+(((((((((((((:CYdARZ;(((((((((((((*@>FJ<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJHbo7(((((((((((((((.B+(((((((((((((((;:((((((((((((((((@=?A<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJMN=((((((28974)((((((((((()5787/((((((((((((/8875*((((()B<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJE*((((+?BoGHHGGGGGGC\m?/(((((((((1@AT_CCCCCCADF?w;*((((((((*:=v>BE>>>>>>=MW>2(((((4@U^;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJKK:((((+CHHHHHHGGGGGGGGGGGGFFFD/(((((((1DtCCCCCCCCCCCCBBBBBBBEF?*((((((*>>BE??????>>>>>>>>>>>>@n2(((((A;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJFr/((((AHIIHHHHHHGGGGGGGGGGGGFFFFFFD(((((((FDDDCCCCCCCCCCCCBBBBBBBBBBEF=((((((>>>>>>>>>>>===C}(((((9>>>>>>>>>>>===>GK:((((*Bhx;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJ>((((>HHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFH(((((IDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAA<((((;@@@????????????>>>>>>>>>>>>======B~(((((Bw;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJ<((((AHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFU(((((ZDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAE((((C@@@????????????>>>>>>>>>>>>======Dt(((((@{;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJ<((((AHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFW(((((\DDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAG((((D@@@????????????>>>>>>>>>>>>======Dt(((((@|;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJ=((((>HHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFN(((((]DDDDDDCCCCCCCCCCCCBBBBBBBBBBBBCCCH((((<@@@????????????>>>>>>>>>>>>======A~(((((Bu;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJ?*(((0HgvHHHHHHHHHGGGGGGGGGGGGFFFFFFIdp7(((((\RRRDDDCCCCCCCCCCCCBBBBBBBBBBBBZZZF((((.@q????????????>>>>>>>>>>>>===AXb6((((*Bhx;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJDbv4((((AHIIHHHHHHGGGGGGGGGGGGFFFFFFD((((((]kkkDDDCCCCCCCCCCCCBBBBBBBBBEEErrrE(((((>>>>>>>>>>>===C~(((((9>BE??????>>>>>>>>>>>>Bv/(((((A;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJC~/((((+?BoGHHGGGGGGC`q?-(((((((]hhhooolllMMMCCCCCCCCC\\\tttjjjhhhE((((((*:=v>AD>>>>>>=MX>1(((((4@U^;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJKK>((((((28974)((((((((]hhhhhhjjjssstttsssuuuooohhhhhhhhhE((((((((/8875)((((()B<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJGYb9(((((((((((((((((((]hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhE(((((((((((((((((((@=?A<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIFi{9((((((((((((((((((]hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhE(((((((((((((((((*@>FJ<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIH\e@+(((((((((*6(((((`hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhlllG((((4,((((((((((4A<>?<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIJJG}>1(((((,2(((((*8?@U^===<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHIakGDBCEHl}FHIFFFF|(((((GtDDDJJJ^^^hhhjjjjjjhhheeePPPBBBAAA;((((;@@@???BT\Cy@?@ACp>CE=========<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFF|(((((GtDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAA;((((;@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFF|(((((GtDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAA;((((;@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFH~(((()IuDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAA>((((>@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFHY`:(((?FW^DDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAi~3((1@n@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::tro]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEV^A=AEOTDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAA@n=<@o@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::trp]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::usp¾þ]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::vtqĿľ]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::wur]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::xvsľ]]KKKKKKKKKJJJJJJJJJJJJIIIIIIIIIIIIHHHHHHHHHHHHGGGGGGGGGGGGFFFFFFFFFFFFEEEEEEEEEEEEDDDDDDDDDDDDCCCCCCCCCCCCBBBBBBBBBBBBAAAAAAAAAAAA@@@@@@@@@@@@????????????>>>>>>>>>>>>============<<<<<<<<<<<<;;;;;;;;;;;;:::ywu]]¾Ŀý]]½Ŀýſ]]½½ýþĿſüľ]]¼ýýþĿĿºûýĽľ]]¼½ýýľľſ»»üĽĽŽƾƿ]]»»üýýþľſ¹¹úûüĽžžƿ]]»»üĽĽžſƿ¸¹úĻĻżƽƾǿ]]dub-1.40.0/scripts/win-installer/installer.nsi000066400000000000000000000111771477246567400213720ustar00rootroot00000000000000SetCompressor /SOLID lzma ;-------------------------------------------------------- ; Defines ;-------------------------------------------------------- ; Options !ifndef Version !define /ifndef Version "0.9.21" !endif !define DubExecPath "..\..\bin" ;-------------------------------------------------------- ; Includes ;-------------------------------------------------------- !include "MUI.nsh" !include "EnvVarUpdate.nsh" ;-------------------------------------------------------- ; General definitions ;-------------------------------------------------------- ; Name of the installer Name "dub Package Manager ${Version}" ; Name of the output file of the installer OutFile "dub-${Version}-setup.exe" ; Where the program will be installed InstallDir "$PROGRAMFILES\dub" ; Take the installation directory from the registry, if possible InstallDirRegKey HKLM "Software\dub" "" ; Prevent installation of a corrupt installer CRCCheck force RequestExecutionLevel admin ;-------------------------------------------------------- ; Interface settings ;-------------------------------------------------------- ;!define MUI_ICON "installer-icon.ico" ;!define MUI_UNICON "uninstaller-icon.ico" ;-------------------------------------------------------- ; Installer pages ;-------------------------------------------------------- !define MUI_WELCOMEFINISHPAGE_BITMAP "banner.bmp" !define MUI_HEADERIMAGE !define MUI_HEADERIMAGE_BITMAP "header.bmp" !insertmacro MUI_PAGE_WELCOME !insertmacro MUI_PAGE_COMPONENTS !insertmacro MUI_PAGE_DIRECTORY !insertmacro MUI_PAGE_INSTFILES !insertmacro MUI_PAGE_FINISH !insertmacro MUI_UNPAGE_WELCOME !insertmacro MUI_UNPAGE_CONFIRM !insertmacro MUI_UNPAGE_INSTFILES !insertmacro MUI_UNPAGE_FINISH ;-------------------------------------------------------- ; The languages ;-------------------------------------------------------- !insertmacro MUI_LANGUAGE "English" ;-------------------------------------------------------- ; Required section: main program files, ; registry entries, etc. ;-------------------------------------------------------- ; Section "dub" DubFiles ; This section is mandatory SectionIn RO SetOutPath $INSTDIR ; Create installation directory CreateDirectory "$INSTDIR" File "${DubExecPath}\dub.exe" File "${DubExecPath}\libcurl.dll" File "${DubExecPath}\libeay32.dll" File "${DubExecPath}\ssleay32.dll" ; Create command line batch file FileOpen $0 "$INSTDIR\dubvars.bat" w FileWrite $0 "@echo.$\n" FileWrite $0 "@echo Setting up environment for using dub from %~dp0$\n" FileWrite $0 "@set PATH=%~dp0;%PATH%$\n" FileClose $0 ; Write installation dir in the registry WriteRegStr HKLM SOFTWARE\dub "Install_Dir" "$INSTDIR" ; Write registry keys to make uninstall from Windows WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\dub" "DisplayName" "dub package manager" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\dub" "UninstallString" '"$INSTDIR\uninstall.exe"' WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\dub" "NoModify" 1 WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\dub" "NoRepair" 1 WriteUninstaller "uninstall.exe" SectionEnd Section "Add to PATH" AddDubToPath ; Add dub directory to path (for all users) ${EnvVarUpdate} $0 "PATH" "A" "HKLM" "$INSTDIR" SectionEnd Section /o "Start menu shortcuts" StartMenuShortcuts CreateDirectory "$SMPROGRAMS\DUB" ; install dub command prompt CreateShortCut "$SMPROGRAMS\DUB\DUB Command Prompt.lnk" '%comspec%' '/k ""$INSTDIR\dubvars.bat""' "" "" SW_SHOWNORMAL "" "Open DUB Command Prompt" CreateShortCut "$SMPROGRAMS\DUB\Uninstall.lnk" "$INSTDIR\uninstall.exe" "" "$INSTDIR\uninstall.exe" 0 SectionEnd ;-------------------------------------------------------- ; Uninstaller ;-------------------------------------------------------- Section "Uninstall" ; Remove directories to path (for all users) ; (if for the current user, use HKCU) ${un.EnvVarUpdate} $0 "PATH" "R" "HKLM" "$INSTDIR" ; Remove stuff from registry DeleteRegKey HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\dub" DeleteRegKey HKLM SOFTWARE\dub DeleteRegKey /ifempty HKLM SOFTWARE\dub ; This is for deleting the remembered language of the installation DeleteRegKey HKCU Software\dub DeleteRegKey /ifempty HKCU Software\dub ; Remove the uninstaller Delete $INSTDIR\uninstall.exe ; Remove shortcuts Delete "$SMPROGRAMS\dub\dub Command Prompt.lnk" ; Remove used directories RMDir /r /REBOOTOK "$INSTDIR" RMDir /r /REBOOTOK "$SMPROGRAMS\dub" SectionEnd dub-1.40.0/scripts/win-installer/make_installer.cmd000066400000000000000000000002311477246567400223260ustar00rootroot00000000000000set GITVER=unknown for /f %%i in ('git describe --tags') do set GITVER=%%i "%ProgramFiles(x86)%\NSIS\makensis.exe" "/DVersion=%GITVER:~1%" installer.nsi dub-1.40.0/scripts/zsh-completion/000077500000000000000000000000001477246567400170365ustar00rootroot00000000000000dub-1.40.0/scripts/zsh-completion/_dub000066400000000000000000000266071477246567400177050ustar00rootroot00000000000000#compdef dub # Useful help: # https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org # http://zsh.sourceforge.net/Doc/Release/Completion-System.html # http://zdharma.org/Zsh-100-Commits-Club/Zsh-Native-Scripting-Handbook.html # # Completions installed on your system, e.g. for MacOSX + Homebrew users: # /usr/local/Cellar/zsh/$VERSION/share/zsh/functions # The GIT completion is quite amazing (and equally complex) # The CVS completion is much easier to grok (e.g. for function dispatch) # Entry point _dub() { # TODO: # - Handle registry URLs # - Handle multiple dub (e.g. ./bin/dub add [TAB]) # - Interactively query configuration (for -c and --override-config) # => Dub does not currently support this # - Add ability to provide version, e.g. vibe-d@0.8.6 (see dub add) # - Get registry packages if it doesn't make us lag # - Query compilers # Note that global arguments won't show up when completing commands # This is on purpose, to reduce the amount of options being shown during completion, # as users are much more likely to be looking for command-specific options. _arguments -S -C \ '(* : -)'{-h,--help}'[Display general or command specific help and exit]' \ '(* : -)--version[Print version information and exit]' \ \ '--root=[Run as if dub was started in given path]: :_directories' \ '--skip-registry=[Skips searching packages on certain repositories]:mode:(none standard configured all)' \ '--registry=[Search the given registry URL first when resolving dependencies]:registry URL:_urls' \ '--bare[Read only packages contained in the current directory]' \ '--cache=[Puts any fetched packages in the specified location]:cache location:(local|system|user)' \ '--annotate[Do not perform any action, just print what would be done]' \ \ + '(verbosity)' \ '--vquiet[Print no messages]' \ '--verror[Only print errors]' \ {-q,--quiet}'[Only print warnings and errors]' \ {-v,--verbose}'[Print diagnostic output]' \ '--vverbose[Print debug output]' \ \ '--[End of dub arguments, the following will be sent to the program]' \ '*::dub command:_command_dispatch' } # Command dispatch function _command_dispatch() { declare -a commands=( init:'Initialize a new dub package' run:'Build and run a dub package (default action)' build:'Build a dub package (by name, or in the working directory by default)' test:'Execute the tests of a dub package' generate:'Generates project files using the specified generator' describe:'Prints a JSON description of the project and its dependencies' clean:'Removes intermediate build files and cached build results' dustmite:'Create reduced test cases for build errors' fetch:'Manually retrieves and caches a package' add:'Adds dependencies to the package file' remove:'Removes a cached package' upgrade:'Forces an upgrade of the dependencies' add-path:'Adds a default package search path' remove-path:'Removes a package search path' add-local:'Adds a local package directory (e.g. a git repository)' remove-local:'Removes a local package directory' list:'Prints a list of all local packages dub is aware of' search:'Search for available packages' add-override:'Adds a new package override' remove-override:'Removes an existing package override' list-overrides:'Prints a list of all local package overrides' clean-caches:'Removes cached metadata' convert:'Converts the file format of the package recipe' ) if (( CURRENT == 1 )); then _alternative \ 'files:filename:_files -g "*.d"' \ "commands:dub command: _describe -t commands command commands" else integer ret=0 local cmd=${${(k)commands}[(r)$words[1]:*]%%:*} if [ ! -z "$cmd" ]; then _call_function ret _dub_$cmd else # Assume single file, it takes program arguments _message "Arguments for single file package $words[1]" fi return 0 fi } (( $+functions[_dub_add] )) || _dub_add() { # TODO: Make dub list more machine-readable local -a dubList=("${(@f)$(dub list)}") # First element is 'Packages present in the system...' # Last element is an empty line dubList=(${dubList[2,$#dubList-1]}) local -A pkgs # Collect versions and names for ((i = 1; i <= ${#dubList}; i++)); do pkg_name=${${=${dubList[i]}}[1]} pkg_version=${${${=${dubList[i]}}[2]}%:} # Subpackages are currently not supported by 'dub add' (see dlang/dub#1846) if [ ! -z "${pkg_name:#*:*}" ]; then pkgs[${pkg_name}]+="${pkg_version}, " fi done # Merge versions local -a packages for name ver in ${(kv)pkgs}; do packages+=${name}:"${ver%, }" done # Package list includes ':' which is used as description #_values 'local packages' ${pkgs//:/\\:} # Use the unique property to get rid of subpkgs _describe -t packages package packages } (( $+functions[_dub_init] )) || _dub_init() { _arguments -S -C \ ':package directory:_directories' \ '*:package dependency:_dub_add' \ '(-t --type)'{-t,--type}'[Set the type of project to generate]:project type:((minimal\:"simple hello world project (default)" vibe.d\:"minimal HTTP server based on vibe.d" deimos\:"skeleton for C header bindings" custom\:"custom project provided by dub package"))' \ '(-f --format)'{-f,--format}'[Sets the format to use for the manifest file]:format:(json sdl)' \ '(-n --non-iteractive)'{-n,--non-iteractive}'[Do not prompt for values and use only defaults]' \ '(* : -)'{-h,--help}'[Display general or command specific help and exit]' } (( $+functions[_dub_list] )) || _dub_list() { _arguments -S -C \ '(* : -)'{-h,--help}'[Display general or command specific help and exit]' } # dub generate, dub build, dub run... (( $+functions[_dub_generate_generic] )) || _dub_generate_generic() { _arguments -S -C \ $@ \ '::package:_dub_add' \ '(* : -)'{-h,--help}'[Display general or command specific help and exit]' \ '(-b --build)'{-b,--build=}'[Specifies the type of build to perform]:build type:("debug (default)" plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov cov-ctfe unittest-cov unittest-cov-ctfe syntax)' \ '(-c --config)'{-c,--config=}'[Builds the specified configuration]:package configuration: ' \ '*--override-config=[ Uses the specified configuration for a certain dependency]:dependency/config: ' \ '--compiler=[Specifies the compiler binary to use (can be a path)]:compiler:(dmd gdc ldc gdmd ldmd)' \ '(-a --arch)'{-a,--arch=}'[Force a different architecture (e.g. x86 or x86_64)]:architecture: ' \ '(-d --debug)*'{-d,--debug=}'[Define the specified debug version identifier when building]:Debug version: ' \ '--d-version=[Define the specified version identifier when building]:Version identifier: ' \ '--nodeps[Do not resolve missing dependencies before building]' \ '--build-mode=[Specifies the way the compiler and linker are invoked]:build mode:("separate (default)" allAtOnce singleFile)' \ '--single[Treats the package name as a filename. The file must contain a package recipe comment]:file:_files -g "*.d"' \ '--filter-versions[Experimental: Filter version identifiers and debug version identifiers to improve build cache efficiency]' \ '--combined[Tries to build the whole project in a single compiler run]' \ '--print-builds[Prints the list of available build types]' \ '--print-configs[Prints the list of available configurations]' \ '--print-platform[Prints the identifiers for the current build platform as used for the manifests build field]' \ '--parallel[Runs multiple compiler instances in parallel, if possible]' } (( $+functions[_dub_generate] )) || _dub_generate() { local curcontext="$curcontext" declare -a generators=( visuald:'VisualD project files', sublimetext:'SublimeText project file' cmake:'CMake build scripts' build:'Builds the package directly (use "dub build" instead)' ) local localArgs=( ':generator: _describe -t generators generator generators' ) integer ret=0 _call_function ret _dub_generate_generic ${(@)localArgs} return ret } (( $+functions[_dub_build] )) || _dub_build() { local localArgs=( $@ '--rdmd[Use rdmd instead of directly invoking the compiler]' '(-f --force)'{-f,--force}'[Forces a recompilation even if the target is up to date]' '(-y--yes)'{-y,--yes}'[Assume "yes" as answer to all interactive prompts]' '(-n--non-interactive)'{-n,--non-interactive}'[Do not enter interactive mode]' ) integer ret=0 _call_function ret _dub_generate_generic ${(@)localArgs} return ret } (( $+functions[_dub_run] )) || _dub_run() { local localArgs=( '--[End of dub arguments, the following will be sent to the program]' '--temp-build[Builds the project in the temp folder if possible]' ) integer ret=0 _call_function ret _dub_build ${(@)localArgs} return ret } (( $+functions[_dub_test] )) || _dub_test() { local localArgs=( '--main-file=[Specifies a custom file containing the main() function to use for running the tests]:main file:_files -g "*.d"' ) integer ret=0 _call_function ret _dub_build ${(@)localArgs} return ret } (( $+functions[_dub_describe] )) || _dub_describe() { local localArgs=( '--import-paths[Shortcut for --data=import-paths --data-list]' '--string-import-paths[Shortcut for --data=string-import-paths --data-list]' '--data=[List the values of a particular build setting]:listing options: _values -s , argument main-source-file dflags lflags libs linker-files source-files versions debug-versions import-paths string-import-paths import-files options' '--data-list[Output --data information in list format (line-by-line)]' '--data-0[Output --data information using null-delimiters, rather than spaces or newlines]' ) integer ret=0 _call_function ret _dub_build ${(@)localArgs} return ret } (( $+functions[_dub_clean] )) || _dub_clean() { _arguments -S -C \ '(* : -)'{-h,--help}'[Display general or command specific help and exit]' \ + '(toclean)' \ '--all-packages[Cleans up *all* known packages (dub list)]' \ ':package:_dub_add' } (( $+functions[_dub_dustmite] )) || _dub_dustmite() { local localArgs=( ':target directory:_directories' '--compiler-status=[The expected status code of the compiler run]:status code: ' '--compiler-regex=[A regular expression used to match against the compiler output]:regex: ' '--linker-status=[The expected status code of the linker run]:status code: ' '--linker-regex=[A regular expression used to match against the linker output]:regex: ' '--program-status=[The expected status code of the built executable]:status code: ' '--program-regex=[A regular expression used to match against the program output]:regex: ' '--[End of dub arguments, the following will be sent to the program]' ) integer ret=0 _call_function ret _dub_generate_generic ${(@)localArgs} return ret } _dub dub-1.40.0/source/000077500000000000000000000000001477246567400136745ustar00rootroot00000000000000dub-1.40.0/source/app.d000066400000000000000000000017671477246567400146340ustar00rootroot00000000000000/** Application entry point. Copyright: © 2013 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module app; import dub.commandline; // Set output path and options for coverage reports version (DigitalMars) version (D_Coverage) { shared static this() { import core.runtime, std.file, std.path, std.stdio; dmd_coverSetMerge(true); auto path = buildPath(dirName(thisExePath()), "../cov"); if (!path.exists) mkdir(path); dmd_coverDestPath(path); } } /** * Workaround https://github.com/dlang/dub/issues/1812 * * On Linux, a segmentation fault happens when dub is compiled with a recent * compiler. While not confirmed, the logs seem to point to parallel marking * done by the GC. Hence this disables it. * * https://dlang.org/changelog/2.087.0.html#gc_parallel */ extern(C) __gshared string[] rt_options = [ "gcopt=parallel:0" ]; int main(string[] args) { return runDubCommandLine(args); } dub-1.40.0/source/dub/000077500000000000000000000000001477246567400144465ustar00rootroot00000000000000dub-1.40.0/source/dub/commandline.d000066400000000000000000003257211477246567400171130ustar00rootroot00000000000000/** Defines the behavior of the DUB command line client. Copyright: © 2012-2013 Matthias Dondorff, Copyright © 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ module dub.commandline; import dub.compilers.compiler; import dub.dependency; import dub.dub; import dub.generators.generator; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.packagesuppliers; import dub.project; import dub.internal.utils : getDUBVersion, getClosestMatch, getTempFile; import dub.internal.dyaml.stdsumtype; import std.algorithm; import std.array; import std.conv; import std.encoding; import std.exception; static import std.file; import std.getopt; import std.path : absolutePath, buildNormalizedPath, expandTilde, setExtension; import std.process : environment, spawnProcess, wait; import std.stdio; import std.string; import std.typecons : Tuple, tuple; /** Retrieves a list of all available commands. Commands are grouped by category. */ CommandGroup[] getCommands() @safe pure nothrow { return [ CommandGroup("Package creation", new InitCommand ), CommandGroup("Build, test and run", new RunCommand, new BuildCommand, new TestCommand, new LintCommand, new GenerateCommand, new DescribeCommand, new CleanCommand, new DustmiteCommand ), CommandGroup("Package management", new FetchCommand, new AddCommand, new RemoveCommand, new UpgradeCommand, new AddPathCommand, new RemovePathCommand, new AddLocalCommand, new RemoveLocalCommand, new ListCommand, new SearchCommand, new AddOverrideCommand, new RemoveOverrideCommand, new ListOverridesCommand, new CleanCachesCommand, new ConvertCommand, ) ]; } /** Extract the command name from the argument list Params: args = a list of string arguments that will be processed Returns: The command name that was found (may be null). */ string commandNameArgument(ref string[] args) { if (args.length >= 1 && !args[0].startsWith("-") && !args[0].canFind(":")) { const result = args[0]; args = args[1 .. $]; return result; } return null; } /// test extractCommandNameArgument usage unittest { { string[] args; /// It returns an empty string on when there are no args assert(commandNameArgument(args) is null); assert(!args.length); } { string[] args = [ "test" ]; /// It returns the first argument when it does not start with `-` assert(commandNameArgument(args) == "test"); /// There is nothing to extract when the arguments only contain the `test` cmd assert(!args.length); } { string[] args = [ "-a", "-b" ]; /// It extracts two arguments when they are not a command assert(commandNameArgument(args) is null); assert(args == ["-a", "-b"]); } { string[] args = [ "-test" ]; /// It returns the an empty string when it starts with `-` assert(commandNameArgument(args) is null); assert(args.length == 1); } { string[] args = [ "foo:bar" ]; // Sub package names are ignored as command names assert(commandNameArgument(args) is null); assert(args.length == 1); args[0] = ":foo"; assert(commandNameArgument(args) is null); assert(args.length == 1); } } /** Handles the Command Line options and commands. */ struct CommandLineHandler { /// The list of commands that can be handled CommandGroup[] commandGroups; /// General options parser CommonOptions options; /** Create the list of all supported commands Returns: Returns the list of the supported command names */ string[] commandNames() { return commandGroups.map!(g => g.commands).joiner.map!(c => c.name).array; } /** Parses the general options and sets up the log level and the root_path */ string[] prepareOptions(CommandArgs args) { LogLevel loglevel = LogLevel.info; options.prepare(args); if (options.vverbose) loglevel = LogLevel.debug_; else if (options.verbose) loglevel = LogLevel.diagnostic; else if (options.vquiet) loglevel = LogLevel.none; else if (options.quiet) loglevel = LogLevel.warn; else if (options.verror) loglevel = LogLevel.error; setLogLevel(loglevel); if (options.root_path.empty) { options.root_path = std.file.getcwd(); } else { options.root_path = options.root_path.expandTilde.absolutePath.buildNormalizedPath; } final switch (options.colorMode) with (options.Color) { case automatic: // Use default determined in internal.logging.initLogging(). break; case on: foreach (ref grp; commandGroups) foreach (ref cmd; grp.commands) if (auto pc = cast(PackageBuildCommand)cmd) pc.baseSettings.buildSettings.options |= BuildOption.color; setLoggingColorsEnabled(true); // enable colors, no matter what break; case off: foreach (ref grp; commandGroups) foreach (ref cmd; grp.commands) if (auto pc = cast(PackageBuildCommand)cmd) pc.baseSettings.buildSettings.options &= ~BuildOption.color; setLoggingColorsEnabled(false); // disable colors, no matter what break; } return args.extractAllRemainingArgs(); } /** Get an instance of the requested command. If there is no command in the argument list, the `run` command is returned by default. If the `--help` argument previously handled by `prepareOptions`, `this.options.help` is already `true`, with this returning the requested command. If no command was requested (just dub --help) this returns the help command. Params: name = the command name Returns: Returns the command instance if it exists, null otherwise */ Command getCommand(string name) { if (name == "help" || (name == "" && options.help)) { return new HelpCommand(); } if (name == "") { name = "run"; } foreach (grp; commandGroups) foreach (c; grp.commands) if (c.name == name) { return c; } return null; } /** Get an instance of the requested command after the args are sent. It uses getCommand to get the command instance and then calls prepare. Params: name = the command name args = the command arguments Returns: Returns the command instance if it exists, null otherwise */ Command prepareCommand(string name, CommandArgs args) { auto cmd = getCommand(name); if (cmd !is null && !(cast(HelpCommand)cmd)) { // process command line options for the selected command cmd.prepare(args); enforceUsage(cmd.acceptsAppArgs || !args.hasAppArgs, name ~ " doesn't accept application arguments."); } return cmd; } } /// Can get the command names unittest { CommandLineHandler handler; handler.commandGroups = getCommands(); assert(handler.commandNames == ["init", "run", "build", "test", "lint", "generate", "describe", "clean", "dustmite", "fetch", "add", "remove", "upgrade", "add-path", "remove-path", "add-local", "remove-local", "list", "search", "add-override", "remove-override", "list-overrides", "clean-caches", "convert"]); } /// It sets the cwd as root_path by default unittest { CommandLineHandler handler; auto args = new CommandArgs([]); handler.prepareOptions(args); assert(handler.options.root_path == std.file.getcwd()); } /// It can set a custom root_path unittest { CommandLineHandler handler; auto args = new CommandArgs(["--root=/tmp/test"]); handler.prepareOptions(args); assert(handler.options.root_path == "/tmp/test".absolutePath.buildNormalizedPath); args = new CommandArgs(["--root=./test"]); handler.prepareOptions(args); assert(handler.options.root_path == "./test".absolutePath.buildNormalizedPath); } /// It sets the info log level by default unittest { scope(exit) setLogLevel(LogLevel.info); CommandLineHandler handler; auto args = new CommandArgs([]); handler.prepareOptions(args); assert(getLogLevel() == LogLevel.info); } /// It can set a custom error level unittest { scope(exit) setLogLevel(LogLevel.info); CommandLineHandler handler; auto args = new CommandArgs(["--vverbose"]); handler.prepareOptions(args); assert(getLogLevel() == LogLevel.debug_); handler = CommandLineHandler(); args = new CommandArgs(["--verbose"]); handler.prepareOptions(args); assert(getLogLevel() == LogLevel.diagnostic); handler = CommandLineHandler(); args = new CommandArgs(["--vquiet"]); handler.prepareOptions(args); assert(getLogLevel() == LogLevel.none); handler = CommandLineHandler(); args = new CommandArgs(["--quiet"]); handler.prepareOptions(args); assert(getLogLevel() == LogLevel.warn); handler = CommandLineHandler(); args = new CommandArgs(["--verror"]); handler.prepareOptions(args); assert(getLogLevel() == LogLevel.error); } /// It returns the `run` command by default unittest { CommandLineHandler handler; handler.commandGroups = getCommands(); assert(handler.getCommand("").name == "run"); } /// It returns the `help` command when there is none set and the --help arg /// was set unittest { CommandLineHandler handler; auto args = new CommandArgs(["--help"]); handler.prepareOptions(args); handler.commandGroups = getCommands(); assert(cast(HelpCommand)handler.getCommand("") !is null); } /// It returns the `help` command when the `help` command is sent unittest { CommandLineHandler handler; handler.commandGroups = getCommands(); assert(cast(HelpCommand) handler.getCommand("help") !is null); } /// It returns the `init` command when the `init` command is sent unittest { CommandLineHandler handler; handler.commandGroups = getCommands(); assert(handler.getCommand("init").name == "init"); } /// It returns null when a missing command is sent unittest { CommandLineHandler handler; handler.commandGroups = getCommands(); assert(handler.getCommand("missing") is null); } /** Processes the given command line and executes the appropriate actions. Params: args = This command line argument array as received in `main`. The first entry is considered to be the name of the binary invoked. Returns: Returns the exit code that is supposed to be returned to the system. */ int runDubCommandLine(string[] args) { static string[] toSinglePackageArgs (string args0, string file, string[] trailing) { return [args0, "run", "-q", "--temp-build", "--single", file, "--"] ~ trailing; } // Initialize the logging module, ensure that whether stdout/stderr are a TTY // or not is detected in order to disable colors if the output isn't a console initLogging(); logDiagnostic("DUB version %s", getDUBVersion()); { version(Windows) { // Guarantee that this environment variable is set // this is specifically needed because of the Windows fix that follows this statement. // While it probably isn't needed for all targets, it does simplify things a bit. // Question is can it be more generic? Probably not due to $TMP if ("TEMP" !in environment) environment["TEMP"] = std.file.tempDir(); // rdmd uses $TEMP to compute a temporary path. since cygwin substitutes backslashes // with slashes, this causes OPTLINK to fail (it thinks path segments are options) // we substitute the other way around here to fix this. // In case the environment variable TEMP is empty (it should never be), we'll swap out // opIndex in favor of get with the fallback. environment["TEMP"] = environment.get("TEMP", null).replace("/", "\\"); } } auto handler = CommandLineHandler(getCommands()); // Special syntaxes need to be handled before regular argument parsing if (args.length >= 2) { // Read input source code from stdin if (args[1] == "-") { auto path = getTempFile("app", ".d"); stdin.byChunk(4096).joiner.toFile(path.toNativeString()); args = toSinglePackageArgs(args[0], path.toNativeString(), args[2 .. $]); } // Dub has a shebang syntax to be able to use it as script, e.g. // #/usr/bin/env dub // With this approach, we need to support the file having // both the `.d` extension, or having none at all. // We also need to make sure arguments passed to the script // are passed to the program, not `dub`, e.g.: // ./my_dub_script foo bar // Gives us `args = [ "dub", "./my_dub_script" "foo", "bar" ]`, // which we need to interpret as: // `args = [ "dub", "./my_dub_script", "--", "foo", "bar" ]` else if (args[1].endsWith(".d")) args = toSinglePackageArgs(args[0], args[1], args[2 .. $]); // Here we have a problem: What if the script name is a command name ? // We have to assume it isn't, and to reduce the risk of false positive // we only consider the case where the file name is the first argument, // as the shell invocation cannot be controlled. else if (handler.getCommand(args[1]) is null && !args[1].startsWith("-")) { if (std.file.exists(args[1])) { auto path = getTempFile("app", ".d"); std.file.copy(args[1], path.toNativeString()); args = toSinglePackageArgs(args[0], path.toNativeString(), args[2 .. $]); } else if (std.file.exists(args[1].setExtension(".d"))) { args = toSinglePackageArgs(args[0], args[1].setExtension(".d"), args[2 .. $]); } } } auto common_args = new CommandArgs(args[1..$]); try args = handler.prepareOptions(common_args); catch (Exception e) { logError("Error processing arguments: %s", e.msg); logDiagnostic("Full exception: %s", e.toString().sanitize); logInfo("Run 'dub help' for usage information."); return 1; } if (handler.options.version_) { showVersion(); return 0; } const command_name = commandNameArgument(args); auto command_args = new CommandArgs(args); Command cmd; try { cmd = handler.prepareCommand(command_name, command_args); } catch (Exception e) { logError("Error processing arguments: %s", e.msg); logDiagnostic("Full exception: %s", e.toString().sanitize); logInfo("Run 'dub help' for usage information."); return 1; } if (cmd is null) { logInfoNoTag("USAGE: dub [--version] [] [] [-- []]"); logInfoNoTag(""); logError("Unknown command: %s", command_name); import std.algorithm.iteration : filter; import std.uni : toUpper; foreach (CommandGroup key; handler.commandGroups) { auto similarCommands = key.commands.filter!(cmd => levenshteinDistance(command_name, cmd.name) < 4).array(); if (similarCommands) { sort!((a, b) => levenshteinDistance(command_name, a.name) < levenshteinDistance( command_name, b.name))(similarCommands); foreach (command; similarCommands) { logInfo("Did you mean '%s'?", command.name); } } } logInfoNoTag(""); return 1; } if (cast(HelpCommand)cmd !is null) { showHelp(handler.commandGroups, common_args); return 0; } if (handler.options.help) { showCommandHelp(cmd, command_args, common_args); return 0; } auto remaining_args = command_args.extractRemainingArgs(); if (remaining_args.any!(a => a.startsWith("-"))) { logError("Unknown command line flags: %s", remaining_args.filter!(a => a.startsWith("-")).array.join(" ").color(Mode.bold)); logInfo(`Type "%s" to get a list of all supported flags.`, text("dub ", cmd.name, " -h").color(Mode.bold)); return 1; } try { // initialize the root package Dub dub = cmd.prepareDub(handler.options); // execute the command return cmd.execute(dub, remaining_args, command_args.appArgs); } catch (UsageException e) { // usage exceptions get thrown before any logging, so we are // making the errors more narrow to better fit on small screens. tagWidth.push(5); logError("%s", e.msg); logDebug("Full exception: %s", e.toString().sanitize); logInfo(`Run "%s" for more information about the "%s" command.`, text("dub ", cmd.name, " -h").color(Mode.bold), cmd.name.color(Mode.bold)); return 1; } catch (Exception e) { // most exceptions get thrown before logging, so same thing here as // above. However this might be subject to change if it results in // weird behavior anywhere. tagWidth.push(5); logError("%s", e.msg); logDebug("Full exception: %s", e.toString().sanitize); return 2; } } /** Contains and parses options common to all commands. */ struct CommonOptions { bool verbose, vverbose, quiet, vquiet, verror, version_; bool help, annotate, bare; string[] registry_urls; string root_path, recipeFile; enum Color { automatic, on, off } Color colorMode = Color.automatic; SkipPackageSuppliers skipRegistry = SkipPackageSuppliers.default_; PlacementLocation placementLocation = PlacementLocation.user; private void parseColor(string option, string value) @safe { // `automatic`, `on`, `off` are there for backwards compatibility // `auto`, `always`, `never` is being used for compatibility with most // other development and linux tools, after evaluating what other tools // are doing, to help users intuitively pick correct values. // See https://github.com/dlang/dub/issues/2410 for discussion if (!value.length || value == "auto" || value == "automatic") colorMode = Color.automatic; else if (value == "always" || value == "on") colorMode = Color.on; else if (value == "never" || value == "off") colorMode = Color.off; else throw new ConvException("Unable to parse argument '--" ~ option ~ "=" ~ value ~ "', supported values: --color[=auto], --color=always, --color=never"); } private void parseSkipRegistry(string option, string value) @safe { // We only want to support `none`, `standard`, `configured`, and `all`. // We use a separate function to prevent getopt from parsing SkipPackageSuppliers.default_. assert(option == "skip-registry", "parseSkipRegistry called with unknown option '" ~ option ~ "'"); switch (value) with (SkipPackageSuppliers) { case "none": skipRegistry = none; break; case "standard": skipRegistry = standard; break; case "configured": skipRegistry = configured; break; case "all": skipRegistry = all; break; default: throw new GetOptException("skip-registry only accepts 'none', 'standard', 'configured', and 'all', not '" ~ value ~ "'"); } } /// Parses all common options and stores the result in the struct instance. void prepare(CommandArgs args) { args.getopt("h|help", &help, ["Display general or command specific help"]); args.getopt("root", &root_path, ["Path to operate in instead of the current working dir"]); args.getopt("recipe", &recipeFile, ["Loads a custom recipe path instead of dub.json/dub.sdl"]); args.getopt("registry", ®istry_urls, [ "Search the given registry URL first when resolving dependencies. Can be specified multiple times. Available registry types:", " DUB: URL to DUB registry (default)", " Maven: URL to Maven repository + group id containing dub packages as artifacts. E.g. mvn+http://localhost:8040/maven/libs-release/dubpackages", ]); args.getopt("skip-registry", &skipRegistry, &parseSkipRegistry, [ "Sets a mode for skipping the search on certain package registry types:", " none: Search all configured or default registries (default)", " standard: Don't search the main registry (e.g. "~defaultRegistryURLs[0]~")", " configured: Skip all default and user configured registries", " all: Only search registries specified with --registry", ]); args.getopt("annotate", &annotate, ["Do not perform any action, just print what would be done"]); args.getopt("bare", &bare, ["Read only packages contained in the current directory"]); args.getopt("v|verbose", &verbose, ["Print diagnostic output"]); args.getopt("vverbose", &vverbose, ["Print debug output"]); args.getopt("q|quiet", &quiet, ["Only print warnings and errors"]); args.getopt("verror", &verror, ["Only print errors"]); args.getopt("vquiet", &vquiet, ["Print no messages"]); args.getopt("color", &colorMode, &parseColor, [ "Configure colored output. Accepted values:", " auto: Colored output on console/terminal,", " unless NO_COLOR is set and non-empty (default)", " always: Force colors enabled", " never: Force colors disabled" ]); args.getopt("cache", &placementLocation, ["Puts any fetched packages in the specified location [local|system|user]."]); version_ = args.hasAppVersion; } } /** Encapsulates a set of application arguments. This class serves two purposes. The first is to provide an API for parsing command line arguments (`getopt`). At the same time it records all calls to `getopt` and provides a list of all possible options using the `recognizedArgs` property. */ class CommandArgs { struct Arg { alias Value = SumType!(string[], string, bool, int, uint); Value defaultValue; Value value; string names; string[] helpText; bool hidden; } private { string[] m_args; Arg[] m_recognizedArgs; string[] m_appArgs; } /** Initializes the list of source arguments. Note that all array entries are considered application arguments (i.e. no application name entry is present as the first entry) */ this(string[] args) @safe pure nothrow { auto app_args_idx = args.countUntil("--"); m_appArgs = app_args_idx >= 0 ? args[app_args_idx+1 .. $] : []; m_args = "dummy" ~ (app_args_idx >= 0 ? args[0..app_args_idx] : args); } /** Checks if the app arguments are present. Returns: true if an -- argument is given with arguments after it, otherwise false */ @property bool hasAppArgs() { return m_appArgs.length > 0; } /** Checks if the `--version` argument is present on the first position in the list. Returns: true if the application version argument was found on the first position */ @property bool hasAppVersion() { return m_args.length > 1 && m_args[1] == "--version"; } /** Returns the list of app args. The app args are provided after the `--` argument. */ @property string[] appArgs() { return m_appArgs; } /** Returns the list of all options recognized. This list is created by recording all calls to `getopt`. */ @property const(Arg)[] recognizedArgs() { return m_recognizedArgs; } void getopt(T)(string names, T* var, string[] help_text = null, bool hidden=false) { getopt!T(names, var, null, help_text, hidden); } void getopt(T)(string names, T* var, void delegate(string, string) @safe parseValue, string[] help_text = null, bool hidden=false) { import std.traits : OriginalType; foreach (ref arg; m_recognizedArgs) if (names == arg.names) { assert(help_text is null, format!("Duplicated argument '%s' must not change helptext, consider to remove the duplication")(names)); *var = arg.value.match!( (OriginalType!T v) => cast(T)v, (_) { if (false) return T.init; assert(false, "value from previous getopt has different type than the current getopt call"); } ); return; } assert(help_text.length > 0); Arg arg; arg.defaultValue = cast(OriginalType!T)*var; arg.names = names; arg.helpText = help_text; arg.hidden = hidden; if (parseValue is null) m_args.getopt(config.passThrough, names, var); else m_args.getopt(config.passThrough, names, parseValue); arg.value = cast(OriginalType!T)*var; m_recognizedArgs ~= arg; } /** Resets the list of available source arguments. */ void dropAllArgs() { m_args = null; } /** Returns the list of unprocessed arguments, ignoring the app arguments, and resets the list of available source arguments. */ string[] extractRemainingArgs() { assert(m_args !is null, "extractRemainingArgs must be called only once."); auto ret = m_args[1 .. $]; m_args = null; return ret; } /** Returns the list of unprocessed arguments, including the app arguments and resets the list of available source arguments. */ string[] extractAllRemainingArgs() { auto ret = extractRemainingArgs(); if (this.hasAppArgs) { ret ~= "--" ~ m_appArgs; } return ret; } } /// Using CommandArgs unittest { /// It should not find the app version for an empty arg list assert(new CommandArgs([]).hasAppVersion == false); /// It should find the app version when `--version` is the first arg assert(new CommandArgs(["--version"]).hasAppVersion == true); /// It should not find the app version when `--version` is the second arg assert(new CommandArgs(["a", "--version"]).hasAppVersion == false); /// It returns an empty app arg list when `--` arg is missing assert(new CommandArgs(["1", "2"]).appArgs == []); /// It returns an empty app arg list when `--` arg is missing assert(new CommandArgs(["1", "2"]).appArgs == []); /// It returns app args set after "--" assert(new CommandArgs(["1", "2", "--", "a"]).appArgs == ["a"]); assert(new CommandArgs(["1", "2", "--"]).appArgs == []); assert(new CommandArgs(["--"]).appArgs == []); assert(new CommandArgs(["--", "a"]).appArgs == ["a"]); /// It returns the list of all args when no args are processed assert(new CommandArgs(["1", "2", "--", "a"]).extractAllRemainingArgs == ["1", "2", "--", "a"]); } /// It removes the extracted args unittest { auto args = new CommandArgs(["-a", "-b", "--", "-c"]); bool value; args.getopt("b", &value, [""]); assert(args.extractAllRemainingArgs == ["-a", "--", "-c"]); } /// It should not be able to remove app args unittest { auto args = new CommandArgs(["-a", "-b", "--", "-c"]); bool value; args.getopt("-c", &value, [""]); assert(!value); assert(args.extractAllRemainingArgs == ["-a", "-b", "--", "-c"]); } /** Base class for all commands. This cass contains a high-level description of the command, including brief and full descriptions and a human readable command line pattern. On top of that it defines the two main entry functions for command execution. */ class Command { string name; string argumentsPattern; string description; string[] helpText; bool acceptsAppArgs; bool hidden = false; // used for deprecated commands /** Parses all known command line options without executing any actions. This function will be called prior to execute, or may be called as the only method when collecting the list of recognized command line options. Only `args.getopt` should be called within this method. */ abstract void prepare(scope CommandArgs args); /** * Initialize the dub instance used by `execute` */ public Dub prepareDub(CommonOptions options) { Dub dub; if (options.bare) { dub = new Dub(NativePath(options.root_path), getWorkingDirectory()); dub.defaultPlacementLocation = options.placementLocation; return dub; } // initialize DUB auto package_suppliers = options.registry_urls .map!((url) { // Allow to specify fallback mirrors as space separated urls. Undocumented as we // should simply retry over all registries instead of using a special // FallbackPackageSupplier. auto urls = url.splitter(' '); PackageSupplier ps = _getRegistryPackageSupplier(urls.front); urls.popFront; if (!urls.empty) ps = new FallbackPackageSupplier(ps ~ urls.map!_getRegistryPackageSupplier.array); return ps; }) .array; dub = new Dub(options.root_path, package_suppliers, options.skipRegistry); dub.dryRun = options.annotate; dub.defaultPlacementLocation = options.placementLocation; dub.mainRecipePath = options.recipeFile; // make the CWD package available so that for example sub packages can reference their // parent package. try dub.packageManager.getOrLoadPackage(NativePath(options.root_path), NativePath(options.recipeFile), false, StrictMode.Warn); catch (Exception e) { // by default we ignore CWD package load fails in prepareDUB, since // they will fail again later when they are actually requested. This // is done to provide custom options to the loading logic and should // ideally be moved elsewhere. (This catch has been around since 10 // years when it was first introduced in _app.d_) logDiagnostic("No valid package found in current working directory: %s", e.msg); // for now, we work around not knowing if the package is needed or // not, simply by trusting the user to only use `--recipe` when the // recipe file actually exists, otherwise we throw the error. bool loadMustSucceed = options.recipeFile.length > 0; if (loadMustSucceed) throw e; } return dub; } /** Executes the actual action. Note that `prepare` will be called before any call to `execute`. */ abstract int execute(Dub dub, string[] free_args, string[] app_args); private bool loadCwdPackage(Dub dub, bool warn_missing_package) { auto filePath = dub.packageManager.findPackageFile(dub.rootPath); if (filePath.empty) { if (warn_missing_package) { logInfoNoTag(""); logInfoNoTag("No package manifest (dub.json or dub.sdl) was found in"); logInfoNoTag(dub.rootPath.toNativeString()); logInfoNoTag("Please run DUB from the root directory of an existing package, or run"); logInfoNoTag("\"%s\" to get information on creating a new package.", "dub init --help".color(Mode.bold)); logInfoNoTag(""); } return false; } dub.loadPackage(); return true; } } /** Encapsulates a group of commands that fit into a common category. */ struct CommandGroup { /// Caption of the command category string caption; /// List of commands contained in this group Command[] commands; this(string caption, Command[] commands...) @safe pure nothrow { this.caption = caption; this.commands = commands.dup; } } /******************************************************************************/ /* HELP */ /******************************************************************************/ class HelpCommand : Command { this() @safe pure nothrow { this.name = "help"; this.description = "Shows the help message"; this.helpText = [ "Shows the help message and the supported command options." ]; } /// HelpCommand.prepare is not supposed to be called, use /// cast(HelpCommand)this to check if help was requested before execution. override void prepare(scope CommandArgs args) { assert(false, "HelpCommand.prepare is not supposed to be called, use cast(HelpCommand)this to check if help was requested before execution."); } /// HelpCommand.execute is not supposed to be called, use /// cast(HelpCommand)this to check if help was requested before execution. override int execute(Dub dub, string[] free_args, string[] app_args) { assert(false, "HelpCommand.execute is not supposed to be called, use cast(HelpCommand)this to check if help was requested before execution."); } } /******************************************************************************/ /* INIT */ /******************************************************************************/ class InitCommand : Command { private{ string m_templateType = "minimal"; PackageFormat m_format = PackageFormat.json; bool m_nonInteractive; } this() @safe pure nothrow { this.name = "init"; this.argumentsPattern = "[ [...]]"; this.description = "Initializes an empty package skeleton"; this.helpText = [ "Initializes an empty package of the specified type in the given directory.", "By default, the current working directory is used.", "", "Custom templates can be defined by packages by providing a sub-package called \"init-exec\". No default source files are added in this case.", "The \"init-exec\" sub-package is compiled and executed inside the destination folder after the base project directory has been created.", "Free arguments \"dub init -t custom -- free args\" are passed into the \"init-exec\" sub-package as app arguments." ]; this.acceptsAppArgs = true; } override void prepare(scope CommandArgs args) { args.getopt("t|type", &m_templateType, [ "Set the type of project to generate. Available types:", "", "minimal - simple \"hello world\" project (default)", "vibe.d - minimal HTTP server based on vibe.d", "deimos - skeleton for C header bindings", "custom - custom project provided by dub package", ]); args.getopt("f|format", &m_format, [ "Sets the format to use for the package description file. Possible values:", " " ~ [__traits(allMembers, PackageFormat)].map!(f => f == m_format.init.to!string ? f ~ " (default)" : f).join(", ") ]); args.getopt("n|non-interactive", &m_nonInteractive, ["Don't enter interactive mode."]); } override int execute(Dub dub, string[] free_args, string[] app_args) { string dir; if (free_args.length) { dir = free_args[0]; free_args = free_args[1 .. $]; } static string input(string caption, string default_value) { import dub.internal.colorize; cwritef("%s [%s]: ", caption.color(Mode.bold), default_value); auto inp = readln(); return inp.length > 1 ? inp[0 .. $-1] : default_value; } static string select(string caption, bool free_choice, string default_value, const string[] options...) { import dub.internal.colorize.cwrite; assert(options.length); import std.math : floor, log10; auto ndigits = (size_t val) => log10(cast(double) val).floor.to!uint + 1; immutable default_idx = options.countUntil(default_value); immutable max_width = options.map!(s => s.length).reduce!max + ndigits(options.length) + " ".length; immutable num_columns = max(1, 82 / max_width); immutable num_rows = (options.length + num_columns - 1) / num_columns; string[] options_matrix; options_matrix.length = num_rows * num_columns; foreach (i, option; options) { size_t y = i % num_rows; size_t x = i / num_rows; options_matrix[x + y * num_columns] = option; } auto idx_to_user = (string option) => cast(uint)options.countUntil(option) + 1; auto user_to_idx = (size_t i) => cast(uint)i - 1; assert(default_idx >= 0); cwriteln((free_choice ? "Select or enter " : "Select ").color(Mode.bold), caption.color(Mode.bold), ":".color(Mode.bold)); foreach (i, option; options_matrix) { if (i != 0 && (i % num_columns) == 0) cwriteln(); if (!option.length) continue; auto user_id = idx_to_user(option); cwritef("%*u)".color(Color.cyan, Mode.bold) ~ " %s", ndigits(options.length), user_id, leftJustifier(option, max_width)); } cwriteln(); immutable default_choice = (default_idx + 1).to!string; while (true) { auto choice = input(free_choice ? "?" : "#?", default_choice); if (choice is default_choice) return default_value; choice = choice.strip; uint option_idx = uint.max; try option_idx = cast(uint)user_to_idx(to!uint(choice)); catch (ConvException) {} if (option_idx != uint.max) { if (option_idx < options.length) return options[option_idx]; } else if (free_choice || options.canFind(choice)) return choice; logError("Select an option between 1 and %u%s.", options.length, free_choice ? " or enter a custom value" : null); } } static string license_select(string def) { static immutable licenses = [ "BSL-1.0 (Boost)", "MIT", "Unlicense (public domain)", "Apache-", "-1.0", "-1.1", "-2.0", "AGPL-", "-1.0-only", "-1.0-or-later", "-3.0-only", "-3.0-or-later", "GPL-", "-2.0-only", "-2.0-or-later", "-3.0-only", "-3.0-or-later", "LGPL-", "-2.0-only", "-2.0-or-later", "-2.1-only", "-2.1-or-later", "-3.0-only", "-3.0-or-later", "BSD-", "-1-Clause", "-2-Clause", "-3-Clause", "-4-Clause", "MPL- (Mozilla)", "-1.0", "-1.1", "-2.0", "-2.0-no-copyleft-exception", "EUPL-", "-1.0", "-1.1", "-2.0", "CC- (Creative Commons)", "-BY-4.0 (Attribution 4.0 International)", "-BY-SA-4.0 (Attribution Share Alike 4.0 International)", "Zlib", "ISC", "proprietary", ]; static string sanitize(string license) { auto desc = license.countUntil(" ("); if (desc != -1) license = license[0 .. desc]; return license; } string[] root; foreach (l; licenses) if (!l.startsWith("-")) root ~= l; string result; while (true) { string picked; if (result.length) { auto start = licenses.countUntil!(a => a == result || a.startsWith(result ~ " (")) + 1; auto end = start; while (end < licenses.length && licenses[end].startsWith("-")) end++; picked = select( "variant of " ~ result[0 .. $ - 1], false, "(back)", // https://dub.pm/package-format-json.html#licenses licenses[start .. end].map!"a[1..$]".array ~ "(back)" ); if (picked == "(back)") { result = null; continue; } picked = sanitize(picked); } else { picked = select( "an SPDX license-identifier (" ~ "https://spdx.org/licenses/".color(Color.light_blue, Mode.underline) ~ ")".color(Mode.bold), true, def, // https://dub.pm/package-format-json.html#licenses root ); picked = sanitize(picked); } if (picked == def) return def; if (result.length) result ~= picked; else result = picked; if (!result.endsWith("-")) return result; } } void depCallback(ref PackageRecipe p, ref PackageFormat fmt) { import std.datetime: Clock; if (m_nonInteractive) return; enum free_choice = true; fmt = select("a package recipe format", !free_choice, fmt.to!string, "sdl", "json").to!PackageFormat; auto author = p.authors.join(", "); while (true) { // Tries getting the name until a valid one is given. import std.regex; auto nameRegex = regex(`^[a-z0-9\-_]+$`); string triedName = input("Name", p.name); if (triedName.matchFirst(nameRegex).empty) { logError(`Invalid name '%s', names should consist only of lowercase alphanumeric characters, dashes ('-') and underscores ('_').`, triedName); } else { p.name = triedName; break; } } p.description = input("Description", p.description); p.authors = input("Author name", author).split(",").map!(a => a.strip).array; p.license = license_select(p.license); string copyrightString = .format("Copyright © %s, %-(%s, %)", Clock.currTime().year, p.authors); p.copyright = input("Copyright string", copyrightString); while (true) { auto depspec = input("Add dependency (leave empty to skip)", null); if (!depspec.length) break; addDependency(dub, p, depspec); } } if (!["vibe.d", "deimos", "minimal"].canFind(m_templateType)) { free_args ~= m_templateType; } dub.createEmptyPackage(NativePath(dir), free_args, m_templateType, m_format, &depCallback, app_args); logInfo("Package successfully created in %s", dir.length ? dir : "."); return 0; } } /******************************************************************************/ /* GENERATE / BUILD / RUN / TEST / DESCRIBE */ /******************************************************************************/ abstract class PackageBuildCommand : Command { protected { string m_compilerName; string m_arch; string[] m_debugVersions; string[] m_dVersions; string[] m_overrideConfigs; GeneratorSettings baseSettings; string m_defaultConfig; bool m_nodeps; bool m_forceRemove = false; } override void prepare(scope CommandArgs args) { args.getopt("b|build", &this.baseSettings.buildType, [ "Specifies the type of build to perform. Note that setting the DFLAGS environment variable will override the build type with custom flags.", "Possible names:", " "~builtinBuildTypes.join(", ")~" and custom types" ]); args.getopt("c|config", &this.baseSettings.config, [ "Builds the specified configuration. Configurations can be defined in dub.json" ]); args.getopt("override-config", &m_overrideConfigs, [ "Uses the specified configuration for a certain dependency. Can be specified multiple times.", "Format: --override-config=/" ]); args.getopt("compiler", &m_compilerName, [ "Specifies the compiler binary to use (can be a path).", "Arbitrary pre- and suffixes to the identifiers below are recognized (e.g. ldc2 or dmd-2.063) and matched to the proper compiler type:", " "~["dmd", "gdc", "ldc", "gdmd", "ldmd"].join(", ") ]); args.getopt("a|arch", &m_arch, [ "Force a different architecture (e.g. x86 or x86_64)" ]); args.getopt("d|debug", &m_debugVersions, [ "Define the specified `debug` version identifier when building - can be used multiple times" ]); args.getopt("d-version", &m_dVersions, [ "Define the specified `version` identifier when building - can be used multiple times.", "Use sparingly, with great power comes great responsibility! For commonly used or combined versions " ~ "and versions that dependees should be able to use, create configurations in your package." ]); args.getopt("nodeps", &m_nodeps, [ "Do not resolve missing dependencies before building" ]); args.getopt("build-mode", &this.baseSettings.buildMode, [ "Specifies the way the compiler and linker are invoked. Valid values:", " separate (default), allAtOnce, singleFile" ]); args.getopt("single", &this.baseSettings.single, [ "Treats the package name as a filename. The file must contain a package recipe comment." ]); args.getopt("force-remove", &m_forceRemove, [ "Deprecated option that does nothing." ]); args.getopt("filter-versions", &this.baseSettings.filterVersions, [ "[Experimental] Filter version identifiers and debug version identifiers to improve build cache efficiency." ]); } protected void setupVersionPackage(Dub dub, string str_package_info, string default_build_type = "debug") { UserPackageDesc udesc = UserPackageDesc.fromString(str_package_info); setupPackage(dub, udesc, default_build_type); } protected void setupPackage(Dub dub, UserPackageDesc udesc, string default_build_type = "debug") { if (!m_compilerName.length) m_compilerName = dub.defaultCompiler; if (!m_arch.length) m_arch = dub.defaultArchitecture; if (dub.defaultLowMemory) this.baseSettings.buildSettings.options |= BuildOption.lowmem; if (dub.defaultEnvironments) this.baseSettings.buildSettings.addEnvironments(dub.defaultEnvironments); if (dub.defaultBuildEnvironments) this.baseSettings.buildSettings.addBuildEnvironments(dub.defaultBuildEnvironments); if (dub.defaultRunEnvironments) this.baseSettings.buildSettings.addRunEnvironments(dub.defaultRunEnvironments); if (dub.defaultPreGenerateEnvironments) this.baseSettings.buildSettings.addPreGenerateEnvironments(dub.defaultPreGenerateEnvironments); if (dub.defaultPostGenerateEnvironments) this.baseSettings.buildSettings.addPostGenerateEnvironments(dub.defaultPostGenerateEnvironments); if (dub.defaultPreBuildEnvironments) this.baseSettings.buildSettings.addPreBuildEnvironments(dub.defaultPreBuildEnvironments); if (dub.defaultPostBuildEnvironments) this.baseSettings.buildSettings.addPostBuildEnvironments(dub.defaultPostBuildEnvironments); if (dub.defaultPreRunEnvironments) this.baseSettings.buildSettings.addPreRunEnvironments(dub.defaultPreRunEnvironments); if (dub.defaultPostRunEnvironments) this.baseSettings.buildSettings.addPostRunEnvironments(dub.defaultPostRunEnvironments); this.baseSettings.compiler = getCompiler(m_compilerName); this.baseSettings.platform = this.baseSettings.compiler.determinePlatform(this.baseSettings.buildSettings, m_compilerName, m_arch); this.baseSettings.buildSettings.addDebugVersions(m_debugVersions); this.baseSettings.buildSettings.addVersions(m_dVersions); m_defaultConfig = null; enforce(loadSpecificPackage(dub, udesc), "Failed to load package."); if (this.baseSettings.config.length != 0 && !dub.configurations.canFind(this.baseSettings.config) && this.baseSettings.config != "unittest") { string msg = "Unknown build configuration: " ~ this.baseSettings.config; enum distance = 3; auto match = dub.configurations.getClosestMatch(this.baseSettings.config, distance); if (match !is null) msg ~= ". Did you mean '" ~ match ~ "'?"; enforce(0, msg); } if (this.baseSettings.buildType.length == 0) { if (environment.get("DFLAGS") !is null) this.baseSettings.buildType = "$DFLAGS"; else this.baseSettings.buildType = default_build_type; } if (!m_nodeps) { // retrieve missing packages if (!dub.project.hasAllDependencies) { logDiagnostic("Checking for missing dependencies."); if (this.baseSettings.single) dub.upgrade(UpgradeOptions.select | UpgradeOptions.noSaveSelections); else dub.upgrade(UpgradeOptions.select); } } dub.project.validate(); foreach (sc; m_overrideConfigs) { auto idx = sc.indexOf('/'); enforceUsage(idx >= 0, "Expected \"/\" as argument to --override-config."); dub.project.overrideConfiguration(sc[0 .. idx], sc[idx+1 .. $]); } } private bool loadSpecificPackage(Dub dub, UserPackageDesc udesc) { if (this.baseSettings.single) { enforce(udesc.name.length, "Missing file name of single-file package."); dub.loadSingleFilePackage(udesc.name); return true; } bool from_cwd = udesc.name.length == 0 || udesc.name.startsWith(":"); // load package in root_path to enable searching for sub packages if (loadCwdPackage(dub, from_cwd)) { if (udesc.name.startsWith(":")) { auto pack = dub.packageManager.getSubPackage( dub.project.rootPackage, udesc.name[1 .. $], false); dub.loadPackage(pack); return true; } if (from_cwd) return true; } enforce(udesc.name.length, "No valid root package found - aborting."); auto pack = dub.packageManager.getBestPackage( PackageName(udesc.name), udesc.range); enforce(pack, format!"Failed to find package '%s' locally."(udesc)); logInfo("Building package %s in %s", pack.name, pack.path.toNativeString()); dub.loadPackage(pack); return true; } } class GenerateCommand : PackageBuildCommand { protected { string m_generator; bool m_printPlatform, m_printBuilds, m_printConfigs; bool m_deep; // only set in BuildCommand } this() @safe pure nothrow { this.name = "generate"; this.argumentsPattern = " [[@]]"; this.description = "Generates project files using the specified generator"; this.helpText = [ "Generates project files using one of the supported generators:", "", "visuald - VisualD project files", "sublimetext - SublimeText project file", "cmake - CMake build scripts", "build - Builds the package directly", "", "An optional package name can be given to generate a different package than the root/CWD package." ]; } override void prepare(scope CommandArgs args) { super.prepare(args); args.getopt("combined", &this.baseSettings.combined, [ "Tries to build the whole project in a single compiler run." ]); args.getopt("print-builds", &m_printBuilds, [ "Prints the list of available build types" ]); args.getopt("print-configs", &m_printConfigs, [ "Prints the list of available configurations" ]); args.getopt("print-platform", &m_printPlatform, [ "Prints the identifiers for the current build platform as used for the build fields in dub.json" ]); args.getopt("parallel", &this.baseSettings.parallelBuild, [ "Runs multiple compiler instances in parallel, if possible." ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { string str_package_info; if (!m_generator.length) { enforceUsage(free_args.length >= 1 && free_args.length <= 2, "Expected one or two arguments."); m_generator = free_args[0]; if (free_args.length >= 2) str_package_info = free_args[1]; } else { enforceUsage(free_args.length <= 1, "Expected one or zero arguments."); if (free_args.length >= 1) str_package_info = free_args[0]; } setupVersionPackage(dub, str_package_info, "debug"); if (m_printBuilds) { logInfo("Available build types:"); foreach (i, tp; dub.project.builds) logInfo(" %s%s", tp, i == 0 ? " [default]" : null); logInfo(""); } m_defaultConfig = dub.project.getDefaultConfiguration(this.baseSettings.platform); if (m_printConfigs) { logInfo("Available configurations:"); foreach (tp; dub.configurations) logInfo(" %s%s", tp, tp == m_defaultConfig ? " [default]" : null); logInfo(""); } GeneratorSettings gensettings = this.baseSettings; if (!gensettings.config.length) gensettings.config = m_defaultConfig; gensettings.runArgs = app_args; gensettings.recipeName = dub.mainRecipePath; // legacy compatibility, default working directory is always CWD gensettings.overrideToolWorkingDirectory = getWorkingDirectory(); gensettings.buildDeep = m_deep; logDiagnostic("Generating using %s", m_generator); dub.generateProject(m_generator, gensettings); if (this.baseSettings.buildType == "ddox") dub.runDdox(gensettings.run, app_args); return 0; } } class BuildCommand : GenerateCommand { protected { bool m_yes; // automatic yes to prompts; bool m_nonInteractive; } this() @safe pure nothrow { this.name = "build"; this.argumentsPattern = "[[@]]"; this.description = "Builds a package (uses the main package in the current working directory by default)"; this.helpText = [ "Builds a package (uses the main package in the current working directory by default)" ]; } override void prepare(scope CommandArgs args) { args.getopt("temp-build", &this.baseSettings.tempBuild, [ "Builds the project in the temp folder if possible." ]); args.getopt("rdmd", &this.baseSettings.rdmd, [ "Use rdmd instead of directly invoking the compiler" ]); args.getopt("f|force", &this.baseSettings.force, [ "Forces a recompilation even if the target is up to date" ]); args.getopt("y|yes", &m_yes, [ `Automatic yes to prompts. Assume "yes" as answer to all interactive prompts.` ]); args.getopt("n|non-interactive", &m_nonInteractive, [ "Don't enter interactive mode." ]); args.getopt("d|deep", &m_deep, [ "Build all dependencies, even when main target is a static library." ]); super.prepare(args); m_generator = "build"; } override int execute(Dub dub, string[] free_args, string[] app_args) { // single package files don't need to be downloaded, they are on the disk. if (free_args.length < 1 || this.baseSettings.single) return super.execute(dub, free_args, app_args); if (!m_nonInteractive) { const packageParts = UserPackageDesc.fromString(free_args[0]); if (auto rc = fetchMissingPackages(dub, packageParts)) return rc; } return super.execute(dub, free_args, app_args); } private int fetchMissingPackages(Dub dub, in UserPackageDesc packageParts) { static bool input(string caption, bool default_value = true) { writef("%s [%s]: ", caption, default_value ? "Y/n" : "y/N"); auto inp = readln(); string userInput = "y"; if (inp.length > 1) userInput = inp[0 .. $ - 1].toLower; switch (userInput) { case "no", "n", "0": return false; case "yes", "y", "1": default: return true; } } // Local subpackages are always assumed to be present if (packageParts.name.startsWith(":")) return 0; const baseName = PackageName(packageParts.name).main; // Found locally if (dub.packageManager.getBestPackage(baseName, packageParts.range)) return 0; // Non-interactive, either via flag, or because a version was provided if (m_yes || !packageParts.range.matchesAny()) { dub.fetch(baseName, packageParts.range); return 0; } // Otherwise we go the long way of asking the user. // search for the package and filter versions for exact matches auto search = dub.searchPackages(baseName.toString()) .map!(tup => tup[1].find!(p => p.name == baseName.toString())) .filter!(ps => !ps.empty); if (search.empty) { logWarn("Package '%s' was neither found locally nor online.", packageParts); return 2; } const p = search.front.front; logInfo("Package '%s' was not found locally but is available online:", packageParts); logInfo("---"); logInfo("Description: %s", p.description); logInfo("Version: %s", p.version_); logInfo("---"); if (input("Do you want to fetch '%s@%s' now?".format(packageParts, p.version_))) dub.fetch(baseName, VersionRange.fromString(p.version_)); return 0; } } class RunCommand : BuildCommand { this() @safe pure nothrow { this.name = "run"; this.argumentsPattern = "[[@]]"; this.description = "Builds and runs a package (default command)"; this.helpText = [ "Builds and runs a package (uses the main package in the current working directory by default)" ]; this.acceptsAppArgs = true; } override void prepare(scope CommandArgs args) { super.prepare(args); this.baseSettings.run = true; } override int execute(Dub dub, string[] free_args, string[] app_args) { return super.execute(dub, free_args, app_args); } } class TestCommand : PackageBuildCommand { private { string m_mainFile; } this() @safe pure nothrow { this.name = "test"; this.argumentsPattern = "[[@]]"; this.description = "Executes the tests of the selected package"; this.helpText = [ `Builds the package and executes all contained unit tests.`, ``, `If no explicit configuration is given, an existing "unittest" ` ~ `configuration will be preferred for testing. If none exists, the ` ~ `first library type configuration will be used, and if that doesn't ` ~ `exist either, the first executable configuration is chosen.`, ``, `When a custom main file (--main-file) is specified, only library ` ~ `configurations can be used. Otherwise, depending on the type of ` ~ `the selected configuration, either an existing main file will be ` ~ `used (and needs to be properly adjusted to just run the unit ` ~ `tests for 'version(unittest)'), or DUB will generate one for ` ~ `library type configurations.`, ``, `Finally, if the package contains a dependency to the "tested" ` ~ `package, the automatically generated main file will use it to ` ~ `run the unit tests.` ]; this.acceptsAppArgs = true; } override void prepare(scope CommandArgs args) { args.getopt("temp-build", &this.baseSettings.tempBuild, [ "Builds the project in the temp folder if possible." ]); args.getopt("main-file", &m_mainFile, [ "Specifies a custom file containing the main() function to use for running the tests." ]); args.getopt("combined", &this.baseSettings.combined, [ "Tries to build the whole project in a single compiler run." ]); args.getopt("parallel", &this.baseSettings.parallelBuild, [ "Runs multiple compiler instances in parallel, if possible." ]); args.getopt("f|force", &this.baseSettings.force, [ "Forces a recompilation even if the target is up to date" ]); bool coverage = false; args.getopt("coverage", &coverage, [ "Enables code coverage statistics to be generated." ]); if (coverage) this.baseSettings.buildType = "unittest-cov"; bool coverageCTFE = false; args.getopt("coverage-ctfe", &coverageCTFE, [ "Enables code coverage (including CTFE) statistics to be generated." ]); if (coverageCTFE) this.baseSettings.buildType = "unittest-cov-ctfe"; super.prepare(args); } override int execute(Dub dub, string[] free_args, string[] app_args) { string str_package_info; enforceUsage(free_args.length <= 1, "Expected one or zero arguments."); if (free_args.length >= 1) str_package_info = free_args[0]; setupVersionPackage(dub, str_package_info, "unittest"); GeneratorSettings settings = this.baseSettings; settings.compiler = getCompiler(this.baseSettings.platform.compilerBinary); settings.run = true; settings.runArgs = app_args; dub.testProject(settings, this.baseSettings.config, NativePath(m_mainFile)); return 0; } } class LintCommand : PackageBuildCommand { private { bool m_syntaxCheck = false; bool m_styleCheck = false; string m_errorFormat; bool m_report = false; string m_reportFormat; string m_reportFile; string[] m_importPaths; string m_config; } this() @safe pure nothrow { this.name = "lint"; this.argumentsPattern = "[[@]]"; this.description = "Executes the linter tests of the selected package"; this.helpText = [ `Builds the package and executes D-Scanner linter tests.` ]; this.acceptsAppArgs = true; } override void prepare(scope CommandArgs args) { args.getopt("syntax-check", &m_syntaxCheck, [ "Lexes and parses sourceFile, printing the line and column number of " ~ "any syntax errors to stdout." ]); args.getopt("style-check", &m_styleCheck, [ "Lexes and parses sourceFiles, printing the line and column number of " ~ "any static analysis check failures stdout." ]); args.getopt("error-format", &m_errorFormat, [ "Format errors produced by the style/syntax checkers." ]); args.getopt("report", &m_report, [ "Generate a static analysis report in JSON format." ]); args.getopt("report-format", &m_reportFormat, [ "Specifies the format of the generated report." ]); args.getopt("report-file", &m_reportFile, [ "Write report to file." ]); if (m_reportFormat || m_reportFile) m_report = true; args.getopt("import-paths", &m_importPaths, [ "Import paths" ]); args.getopt("dscanner-config", &m_config, [ "Use the given d-scanner configuration file." ]); super.prepare(args); } override int execute(Dub dub, string[] free_args, string[] app_args) { string str_package_info; enforceUsage(free_args.length <= 1, "Expected one or zero arguments."); if (free_args.length >= 1) str_package_info = free_args[0]; string[] args; if (!m_syntaxCheck && !m_styleCheck && !m_report && app_args.length == 0) { m_styleCheck = true; } if (m_syntaxCheck) args ~= "--syntaxCheck"; if (m_styleCheck) args ~= "--styleCheck"; if (m_errorFormat) args ~= ["--errorFormat", m_errorFormat]; if (m_report) args ~= "--report"; if (m_reportFormat) args ~= ["--reportFormat", m_reportFormat]; if (m_reportFile) args ~= ["--reportFile", m_reportFile]; foreach (import_path; m_importPaths) args ~= ["-I", import_path]; if (m_config) args ~= ["--config", m_config]; setupVersionPackage(dub, str_package_info); dub.lintProject(args ~ app_args); return 0; } } class DescribeCommand : PackageBuildCommand { private { bool m_importPaths = false; bool m_stringImportPaths = false; bool m_dataList = false; bool m_dataNullDelim = false; string[] m_data; } this() @safe pure nothrow { this.name = "describe"; this.argumentsPattern = "[[@]]"; this.description = "Prints a JSON description of the project and its dependencies"; this.helpText = [ "Prints a JSON build description for the root package an all of " ~ "their dependencies in a format similar to a JSON package " ~ "description file. This is useful mostly for IDEs.", "", "All usual options that are also used for build/run/generate apply.", "", "When --data=VALUE is supplied, specific build settings for a project " ~ "will be printed instead (by default, formatted for the current compiler).", "", "The --data=VALUE option can be specified multiple times to retrieve " ~ "several pieces of information at once. A comma-separated list is " ~ "also acceptable (ex: --data=dflags,libs). The data will be output in " ~ "the same order requested on the command line.", "", "The accepted values for --data=VALUE are:", "", "main-source-file, dflags, lflags, libs, linker-files, " ~ "source-files, versions, debug-versions, import-paths, " ~ "string-import-paths, import-files, options", "", "The following are also accepted by --data if --data-list is used:", "", "target-type, target-path, target-name, working-directory, " ~ "copy-files, string-import-files, pre-generate-commands, " ~ "post-generate-commands, pre-build-commands, post-build-commands, " ~ "pre-run-commands, post-run-commands, requirements", ]; } override void prepare(scope CommandArgs args) { super.prepare(args); args.getopt("import-paths", &m_importPaths, [ "Shortcut for --data=import-paths --data-list" ]); args.getopt("string-import-paths", &m_stringImportPaths, [ "Shortcut for --data=string-import-paths --data-list" ]); args.getopt("data", &m_data, [ "Just list the values of a particular build setting, either for this "~ "package alone or recursively including all dependencies. Accepts a "~ "comma-separated list. See above for more details and accepted "~ "possibilities for VALUE." ]); args.getopt("data-list", &m_dataList, [ "Output --data information in list format (line-by-line), instead "~ "of formatting for a compiler command line.", ]); args.getopt("data-0", &m_dataNullDelim, [ "Output --data information using null-delimiters, rather than "~ "spaces or newlines. Result is usable with, ex., xargs -0.", ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage( !(m_importPaths && m_stringImportPaths), "--import-paths and --string-import-paths may not be used together." ); enforceUsage( !(m_data && (m_importPaths || m_stringImportPaths)), "--data may not be used together with --import-paths or --string-import-paths." ); // disable all log output to stdout and use "writeln" to output the JSON description auto ll = getLogLevel(); setLogLevel(max(ll, LogLevel.warn)); scope (exit) setLogLevel(ll); string str_package_info; enforceUsage(free_args.length <= 1, "Expected one or zero arguments."); if (free_args.length >= 1) str_package_info = free_args[0]; setupVersionPackage(dub, str_package_info); m_defaultConfig = dub.project.getDefaultConfiguration(this.baseSettings.platform); GeneratorSettings settings = this.baseSettings; if (!settings.config.length) settings.config = m_defaultConfig; settings.cache = dub.cachePathDontUse(); // See function's description // Ignore other options settings.buildSettings.options = this.baseSettings.buildSettings.options & BuildOption.lowmem; // With a requested `unittest` config, switch to the special test runner // config (which doesn't require an existing `unittest` configuration). if (this.baseSettings.config == "unittest") { const test_config = dub.project.addTestRunnerConfiguration(settings, !dub.dryRun); if (test_config) settings.config = test_config; } if (m_importPaths) { m_data = ["import-paths"]; m_dataList = true; } else if (m_stringImportPaths) { m_data = ["string-import-paths"]; m_dataList = true; } if (m_data.length) { ListBuildSettingsFormat lt; with (ListBuildSettingsFormat) lt = m_dataList ? (m_dataNullDelim ? listNul : list) : (m_dataNullDelim ? commandLineNul : commandLine); dub.listProjectData(settings, m_data, lt); } else { auto desc = dub.project.describe(settings); writeln(desc.serializeToPrettyJson()); } return 0; } } class CleanCommand : Command { private { bool m_allPackages; } this() @safe pure nothrow { this.name = "clean"; this.argumentsPattern = "[]"; this.description = "Removes intermediate build files and cached build results"; this.helpText = [ "This command removes any cached build files of the given package(s). The final target file, as well as any copyFiles are currently not removed.", "Without arguments, the package in the current working directory will be cleaned." ]; } override void prepare(scope CommandArgs args) { args.getopt("all-packages", &m_allPackages, [ "Cleans up *all* known packages (dub list)" ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length <= 1, "Expected one or zero arguments."); enforceUsage(app_args.length == 0, "Application arguments are not supported for the clean command."); enforceUsage(!m_allPackages || !free_args.length, "The --all-packages flag may not be used together with an explicit package name."); enforce(free_args.length == 0, "Cleaning a specific package isn't possible right now."); if (m_allPackages) { dub.clean(); } else { dub.loadPackage(); dub.clean(dub.project.rootPackage); } return 0; } } /******************************************************************************/ /* FETCH / ADD / REMOVE / UPGRADE */ /******************************************************************************/ class AddCommand : Command { this() @safe pure nothrow { this.name = "add"; this.argumentsPattern = "[@] []"; this.description = "Adds dependencies to the package file."; this.helpText = [ "Adds as dependencies.", "", "Running \"dub add \" is the same as adding to the \"dependencies\" section in dub.json/dub.sdl.", "If no version is specified for one of the packages, dub will query the registry for the latest version." ]; } override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { import dub.recipe.io : readPackageRecipe, writePackageRecipe; enforceUsage(free_args.length != 0, "Expected one or more arguments."); enforceUsage(app_args.length == 0, "Unexpected application arguments."); if (!loadCwdPackage(dub, true)) return 2; auto recipe = dub.project.rootPackage.rawRecipe.clone; foreach (depspec; free_args) { if (!addDependency(dub, recipe, depspec)) return 2; } writePackageRecipe(dub.project.rootPackage.recipePath, recipe); return 0; } } class UpgradeCommand : Command { private { bool m_prerelease = false; bool m_includeSubPackages = false; bool m_forceRemove = false; bool m_missingOnly = false; bool m_verify = false; bool m_dryRun = false; } this() @safe pure nothrow { this.name = "upgrade"; this.argumentsPattern = "[]"; this.description = "Forces an upgrade of the dependencies"; this.helpText = [ "Upgrades all dependencies of the package by querying the package registry(ies) for new versions.", "", "This will update the versions stored in the selections file ("~SelectedVersions.defaultFile~") accordingly.", "", "If one or more package names are specified, only those dependencies will be upgraded. Otherwise all direct and indirect dependencies of the root package will get upgraded." ]; } override void prepare(scope CommandArgs args) { args.getopt("prerelease", &m_prerelease, [ "Uses the latest pre-release version, even if release versions are available" ]); args.getopt("s|sub-packages", &m_includeSubPackages, [ "Also upgrades dependencies of all directory based sub packages" ]); args.getopt("verify", &m_verify, [ "Updates the project and performs a build. If successful, rewrites the selected versions file ." ]); args.getopt("dry-run", &m_dryRun, [ "Only print what would be upgraded, but don't actually upgrade anything." ]); args.getopt("missing-only", &m_missingOnly, [ "Performs an upgrade only for dependencies that don't yet have a version selected. This is also done automatically before each build." ]); args.getopt("force-remove", &m_forceRemove, [ "Deprecated option that does nothing." ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length <= 1, "Unexpected arguments."); enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(!m_verify, "--verify is not yet implemented."); enforce(loadCwdPackage(dub, true), "Failed to load package."); logInfo("Upgrading", Color.cyan, "project in %s", dub.projectPath.toNativeString().color(Mode.bold)); auto options = UpgradeOptions.upgrade|UpgradeOptions.select; if (m_missingOnly) options &= ~UpgradeOptions.upgrade; if (m_prerelease) options |= UpgradeOptions.preRelease; if (m_dryRun) options |= UpgradeOptions.dryRun; dub.upgrade(options, free_args); auto spacks = dub.project.rootPackage .subPackages .filter!(sp => sp.path.length); if (m_includeSubPackages) { bool any_error = false; // Go through each path based sub package, load it as a new instance // and perform an upgrade as if the upgrade had been run from within // the sub package folder. Note that we have to use separate Dub // instances, because the upgrade always works on the root package // of a project, which in this case are the individual sub packages. foreach (sp; spacks) { try { auto fullpath = (dub.projectPath ~ sp.path).toNativeString(); logInfo("Upgrading", Color.cyan, "sub package in %s", fullpath); auto sdub = new Dub(fullpath, dub.packageSuppliers, SkipPackageSuppliers.all); sdub.defaultPlacementLocation = dub.defaultPlacementLocation; sdub.loadPackage(); sdub.upgrade(options, free_args); } catch (Exception e) { logError("Failed to update sub package at %s: %s", sp.path, e.msg); any_error = true; } } if (any_error) return 1; } else if (!spacks.empty) { foreach (sp; spacks) logInfo("Not upgrading sub package in %s", sp.path); logInfo("\nNote: specify -s to also upgrade sub packages."); } return 0; } } class FetchRemoveCommand : Command { protected { string m_version; bool m_forceRemove = false; } override void prepare(scope CommandArgs args) { args.getopt("version", &m_version, [ "Use the specified version/branch instead of the latest available match", "The remove command also accepts \"*\" here as a wildcard to remove all versions of the package from the specified location" ], true); // hide --version from help args.getopt("force-remove", &m_forceRemove, [ "Deprecated option that does nothing" ]); } abstract override int execute(Dub dub, string[] free_args, string[] app_args); } class FetchCommand : FetchRemoveCommand { private enum FetchStatus { /// Package is already present and on the right version Present = 0, /// Package was fetched from the registry Fetched = 1, /// Attempts at fetching the package failed Failed = 2, } protected bool recursive; protected size_t[FetchStatus.max + 1] result; this() @safe pure nothrow { this.name = "fetch"; this.argumentsPattern = "[@]"; this.description = "Explicitly retrieves and caches packages"; this.helpText = [ "When run with one or more arguments, regardless of the location it is run in,", "it will fetch the packages matching the argument(s).", "Examples:", "$ dub fetch vibe-d", "$ dub fetch vibe-d@v0.9.0 --cache=local --recursive", "", "When run in a project with no arguments, it will fetch all dependencies for that project.", "If the project doesn't have set dependencies (no 'dub.selections.json'), it will also perform dependency resolution.", "Example:", "$ cd myProject && dub fetch", "", "Note that the 'build', 'run', and any other command that need packages will automatically perform fetch,", "hence it is not generally necessary to run this command before any other." ]; } override void prepare(scope CommandArgs args) { args.getopt("r|recursive", &this.recursive, [ "Also fetches dependencies of specified packages", ]); super.prepare(args); } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(app_args.length == 0, "Unexpected application arguments."); // remove then --version removed if (m_version.length) { enforceUsage(free_args.length == 1, "Expecting exactly one argument when using --version."); const name = free_args[0]; logWarn("The '--version' parameter was deprecated, use %s@%s. Please update your scripts.", name, m_version); enforceUsage(!name.canFindVersionSplitter, "Double version spec not allowed."); this.fetchPackage(dub, UserPackageDesc(name, VersionRange.fromString(m_version))); return this.result[FetchStatus.Failed] ? 1 : 0; } // Fetches dependencies of the project // This is obviously mutually exclusive with the below foreach if (!free_args.length) { if (!this.loadCwdPackage(dub, true)) return 1; // retrieve missing packages if (!dub.project.hasAllDependencies) { logInfo("Resolving", Color.green, "missing dependencies for project"); dub.upgrade(UpgradeOptions.select); } else logInfo("All %s dependencies are already present locally", dub.project.dependencies.length); return 0; } // Fetches packages named explicitly foreach (name; free_args) { const udesc = UserPackageDesc.fromString(name); this.fetchPackage(dub, udesc); } // Note that this does not include packages indirectly fetched. // Hence it is not currently displayed in the no-argument version, // and will only include directly mentioned packages in the arg version. logInfoNoTag("%s packages fetched, %s already present, %s failed", this.result[FetchStatus.Fetched], this.result[FetchStatus.Present], this.result[FetchStatus.Failed]); return this.result[FetchStatus.Failed] ? 1 : 0; } /// Shell around `fetchSinglePackage` with logs and recursion support private void fetchPackage(Dub dub, UserPackageDesc udesc) { auto r = this.fetchSinglePackage(dub, udesc); this.result[r] += 1; final switch (r) { case FetchStatus.Failed: // Error displayed in `fetchPackage` as it has more information // However we need to return here as we can't recurse. return; case FetchStatus.Present: logInfo("Existing", Color.green, "package %s found locally", udesc); break; case FetchStatus.Fetched: logInfo("Fetched", Color.green, "package %s successfully", udesc); break; } if (this.recursive) { auto pack = dub.packageManager.getBestPackage( PackageName(udesc.name), udesc.range); auto proj = new Project(dub.packageManager, pack); if (!proj.hasAllDependencies) { logInfo("Resolving", Color.green, "missing dependencies for project"); dub.loadPackage(pack); dub.upgrade(UpgradeOptions.select); } } } /// Implementation for argument version private FetchStatus fetchSinglePackage(Dub dub, UserPackageDesc udesc) { auto fspkg = dub.packageManager.getBestPackage( PackageName(udesc.name), udesc.range); // Avoid dub fetch if the package is present on the filesystem. if (fspkg !is null && udesc.range.isExactVersion()) return FetchStatus.Present; try { auto pkg = dub.fetch(PackageName(udesc.name), udesc.range, FetchOptions.forceBranchUpgrade); assert(pkg !is null, "dub.fetch returned a null Package"); return pkg is fspkg ? FetchStatus.Present : FetchStatus.Fetched; } catch (Exception e) { logError("Fetching %s failed: %s", udesc, e.msg); return FetchStatus.Failed; } } } class RemoveCommand : FetchRemoveCommand { private { bool m_nonInteractive; } this() @safe pure nothrow { this.name = "remove"; this.argumentsPattern = "[@]"; this.description = "Removes a cached package"; this.helpText = [ "Removes a package that is cached on the local system." ]; } override void prepare(scope CommandArgs args) { super.prepare(args); args.getopt("n|non-interactive", &m_nonInteractive, ["Don't enter interactive mode."]); } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length == 1, "Expecting exactly one argument."); enforceUsage(app_args.length == 0, "Unexpected application arguments."); auto package_id = free_args[0]; auto location = dub.defaultPlacementLocation; size_t resolveVersion(in Package[] packages) { // just remove only package version if (packages.length == 1) return 0; writeln("Select version of '", package_id, "' to remove from location '", location, "':"); foreach (i, pack; packages) writefln("%s) %s", i + 1, pack.version_); writeln(packages.length + 1, ") ", "all versions"); while (true) { writef("> "); auto inp = readln(); if (!inp.length) // Ctrl+D return size_t.max; inp = inp.stripRight; if (!inp.length) // newline or space continue; try { immutable selection = inp.to!size_t - 1; if (selection <= packages.length) return selection; } catch (ConvException e) { } logError("Please enter a number between 1 and %s.", packages.length + 1); } } if (!m_version.empty) { // remove then --version removed enforceUsage(!package_id.canFindVersionSplitter, "Double version spec not allowed."); logWarn("The '--version' parameter was deprecated, use %s@%s. Please update your scripts.", package_id, m_version); dub.remove(PackageName(package_id), m_version, location); } else { const parts = UserPackageDesc.fromString(package_id); const explicit = package_id.canFindVersionSplitter; if (m_nonInteractive || explicit || parts.range != VersionRange.Any) { const str = parts.range.matchesAny() ? "*" : parts.range.toString(); dub.remove(PackageName(parts.name), str, location); } else { dub.remove(PackageName(package_id), location, &resolveVersion); } } return 0; } } /******************************************************************************/ /* ADD/REMOVE PATH/LOCAL */ /******************************************************************************/ abstract class RegistrationCommand : Command { private { bool m_system; } override void prepare(scope CommandArgs args) { args.getopt("system", &m_system, [ "DEPRECATED: Use --cache=system instead" ], true); } abstract override int execute(Dub dub, string[] free_args, string[] app_args); } class AddPathCommand : RegistrationCommand { this() @safe pure nothrow { this.name = "add-path"; this.argumentsPattern = ""; this.description = "Adds a default package search path"; this.helpText = [ "Adds a default package search path. All direct sub folders of this path will be searched for package descriptions and will be made available as packages. Using this command has the equivalent effect as calling 'dub add-local' on each of the sub folders manually.", "", "Any packages registered using add-path will be preferred over packages downloaded from the package registry when searching for dependencies during a build operation.", "", "The version of the packages will be determined by one of the following:", " - For GIT working copies, the last tag (git describe) is used to determine the version", " - If the package contains a \"version\" field in the package description, this is used", " - If neither of those apply, \"~master\" is assumed" ]; } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length == 1, "Missing search path."); enforceUsage(!this.m_system || dub.defaultPlacementLocation == PlacementLocation.user, "Cannot use both --system and --cache, prefer --cache"); if (this.m_system) dub.addSearchPath(free_args[0], PlacementLocation.system); else dub.addSearchPath(free_args[0], dub.defaultPlacementLocation); return 0; } } class RemovePathCommand : RegistrationCommand { this() @safe pure nothrow { this.name = "remove-path"; this.argumentsPattern = ""; this.description = "Removes a package search path"; this.helpText = ["Removes a package search path previously added with add-path."]; } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length == 1, "Expected one argument."); enforceUsage(!this.m_system || dub.defaultPlacementLocation == PlacementLocation.user, "Cannot use both --system and --cache, prefer --cache"); if (this.m_system) dub.removeSearchPath(free_args[0], PlacementLocation.system); else dub.removeSearchPath(free_args[0], dub.defaultPlacementLocation); return 0; } } class AddLocalCommand : RegistrationCommand { this() @safe pure nothrow { this.name = "add-local"; this.argumentsPattern = " []"; this.description = "Adds a local package directory (e.g. a git repository)"; this.helpText = [ "Adds a local package directory to be used during dependency resolution. This command is useful for registering local packages, such as GIT working copies, that are either not available in the package registry, or are supposed to be overwritten.", "", "The version of the package is either determined automatically (see the \"add-path\" command, or can be explicitly overwritten by passing a version on the command line.", "", "See 'dub add-path -h' for a way to register multiple local packages at once." ]; } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length == 1 || free_args.length == 2, "Expecting one or two arguments."); enforceUsage(!this.m_system || dub.defaultPlacementLocation == PlacementLocation.user, "Cannot use both --system and --cache, prefer --cache"); string ver = free_args.length == 2 ? free_args[1] : null; if (this.m_system) dub.addLocalPackage(free_args[0], ver, PlacementLocation.system); else dub.addLocalPackage(free_args[0], ver, dub.defaultPlacementLocation); return 0; } } class RemoveLocalCommand : RegistrationCommand { this() @safe pure nothrow { this.name = "remove-local"; this.argumentsPattern = ""; this.description = "Removes a local package directory"; this.helpText = ["Removes a local package directory"]; } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length >= 1, "Missing package path argument."); enforceUsage(free_args.length <= 1, "Expected the package path to be the only argument."); enforceUsage(!this.m_system || dub.defaultPlacementLocation == PlacementLocation.user, "Cannot use both --system and --cache, prefer --cache"); if (this.m_system) dub.removeLocalPackage(free_args[0], PlacementLocation.system); else dub.removeLocalPackage(free_args[0], dub.defaultPlacementLocation); return 0; } } class ListCommand : Command { this() @safe pure nothrow { this.name = "list"; this.argumentsPattern = "[[@]]"; this.description = "Prints a list of all or selected local packages dub is aware of"; this.helpText = [ "Prints a list of all or selected local packages. This includes all cached "~ "packages (user or system wide), all packages in the package search paths "~ "(\"dub add-path\") and all manually registered packages (\"dub add-local\"). "~ "If a package (and optionally a version spec) is specified, only matching packages are shown." ]; } override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(free_args.length <= 1, "Expecting zero or one extra arguments."); const pinfo = free_args.length ? UserPackageDesc.fromString(free_args[0]) : UserPackageDesc("",VersionRange.Any); const pname = pinfo.name; enforceUsage(app_args.length == 0, "The list command supports no application arguments."); logInfoNoTag("Packages present in the system and known to dub:"); foreach (p; dub.packageManager.getPackageIterator()) { if ((pname == "" || pname == p.name) && pinfo.range.matches(p.version_)) logInfoNoTag(" %s %s: %s", p.name.color(Mode.bold), p.version_, p.path.toNativeString()); } logInfo(""); return 0; } } class SearchCommand : Command { this() @safe pure nothrow { this.name = "search"; this.argumentsPattern = ""; this.description = "Search for available packages."; this.helpText = [ "Search all specified providers for matching packages." ]; } override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { enforce(free_args.length == 1, "Expected one argument."); auto res = dub.searchPackages(free_args[0]); if (res.empty) { logError("No matches found."); return 2; } auto justify = res .map!((descNmatches) => descNmatches[1]) .joiner .map!(m => m.name.length + m.version_.length) .reduce!max + " ()".length; justify += (~justify & 3) + 1; // round to next multiple of 4 int colorDifference = cast(int)"a".color(Mode.bold).length - 1; justify += colorDifference; foreach (desc, matches; res) { logInfoNoTag("==== %s ====", desc); foreach (m; matches) logInfoNoTag(" %s%s", leftJustify(m.name.color(Mode.bold) ~ " (" ~ m.version_ ~ ")", justify), m.description); } return 0; } } /******************************************************************************/ /* OVERRIDES */ /******************************************************************************/ class AddOverrideCommand : Command { private { bool m_system = false; } static immutable string DeprecationMessage = "This command is deprecated. Use path based dependency, custom cache path, " ~ "or edit `dub.selections.json` to achieve the same results."; this() @safe pure nothrow { this.name = "add-override"; this.argumentsPattern = " "; this.description = "Adds a new package override."; this.hidden = true; this.helpText = [ DeprecationMessage ]; } override void prepare(scope CommandArgs args) { args.getopt("system", &m_system, [ "Register system-wide instead of user-wide" ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { logWarn(DeprecationMessage); enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(free_args.length == 3, "Expected three arguments, not "~free_args.length.to!string); auto scope_ = m_system ? PlacementLocation.system : PlacementLocation.user; auto pack = free_args[0]; auto source = VersionRange.fromString(free_args[1]); if (existsFile(NativePath(free_args[2]))) { auto target = NativePath(free_args[2]); if (!target.absolute) target = getWorkingDirectory() ~ target; dub.packageManager.addOverride_(scope_, pack, source, target); logInfo("Added override %s %s => %s", pack, source, target); } else { auto target = Version(free_args[2]); dub.packageManager.addOverride_(scope_, pack, source, target); logInfo("Added override %s %s => %s", pack, source, target); } return 0; } } class RemoveOverrideCommand : Command { private { bool m_system = false; } this() @safe pure nothrow { this.name = "remove-override"; this.argumentsPattern = " "; this.description = "Removes an existing package override."; this.hidden = true; this.helpText = [ AddOverrideCommand.DeprecationMessage ]; } override void prepare(scope CommandArgs args) { args.getopt("system", &m_system, [ "Register system-wide instead of user-wide" ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { logWarn(AddOverrideCommand.DeprecationMessage); enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(free_args.length == 2, "Expected two arguments, not "~free_args.length.to!string); auto scope_ = m_system ? PlacementLocation.system : PlacementLocation.user; auto source = VersionRange.fromString(free_args[1]); dub.packageManager.removeOverride_(scope_, free_args[0], source); return 0; } } class ListOverridesCommand : Command { this() @safe pure nothrow { this.name = "list-overrides"; this.argumentsPattern = ""; this.description = "Prints a list of all local package overrides"; this.hidden = true; this.helpText = [ AddOverrideCommand.DeprecationMessage ]; } override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { logWarn(AddOverrideCommand.DeprecationMessage); void printList(in PackageOverride_[] overrides, string caption) { if (overrides.length == 0) return; logInfoNoTag("# %s", caption); foreach (ovr; overrides) ovr.target.match!( t => logInfoNoTag("%s %s => %s", ovr.package_.color(Mode.bold), ovr.source, t)); } printList(dub.packageManager.getOverrides_(PlacementLocation.user), "User wide overrides"); printList(dub.packageManager.getOverrides_(PlacementLocation.system), "System wide overrides"); return 0; } } /******************************************************************************/ /* Cache cleanup */ /******************************************************************************/ class CleanCachesCommand : Command { this() @safe pure nothrow { this.name = "clean-caches"; this.argumentsPattern = ""; this.description = "Removes cached metadata"; this.helpText = [ "This command removes any cached metadata like the list of available packages and their latest version." ]; } override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { return 0; } } /******************************************************************************/ /* DUSTMITE */ /******************************************************************************/ class DustmiteCommand : PackageBuildCommand { private { int m_compilerStatusCode = int.min; int m_linkerStatusCode = int.min; int m_programStatusCode = int.min; string m_compilerRegex; string m_linkerRegex; string m_programRegex; string m_testPackage; bool m_noRedirect; string m_strategy; uint m_jobCount; // zero means not specified bool m_trace; } this() @safe pure nothrow { this.name = "dustmite"; this.argumentsPattern = ""; this.acceptsAppArgs = true; this.description = "Create reduced test cases for build errors"; this.helpText = [ "This command uses the Dustmite utility to isolate the cause of build errors in a DUB project.", "", "It will create a copy of all involved packages and run dustmite on this copy, leaving a reduced test case.", "", "Determining the desired error condition is done by checking the compiler/linker status code, as well as their output (stdout and stderr combined). If --program-status or --program-regex is given and the generated binary is an executable, it will be executed and its output will also be incorporated into the final decision." ]; } override void prepare(scope CommandArgs args) { args.getopt("compiler-status", &m_compilerStatusCode, ["The expected status code of the compiler run"]); args.getopt("compiler-regex", &m_compilerRegex, ["A regular expression used to match against the compiler output"]); args.getopt("linker-status", &m_linkerStatusCode, ["The expected status code of the linker run"]); args.getopt("linker-regex", &m_linkerRegex, ["A regular expression used to match against the linker output"]); args.getopt("program-status", &m_programStatusCode, ["The expected status code of the built executable"]); args.getopt("program-regex", &m_programRegex, ["A regular expression used to match against the program output"]); args.getopt("test-package", &m_testPackage, ["Perform a test run - usually only used internally"]); args.getopt("combined", &this.baseSettings.combined, ["Builds multiple packages with one compiler run"]); args.getopt("no-redirect", &m_noRedirect, ["Don't redirect stdout/stderr streams of the test command"]); args.getopt("strategy", &m_strategy, ["Set strategy (careful/lookback/pingpong/indepth/inbreadth)"]); args.getopt("j", &m_jobCount, ["Set number of look-ahead processes"]); args.getopt("trace", &m_trace, ["Save all attempted reductions to DIR.trace"]); super.prepare(args); // speed up loading when in test mode if (m_testPackage.length) { m_nodeps = true; } } /// Returns: A minimally-initialized dub instance in test mode override Dub prepareDub(CommonOptions options) { if (!m_testPackage.length) return super.prepareDub(options); return new Dub(NativePath(options.root_path), getWorkingDirectory()); } override int execute(Dub dub, string[] free_args, string[] app_args) { import std.format : formattedWrite; if (m_testPackage.length) { setupPackage(dub, UserPackageDesc(m_testPackage)); m_defaultConfig = dub.project.getDefaultConfiguration(this.baseSettings.platform); GeneratorSettings gensettings = this.baseSettings; if (!gensettings.config.length) gensettings.config = m_defaultConfig; gensettings.run = m_programStatusCode != int.min || m_programRegex.length; gensettings.runArgs = app_args; gensettings.force = true; gensettings.compileCallback = check(m_compilerStatusCode, m_compilerRegex); gensettings.linkCallback = check(m_linkerStatusCode, m_linkerRegex); gensettings.runCallback = check(m_programStatusCode, m_programRegex); try dub.generateProject("build", gensettings); catch (DustmiteMismatchException) { logInfoNoTag("Dustmite test doesn't match."); return 3; } catch (DustmiteMatchException) { logInfoNoTag("Dustmite test matches."); return 0; } } else { enforceUsage(free_args.length == 1, "Expected destination path."); auto path = NativePath(free_args[0]); path.normalize(); enforceUsage(!path.empty, "Destination path must not be empty."); if (!path.absolute) path = getWorkingDirectory() ~ path; enforceUsage(!path.startsWith(dub.rootPath), "Destination path must not be a sub directory of the tested package!"); setupPackage(dub, UserPackageDesc.init); auto prj = dub.project; if (this.baseSettings.config.empty) this.baseSettings.config = prj.getDefaultConfiguration(this.baseSettings.platform); void copyFolderRec(NativePath folder, NativePath dstfolder) { ensureDirectory(dstfolder); foreach (de; iterateDirectory(folder)) { if (de.name.startsWith(".")) continue; if (de.isDirectory) { copyFolderRec(folder ~ de.name, dstfolder ~ de.name); } else { if (de.name.endsWith(".o") || de.name.endsWith(".obj")) continue; if (de.name.endsWith(".exe")) continue; try copyFile(folder ~ de.name, dstfolder ~ de.name); catch (Exception e) { logWarn("Failed to copy file %s: %s", (folder ~ de.name).toNativeString(), e.msg); } } } } static void fixPathDependency(in PackageName name, ref Dependency dep) { dep.visit!( (NativePath path) { dep = Dependency(NativePath("../") ~ name.main.toString()); }, (any) { /* Nothing to do */ }, ); } void fixPathDependencies(ref PackageRecipe recipe, NativePath base_path) { foreach (name, ref dep; recipe.buildSettings.dependencies) fixPathDependency(PackageName(name), dep); foreach (ref cfg; recipe.configurations) foreach (name, ref dep; cfg.buildSettings.dependencies) fixPathDependency(PackageName(name), dep); foreach (ref subp; recipe.subPackages) if (subp.path.length) { auto sub_path = base_path ~ NativePath(subp.path); auto pack = dub.packageManager.getOrLoadPackage(sub_path); fixPathDependencies(pack.recipe, sub_path); pack.storeInfo(sub_path); } else fixPathDependencies(subp.recipe, base_path); } bool[string] visited; foreach (pack_; prj.getTopologicalPackageList()) { auto pack = pack_.basePackage; if (pack.name in visited) continue; visited[pack.name] = true; auto dst_path = path ~ pack.name; logInfo("Prepare", Color.light_blue, "Copy package %s to destination folder...", pack.name.color(Mode.bold)); copyFolderRec(pack.path, dst_path); // adjust all path based dependencies fixPathDependencies(pack.recipe, dst_path); // overwrite package description file with additional version information pack.storeInfo(dst_path); } logInfo("Starting", Color.light_green, "Executing dustmite..."); auto testcmd = appender!string(); testcmd.formattedWrite("%s dustmite --test-package=%s --build=%s --config=%s", std.file.thisExePath, prj.name, this.baseSettings.buildType, this.baseSettings.config); if (m_compilerName.length) testcmd.formattedWrite(" \"--compiler=%s\"", m_compilerName); if (m_arch.length) testcmd.formattedWrite(" --arch=%s", m_arch); if (m_compilerStatusCode != int.min) testcmd.formattedWrite(" --compiler-status=%s", m_compilerStatusCode); if (m_compilerRegex.length) testcmd.formattedWrite(" \"--compiler-regex=%s\"", m_compilerRegex); if (m_linkerStatusCode != int.min) testcmd.formattedWrite(" --linker-status=%s", m_linkerStatusCode); if (m_linkerRegex.length) testcmd.formattedWrite(" \"--linker-regex=%s\"", m_linkerRegex); if (m_programStatusCode != int.min) testcmd.formattedWrite(" --program-status=%s", m_programStatusCode); if (m_programRegex.length) testcmd.formattedWrite(" \"--program-regex=%s\"", m_programRegex); if (this.baseSettings.combined) testcmd ~= " --combined"; // --vquiet swallows dustmite's output ... if (!m_noRedirect) testcmd ~= " --vquiet"; // TODO: pass *all* original parameters logDiagnostic("Running dustmite: %s", testcmd); string[] extraArgs; if (m_noRedirect) extraArgs ~= "--no-redirect"; if (m_strategy.length) extraArgs ~= "--strategy=" ~ m_strategy; if (m_jobCount) extraArgs ~= "-j" ~ m_jobCount.to!string; if (m_trace) extraArgs ~= "--trace"; const cmd = "dustmite" ~ extraArgs ~ [path.toNativeString(), testcmd.data]; auto dmpid = spawnProcess(cmd); return dmpid.wait(); } return 0; } void delegate(int, string) check(int code_match, string regex_match) { return (code, output) { import std.encoding; import std.regex; logInfo("%s", output); if (code_match != int.min && code != code_match) { logInfo("Exit code %s doesn't match expected value %s", code, code_match); throw new DustmiteMismatchException; } if (regex_match.length > 0 && !match(output.sanitize, regex_match)) { logInfo("Output doesn't match regex:"); logInfo("%s", output); throw new DustmiteMismatchException; } if (code != 0 && code_match != int.min || regex_match.length > 0) { logInfo("Tool failed, but matched either exit code or output - counting as match."); throw new DustmiteMatchException; } }; } static class DustmiteMismatchException : Exception { this(string message = "", string file = __FILE__, int line = __LINE__, Throwable next = null) { super(message, file, line, next); } } static class DustmiteMatchException : Exception { this(string message = "", string file = __FILE__, int line = __LINE__, Throwable next = null) { super(message, file, line, next); } } } /******************************************************************************/ /* CONVERT command */ /******************************************************************************/ class ConvertCommand : Command { private { string m_format; bool m_stdout; } this() @safe pure nothrow { this.name = "convert"; this.argumentsPattern = ""; this.description = "Converts the file format of the package recipe."; this.helpText = [ "This command will convert between JSON and SDLang formatted package recipe files.", "", "Warning: Beware that any formatting and comments within the package recipe will get lost in the conversion process." ]; } override void prepare(scope CommandArgs args) { args.getopt("f|format", &m_format, ["Specifies the target package recipe format. Possible values:", " json, sdl"]); args.getopt("s|stdout", &m_stdout, ["Outputs the converted package recipe to stdout instead of writing to disk."]); } override int execute(Dub dub, string[] free_args, string[] app_args) { enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(free_args.length == 0, "Unexpected arguments: "~free_args.join(" ")); enforceUsage(m_format.length > 0, "Missing target format file extension (--format=...)."); if (!loadCwdPackage(dub, true)) return 2; dub.convertRecipe(m_format, m_stdout); return 0; } } /******************************************************************************/ /* HELP */ /******************************************************************************/ private { enum shortArgColumn = 2; enum longArgColumn = 6; enum descColumn = 24; enum lineWidth = 80 - 1; } private void showHelp(in CommandGroup[] commands, CommandArgs common_args) { writeln( `USAGE: dub [--version] [] [] [-- []] Manages the DUB project in the current directory. If the command is omitted, DUB will default to "run". When running an application, "--" can be used to separate DUB options from options passed to the application. Run "dub --help" to get help for a specific command. You can use the "http_proxy" environment variable to configure a proxy server to be used for fetching packages. Available commands ==================`); foreach (grp; commands) { writeln(); writeWS(shortArgColumn); writeln(grp.caption); writeWS(shortArgColumn); writerep!'-'(grp.caption.length); writeln(); foreach (cmd; grp.commands) { if (cmd.hidden) continue; writeWS(shortArgColumn); writef("%s %s", cmd.name, cmd.argumentsPattern); auto chars_output = cmd.name.length + cmd.argumentsPattern.length + shortArgColumn + 1; if (chars_output < descColumn) { writeWS(descColumn - chars_output); } else { writeln(); writeWS(descColumn); } writeWrapped(cmd.description, descColumn, descColumn); } } writeln(); writeln(); writeln(`Common options`); writeln(`==============`); writeln(); writeOptions(common_args); writeln(); showVersion(); } private void showVersion() { writefln("DUB version %s, built on %s", getDUBVersion(), __DATE__); } private void showCommandHelp(Command cmd, CommandArgs args, CommandArgs common_args) { writefln(`USAGE: dub %s %s []%s`, cmd.name, cmd.argumentsPattern, cmd.acceptsAppArgs ? " [-- ]": null); writeln(); foreach (ln; cmd.helpText) ln.writeWrapped(); if (args.recognizedArgs.length) { writeln(); writeln(); writeln("Command specific options"); writeln("========================"); writeln(); writeOptions(args); } writeln(); writeln(); writeln("Common options"); writeln("=============="); writeln(); writeOptions(common_args); writeln(); writefln("DUB version %s, built on %s", getDUBVersion(), __DATE__); } private void writeOptions(CommandArgs args) { foreach (arg; args.recognizedArgs) { if (arg.hidden) continue; auto names = arg.names.split("|"); assert(names.length == 1 || names.length == 2); string sarg = names[0].length == 1 ? names[0] : null; string larg = names[0].length > 1 ? names[0] : names.length > 1 ? names[1] : null; if (sarg !is null) { writeWS(shortArgColumn); writef("-%s", sarg); writeWS(longArgColumn - shortArgColumn - 2); } else writeWS(longArgColumn); size_t col = longArgColumn; if (larg !is null) { arg.defaultValue.match!( (bool b) { writef("--%s", larg); col += larg.length + 2; }, (_) { writef("--%s=VALUE", larg); col += larg.length + 8; } ); } if (col < descColumn) { writeWS(descColumn - col); } else { writeln(); writeWS(descColumn); } foreach (i, ln; arg.helpText) { if (i > 0) writeWS(descColumn); ln.writeWrapped(descColumn, descColumn); } } } private void writeWrapped(string string, size_t indent = 0, size_t first_line_pos = 0) { // handle pre-indented strings and bullet lists size_t first_line_indent = 0; while (string.startsWith(" ")) { string = string[1 .. $]; indent++; first_line_indent++; } if (string.startsWith("- ")) indent += 2; auto wrapped = string.wrap(lineWidth, getRepString!' '(first_line_pos+first_line_indent), getRepString!' '(indent)); wrapped = wrapped[first_line_pos .. $]; foreach (ln; wrapped.splitLines()) writeln(ln); } private void writeWS(size_t num) { writerep!' '(num); } private void writerep(char ch)(size_t num) { write(getRepString!ch(num)); } private string getRepString(char ch)(size_t len) { static string buf; if (len > buf.length) buf ~= [ch].replicate(len-buf.length); return buf[0 .. len]; } /*** */ private void enforceUsage(bool cond, string text) { if (!cond) throw new UsageException(text); } private class UsageException : Exception { this(string message, string file = __FILE__, int line = __LINE__, Throwable next = null) { super(message, file, line, next); } } private bool addDependency(Dub dub, ref PackageRecipe recipe, string depspec) { Dependency dep; const parts = UserPackageDesc.fromString(depspec); const depname = PackageName(parts.name); if (parts.range == VersionRange.Any) { try { const ver = dub.getLatestVersion(depname); dep = ver.isBranch ? Dependency(ver) : Dependency("~>" ~ ver.toString()); } catch (Exception e) { logError("Could not find package '%s'.", depname); logDebug("Full error: %s", e.toString().sanitize); return false; } } else dep = Dependency(parts.range); recipe.buildSettings.dependencies[depname.toString()] = dep; logInfo("Adding dependency %s %s", depname, dep.toString()); return true; } /** * A user-provided package description * * User provided package description currently only covers packages * referenced by their name with an associated version. * Hence there is an implicit assumption that they are in the registry. * Future improvements could support `Dependency` instead of `VersionRange`. */ private struct UserPackageDesc { string name; VersionRange range = VersionRange.Any; /// Provides a string representation for the user public string toString() const { if (this.range.matchesAny()) return this.name; return format("%s@%s", this.name, range); } /** * Breaks down a user-provided string into its name and version range * * User-provided strings (via the command line) are either in the form * `=` or `@`. * As it is more explicit, we recommend the latter (the `@` version * is not used by names or `VersionRange`, but `=` is). * * If no version range is provided, the returned struct has its `range` * property set to `VersionRange.Any` as this is the most usual usage * in the command line. Some cakkers may want to distinguish between * user-provided version and implicit version, but this is discouraged. * * Params: * str = User-provided string * * Returns: * A populated struct. */ static UserPackageDesc fromString(string packageName) { // split @ auto parts = packageName.findSplit("@"); if (parts[1].empty) { // split = parts = packageName.findSplit("="); } UserPackageDesc p; p.name = parts[0]; p.range = !parts[1].empty ? VersionRange.fromString(parts[2]) : VersionRange.Any; return p; } } unittest { // https://github.com/dlang/dub/issues/1681 assert(UserPackageDesc.fromString("") == UserPackageDesc("", VersionRange.Any)); assert(UserPackageDesc.fromString("foo") == UserPackageDesc("foo", VersionRange.Any)); assert(UserPackageDesc.fromString("foo=1.0.1") == UserPackageDesc("foo", VersionRange.fromString("1.0.1"))); assert(UserPackageDesc.fromString("foo@1.0.1") == UserPackageDesc("foo", VersionRange.fromString("1.0.1"))); assert(UserPackageDesc.fromString("foo@==1.0.1") == UserPackageDesc("foo", VersionRange.fromString("==1.0.1"))); assert(UserPackageDesc.fromString("foo@>=1.0.1") == UserPackageDesc("foo", VersionRange.fromString(">=1.0.1"))); assert(UserPackageDesc.fromString("foo@~>1.0.1") == UserPackageDesc("foo", VersionRange.fromString("~>1.0.1"))); assert(UserPackageDesc.fromString("foo@<1.0.1") == UserPackageDesc("foo", VersionRange.fromString("<1.0.1"))); } private ulong canFindVersionSplitter(string packageName) { // see UserPackageDesc.fromString return packageName.canFind("@", "="); } unittest { assert(!canFindVersionSplitter("foo")); assert(canFindVersionSplitter("foo=1.0.1")); assert(canFindVersionSplitter("foo@1.0.1")); assert(canFindVersionSplitter("foo@==1.0.1")); assert(canFindVersionSplitter("foo@>=1.0.1")); assert(canFindVersionSplitter("foo@~>1.0.1")); assert(canFindVersionSplitter("foo@<1.0.1")); } dub-1.40.0/source/dub/compilers/000077500000000000000000000000001477246567400164435ustar00rootroot00000000000000dub-1.40.0/source/dub/compilers/buildsettings.d000066400000000000000000000503141477246567400214730ustar00rootroot00000000000000/** Build settings definitions. Copyright: © 2013-2014 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.buildsettings; import dub.internal.vibecompat.inet.path; import dub.internal.configy.Attributes; import std.array : array; import std.algorithm : filter, any; import std.path : globMatch; import std.typecons : BitFlags; import std.algorithm.iteration : uniq; import std.range : chain; /// BuildPlatform specific settings, like needed libraries or additional /// include paths. struct BuildSettings { import dub.internal.vibecompat.data.serialization : byName; TargetType targetType; string targetPath; string targetName; string workingDirectory; string mainSourceFile; string[] dflags; string[] lflags; string[] libs; string[] linkerFiles; string[] sourceFiles; string[] injectSourceFiles; string[] copyFiles; string[] extraDependencyFiles; string[] versions; string[] debugVersions; string[] versionFilters; string[] debugVersionFilters; string[] importPaths; string[] cImportPaths; string[] stringImportPaths; string[] importFiles; string[] stringImportFiles; string[] preGenerateCommands; string[] postGenerateCommands; string[] preBuildCommands; string[] postBuildCommands; string[] preRunCommands; string[] postRunCommands; string[string] environments; string[string] buildEnvironments; string[string] runEnvironments; string[string] preGenerateEnvironments; string[string] postGenerateEnvironments; string[string] preBuildEnvironments; string[string] postBuildEnvironments; string[string] preRunEnvironments; string[string] postRunEnvironments; @byName Flags!BuildRequirement requirements; @byName Flags!BuildOption options; BuildSettings dup() const { import std.traits: FieldNameTuple; import std.algorithm: map; import std.typecons: tuple; import std.array: assocArray; BuildSettings ret; foreach (m; FieldNameTuple!BuildSettings) { static if (is(typeof(__traits(getMember, ret, m) = __traits(getMember, this, m).dup))) __traits(getMember, ret, m) = __traits(getMember, this, m).dup; else static if (is(typeof(add(__traits(getMember, ret, m), __traits(getMember, this, m))))) add(__traits(getMember, ret, m), __traits(getMember, this, m)); else static if (is(typeof(__traits(getMember, ret, m) = __traits(getMember, this, m)))) __traits(getMember, ret, m) = __traits(getMember, this, m); else static assert(0, "Cannot duplicate BuildSettings." ~ m); } assert(ret.targetType == targetType); assert(ret.targetName == targetName); assert(ret.importPaths == importPaths); assert(ret.cImportPaths == cImportPaths); return ret; } /** * Merges $(LREF bs) onto `this` BuildSettings instance. This is called for * sourceLibrary dependencies when they are included in the build to be * merged into the root package build settings as well as configuring * targets for different build types such as `release` or `unittest-cov`. */ void add(in BuildSettings bs) { addDFlags(bs.dflags); addLFlags(bs.lflags); addLibs(bs.libs); addLinkerFiles(bs.linkerFiles); addSourceFiles(bs.sourceFiles); addInjectSourceFiles(bs.injectSourceFiles); addCopyFiles(bs.copyFiles); addExtraDependencyFiles(bs.extraDependencyFiles); addVersions(bs.versions); addDebugVersions(bs.debugVersions); addVersionFilters(bs.versionFilters); addDebugVersionFilters(bs.debugVersionFilters); addImportPaths(bs.importPaths); addCImportPaths(bs.cImportPaths); addStringImportPaths(bs.stringImportPaths); addImportFiles(bs.importFiles); addStringImportFiles(bs.stringImportFiles); addPreGenerateCommands(bs.preGenerateCommands); addPostGenerateCommands(bs.postGenerateCommands); addPreBuildCommands(bs.preBuildCommands); addPostBuildCommands(bs.postBuildCommands); addPreRunCommands(bs.preRunCommands); addPostRunCommands(bs.postRunCommands); addEnvironments(bs.environments); addBuildEnvironments(bs.buildEnvironments); addRunEnvironments(bs.runEnvironments); addPreGenerateEnvironments(bs.preGenerateEnvironments); addPostGenerateEnvironments(bs.postGenerateEnvironments); addPreBuildEnvironments(bs.preBuildEnvironments); addPostBuildEnvironments(bs.postBuildEnvironments); addPreRunEnvironments(bs.preRunEnvironments); addPostRunEnvironments(bs.postRunEnvironments); addRequirements(bs.requirements); addOptions(bs.options); } void addDFlags(in string[] value...) { dflags = chain(dflags, value.dup).uniq.array; } void prependDFlags(in string[] value...) { prepend(dflags, value); } void removeDFlags(in string[] value...) { remove(dflags, value); } void addLFlags(in string[] value...) { lflags ~= value; } void prependLFlags(in string[] value...) { prepend(lflags, value, false); } void addLibs(in string[] value...) { add(libs, value); } void addLinkerFiles(in string[] value...) { add(linkerFiles, value); } void addSourceFiles(in string[] value...) { add(sourceFiles, value); } void prependSourceFiles(in string[] value...) { prepend(sourceFiles, value); } void removeSourceFiles(in string[] value...) { removePaths(sourceFiles, value); } void addInjectSourceFiles(in string[] value...) { add(injectSourceFiles, value); } void addCopyFiles(in string[] value...) { add(copyFiles, value); } void addExtraDependencyFiles(in string[] value...) { add(extraDependencyFiles, value); } void addVersions(in string[] value...) { add(versions, value); } void addDebugVersions(in string[] value...) { add(debugVersions, value); } void addVersionFilters(in string[] value...) { add(versionFilters, value); } void addDebugVersionFilters(in string[] value...) { add(debugVersionFilters, value); } void addImportPaths(in string[] value...) { add(importPaths, value); } void addCImportPaths(in string[] value...) { add(cImportPaths, value); } void addStringImportPaths(in string[] value...) { add(stringImportPaths, value); } void prependStringImportPaths(in string[] value...) { prepend(stringImportPaths, value); } void addImportFiles(in string[] value...) { add(importFiles, value); } void addStringImportFiles(in string[] value...) { addSI(stringImportFiles, value); } void addPreGenerateCommands(in string[] value...) { add(preGenerateCommands, value, false); } void addPostGenerateCommands(in string[] value...) { add(postGenerateCommands, value, false); } void addPreBuildCommands(in string[] value...) { add(preBuildCommands, value, false); } void addPostBuildCommands(in string[] value...) { add(postBuildCommands, value, false); } void addPreRunCommands(in string[] value...) { add(preRunCommands, value, false); } void addPostRunCommands(in string[] value...) { add(postRunCommands, value, false); } void addEnvironments(in string[string] value) { add(environments, value); } void updateEnvironments(in string[string] value) { update(environments, value); } void addBuildEnvironments(in string[string] value) { add(buildEnvironments, value); } void updateBuildEnvironments(in string[string] value) { update(buildEnvironments, value); } void addRunEnvironments(in string[string] value) { add(runEnvironments, value); } void updateRunEnvironments(in string[string] value) { update(runEnvironments, value); } void addPreGenerateEnvironments(in string[string] value) { add(preGenerateEnvironments, value); } void updatePreGenerateEnvironments(in string[string] value) { update(preGenerateEnvironments, value); } void addPostGenerateEnvironments(in string[string] value) { add(postGenerateEnvironments, value); } void updatePostGenerateEnvironments(in string[string] value) { update(postGenerateEnvironments, value); } void addPreBuildEnvironments(in string[string] value) { add(preBuildEnvironments, value); } void updatePreBuildEnvironments(in string[string] value) { update(preBuildEnvironments, value); } void addPostBuildEnvironments(in string[string] value) { add(postBuildEnvironments, value); } void updatePostBuildEnvironments(in string[string] value) { update(postBuildEnvironments, value); } void addPreRunEnvironments(in string[string] value) { add(preRunEnvironments, value); } void updatePreRunEnvironments(in string[string] value) { update(preRunEnvironments, value); } void addPostRunEnvironments(in string[string] value) { add(postRunEnvironments, value); } void updatePostRunEnvironments(in string[string] value) { update(postRunEnvironments, value); } void addRequirements(in BuildRequirement[] value...) { foreach (v; value) this.requirements |= v; } void addRequirements(in Flags!BuildRequirement value) { this.requirements |= value; } void addOptions(in BuildOption[] value...) { foreach (v; value) this.options |= v; } void addOptions(in Flags!BuildOption value) { this.options |= value; } void removeOptions(in BuildOption[] value...) { foreach (v; value) this.options &= ~v; } void removeOptions(in Flags!BuildOption value) { this.options &= ~value; } private: static auto filterDuplicates(T)(ref string[] arr, in T vals, bool noDuplicates = true) { return noDuplicates ? vals.filter!(filtered => !arr.any!(item => item == filtered)).array : vals; } // Append `vals` to `arr` without adding duplicates. static void add(ref string[] arr, in string[] vals, bool noDuplicates = true) { // vals might contain duplicates, add each val individually foreach (val; vals) arr ~= filterDuplicates(arr, [val], noDuplicates); } // Append `vals` to `aa` static void add(ref string[string] aa, in string[string] vals) { // vals might contain duplicated keys, add each val individually foreach (key, val; vals) if (key !in aa) aa[key] = val; } // Update `vals` to `aa` static void update(ref string[string] aa, in string[string] vals) { // If there are duplicate keys, they will be ignored and overwritten. foreach (key, val; vals) aa[key] = val; } unittest { auto ary = ["-dip1000", "-vgc"]; BuildSettings.add(ary, ["-dip1000", "-vgc"]); assert(ary == ["-dip1000", "-vgc"]); BuildSettings.add(ary, ["-dip1001", "-vgc"], false); assert(ary == ["-dip1000", "-vgc", "-dip1001", "-vgc"]); BuildSettings.add(ary, ["-dupflag", "-notdupflag", "-dupflag"]); assert(ary == ["-dip1000", "-vgc", "-dip1001", "-vgc", "-dupflag", "-notdupflag"]); } // Prepend `arr` by `vals` without adding duplicates. static void prepend(ref string[] arr, in string[] vals, bool noDuplicates = true) { import std.range : retro; // vals might contain duplicates, add each val individually foreach (val; vals.retro) arr = filterDuplicates(arr, [val], noDuplicates) ~ arr; } unittest { auto ary = ["-dip1000", "-vgc"]; BuildSettings.prepend(ary, ["-dip1000", "-vgc"]); assert(ary == ["-dip1000", "-vgc"]); BuildSettings.prepend(ary, ["-dip1001", "-vgc"], false); assert(ary == ["-dip1001", "-vgc", "-dip1000", "-vgc"]); BuildSettings.prepend(ary, ["-dupflag", "-notdupflag", "-dupflag"]); assert(ary == ["-notdupflag", "-dupflag", "-dip1001", "-vgc", "-dip1000", "-vgc"]); } // add string import files (avoids file name duplicates in addition to path duplicates) static void addSI(ref string[] arr, in string[] vals) { bool[string] existing; foreach (v; arr) existing[NativePath(v).head.name] = true; foreach (v; vals) { auto s = NativePath(v).head.name; if (s !in existing) { existing[s] = true; arr ~= v; } } } unittest { auto ary = ["path/foo.txt"]; BuildSettings.addSI(ary, ["path2/foo2.txt"]); assert(ary == ["path/foo.txt", "path2/foo2.txt"]); BuildSettings.addSI(ary, ["path2/foo.txt"]); // no duplicate basenames assert(ary == ["path/foo.txt", "path2/foo2.txt"]); } static bool pathMatch(string path, string pattern) { import std.functional : memoize; alias nativePath = memoize!((string stringPath) => NativePath(stringPath)); return nativePath(path) == nativePath(pattern) || globMatch(path, pattern); } static void removeValuesFromArray(alias Match)(ref string[] arr, in string[] vals) { bool matches(string s) { return vals.any!(item => Match(s, item)); } arr = arr.filter!(s => !matches(s)).array; } static void removePaths(ref string[] arr, in string[] vals) { removeValuesFromArray!(pathMatch)(arr, vals); } unittest { auto ary = ["path1", "root/path1", "root/path2", "root2/path1"]; BuildSettings.removePaths(ary, ["path1"]); assert(ary == ["root/path1", "root/path2", "root2/path1"]); BuildSettings.removePaths(ary, ["*/path1"]); assert(ary == ["root/path2"]); BuildSettings.removePaths(ary, ["foo", "bar", "root/path2"]); assert(ary == []); } static void remove(ref string[] arr, in string[] vals) { removeValuesFromArray!((a, b) => a == b)(arr, vals); } unittest { import std.string : join; auto ary = ["path1", "root/path1", "root/path2", "root2/path1"]; BuildSettings.remove(ary, ["path1"]); assert(ary == ["root/path1", "root/path2", "root2/path1"]); BuildSettings.remove(ary, ["root/path*"]); assert(ary == ["root/path1", "root/path2", "root2/path1"]); BuildSettings.removePaths(ary, ["foo", "root/path2", "bar", "root2/path1"]); assert(ary == ["root/path1"]); BuildSettings.remove(ary, ["root/path1", "foo"]); assert(ary == []); } } enum BuildSetting { dflags = 1<<0, lflags = 1<<1, libs = 1<<2, sourceFiles = 1<<3, copyFiles = 1<<4, versions = 1<<5, debugVersions = 1<<6, importPaths = 1<<7, cImportPaths = 1<<8, stringImportPaths = 1<<9, options = 1<<10, none = 0, commandLine = dflags|copyFiles, commandLineSeparate = commandLine|lflags, all = dflags|lflags|libs|sourceFiles|copyFiles|versions|debugVersions|importPaths|cImportPaths|stringImportPaths|options, noOptions = all & ~options } enum TargetType { autodetect, none, executable, library, sourceLibrary, dynamicLibrary, staticLibrary, object } enum BuildRequirement { none = 0, /// No special requirements allowWarnings = 1<<0, /// Warnings do not abort compilation silenceWarnings = 1<<1, /// Don't show warnings disallowDeprecations = 1<<2, /// Using deprecated features aborts compilation silenceDeprecations = 1<<3, /// Don't show deprecation warnings disallowInlining = 1<<4, /// Avoid function inlining, even in release builds disallowOptimization = 1<<5, /// Avoid optimizations, even in release builds requireBoundsCheck = 1<<6, /// Always perform bounds checks requireContracts = 1<<7, /// Leave assertions and contracts enabled in release builds relaxProperties = 1<<8, /// DEPRECATED: Do not enforce strict property handling (-property) noDefaultFlags = 1<<9, /// Do not issue any of the default build flags (e.g. -debug, -w, -property etc.) - use only for development purposes } enum BuildOption { none = 0, /// Use compiler defaults debugMode = 1<<0, /// Compile in debug mode (enables contracts, -debug) releaseMode = 1<<1, /// Compile in release mode (disables assertions and bounds checks, -release) coverage = 1<<2, /// Enable code coverage analysis (-cov) debugInfo = 1<<3, /// Enable symbolic debug information (-g) debugInfoC = 1<<4, /// Enable symbolic debug information in C compatible form (-gc) alwaysStackFrame = 1<<5, /// Always generate a stack frame (-gs) stackStomping = 1<<6, /// Perform stack stomping (-gx) inline = 1<<7, /// Perform function inlining (-inline) noBoundsCheck = 1<<8, /// Disable all bounds checking (-noboundscheck) optimize = 1<<9, /// Enable optimizations (-O) profile = 1<<10, /// Emit profiling code (-profile) unittests = 1<<11, /// Compile unit tests (-unittest) verbose = 1<<12, /// Verbose compiler output (-v) ignoreUnknownPragmas = 1<<13, /// Ignores unknown pragmas during compilation (-ignore) syntaxOnly = 1<<14, /// Don't generate object files (-o-) warnings = 1<<15, /// Enable warnings (-wi) warningsAsErrors = 1<<16, /// Treat warnings as errors (-w) ignoreDeprecations = 1<<17, /// Do not warn about using deprecated features (-d) deprecationWarnings = 1<<18, /// Warn about using deprecated features (-dw) deprecationErrors = 1<<19, /// Stop compilation upon usage of deprecated features (-de) property = 1<<20, /// DEPRECATED: Enforce property syntax (-property) profileGC = 1<<21, /// Profile runtime allocations pic = 1<<22, /// Generate position independent code betterC = 1<<23, /// Compile in betterC mode (-betterC) lowmem = 1<<24, /// Compile in low-memory mode (-lowmem) coverageCTFE = 1<<25, /// Enable code coverage analysis including at compile-time (-cov=ctfe) color = 1<<26, /// Colorize output (-color) // for internal usage _docs = 1<<27, // Write ddoc to docs _ddox = 1<<28, // Compile docs.json } struct Flags (T) { import dub.internal.vibecompat.data.serialization : ignore; import dub.internal.vibecompat.data.json : Json; @ignore BitFlags!T values; public this(T opt) @safe pure nothrow @nogc { this.values = opt; } public this(BitFlags!T v) @safe pure nothrow @nogc { this.values = v; } alias values this; public Json toJson() const { import std.conv : to; import std.traits : EnumMembers; auto json = Json.emptyArray; static foreach (em; EnumMembers!T) { static if (em != 0) { if (values & em) { json ~= em.to!string; } } } return json; } public static Flags!T fromJson(Json json) { import std.conv : to; import std.exception : enforce; BitFlags!T flags; enforce(json.type == Json.Type.array, "Should be an array"); foreach (jval; json) { flags |= jval.get!string.to!T; } return Flags!T(flags); } /** * Reads a list of flags from a JSON/YAML document and converts them * to our internal representation. * * Flags inside of dub code are stored as a `BitFlags`, * but they are specified in the recipe using an array of their name. * This routine handles the conversion from `string[]` to `BitFlags!T`. */ public static Flags!T fromYAML (scope ConfigParser!(Flags!T) p) { import dub.internal.dyaml.node; import std.exception; import std.conv; enforce(p.node.nodeID == NodeID.sequence, "Should be a sequence"); typeof(return) res; foreach (str; p.node.sequence) res |= str.as!string.to!T; return res; } } unittest { import dub.internal.vibecompat.data.json; auto opts = Flags!BuildOption(BuildOption.debugMode | BuildOption.debugInfo | BuildOption.warningsAsErrors); const str = serializeToJsonString(opts); assert(str == `["debugMode","debugInfo","warningsAsErrors"]`); assert(deserializeJson!(typeof(opts))(str) == opts); } unittest { import dub.internal.configy.Read; static struct Config { Flags!BuildRequirement flags; } auto c = parseConfigString!Config(` { "flags": [ "allowWarnings", "noDefaultFlags", "disallowInlining" ] } `, __FILE__); assert(c.flags.allowWarnings); c.flags.allowWarnings = false; assert(c.flags.noDefaultFlags); c.flags.noDefaultFlags = false; assert(c.flags.disallowInlining); c.flags.disallowInlining = false; assert(c.flags == c.flags.init); } /** All build options that will be inherited upwards in the dependency graph Build options in this category fulfill one of the following properties: $(UL $(LI The option affects the semantics of the generated code) $(LI The option affects if a certain piece of code is valid or not) $(LI The option enabled meta information in dependent projects that are useful for the dependee (e.g. debug information)) ) */ enum Flags!BuildOption inheritedBuildOptions = BuildOption.debugMode | BuildOption.releaseMode | BuildOption.coverage | BuildOption.coverageCTFE | BuildOption.debugInfo | BuildOption.debugInfoC | BuildOption.alwaysStackFrame | BuildOption.stackStomping | BuildOption.inline | BuildOption.noBoundsCheck | BuildOption.profile | BuildOption.ignoreUnknownPragmas | BuildOption.syntaxOnly | BuildOption.warnings | BuildOption.warningsAsErrors | BuildOption.ignoreDeprecations | BuildOption.deprecationWarnings | BuildOption.deprecationErrors | BuildOption.property | BuildOption.profileGC | BuildOption.pic; deprecated("Use `Flags!BuildOption` instead") public alias BuildOptions = Flags!BuildOption; deprecated("Use `Flags!BuildRequirement` instead") public alias BuildRequirements = Flags!BuildRequirement; dub-1.40.0/source/dub/compilers/compiler.d000066400000000000000000000175411477246567400204320ustar00rootroot00000000000000/** Compiler settings and abstraction. Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.compiler; public import dub.compilers.buildsettings; deprecated("Please `import dub.dependency : Dependency` instead") public import dub.dependency : Dependency; public import dub.platform : BuildPlatform, matchesSpecification; import dub.internal.vibecompat.inet.path; import dub.internal.vibecompat.core.file; import dub.internal.logging; import std.algorithm; import std.array; import std.exception; import std.process; /// Exception thrown in Compiler.determinePlatform if the given architecture is /// not supported. class UnsupportedArchitectureException : Exception { this(string architecture, string file = __FILE__, size_t line = __LINE__, Throwable nextInChain = null) pure nothrow @safe { super("Unsupported architecture: "~architecture, file, line, nextInChain); } } /// Exception thrown in getCompiler if no compiler matches the given name. class UnknownCompilerException : Exception { this(string compilerName, string file = __FILE__, size_t line = __LINE__, Throwable nextInChain = null) pure nothrow @safe { super("Unknown compiler: "~compilerName, file, line, nextInChain); } } /// Exception thrown in invokeTool and probePlatform if running the compiler /// returned non-zero exit code. class CompilerInvocationException : Exception { this(string msg, string file = __FILE__, size_t line = __LINE__, Throwable nextInChain = null) pure nothrow @safe { super(msg, file, line, nextInChain); } } /** Returns a compiler handler for a given binary name. The name will be compared against the canonical name of each registered compiler handler. If no match is found, the sub strings "dmd", "gdc" and "ldc", in this order, will be searched within the name. If this doesn't yield a match either, an $(LREF UnknownCompilerException) will be thrown. */ Compiler getCompiler(string name) { foreach (c; s_compilers) if (c.name == name) return c; // try to match names like gdmd or gdc-2.61 if (name.canFind("dmd")) return getCompiler("dmd"); if (name.canFind("gdc")) return getCompiler("gdc"); if (name.canFind("ldc")) return getCompiler("ldc"); throw new UnknownCompilerException(name); } /** Registers a new compiler handler. Note that by default `DMDCompiler`, `GDCCompiler` and `LDCCompiler` are already registered at startup. */ void registerCompiler(Compiler c) { s_compilers ~= c; } interface Compiler { /// Returns the canonical name of the compiler (e.g. "dmd"). @property string name() const; /** Determines the build platform properties given a set of build settings. This will invoke the compiler to build a platform probe file, which determines the target build platform's properties during compile-time. See_Also: `dub.compilers.utils.generatePlatformProbeFile` */ BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override = null); /// Replaces high level fields with low level fields and converts /// dmd flags to compiler-specific flags void prepareBuildSettings(ref BuildSettings settings, const scope ref BuildPlatform platform, BuildSetting supported_fields = BuildSetting.all) const; /// Removes any dflags that match one of the BuildOptions values and populates the BuildSettings.options field. void extractBuildOptions(ref BuildSettings settings) const; /// Computes the full file name of the generated binary. string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) const; /// Adds the appropriate flag to set a target path void setTarget(ref BuildSettings settings, in BuildPlatform platform, string targetPath = null) const; /// Invokes the compiler using the given flags deprecated("specify the working directory") final void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback) { invoke(settings, platform, output_callback, getWorkingDirectory()); } /// ditto void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback, NativePath cwd); /// Invokes the underlying linker directly deprecated("specify the working directory") final void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback) { invokeLinker(settings, platform, objects, output_callback, getWorkingDirectory()); } /// ditto void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback, NativePath cwd); /// Convert linker flags to compiler format string[] lflagsToDFlags(const string[] lflags) const; /// Determines compiler version string determineVersion(string compiler_binary, string verboseOutput); /** Runs a tool and provides common boilerplate code. This method should be used by `Compiler` implementations to invoke the compiler or linker binary. */ deprecated("specify the working directory") protected final void invokeTool(string[] args, void delegate(int, string) output_callback, string[string] env = null) { invokeTool(args, output_callback, getWorkingDirectory(), env); } /// ditto protected final void invokeTool(string[] args, void delegate(int, string) output_callback, NativePath cwd, string[string] env = null) { import std.string; int status; if (output_callback) { auto result = execute(args, env, Config.none, size_t.max, cwd.toNativeString()); output_callback(result.status, result.output); status = result.status; } else { auto compiler_pid = spawnProcess(args, env, Config.none, cwd.toNativeString()); status = compiler_pid.wait(); } version (Posix) if (status == -9) { throw new CompilerInvocationException( format("%s failed with exit code %s. This may indicate that the process has run out of memory.", args[0], status)); } enforce!CompilerInvocationException(status == 0, format("%s failed with exit code %s.", args[0], status)); } /** Default compiler arguments for performing a probe. They should be the D compiler equivalent of "don't output executables" */ protected string[] defaultProbeArgs() const; /** Compiles platform probe file with the specified compiler and parses its output. Params: compiler_binary = binary to invoke compiler with arch_flags = compiler specific flags derived from the user's arch override */ protected final BuildPlatform probePlatform(string compiler_binary, string[] arch_flags) { import dub.compilers.utils : generatePlatformProbeFile, readPlatformSDLProbe; import std.string : format, strip; immutable fileArg = generatePlatformProbeFile().toNativeString; auto result = execute(compiler_binary ~ defaultProbeArgs ~ arch_flags ~ fileArg); enforce!CompilerInvocationException(result.status == 0, format("Failed to invoke the compiler %s to determine the build platform: %s", compiler_binary, result.output)); BuildPlatform build_platform = readPlatformSDLProbe(result.output); string ver = determineVersion(compiler_binary, result.output).strip; build_platform.compilerBinary = compiler_binary; if (ver.empty) { logWarn(`Could not probe the compiler version for "%s". ` ~ `Toolchain requirements might be ineffective`, build_platform.compiler); } else { build_platform.compilerVersion = ver; } return build_platform; } } private { Compiler[] s_compilers; } /// Adds the given flags to the build settings if desired, otherwise informs the user package void maybeAddArchFlags(ref BuildSettings settings, bool keep_arch, string[] arch_flags, string arch_override) { if (keep_arch) settings.addDFlags(arch_flags); else if (arch_override.length) { logDebug("Ignoring arch_override '%s' for better caching because it doesn't affect the build", arch_override); } } dub-1.40.0/source/dub/compilers/dmd.d000066400000000000000000000336621477246567400173660ustar00rootroot00000000000000/** DMD compiler support. Copyright: © 2013-2013 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.dmd; import dub.compilers.compiler; import dub.compilers.utils; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import std.algorithm; import std.array; import std.exception; import std.typecons; // Determines whether the specified process is running under WOW64 or an Intel64 of x64 processor. version (Windows) private Nullable!bool isWow64() { // See also: https://docs.microsoft.com/de-de/windows/desktop/api/sysinfoapi/nf-sysinfoapi-getnativesysteminfo import core.sys.windows.windows : GetNativeSystemInfo, SYSTEM_INFO, PROCESSOR_ARCHITECTURE_AMD64; static Nullable!bool result; // A process's architecture won't change over while the process is in memory // Return the cached result if (!result.isNull) return result; SYSTEM_INFO systemInfo; GetNativeSystemInfo(&systemInfo); result = systemInfo.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_AMD64; return result; } class DMDCompiler : Compiler { private static immutable s_options = [ tuple(BuildOption.debugMode, ["-debug"]), tuple(BuildOption.releaseMode, ["-release"]), tuple(BuildOption.coverage, ["-cov"]), tuple(BuildOption.coverageCTFE, ["-cov=ctfe"]), tuple(BuildOption.debugInfo, ["-g"]), tuple(BuildOption.debugInfoC, ["-g"]), tuple(BuildOption.alwaysStackFrame, ["-gs"]), tuple(BuildOption.stackStomping, ["-gx"]), tuple(BuildOption.inline, ["-inline"]), tuple(BuildOption.noBoundsCheck, ["-noboundscheck"]), tuple(BuildOption.optimize, ["-O"]), tuple(BuildOption.profile, ["-profile"]), tuple(BuildOption.unittests, ["-unittest"]), tuple(BuildOption.verbose, ["-v"]), tuple(BuildOption.ignoreUnknownPragmas, ["-ignore"]), tuple(BuildOption.syntaxOnly, ["-o-"]), tuple(BuildOption.warnings, ["-wi"]), tuple(BuildOption.warningsAsErrors, ["-w"]), tuple(BuildOption.ignoreDeprecations, ["-d"]), tuple(BuildOption.deprecationWarnings, ["-dw"]), tuple(BuildOption.deprecationErrors, ["-de"]), tuple(BuildOption.property, ["-property"]), tuple(BuildOption.profileGC, ["-profile=gc"]), tuple(BuildOption.betterC, ["-betterC"]), tuple(BuildOption.lowmem, ["-lowmem"]), tuple(BuildOption.color, ["-color"]), tuple(BuildOption._docs, ["-Dddocs"]), tuple(BuildOption._ddox, ["-Xfdocs.json", "-Df__dummy.html"]), ]; @property string name() const { return "dmd"; } enum dmdVersionRe = `^version\s+v?(\d+\.\d+\.\d+[A-Za-z0-9.+-]*)`; unittest { import std.regex : matchFirst, regex; auto probe = ` binary dmd version v2.082.0 config /etc/dmd.conf `; auto re = regex(dmdVersionRe, "m"); auto c = matchFirst(probe, re); assert(c && c.length > 1 && c[1] == "2.082.0"); } unittest { import std.regex : matchFirst, regex; auto probe = ` binary dmd version v2.084.0-beta.1 config /etc/dmd.conf `; auto re = regex(dmdVersionRe, "m"); auto c = matchFirst(probe, re); assert(c && c.length > 1 && c[1] == "2.084.0-beta.1"); } string determineVersion(string compiler_binary, string verboseOutput) { import std.regex : matchFirst, regex; auto ver = matchFirst(verboseOutput, regex(dmdVersionRe, "m")); return ver && ver.length > 1 ? ver[1] : null; } BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override) { // Set basic arch flags for the probe - might be revised based on the exact value + compiler version string[] arch_flags; switch (arch_override) { default: throw new UnsupportedArchitectureException(arch_override); case "": // Don't use Optlink by default on Windows version (Windows) { const is64bit = isWow64(); if (!is64bit.isNull) arch_flags = [ is64bit.get ? "-m64" : "-m32" ]; } break; // DMD 2.099 made MsCOFF the default, and DMD v2.109 removed OMF // support. Default everything to MsCOFF, people wanting to use OMF // should use an older DMD / dub. case "x86", "x86_omf", "x86_mscoff": arch_flags = ["-m32"]; break; case "x86_64": arch_flags = ["-m64"]; break; } auto bp = probePlatform(compiler_binary, arch_flags); bool keep_arch; if (arch_flags.length) keep_arch = bp.architecture != probePlatform(compiler_binary, []).architecture; settings.maybeAddArchFlags(keep_arch, arch_flags, arch_override); if (arch_override.length && !bp.architecture.canFind(arch_override) && !arch_override.among("x86_omf", "x86_mscoff") ) { logWarn(`Failed to apply the selected architecture %s. Got %s.`, arch_override, bp.architecture); } return bp; } version (Windows) version (DigitalMars) unittest { BuildSettings settings; auto compiler = new DMDCompiler; auto bp = compiler.determinePlatform(settings, "dmd", "x86"); assert(bp.isWindows()); assert(bp.architecture.canFind("x86")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", "x86_omf"); assert(bp.isWindows()); assert(bp.architecture.canFind("x86")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", "x86_mscoff"); assert(bp.isWindows()); assert(bp.architecture.canFind("x86")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", "x86_64"); assert(bp.isWindows()); assert(bp.architecture.canFind("x86_64")); assert(!bp.architecture.canFind("x86")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", ""); if (!isWow64.isNull && !isWow64.get) assert(bp.architecture.canFind("x86")); if (!isWow64.isNull && isWow64.get) assert(bp.architecture.canFind("x86_64")); } version (LDC) unittest { import std.conv : to; version (ARM) enum isARM = true; version (AArch64) enum isARM = true; else enum isARM = false; BuildSettings settings; auto compiler = new DMDCompiler; auto bp = compiler.determinePlatform(settings, "ldmd2", "x86"); static if (isARM) assert(bp.architecture.canFind("arm"), bp.architecture.to!string); else assert(bp.architecture.canFind("x86"), bp.architecture.to!string); bp = compiler.determinePlatform(settings, "ldmd2", ""); version (X86) assert(bp.architecture.canFind("x86"), bp.architecture.to!string); version (X86_64) assert(bp.architecture.canFind("x86_64"), bp.architecture.to!string); } void prepareBuildSettings(ref BuildSettings settings, const scope ref BuildPlatform platform, BuildSetting fields = BuildSetting.all) const { enforceBuildRequirements(settings); // Keep the current dflags at the end of the array so that they will overwrite other flags. // This allows user $DFLAGS to modify flags added by us. const dflagsTail = settings.dflags; settings.dflags = []; if (!(fields & BuildSetting.options)) { foreach (t; s_options) if (settings.options & t[0]) settings.addDFlags(t[1]); } if (!(fields & BuildSetting.versions)) { settings.addDFlags(settings.versions.map!(s => "-version="~s)().array()); settings.versions = null; } if (!(fields & BuildSetting.debugVersions)) { settings.addDFlags(settings.debugVersions.map!(s => "-debug="~s)().array()); settings.debugVersions = null; } if (!(fields & BuildSetting.importPaths)) { settings.addDFlags(settings.importPaths.map!(s => "-I"~s)().array()); settings.importPaths = null; } if (!(fields & BuildSetting.cImportPaths)) { settings.addDFlags(settings.cImportPaths.map!(s => "-P-I"~s)().array()); settings.cImportPaths = null; } if (!(fields & BuildSetting.stringImportPaths)) { settings.addDFlags(settings.stringImportPaths.map!(s => "-J"~s)().array()); settings.stringImportPaths = null; } if (!(fields & BuildSetting.libs)) { resolveLibs(settings, platform); if (platform.isWindows()) settings.addSourceFiles(settings.libs.map!(l => l~".lib")().array()); else settings.addLFlags(settings.libs.map!(l => "-l"~l)().array()); } if (!(fields & BuildSetting.sourceFiles)) { settings.addDFlags(settings.sourceFiles); settings.sourceFiles = null; } if (!(fields & BuildSetting.lflags)) { settings.addDFlags(lflagsToDFlags(settings.lflags)); settings.lflags = null; } if (platform.platform.canFind("posix") && (settings.options & BuildOption.pic)) settings.addDFlags("-fPIC"); settings.addDFlags(dflagsTail); assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); } void extractBuildOptions(ref BuildSettings settings) const { Appender!(string[]) newflags; next_flag: foreach (f; settings.dflags) { foreach (t; s_options) if (t[1].canFind(f)) { settings.options |= t[0]; continue next_flag; } if (f.startsWith("-version=")) settings.addVersions(f[9 .. $]); else if (f.startsWith("-debug=")) settings.addDebugVersions(f[7 .. $]); else newflags ~= f; } settings.dflags = newflags.data; } string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) const { import std.conv: text; assert(settings.targetName.length > 0, "No target name set."); final switch (settings.targetType) { case TargetType.autodetect: assert(false, text("Configurations must have a concrete target type, ", settings.targetName, " has ", settings.targetType)); case TargetType.none: return null; case TargetType.sourceLibrary: return null; case TargetType.executable: if (platform.isWindows()) return settings.targetName ~ ".exe"; else return settings.targetName.idup; case TargetType.library: case TargetType.staticLibrary: if (platform.isWindows()) return settings.targetName ~ ".lib"; else return "lib" ~ settings.targetName ~ ".a"; case TargetType.dynamicLibrary: if (platform.isWindows()) return settings.targetName ~ ".dll"; else if (platform.platform.canFind("darwin")) return "lib" ~ settings.targetName ~ ".dylib"; else return "lib" ~ settings.targetName ~ ".so"; case TargetType.object: if (platform.isWindows()) return settings.targetName ~ ".obj"; else return settings.targetName ~ ".o"; } } void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { const targetFileName = getTargetFileName(settings, platform); final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Invalid target type: autodetect"); case TargetType.none: assert(false, "Invalid target type: none"); case TargetType.sourceLibrary: assert(false, "Invalid target type: sourceLibrary"); case TargetType.executable: break; case TargetType.library: case TargetType.staticLibrary: settings.addDFlags("-lib"); break; case TargetType.dynamicLibrary: if (platform.compiler != "dmd" || platform.isWindows() || platform.platform.canFind("osx")) settings.addDFlags("-shared"); else settings.prependDFlags("-shared", "-defaultlib=libphobos2.so"); addDynamicLibName(settings, platform, targetFileName); break; case TargetType.object: settings.addDFlags("-c"); break; } if (tpath is null) tpath = (NativePath(settings.targetPath) ~ targetFileName).toNativeString(); settings.addDFlags("-of"~tpath); } void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback, NativePath cwd) { auto res_file = getTempFile("dub-build", ".rsp"); // clean-up early to avoid build-up of temporaries when invoke is called // many times in one DUB session. (e.g. when using DUB as a library) scope (exit) removeFile(res_file); const(string)[] args = settings.dflags; if (platform.frontendVersion >= 2066) args ~= "-vcolumns"; writeFile(res_file, escapeArgs(args).join("\n")); logDiagnostic("[cwd=%s] %s %s", cwd, platform.compilerBinary, escapeArgs(args).join(" ")); string[string] env; foreach (aa; [settings.environments, settings.buildEnvironments]) foreach (k, v; aa) env[k] = v; invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, cwd, env); } void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback, NativePath cwd) { import std.string; auto tpath = NativePath(settings.targetPath) ~ getTargetFileName(settings, platform); auto args = ["-of"~tpath.toNativeString()]; args ~= objects; args ~= settings.sourceFiles; if (platform.platform.canFind("linux")) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being specified in the wrong order by DMD args ~= lflagsToDFlags(settings.lflags); if (platform.compiler == "ldc") { // ldmd2: support the full LDC-specific list + extra "-m32mscoff", a superset of the DMD list import dub.compilers.ldc : LDCCompiler; args ~= settings.dflags.filter!(f => f == "-m32mscoff" || LDCCompiler.isLinkerDFlag(f)).array; } else { args ~= settings.dflags.filter!(f => isLinkerDFlag(f)).array; } auto res_file = getTempFile("dub-build", ".lnk"); writeFile(res_file, escapeArgs(args).join("\n")); logDiagnostic("[cwd=%s] %s %s", cwd, platform.compilerBinary, escapeArgs(args).join(" ")); string[string] env; foreach (aa; [settings.environments, settings.buildEnvironments]) foreach (k, v; aa) env[k] = v; invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, cwd, env); } string[] lflagsToDFlags(const string[] lflags) const { return map!(f => "-L"~f)(lflags.filter!(f => f != "")()).array(); } private auto escapeArgs(in string[] args) { return args.map!(s => s.canFind(' ') ? "\""~s~"\"" : s); } static bool isLinkerDFlag(string arg) { switch (arg) { case "-g", "-gc", "-m32", "-m64", "-shared", "-lib", "-betterC": return true; default: return arg.startsWith("-L") || arg.startsWith("-Xcc=") || arg.startsWith("-defaultlib="); } } protected string[] defaultProbeArgs () const { return ["-quiet", "-c", "-o-", "-v"]; } } dub-1.40.0/source/dub/compilers/gdc.d000066400000000000000000000230771477246567400173560ustar00rootroot00000000000000/** GDC compiler support. Copyright: © 2013-2013 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.gdc; import dub.compilers.compiler; import dub.compilers.utils; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import std.algorithm; import std.array; import std.exception; import std.process : execute; import std.typecons; class GDCCompiler : Compiler { private static immutable s_options = [ tuple(BuildOption.debugMode, ["-fdebug"]), tuple(BuildOption.releaseMode, ["-frelease"]), tuple(BuildOption.coverage, ["-fprofile-arcs", "-ftest-coverage"]), tuple(BuildOption.debugInfo, ["-g"]), tuple(BuildOption.debugInfoC, ["-g"]), //tuple(BuildOption.alwaysStackFrame, ["-X"]), //tuple(BuildOption.stackStomping, ["-X"]), tuple(BuildOption.inline, ["-finline-functions"]), tuple(BuildOption.noBoundsCheck, ["-fno-bounds-check"]), tuple(BuildOption.optimize, ["-O2"]), tuple(BuildOption.profile, ["-pg"]), tuple(BuildOption.unittests, ["-funittest"]), tuple(BuildOption.verbose, ["-v"]), tuple(BuildOption.ignoreUnknownPragmas, ["-fignore-unknown-pragmas"]), tuple(BuildOption.syntaxOnly, ["-fsyntax-only"]), tuple(BuildOption.warnings, ["-Wall"]), tuple(BuildOption.warningsAsErrors, ["-Werror", "-Wall"]), tuple(BuildOption.ignoreDeprecations, ["-Wno-deprecated"]), tuple(BuildOption.deprecationWarnings, ["-Wdeprecated"]), tuple(BuildOption.deprecationErrors, ["-Werror", "-Wdeprecated"]), tuple(BuildOption.property, ["-fproperty"]), //tuple(BuildOption.profileGC, ["-?"]), tuple(BuildOption.betterC, ["-fno-druntime"]), tuple(BuildOption.color, ["-fdiagnostics-color=always"]), tuple(BuildOption._docs, ["-fdoc-dir=docs"]), tuple(BuildOption._ddox, ["-Xfdocs.json", "-fdoc-file=__dummy.html"]), ]; @property string name() const { return "gdc"; } string determineVersion(string compiler_binary, string verboseOutput) { const result = execute([ compiler_binary, "-dumpfullversion", "-dumpversion" ]); return result.status == 0 ? result.output : null; } BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override) { string[] arch_flags; switch (arch_override) { default: throw new UnsupportedArchitectureException(arch_override); case "": break; case "arm": arch_flags = ["-marm"]; break; case "arm_thumb": arch_flags = ["-mthumb"]; break; case "x86": arch_flags = ["-m32"]; break; case "x86_64": arch_flags = ["-m64"]; break; } auto bp = probePlatform(compiler_binary, arch_flags); bool keep_arch; if (arch_flags.length) keep_arch = bp.architecture != probePlatform(compiler_binary, []).architecture; settings.maybeAddArchFlags(keep_arch, arch_flags, arch_override); return bp; } void prepareBuildSettings(ref BuildSettings settings, const scope ref BuildPlatform platform, BuildSetting fields = BuildSetting.all) const { enforceBuildRequirements(settings); // Keep the current dflags at the end of the array so that they will overwrite other flags. // This allows user $DFLAGS to modify flags added by us. const dflagsTail = settings.dflags; settings.dflags = []; if (!(fields & BuildSetting.options)) { foreach (t; s_options) if (settings.options & t[0]) settings.addDFlags(t[1]); } if (!(fields & BuildSetting.versions)) { settings.addDFlags(settings.versions.map!(s => "-fversion="~s)().array()); settings.versions = null; } if (!(fields & BuildSetting.debugVersions)) { settings.addDFlags(settings.debugVersions.map!(s => "-fdebug="~s)().array()); settings.debugVersions = null; } if (!(fields & BuildSetting.importPaths)) { settings.addDFlags(settings.importPaths.map!(s => "-I"~s)().array()); settings.importPaths = null; } if (!(fields & BuildSetting.cImportPaths)) { settings.addDFlags(settings.cImportPaths.map!(s => "-I"~s)().array()); settings.cImportPaths = null; } if (!(fields & BuildSetting.stringImportPaths)) { settings.addDFlags(settings.stringImportPaths.map!(s => "-J"~s)().array()); settings.stringImportPaths = null; } if (!(fields & BuildSetting.sourceFiles)) { settings.addDFlags(settings.sourceFiles); settings.sourceFiles = null; } if (!(fields & BuildSetting.libs)) { resolveLibs(settings, platform); settings.addDFlags(settings.libs.map!(l => "-l"~l)().array()); } if (!(fields & BuildSetting.lflags)) { settings.addDFlags(lflagsToDFlags(settings.lflags)); settings.lflags = null; } if (settings.options & BuildOption.pic) settings.addDFlags("-fPIC"); settings.addDFlags(dflagsTail); assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); } void extractBuildOptions(ref BuildSettings settings) const { Appender!(string[]) newflags; next_flag: foreach (f; settings.dflags) { foreach (t; s_options) if (t[1].canFind(f)) { settings.options |= t[0]; continue next_flag; } if (f.startsWith("-fversion=")) settings.addVersions(f[10 .. $]); else if (f.startsWith("-fdebug=")) settings.addDebugVersions(f[8 .. $]); else newflags ~= f; } settings.dflags = newflags.data; } string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) const { assert(settings.targetName.length > 0, "No target name set."); final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Configurations must have a concrete target type."); case TargetType.none: return null; case TargetType.sourceLibrary: return null; case TargetType.executable: if (platform.isWindows()) return settings.targetName ~ ".exe"; else return settings.targetName.idup; case TargetType.library: case TargetType.staticLibrary: return "lib" ~ settings.targetName ~ ".a"; case TargetType.dynamicLibrary: if (platform.isWindows()) return settings.targetName ~ ".dll"; else if (platform.platform.canFind("darwin")) return "lib" ~ settings.targetName ~ ".dylib"; else return "lib" ~ settings.targetName ~ ".so"; case TargetType.object: if (platform.isWindows()) return settings.targetName ~ ".obj"; else return settings.targetName ~ ".o"; } } void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { const targetFileName = getTargetFileName(settings, platform); final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Invalid target type: autodetect"); case TargetType.none: assert(false, "Invalid target type: none"); case TargetType.sourceLibrary: assert(false, "Invalid target type: sourceLibrary"); case TargetType.executable: break; case TargetType.library: case TargetType.staticLibrary: case TargetType.object: settings.addDFlags("-c"); break; case TargetType.dynamicLibrary: settings.addDFlags("-shared", "-fPIC"); addDynamicLibName(settings, platform, targetFileName); break; } if (tpath is null) tpath = (NativePath(settings.targetPath) ~ targetFileName).toNativeString(); settings.addDFlags("-o", tpath); } void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback, NativePath cwd) { auto res_file = getTempFile("dub-build", ".rsp"); writeFile(res_file, join(settings.dflags.map!(s => escape(s)), "\n")); logDiagnostic("%s %s", platform.compilerBinary, join(cast(string[])settings.dflags, " ")); string[string] env; foreach (aa; [settings.environments, settings.buildEnvironments]) foreach (k, v; aa) env[k] = v; invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, cwd, env); } void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback, NativePath cwd) { import std.string; string[] args; // As the user is supposed to call setTarget prior to invoke, -o target is already set. if (settings.targetType == TargetType.staticLibrary || settings.targetType == TargetType.staticLibrary) { auto tpath = extractTarget(settings.dflags); assert(tpath !is null, "setTarget should be called before invoke"); args = [ "ar", "rcs", tpath ] ~ objects; } else { args = platform.compilerBinary ~ objects ~ settings.sourceFiles ~ settings.lflags ~ settings.dflags.filter!(f => isLinkageFlag(f)).array; if (platform.platform.canFind("linux")) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being specified in the wrong order } logDiagnostic("%s", args.join(" ")); string[string] env; foreach (aa; [settings.environments, settings.buildEnvironments]) foreach (k, v; aa) env[k] = v; invokeTool(args, output_callback, cwd, env); } string[] lflagsToDFlags(const string[] lflags) const { string[] dflags; foreach( f; lflags ) { if ( f == "") { continue; } dflags ~= "-Xlinker"; dflags ~= f; } return dflags; } protected string[] defaultProbeArgs () const { return ["-fsyntax-only", "-v"]; } } private string extractTarget(const string[] args) { auto i = args.countUntil("-o"); return i >= 0 ? args[i+1] : null; } private bool isLinkageFlag(string flag) { switch (flag) { case "-c": return false; default: return true; } } private string escape(string str) { auto ret = appender!string(); foreach (char ch; str) { switch (ch) { default: ret.put(ch); break; case '\\': ret.put(`\\`); break; case ' ': ret.put(`\ `); break; } } return ret.data; } dub-1.40.0/source/dub/compilers/ldc.d000066400000000000000000000273621477246567400173640ustar00rootroot00000000000000/** LDC compiler support. Copyright: © 2013-2013 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.ldc; import dub.compilers.compiler; import dub.compilers.utils; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import std.algorithm; import std.array; import std.exception; import std.typecons; class LDCCompiler : Compiler { private static immutable s_options = [ tuple(BuildOption.debugMode, ["-d-debug"]), tuple(BuildOption.releaseMode, ["-release"]), tuple(BuildOption.coverage, ["-cov"]), tuple(BuildOption.coverageCTFE, ["-cov=ctfe"]), tuple(BuildOption.debugInfo, ["-g"]), tuple(BuildOption.debugInfoC, ["-gc"]), tuple(BuildOption.alwaysStackFrame, ["-disable-fp-elim"]), //tuple(BuildOption.stackStomping, ["-?"]), tuple(BuildOption.inline, ["-enable-inlining", "-Hkeep-all-bodies"]), tuple(BuildOption.noBoundsCheck, ["-boundscheck=off"]), tuple(BuildOption.optimize, ["-O3"]), tuple(BuildOption.profile, ["-fdmd-trace-functions"]), tuple(BuildOption.unittests, ["-unittest"]), tuple(BuildOption.verbose, ["-v"]), tuple(BuildOption.ignoreUnknownPragmas, ["-ignore"]), tuple(BuildOption.syntaxOnly, ["-o-"]), tuple(BuildOption.warnings, ["-wi"]), tuple(BuildOption.warningsAsErrors, ["-w"]), tuple(BuildOption.ignoreDeprecations, ["-d"]), tuple(BuildOption.deprecationWarnings, ["-dw"]), tuple(BuildOption.deprecationErrors, ["-de"]), tuple(BuildOption.property, ["-property"]), //tuple(BuildOption.profileGC, ["-?"]), tuple(BuildOption.betterC, ["-betterC"]), tuple(BuildOption.lowmem, ["-lowmem"]), tuple(BuildOption.color, ["-enable-color"]), tuple(BuildOption._docs, ["-Dd=docs"]), tuple(BuildOption._ddox, ["-Xf=docs.json", "-Dd=__dummy_docs", "-oq"]), ]; @property string name() const { return "ldc"; } enum ldcVersionRe = `^version\s+v?(\d+\.\d+\.\d+[A-Za-z0-9.+-]*)`; unittest { import std.regex : matchFirst, regex; auto probe = ` binary /usr/bin/ldc2 version 1.11.0 (DMD v2.081.2, LLVM 6.0.1) config /etc/ldc2.conf (x86_64-pc-linux-gnu) `; auto re = regex(ldcVersionRe, "m"); auto c = matchFirst(probe, re); assert(c && c.length > 1 && c[1] == "1.11.0"); } string determineVersion(string compiler_binary, string verboseOutput) { import std.regex : matchFirst, regex; auto ver = matchFirst(verboseOutput, regex(ldcVersionRe, "m")); return ver && ver.length > 1 ? ver[1] : null; } BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override) { string[] arch_flags; bool arch_override_is_triple = false; switch (arch_override) { case "": break; case "x86": arch_flags = ["-march=x86"]; break; case "x86_mscoff": arch_flags = ["-march=x86"]; break; case "x86_64": arch_flags = ["-march=x86-64"]; break; case "aarch64": arch_flags = ["-march=aarch64"]; break; case "powerpc64": arch_flags = ["-march=powerpc64"]; break; default: if (arch_override.canFind('-')) { arch_override_is_triple = true; arch_flags = ["-mtriple="~arch_override]; } else throw new UnsupportedArchitectureException(arch_override); break; } auto bp = probePlatform(compiler_binary, arch_flags); bool keep_arch = arch_override_is_triple; if (!keep_arch && arch_flags.length) keep_arch = bp.architecture != probePlatform(compiler_binary, []).architecture; settings.maybeAddArchFlags(keep_arch, arch_flags, arch_override); return bp; } void prepareBuildSettings(ref BuildSettings settings, const scope ref BuildPlatform platform, BuildSetting fields = BuildSetting.all) const { enforceBuildRequirements(settings); // Keep the current dflags at the end of the array so that they will overwrite other flags. // This allows user $DFLAGS to modify flags added by us. const dflagsTail = settings.dflags; settings.dflags = []; if (!(fields & BuildSetting.options)) { foreach (t; s_options) if (settings.options & t[0]) settings.addDFlags(t[1]); } if (!(fields & BuildSetting.versions)) { settings.addDFlags(settings.versions.map!(s => "-d-version="~s)().array()); settings.versions = null; } if (!(fields & BuildSetting.debugVersions)) { settings.addDFlags(settings.debugVersions.map!(s => "-d-debug="~s)().array()); settings.debugVersions = null; } if (!(fields & BuildSetting.importPaths)) { settings.addDFlags(settings.importPaths.map!(s => "-I"~s)().array()); settings.importPaths = null; } if (!(fields & BuildSetting.cImportPaths)) { settings.addDFlags(settings.cImportPaths.map!(s => "-P-I"~s)().array()); settings.cImportPaths = null; } if (!(fields & BuildSetting.stringImportPaths)) { settings.addDFlags(settings.stringImportPaths.map!(s => "-J"~s)().array()); settings.stringImportPaths = null; } if (!(fields & BuildSetting.sourceFiles)) { settings.addDFlags(settings.sourceFiles); settings.sourceFiles = null; } if (!(fields & BuildSetting.libs)) { resolveLibs(settings, platform); settings.addLFlags(settings.libs.map!(l => "-l"~l)().array()); } if (!(fields & BuildSetting.lflags)) { settings.addDFlags(lflagsToDFlags(settings.lflags)); settings.lflags = null; } if (settings.options & BuildOption.pic) { if (platform.isWindows()) { /* This has nothing to do with PIC, but as the PIC option is exclusively * set internally for code that ends up in a dynamic library, explicitly * specify what `-shared` defaults to (`-shared` can't be used when * compiling only, without linking). * *Pre*pending the flags enables the user to override them. */ settings.prependDFlags("-fvisibility=public", "-dllimport=all"); } else { settings.addDFlags("-relocation-model=pic"); } } settings.addDFlags(dflagsTail); assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); } void extractBuildOptions(ref BuildSettings settings) const { Appender!(string[]) newflags; next_flag: foreach (f; settings.dflags) { foreach (t; s_options) if (t[1].canFind(f)) { settings.options |= t[0]; continue next_flag; } if (f.startsWith("-d-version=")) settings.addVersions(f[11 .. $]); else if (f.startsWith("-d-debug=")) settings.addDebugVersions(f[9 .. $]); else newflags ~= f; } settings.dflags = newflags.data; } string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) const { assert(settings.targetName.length > 0, "No target name set."); const p = platform.platform; final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Configurations must have a concrete target type."); case TargetType.none: return null; case TargetType.sourceLibrary: return null; case TargetType.executable: if (p.canFind("windows")) return settings.targetName ~ ".exe"; else if (p.canFind("wasm")) return settings.targetName ~ ".wasm"; else return settings.targetName.idup; case TargetType.library: case TargetType.staticLibrary: if (p.canFind("windows") && !p.canFind("mingw")) return settings.targetName ~ ".lib"; else return "lib" ~ settings.targetName ~ ".a"; case TargetType.dynamicLibrary: if (p.canFind("windows")) return settings.targetName ~ ".dll"; else if (p.canFind("darwin")) return "lib" ~ settings.targetName ~ ".dylib"; else return "lib" ~ settings.targetName ~ ".so"; case TargetType.object: if (p.canFind("windows")) return settings.targetName ~ ".obj"; else return settings.targetName ~ ".o"; } } void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { const targetFileName = getTargetFileName(settings, platform); final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Invalid target type: autodetect"); case TargetType.none: assert(false, "Invalid target type: none"); case TargetType.sourceLibrary: assert(false, "Invalid target type: sourceLibrary"); case TargetType.executable: break; case TargetType.library: case TargetType.staticLibrary: // -oq: name object files uniquely (so the files don't collide) settings.addDFlags("-lib", "-oq"); // -cleanup-obj (supported since LDC v1.1): remove object files after archiving to static lib if (platform.frontendVersion >= 2071) { settings.addDFlags("-cleanup-obj"); } if (platform.frontendVersion < 2095) { // Since LDC v1.25, -cleanup-obj defaults to a unique temp -od directory // We need to resort to a unique-ish -od directory before that settings.addDFlags("-od=" ~ settings.targetPath ~ "/obj"); } break; case TargetType.dynamicLibrary: settings.addDFlags("-shared"); addDynamicLibName(settings, platform, targetFileName); break; case TargetType.object: settings.addDFlags("-c"); break; } if (tpath is null) tpath = (NativePath(settings.targetPath) ~ targetFileName).toNativeString(); settings.addDFlags("-of"~tpath); } void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback, NativePath cwd) { auto res_file = getTempFile("dub-build", ".rsp"); const(string)[] args = settings.dflags; if (platform.frontendVersion >= 2066) args ~= "-vcolumns"; writeFile(res_file, escapeArgs(args).join("\n")); logDiagnostic("%s %s", platform.compilerBinary, escapeArgs(args).join(" ")); string[string] env; foreach (aa; [settings.environments, settings.buildEnvironments]) foreach (k, v; aa) env[k] = v; invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, cwd, env); } void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback, NativePath cwd) { import std.string; auto tpath = NativePath(settings.targetPath) ~ getTargetFileName(settings, platform); auto args = ["-of"~tpath.toNativeString()]; args ~= objects; args ~= settings.sourceFiles; if (platform.platform.canFind("linux")) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being specified in the wrong order args ~= lflagsToDFlags(settings.lflags); args ~= settings.dflags.filter!(f => isLinkerDFlag(f)).array; auto res_file = getTempFile("dub-build", ".lnk"); writeFile(res_file, escapeArgs(args).join("\n")); logDiagnostic("%s %s", platform.compilerBinary, escapeArgs(args).join(" ")); string[string] env; foreach (aa; [settings.environments, settings.buildEnvironments]) foreach (k, v; aa) env[k] = v; invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, cwd, env); } string[] lflagsToDFlags(const string[] lflags) const { return map!(f => "-L"~f)(lflags.filter!(f => f != "")()).array(); } private auto escapeArgs(in string[] args) { return args.map!(s => s.canFind(' ') ? "\""~s~"\"" : s); } static bool isLinkerDFlag(string arg) { if (arg.length > 2 && arg.startsWith("--")) arg = arg[1 .. $]; // normalize to 1 leading hyphen switch (arg) { case "-g", "-gc", "-m32", "-m64", "-shared", "-lib", "-betterC", "-disable-linker-strip-dead", "-static": return true; default: return arg.startsWith("-L") || arg.startsWith("-Xcc=") || arg.startsWith("-defaultlib=") || arg.startsWith("-platformlib=") || arg.startsWith("-flto") || arg.startsWith("-fsanitize=") || arg.startsWith("-gcc=") || arg.startsWith("-link-") || arg.startsWith("-linker=") || arg.startsWith("-march=") || arg.startsWith("-mscrtlib=") || arg.startsWith("-mtriple="); } } protected string[] defaultProbeArgs () const { return ["-c", "-o-", "-v"]; } } dub-1.40.0/source/dub/compilers/utils.d000066400000000000000000000330151477246567400177520ustar00rootroot00000000000000/** Utility functionality for compiler class implementations. Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.utils; import dub.compilers.buildsettings; import dub.platform : BuildPlatform, archCheck, compilerCheckPragmas, platformCheck, pragmaGen; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import std.algorithm : canFind, endsWith, filter; /** Alters the build options to comply with the specified build requirements. And enabled options that do not comply will get disabled. */ void enforceBuildRequirements(ref BuildSettings settings) { settings.addOptions(BuildOption.warningsAsErrors); if (settings.requirements & BuildRequirement.allowWarnings) { settings.options &= ~BuildOption.warningsAsErrors; settings.options |= BuildOption.warnings; } if (settings.requirements & BuildRequirement.silenceWarnings) settings.options &= ~(BuildOption.warningsAsErrors|BuildOption.warnings); if (settings.requirements & BuildRequirement.disallowDeprecations) { settings.options &= ~(BuildOption.ignoreDeprecations|BuildOption.deprecationWarnings); settings.options |= BuildOption.deprecationErrors; } if (settings.requirements & BuildRequirement.silenceDeprecations) { settings.options &= ~(BuildOption.deprecationErrors|BuildOption.deprecationWarnings); settings.options |= BuildOption.ignoreDeprecations; } if (settings.requirements & BuildRequirement.disallowInlining) settings.options &= ~BuildOption.inline; if (settings.requirements & BuildRequirement.disallowOptimization) settings.options &= ~BuildOption.optimize; if (settings.requirements & BuildRequirement.requireBoundsCheck) settings.options &= ~BuildOption.noBoundsCheck; if (settings.requirements & BuildRequirement.requireContracts) settings.options &= ~BuildOption.releaseMode; if (settings.requirements & BuildRequirement.relaxProperties) settings.options &= ~BuildOption.property; } /** Determines if a specific file name has the extension of a linker file. Linker files include static/dynamic libraries, resource files, object files and DLL definition files. */ bool isLinkerFile(const scope ref BuildPlatform platform, string f) { import std.path; switch (extension(f)) { default: return false; case ".lib", ".obj", ".res", ".def": return platform.isWindows(); case ".a", ".o", ".so", ".dylib": return !platform.isWindows(); } } unittest { BuildPlatform p; p.platform = ["windows"]; assert(isLinkerFile(p, "test.obj")); assert(isLinkerFile(p, "test.lib")); assert(isLinkerFile(p, "test.res")); assert(!isLinkerFile(p, "test.o")); assert(!isLinkerFile(p, "test.d")); p.platform = ["something else"]; assert(isLinkerFile(p, "test.o")); assert(isLinkerFile(p, "test.a")); assert(isLinkerFile(p, "test.so")); assert(isLinkerFile(p, "test.dylib")); assert(!isLinkerFile(p, "test.obj")); assert(!isLinkerFile(p, "test.d")); } /** Adds a default DT_SONAME (ELF) / 'install name' (Mach-O) when linking a dynamic library. This makes dependees reference their dynamic-lib deps by filename only (DT_NEEDED etc.) instead of by the path used in the dependee linker cmdline, and enables loading the deps from the dependee's output directory - either by setting the LD_LIBRARY_PATH environment variable, or baking an rpath into the executable. */ package void addDynamicLibName(ref BuildSettings settings, in BuildPlatform platform, string fileName) { if (!platform.isWindows()) { // *pre*pend to allow the user to override it if (platform.platform.canFind("darwin")) settings.prependLFlags("-install_name", "@rpath/" ~ fileName); else settings.prependLFlags("-soname", fileName); } } /** Replaces each referenced import library by the appropriate linker flags. This function tries to invoke "pkg-config" if possible and falls back to direct flag translation if that fails. */ void resolveLibs(ref BuildSettings settings, const scope ref BuildPlatform platform) { import std.string : format; import std.array : array; if (settings.libs.length == 0) return; if (settings.targetType == TargetType.library || settings.targetType == TargetType.staticLibrary) { logDiagnostic("Ignoring all import libraries for static library build."); settings.libs = null; if (platform.isWindows()) settings.sourceFiles = settings.sourceFiles.filter!(f => !f.endsWith(".lib")).array; } version (Posix) { import std.algorithm : any, map, partition, startsWith; import std.array : array, join, split; import std.exception : enforce; import std.process : execute; try { enum pkgconfig_bin = "pkg-config"; bool exists(string lib) { return execute([pkgconfig_bin, "--exists", lib]).status == 0; } auto pkgconfig_libs = settings.libs.partition!(l => !exists(l)); pkgconfig_libs ~= settings.libs[0 .. $ - pkgconfig_libs.length] .partition!(l => !exists("lib"~l)).map!(l => "lib"~l).array; settings.libs = settings.libs[0 .. $ - pkgconfig_libs.length]; if (pkgconfig_libs.length) { logDiagnostic("Using pkg-config to resolve library flags for %s.", pkgconfig_libs.join(", ")); auto libflags = execute([pkgconfig_bin, "--libs"] ~ pkgconfig_libs); enforce(libflags.status == 0, format("pkg-config exited with error code %s: %s", libflags.status, libflags.output)); foreach (f; libflags.output.split()) { if (f.startsWith("-L-L")) { settings.addLFlags(f[2 .. $]); } else if (f.startsWith("-defaultlib")) { settings.addDFlags(f); } else if (f.startsWith("-L-defaultlib")) { settings.addDFlags(f[2 .. $]); } else if (f.startsWith("-pthread")) { settings.addLFlags("-lpthread"); } else if (f.startsWith("-L-l")) { settings.addLFlags(f[2 .. $].split(",")); } else if (f.startsWith("-Wl,")) settings.addLFlags(f[4 .. $].split(",")); else settings.addLFlags(f); } } if (settings.libs.length) logDiagnostic("Using direct -l... flags for %s.", settings.libs.array.join(", ")); } catch (Exception e) { logDiagnostic("pkg-config failed: %s", e.msg); logDiagnostic("Falling back to direct -l... flags."); } } } /** Searches the given list of compiler flags for ones that have a generic equivalent. Certain compiler flags should, instead of using compiler-specific syntax, be specified as build options (`BuildOption`) or built requirements (`BuildRequirements`). This function will output warning messages to assist the user in making the best choice. */ void warnOnSpecialCompilerFlags(string[] compiler_flags, Flags!BuildOption options, string package_name, string config_name) { import std.algorithm : any, endsWith, startsWith; import std.range : empty; struct SpecialFlag { string[] flags; string alternative; } static immutable SpecialFlag[] s_specialFlags = [ {["-c", "-o-"], "Automatically issued by DUB, do not specify in dub.json"}, {["-w", "-Wall", "-Werr"], `Use "buildRequirements" to control warning behavior`}, {["-property", "-fproperty"], "Using this flag may break building of dependencies and it will probably be removed from DMD in the future"}, {["-wi"], `Use the "buildRequirements" field to control warning behavior`}, {["-d", "-de", "-dw"], `Use the "buildRequirements" field to control deprecation behavior`}, {["-of"], `Use "targetPath" and "targetName" to customize the output file`}, {["-debug", "-fdebug", "-g"], "Call dub with --build=debug"}, {["-release", "-frelease", "-O", "-inline"], "Call dub with --build=release"}, {["-unittest", "-funittest"], "Call dub with --build=unittest"}, {["-lib"], `Use {"targetType": "staticLibrary"} or let dub manage this`}, {["-D"], "Call dub with --build=docs or --build=ddox"}, {["-X"], "Call dub with --build=ddox"}, {["-cov"], "Call dub with --build=cov or --build=unittest-cov"}, {["-cov=ctfe"], "Call dub with --build=cov-ctfe or --build=unittest-cov-ctfe"}, {["-profile"], "Call dub with --build=profile"}, {["-version="], `Use "versions" to specify version constants in a compiler independent way`}, {["-debug="], `Use "debugVersions" to specify version constants in a compiler independent way`}, {["-I"], `Use "importPaths" to specify import paths in a compiler independent way`}, {["-J"], `Use "stringImportPaths" to specify import paths in a compiler independent way`}, {["-m32", "-m64", "-m32mscoff"], `Use --arch=x86/--arch=x86_64/--arch=x86_mscoff to specify the target architecture, e.g. 'dub build --arch=x86_64'`} ]; struct SpecialOption { BuildOption[] flags; string alternative; } static immutable SpecialOption[] s_specialOptions = [ {[BuildOption.debugMode], "Call DUB with --build=debug"}, {[BuildOption.releaseMode], "Call DUB with --build=release"}, {[BuildOption.coverage], "Call DUB with --build=cov or --build=unittest-cov"}, {[BuildOption.coverageCTFE], "Call DUB with --build=cov-ctfe or --build=unittest-cov-ctfe"}, {[BuildOption.debugInfo], "Call DUB with --build=debug"}, {[BuildOption.inline], "Call DUB with --build=release"}, {[BuildOption.noBoundsCheck], "Call DUB with --build=release-nobounds"}, {[BuildOption.optimize], "Call DUB with --build=release"}, {[BuildOption.profile], "Call DUB with --build=profile"}, {[BuildOption.unittests], "Call DUB with --build=unittest"}, {[BuildOption.syntaxOnly], "Call DUB with --build=syntax"}, {[BuildOption.warnings, BuildOption.warningsAsErrors], "Use \"buildRequirements\" to control the warning level"}, {[BuildOption.ignoreDeprecations, BuildOption.deprecationWarnings, BuildOption.deprecationErrors], "Use \"buildRequirements\" to control the deprecation warning level"}, {[BuildOption.property], "This flag is deprecated and has no effect"} ]; bool got_preamble = false; void outputPreamble() { if (got_preamble) return; got_preamble = true; logWarn(""); if (config_name.empty) logWarn("## Warning for package %s ##", package_name); else logWarn("## Warning for package %s, configuration %s ##", package_name, config_name); logWarn(""); logWarn("The following compiler flags have been specified in the package description"); logWarn("file. They are handled by DUB and direct use in packages is discouraged."); logWarn("Alternatively, you can set the DFLAGS environment variable to pass custom flags"); logWarn("to the compiler, or use one of the suggestions below:"); logWarn(""); } foreach (f; compiler_flags) { foreach (sf; s_specialFlags) { if (sf.flags.any!(sff => f == sff || (sff.endsWith("=") && f.startsWith(sff)))) { outputPreamble(); logWarn("%s: %s", f, sf.alternative); break; } } } foreach (sf; s_specialOptions) { foreach (f; sf.flags) { if (options & f) { outputPreamble(); logWarn("%s: %s", f, sf.alternative); break; } } } if (got_preamble) logWarn(""); } private enum probeBeginMark = "__dub_probe_begin__"; private enum probeEndMark = "__dub_probe_end__"; /** Generate a file that will give, at compile time, information about the compiler (architecture, frontend version...) See_Also: `readPlatformProbe` */ NativePath generatePlatformProbeFile() { import dub.internal.vibecompat.core.file; import dub.internal.utils; import std.string : format; enum moduleInfo = q{ module object; alias string = const(char)[]; }; // avoid druntime so that this compiles without a compiler's builtin object.d enum probe = q{ %1$s pragma(msg, `%2$s`); pragma(msg, `\n`); pragma(msg, `compiler`); %6$s pragma(msg, `\n`); pragma(msg, `frontendVersion "`); pragma(msg, __VERSION__.stringof); pragma(msg, `"\n`); pragma(msg, `compilerVendor "`); pragma(msg, __VENDOR__); pragma(msg, `"\n`); pragma(msg, `platform`); %4$s pragma(msg, `\n`); pragma(msg, `architecture `); %5$s pragma(msg, `\n`); pragma(msg, `%3$s`); }.format(moduleInfo, probeBeginMark, probeEndMark, pragmaGen(platformCheck), pragmaGen(archCheck), compilerCheckPragmas); auto path = getTempFile("dub_platform_probe", ".d"); writeFile(path, probe); return path; } /** Processes the SDL output generated by compiling the platform probe file. See_Also: `generatePlatformProbeFile`. */ BuildPlatform readPlatformSDLProbe(string output) { import std.algorithm : map, max, splitter, joiner, count, filter; import std.array : array; import std.exception : enforce; import std.range : front; import std.ascii : newline; import std.string; import dub.internal.sdlang.parser; import dub.internal.sdlang.ast; import std.conv; // work around possible additional output of the compiler auto idx1 = output.indexOf(probeBeginMark ~ newline ~ "\\n"); auto idx2 = output[max(0, idx1) .. $].indexOf(probeEndMark) + idx1; enforce(idx1 >= 0 && idx1 < idx2, "Unexpected platform information output - does not contain a JSON object."); output = output[idx1 + probeBeginMark.length .. idx2].replace(newline, "").replace("\\n", "\n"); output = output.splitter("\n").filter!((e) => e.length > 0) .map!((e) { if (e.count("\"") == 0) { return e ~ ` ""`; } return e; }) .joiner("\n").array().to!string; BuildPlatform build_platform; Tag sdl = parseSource(output); foreach (n; sdl.all.tags) { switch (n.name) { default: break; case "platform": build_platform.platform = n.values.map!(e => e.toString()).array(); break; case "architecture": build_platform.architecture = n.values.map!(e => e.toString()).array(); break; case "compiler": build_platform.compiler = n.values.front.toString(); break; case "frontendVersion": build_platform.frontendVersion = n.values.front.toString() .filter!((e) => e >= '0' && e <= '9').array().to!string .to!int; break; } } return build_platform; } dub-1.40.0/source/dub/data/000077500000000000000000000000001477246567400153575ustar00rootroot00000000000000dub-1.40.0/source/dub/data/settings.d000066400000000000000000000170161477246567400173710ustar00rootroot00000000000000/******************************************************************************* Contains struct definition for settings.json files User settings are file that allow to configure dub default behavior. *******************************************************************************/ module dub.data.settings; import dub.internal.configy.Attributes; import dub.internal.vibecompat.inet.path; /// Determines which of the default package suppliers are queried for packages. public enum SkipPackageSuppliers { none, /// Uses all configured package suppliers. standard, /// Does not use the default package suppliers (`defaultPackageSuppliers`). configured, /// Does not use default suppliers or suppliers configured in DUB's configuration file all, /// Uses only manually specified package suppliers. default_, /// The value wasn't specified. It is provided in order to know when it is safe to ignore it } /** * User-provided settings (configuration) * * All fields in this struct should be optional. * Fields that are *not* optional should be mandatory from the POV * of the application, not the POV of file parsing. * For example, git's `core.author` and `core.email` are required to commit, * but the error happens on the commit, not when the gitconfig is parsed. * * We have multiple configuration locations, and two kinds of fields: * additive and non-additive. Additive fields are fields which are the union * of all configuration files (e.g. `registryURLs`). Non-additive fields * will ignore values set in lower priorities configuration, although parsing * must still succeed. Additive fields are marked as `@Optional`, * non-additive are marked as `SetInfo`. */ package(dub) struct Settings { @Optional string[] registryUrls; @Optional NativePath[] customCachePaths; private struct SkipRegistry { SkipPackageSuppliers skipRegistry; static SkipRegistry fromString (string value) { import std.conv : to; auto result = value.to!SkipPackageSuppliers; if (result == SkipPackageSuppliers.default_) { throw new Exception( "skipRegistry value `default_` is only meant for interal use." ~ " Instead, use one of `none`, `standard`, `configured`, or `all`." ); } return SkipRegistry(result); } alias skipRegistry this; } SetInfo!(SkipRegistry) skipRegistry; SetInfo!(string) defaultCompiler; SetInfo!(string) defaultArchitecture; SetInfo!(bool) defaultLowMemory; SetInfo!(string[string]) defaultEnvironments; SetInfo!(string[string]) defaultBuildEnvironments; SetInfo!(string[string]) defaultRunEnvironments; SetInfo!(string[string]) defaultPreGenerateEnvironments; SetInfo!(string[string]) defaultPostGenerateEnvironments; SetInfo!(string[string]) defaultPreBuildEnvironments; SetInfo!(string[string]) defaultPostBuildEnvironments; SetInfo!(string[string]) defaultPreRunEnvironments; SetInfo!(string[string]) defaultPostRunEnvironments; SetInfo!(string) dubHome; /// Merge a lower priority config (`this`) with a `higher` priority config public Settings merge(Settings higher) return @safe pure nothrow { import std.traits : hasUDA; Settings result; static foreach (idx, _; Settings.tupleof) { static if (hasUDA!(Settings.tupleof[idx], Optional)) result.tupleof[idx] = higher.tupleof[idx] ~ this.tupleof[idx]; else static if (IsSetInfo!(typeof(this.tupleof[idx]))) { if (higher.tupleof[idx].set) result.tupleof[idx] = higher.tupleof[idx]; else result.tupleof[idx] = this.tupleof[idx]; } else static assert(false, "Expect `@Optional` or `SetInfo` on: `" ~ __traits(identifier, this.tupleof[idx]) ~ "` of type : `" ~ typeof(this.tupleof[idx]).stringof ~ "`"); } return result; } /// Workaround multiple `E` declaration in `static foreach` when inline private template IsSetInfo(T) { enum bool IsSetInfo = is(T : SetInfo!E, E); } } unittest { import dub.internal.configy.Read; const str1 = `{ "registryUrls": [ "http://foo.bar\/optional\/escape" ], "customCachePaths": [ "foo/bar", "foo/foo" ], "skipRegistry": "all", "defaultCompiler": "dmd", "defaultArchitecture": "fooarch", "defaultLowMemory": false, "defaultEnvironments": { "VAR2": "settings.VAR2", "VAR3": "settings.VAR3", "VAR4": "settings.VAR4" } }`; const str2 = `{ "registryUrls": [ "http://bar.foo" ], "customCachePaths": [ "bar/foo", "bar/bar" ], "skipRegistry": "none", "defaultCompiler": "ldc", "defaultArchitecture": "bararch", "defaultLowMemory": true, "defaultEnvironments": { "VAR": "Hi", } }`; auto c1 = parseConfigString!Settings(str1, "/dev/null"); assert(c1.registryUrls == [ "http://foo.bar/optional/escape" ]); assert(c1.customCachePaths == [ NativePath("foo/bar"), NativePath("foo/foo") ]); assert(c1.skipRegistry == SkipPackageSuppliers.all); assert(c1.defaultCompiler == "dmd"); assert(c1.defaultArchitecture == "fooarch"); assert(c1.defaultLowMemory == false); assert(c1.defaultEnvironments.length == 3); assert(c1.defaultEnvironments["VAR2"] == "settings.VAR2"); assert(c1.defaultEnvironments["VAR3"] == "settings.VAR3"); assert(c1.defaultEnvironments["VAR4"] == "settings.VAR4"); auto c2 = parseConfigString!Settings(str2, "/dev/null"); assert(c2.registryUrls == [ "http://bar.foo" ]); assert(c2.customCachePaths == [ NativePath("bar/foo"), NativePath("bar/bar") ]); assert(c2.skipRegistry == SkipPackageSuppliers.none); assert(c2.defaultCompiler == "ldc"); assert(c2.defaultArchitecture == "bararch"); assert(c2.defaultLowMemory == true); assert(c2.defaultEnvironments.length == 1); assert(c2.defaultEnvironments["VAR"] == "Hi"); auto m1 = c2.merge(c1); // c1 takes priority, so its registryUrls is first assert(m1.registryUrls == [ "http://foo.bar/optional/escape", "http://bar.foo" ]); // Same with CCP assert(m1.customCachePaths == [ NativePath("foo/bar"), NativePath("foo/foo"), NativePath("bar/foo"), NativePath("bar/bar"), ]); // c1 fields only assert(m1.skipRegistry == c1.skipRegistry); assert(m1.defaultCompiler == c1.defaultCompiler); assert(m1.defaultArchitecture == c1.defaultArchitecture); assert(m1.defaultLowMemory == c1.defaultLowMemory); assert(m1.defaultEnvironments == c1.defaultEnvironments); auto m2 = c1.merge(c2); assert(m2.registryUrls == [ "http://bar.foo", "http://foo.bar/optional/escape" ]); assert(m2.customCachePaths == [ NativePath("bar/foo"), NativePath("bar/bar"), NativePath("foo/bar"), NativePath("foo/foo"), ]); assert(m2.skipRegistry == c2.skipRegistry); assert(m2.defaultCompiler == c2.defaultCompiler); assert(m2.defaultArchitecture == c2.defaultArchitecture); assert(m2.defaultLowMemory == c2.defaultLowMemory); assert(m2.defaultEnvironments == c2.defaultEnvironments); auto m3 = Settings.init.merge(c1); assert(m3 == c1); } unittest { // Test that SkipPackageRegistry.default_ is not allowed import dub.internal.configy.Read; import std.exception : assertThrown; const str1 = `{ "skipRegistry": "default_" }`; assertThrown!Exception(parseConfigString!Settings(str1, "/dev/null")); } dub-1.40.0/source/dub/dependency.d000066400000000000000000001150471477246567400167410ustar00rootroot00000000000000/** Dependency specification functionality. Copyright: © 2012-2013 Matthias Dondorff, © 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ module dub.dependency; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.semver; import dub.internal.dyaml.stdsumtype; import std.algorithm; import std.array; import std.exception; import std.string; /// Represents a fully-qualified package name public struct PackageName { /// The underlying full name of the package private string fullName; /// Where the separator lies, if any private size_t separator; /// Creates a new instance of this struct public this(string fn) @safe pure { this.fullName = fn; if (auto idx = fn.indexOf(':')) this.separator = idx > 0 ? idx : fn.length; else // We were given `:foo` assert(0, "Argument to PackageName constructor needs to be " ~ "a fully qualified string"); } /// Private constructor to have nothrow / @nogc private this(string fn, size_t sep) @safe pure nothrow @nogc { this.fullName = fn; this.separator = sep; } /// The base package name in which the subpackages may live public PackageName main () const return @safe pure nothrow @nogc { return PackageName(this.fullName[0 .. this.separator], this.separator); } /// The subpackage name, or an empty string if there isn't public string sub () const return @safe pure nothrow @nogc { // Return `null` instead of an empty string so that // it can be used in a boolean context, e.g. // `if (name.sub)` would be true with empty string return this.separator < this.fullName.length ? this.fullName[this.separator + 1 .. $] : null; } /// Human readable representation public string toString () const return scope @safe pure nothrow @nogc { return this.fullName; } /// public int opCmp (in PackageName other) const scope @safe pure nothrow @nogc { import core.internal.string : dstrcmp; return dstrcmp(this.toString(), other.toString()); } /// public bool opEquals (in PackageName other) const scope @safe pure nothrow @nogc { return this.toString() == other.toString(); } } /** Encapsulates the name of a package along with its dependency specification. */ struct PackageDependency { /// Backward compatibility deprecated("Use the constructor that accepts a `PackageName` as first argument") this(string n, Dependency s = Dependency.init) @safe pure { this.name = PackageName(n); this.spec = s; } // Remove once deprecated overload is gone this(PackageName n, Dependency s = Dependency.init) @safe pure nothrow @nogc { this.name = n; this.spec = s; } int opCmp(in typeof(this) other) @safe const { return name == other.name ? spec.opCmp(other.spec) : name.opCmp(other.name); } /// Name of the referenced package. PackageName name; /// Dependency specification used to select a particular version of the package. Dependency spec; } /** Represents a dependency specification. A dependency specification either represents a specific version or version range, or a path to a package. In addition to that it has `optional` and `default_` flags to control how non-mandatory dependencies are handled. The package name is notably not part of the dependency specification. */ struct Dependency { /// We currently support 3 'types' private alias Value = SumType!(VersionRange, NativePath, Repository); /// Used by `toString` private static immutable string[] BooleanOptions = [ "optional", "default" ]; // Shortcut to create >=0.0.0 private enum ANY_IDENT = "*"; private Value m_value = Value(VersionRange.Invalid); private bool m_optional; private bool m_default; /// A Dependency, which matches every valid version. public static immutable Dependency Any = Dependency(VersionRange.Any); /// An invalid dependency (with no possible version matches). public static immutable Dependency Invalid = Dependency(VersionRange.Invalid); deprecated("Use `Dependency.Any` instead") static @property Dependency any() @safe { return Dependency(VersionRange.Any); } deprecated("Use `Dependency.Invalid` instead") static @property Dependency invalid() @safe { return Dependency(VersionRange.Invalid); } /** Constructs a new dependency specification that matches a specific path. */ this(NativePath path) @safe { this.m_value = path; } /** Constructs a new dependency specification that matches a specific Git reference. */ this(Repository repository) @safe { this.m_value = repository; } /** Constructs a new dependency specification from a string See the `versionSpec` property for a description of the accepted contents of that string. */ this(string spec) @safe { this(VersionRange.fromString(spec)); } /** Constructs a new dependency specification that matches a specific version. */ this(const Version ver) @safe { this(VersionRange(ver, ver)); } /// Construct a version from a range of possible values this (VersionRange rng) @safe { this.m_value = rng; } deprecated("Instantiate the `Repository` struct with the string directly") this(Repository repository, string spec) @safe { assert(repository.m_ref is null); repository.m_ref = spec; this(repository); } /// If set, overrides any version based dependency selection. deprecated("Construct a new `Dependency` object instead") @property void path(NativePath value) @trusted { this.m_value = value; } /// ditto @property NativePath path() const @safe { return this.m_value.match!( (const NativePath p) => p, ( any ) => NativePath.init, ); } /// If set, overrides any version based dependency selection. deprecated("Construct a new `Dependency` object instead") @property void repository(Repository value) @trusted { this.m_value = value; } /// ditto @property Repository repository() const @safe { return this.m_value.match!( (const Repository p) => p, ( any ) => Repository.init, ); } /// Determines if the dependency is required or optional. @property bool optional() const scope @safe pure nothrow @nogc { return m_optional; } /// ditto @property void optional(bool optional) scope @safe pure nothrow @nogc { m_optional = optional; } /// Determines if an optional dependency should be chosen by default. @property bool default_() const scope @safe pure nothrow @nogc { return m_default; } /// ditto @property void default_(bool value) scope @safe pure nothrow @nogc { m_default = value; } /// Returns true $(I iff) the version range only matches a specific version. @property bool isExactVersion() const scope @safe { return this.m_value.match!( (NativePath v) => false, (Repository v) => false, (VersionRange v) => v.isExactVersion(), ); } /// Returns the exact version matched by the version range. @property Version version_() const @safe { auto range = this.m_value.match!( // Can be simplified to `=> assert(0)` once we drop support for v2.096 (NativePath p) { int dummy; if (dummy) return VersionRange.init; assert(0); }, (Repository r) { int dummy; if (dummy) return VersionRange.init; assert(0); }, (VersionRange v) => v, ); enforce(range.isExactVersion(), "Dependency "~range.toString()~" is no exact version."); return range.m_versA; } /// Sets/gets the matching version range as a specification string. deprecated("Create a new `Dependency` instead and provide a `VersionRange`") @property void versionSpec(string ves) @trusted { this.m_value = VersionRange.fromString(ves); } /// ditto deprecated("Use `Dependency.visit` and match `VersionRange`instead") @property string versionSpec() const @safe { return this.m_value.match!( (const NativePath p) => ANY_IDENT, (const Repository r) => r.m_ref, (const VersionRange p) => p.toString(), ); } /** Returns a modified dependency that gets mapped to a given path. This function will return an unmodified `Dependency` if it is not path based. Otherwise, the given `path` will be prefixed to the existing path. */ Dependency mapToPath(NativePath path) const @trusted { // NOTE Path is @system in vibe.d 0.7.x and in the compatibility layer return this.m_value.match!( (NativePath v) { if (v.empty || v.absolute) return this; auto ret = Dependency(path ~ v); ret.m_default = m_default; ret.m_optional = m_optional; return ret; }, (Repository v) => this, (VersionRange v) => this, ); } /** Returns a human-readable string representation of the dependency specification. */ string toString() const scope @trusted { // Trusted because `SumType.match` doesn't seem to support `scope` string Stringifier (T, string pre = null) (const T v) { const bool extra = this.optional || this.default_; return format("%s%s%s%-(%s, %)%s", pre, v, extra ? " (" : "", BooleanOptions[!this.optional .. 1 + this.default_], extra ? ")" : ""); } return this.m_value.match!( Stringifier!Repository, Stringifier!(NativePath, "@"), Stringifier!VersionRange ); } /** Returns a JSON representation of the dependency specification. Simple specifications will be represented as a single specification string (`versionSpec`), while more complex specifications will be represented as a JSON object with optional "version", "path", "optional" and "default" fields. Params: selections = We are serializing `dub.selections.json`, don't write out `optional` and `default`. */ Json toJson(bool selections = false) const @safe { // NOTE Path and Json is @system in vibe.d 0.7.x and in the compatibility layer static void initJson(ref Json j, bool opt, bool def, bool s = selections) { j = Json.emptyObject; if (!s && opt) j["optional"] = true; if (!s && def) j["default"] = true; } Json json; this.m_value.match!( (const NativePath v) @trusted { initJson(json, optional, default_); json["path"] = v.toString(); }, (const Repository v) @trusted { initJson(json, optional, default_); json["repository"] = v.toString(); json["version"] = v.m_ref; }, (const VersionRange v) @trusted { if (!selections && (optional || default_)) { initJson(json, optional, default_); json["version"] = v.toString(); } else json = Json(v.toString()); }, ); return json; } @trusted unittest { Dependency d = Dependency("==1.0.0"); assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); d = fromJson((fromJson(d.toJson())).toJson()); assert(d == Dependency("1.0.0")); assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); } @trusted unittest { Dependency dependency = Dependency(Repository("git+http://localhost", "1.0.0")); Json expected = Json([ "repository": Json("git+http://localhost"), "version": Json("1.0.0") ]); assert(dependency.toJson() == expected, "Failed: " ~ dependency.toJson().toPrettyString()); } @trusted unittest { Dependency d = Dependency(NativePath("dir")); Json expected = Json([ "path": Json("dir") ]); assert(d.toJson() == expected, "Failed: " ~ d.toJson().toPrettyString()); } /** Constructs a new `Dependency` from its JSON representation. See `toJson` for a description of the JSON format. */ static Dependency fromJson(Json verspec) @trusted { // NOTE Path and Json is @system in vibe.d 0.7.x and in the compatibility layer Dependency dep; if( verspec.type == Json.Type.object ){ if( auto pp = "path" in verspec ) { dep = Dependency(NativePath(verspec["path"].get!string)); } else if (auto repository = "repository" in verspec) { enforce("version" in verspec, "No version field specified!"); enforce(repository.length > 0, "No repository field specified!"); dep = Dependency(Repository( repository.get!string, verspec["version"].get!string)); } else { enforce("version" in verspec, "No version field specified!"); auto ver = verspec["version"].get!string; // Using the string to be able to specify a range of versions. dep = Dependency(ver); } if (auto po = "optional" in verspec) dep.optional = po.get!bool; if (auto po = "default" in verspec) dep.default_ = po.get!bool; } else { // canonical "package-id": "version" dep = Dependency(verspec.get!string); } return dep; } @trusted unittest { assert(fromJson(parseJsonString("\">=1.0.0 <2.0.0\"")) == Dependency(">=1.0.0 <2.0.0")); Dependency parsed = fromJson(parseJsonString(` { "version": "2.0.0", "optional": true, "default": true, "path": "path/to/package" } `)); Dependency d = NativePath("path/to/package"); // supposed to ignore the version spec d.optional = true; d.default_ = true; assert(d == parsed); } /** Compares dependency specifications. These methods are suitable for equality comparisons, as well as for using `Dependency` as a key in hash or tree maps. */ bool opEquals(in Dependency o) const scope @safe { if (o.m_optional != this.m_optional) return false; if (o.m_default != this.m_default) return false; return this.m_value == o.m_value; } /// ditto int opCmp(in Dependency o) const @safe { alias ResultMatch = match!( (VersionRange r1, VersionRange r2) => r1.opCmp(r2), (_1, _2) => 0, ); if (auto result = ResultMatch(this.m_value, o.m_value)) return result; if (m_optional != o.m_optional) return m_optional ? -1 : 1; return 0; } /** Determines if this dependency specification is valid. A specification is valid if it can match at least one version. */ bool valid() const @safe { return this.m_value.match!( (NativePath v) => true, (Repository v) => true, (VersionRange v) => v.isValid(), ); } /** Determines if this dependency specification matches arbitrary versions. This is true in particular for the `any` constant. */ deprecated("Use `VersionRange.matchesAny` directly") bool matchesAny() const scope @safe { return this.m_value.match!( (NativePath v) => true, (Repository v) => true, (VersionRange v) => v.matchesAny(), ); } /** Tests if the specification matches a specific version. */ bool matches(string vers, VersionMatchMode mode = VersionMatchMode.standard) const @safe { return matches(Version(vers), mode); } /// ditto bool matches(in Version v, VersionMatchMode mode = VersionMatchMode.standard) const @safe { return this.m_value.match!( (NativePath i) => true, (Repository i) => true, (VersionRange i) => i.matchesAny() || i.matches(v, mode), ); } /** Merges two dependency specifications. The result is a specification that matches the intersection of the set of versions matched by the individual specifications. Note that this result can be invalid (i.e. not match any version). */ Dependency merge(ref const(Dependency) o) const @trusted { alias Merger = match!( (const NativePath a, const NativePath b) => a == b ? this : Invalid, (const NativePath a, any ) => o, ( any , const NativePath b) => this, (const Repository a, const Repository b) => a.m_ref == b.m_ref ? this : Invalid, (const Repository a, any ) => this, ( any , const Repository b) => o, (const VersionRange a, const VersionRange b) { if (a.matchesAny()) return o; if (b.matchesAny()) return this; VersionRange copy = a; copy.merge(b); if (!copy.isValid()) return Invalid; return Dependency(copy); } ); Dependency ret = Merger(this.m_value, o.m_value); ret.m_optional = m_optional && o.m_optional; return ret; } } /// Allow direct access to the underlying dependency public auto visit (Handlers...) (const auto ref Dependency dep) { return dep.m_value.match!(Handlers); } //// Ditto public auto visit (Handlers...) (auto ref Dependency dep) { return dep.m_value.match!(Handlers); } unittest { Dependency a = Dependency(">=1.1.0"), b = Dependency(">=1.3.0"); assert (a.merge(b).valid() && a.merge(b).toString() == ">=1.3.0", a.merge(b).toString()); assertThrown(Dependency("<=2.0.0 >=1.0.0")); assertThrown(Dependency(">=2.0.0 <=1.0.0")); a = Dependency(">=1.0.0 <=5.0.0"); b = Dependency(">=2.0.0"); assert (a.merge(b).valid() && a.merge(b).toString() == ">=2.0.0 <=5.0.0", a.merge(b).toString()); assertThrown(a = Dependency(">1.0.0 ==5.0.0"), "Construction is invalid"); a = Dependency(">1.0.0"); b = Dependency("<2.0.0"); assert (a.merge(b).valid(), a.merge(b).toString()); assert (a.merge(b).toString() == ">1.0.0 <2.0.0", a.merge(b).toString()); a = Dependency(">2.0.0"); b = Dependency("<1.0.0"); assert (!(a.merge(b)).valid(), a.merge(b).toString()); a = Dependency(">=2.0.0"); b = Dependency("<=1.0.0"); assert (!(a.merge(b)).valid(), a.merge(b).toString()); a = Dependency("==2.0.0"); b = Dependency("==1.0.0"); assert (!(a.merge(b)).valid(), a.merge(b).toString()); a = Dependency("1.0.0"); b = Dependency("==1.0.0"); assert (a == b); a = Dependency("<=2.0.0"); b = Dependency("==1.0.0"); Dependency m = a.merge(b); assert (m.valid(), m.toString()); assert (m.matches(Version("1.0.0"))); assert (!m.matches(Version("1.1.0"))); assert (!m.matches(Version("0.0.1"))); // branches / head revisions a = Dependency(Version.masterBranch); assert(a.valid()); assert(a.matches(Version.masterBranch)); b = Dependency(Version.masterBranch); m = a.merge(b); assert(m.matches(Version.masterBranch)); //assertThrown(a = Dependency(Version.MASTER_STRING ~ " <=1.0.0"), "Construction invalid"); assertThrown(a = Dependency(">=1.0.0 " ~ Version.masterBranch.toString()), "Construction invalid"); immutable string branch1 = Version.branchPrefix ~ "Branch1"; immutable string branch2 = Version.branchPrefix ~ "Branch2"; //assertThrown(a = Dependency(branch1 ~ " " ~ branch2), "Error: '" ~ branch1 ~ " " ~ branch2 ~ "' succeeded"); //assertThrown(a = Dependency(Version.MASTER_STRING ~ " " ~ branch1), "Error: '" ~ Version.MASTER_STRING ~ " " ~ branch1 ~ "' succeeded"); a = Dependency(branch1); b = Dependency(branch2); assert(!a.merge(b).valid, "Shouldn't be able to merge to different branches"); b = a.merge(a); assert(b.valid, "Should be able to merge the same branches. (?)"); assert(a == b); a = Dependency(branch1); assert(a.matches(branch1), "Dependency(branch1) does not match 'branch1'"); assert(a.matches(Version(branch1)), "Dependency(branch1) does not match Version('branch1')"); assert(!a.matches(Version.masterBranch), "Dependency(branch1) matches Version.masterBranch"); assert(!a.matches(branch2), "Dependency(branch1) matches 'branch2'"); assert(!a.matches(Version("1.0.0")), "Dependency(branch1) matches '1.0.0'"); a = Dependency(">=1.0.0"); assert(!a.matches(Version(branch1)), "Dependency(1.0.0) matches 'branch1'"); // Testing optional dependencies. a = Dependency(">=1.0.0"); assert(!a.optional, "Default is not optional."); b = a; assert(!a.merge(b).optional, "Merging two not optional dependencies wrong."); a.optional = true; assert(!a.merge(b).optional, "Merging optional with not optional wrong."); b.optional = true; assert(a.merge(b).optional, "Merging two optional dependencies wrong."); // SemVer's sub identifiers. a = Dependency(">=1.0.0-beta"); assert(!a.matches(Version("1.0.0-alpha")), "Failed: match 1.0.0-alpha with >=1.0.0-beta"); assert(a.matches(Version("1.0.0-beta")), "Failed: match 1.0.0-beta with >=1.0.0-beta"); assert(a.matches(Version("1.0.0")), "Failed: match 1.0.0 with >=1.0.0-beta"); assert(a.matches(Version("1.0.0-rc")), "Failed: match 1.0.0-rc with >=1.0.0-beta"); // Approximate versions. a = Dependency("~>3.0"); b = Dependency(">=3.0.0 <4.0.0-0"); assert(a == b, "Testing failed: " ~ a.toString()); assert(a.matches(Version("3.1.146")), "Failed: Match 3.1.146 with ~>0.1.2"); assert(!a.matches(Version("0.2.0")), "Failed: Match 0.2.0 with ~>0.1.2"); assert(!a.matches(Version("4.0.0-beta.1"))); a = Dependency("~>3.0.0"); assert(a == Dependency(">=3.0.0 <3.1.0-0"), "Testing failed: " ~ a.toString()); a = Dependency("~>3.5"); assert(a == Dependency(">=3.5.0 <4.0.0-0"), "Testing failed: " ~ a.toString()); a = Dependency("~>3.5.0"); assert(a == Dependency(">=3.5.0 <3.6.0-0"), "Testing failed: " ~ a.toString()); assert(!Dependency("~>3.0.0").matches(Version("3.1.0-beta"))); a = Dependency("^0.1.2"); assert(a == Dependency(">=0.1.2 <0.1.3-0")); a = Dependency("^1.2.3"); assert(a == Dependency(">=1.2.3 <2.0.0-0"), "Testing failed: " ~ a.toString()); a = Dependency("^1.2"); assert(a == Dependency(">=1.2.0 <2.0.0-0"), "Testing failed: " ~ a.toString()); a = Dependency("~>0.1.1"); b = Dependency("==0.1.0"); assert(!a.merge(b).valid); b = Dependency("==0.1.9999"); assert(a.merge(b).valid); b = Dependency("==0.2.0"); assert(!a.merge(b).valid); b = Dependency("==0.2.0-beta.1"); assert(!a.merge(b).valid); a = Dependency("~>1.0.1-beta"); b = Dependency(">=1.0.1-beta <1.1.0-0"); assert(a == b, "Testing failed: " ~ a.toString()); assert(a.matches(Version("1.0.1-beta"))); assert(a.matches(Version("1.0.1-beta.6"))); a = Dependency("~d2test"); assert(!a.optional); assert(a.valid); assert(a.version_ == Version("~d2test")); a = Dependency("==~d2test"); assert(!a.optional); assert(a.valid); assert(a.version_ == Version("~d2test")); a = Dependency.Any; assert(!a.optional); assert(a.valid); assertThrown(a.version_); assert(a.matches(Version.masterBranch)); assert(a.matches(Version("1.0.0"))); assert(a.matches(Version("0.0.1-pre"))); b = Dependency(">=1.0.1"); assert(b == a.merge(b)); assert(b == b.merge(a)); b = Dependency(Version.masterBranch); assert(a.merge(b) == b); assert(b.merge(a) == b); a.optional = true; assert(a.matches(Version.masterBranch)); assert(a.matches(Version("1.0.0"))); assert(a.matches(Version("0.0.1-pre"))); b = Dependency(">=1.0.1"); assert(b == a.merge(b)); assert(b == b.merge(a)); b = Dependency(Version.masterBranch); assert(a.merge(b) == b); assert(b.merge(a) == b); assert(Dependency("1.0.0").matches(Version("1.0.0+foo"))); assert(Dependency("1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.standard)); assert(!Dependency("1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); assert(Dependency("1.0.0+foo").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); assert(Dependency("~>1.0.0+foo").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); assert(Dependency("~>1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); } unittest { assert(VersionRange.fromString("~>1.0.4").toString() == "~>1.0.4"); assert(VersionRange.fromString("~>1.4").toString() == "~>1.4"); // https://github.com/dlang/dub/issues/2830 assert(VersionRange.fromString("~>2").toString() == "~>2.0"); assert(VersionRange.fromString("~>5.0").toString() == "~>5.0"); assert(VersionRange.fromString("~>1.0.4+1.2.3").toString() == "~>1.0.4"); assert(VersionRange.fromString("^0.1.2").toString() == "^0.1.2"); assert(VersionRange.fromString("^1.2.3").toString() == "^1.2.3"); assert(VersionRange.fromString("^1.2").toString() == "~>1.2"); // equivalent; prefer ~> } /** Represents an SCM repository. */ struct Repository { private string m_remote; private string m_ref; private Kind m_kind; enum Kind { git, } /** Params: remote = Repository remote. ref_ = Reference to use (SHA1, tag, branch name...) */ this(string remote, string ref_) { enforce(remote.startsWith("git+"), "Unsupported repository type (supports: git+URL)"); m_remote = remote["git+".length .. $]; m_kind = Kind.git; m_ref = ref_; assert(m_remote.length); assert(m_ref.length); } /// Ditto deprecated("Use the constructor accepting a second parameter named `ref_`") this(string remote) { enforce(remote.startsWith("git+"), "Unsupported repository type (supports: git+URL)"); m_remote = remote["git+".length .. $]; m_kind = Kind.git; assert(m_remote.length); } string toString() const nothrow pure @safe { if (empty) return null; string kindRepresentation; final switch (kind) { case Kind.git: kindRepresentation = "git"; } return kindRepresentation~"+"~remote; } /** Returns: Repository URL or path. */ @property string remote() const @nogc nothrow pure @safe in { assert(m_remote !is null); } do { return m_remote; } /** Returns: The reference (commit hash, branch name, tag) we are targeting */ @property string ref_() const @nogc nothrow pure @safe in { assert(m_remote !is null); } in { assert(m_ref !is null); } do { return m_ref; } /** Returns: Repository type. */ @property Kind kind() const @nogc nothrow pure @safe { return m_kind; } /** Returns: Whether the repository was initialized with an URL or path. */ @property bool empty() const @nogc nothrow pure @safe { return m_remote.empty; } } /** Represents a version in semantic version format, or a branch identifier. This can either have the form "~master", where "master" is a branch name, or the form "major.update.bugfix-prerelease+buildmetadata" (see the Semantic Versioning Specification v2.0.0 at http://semver.org/). */ struct Version { private { static immutable MAX_VERS = "99999.0.0"; static immutable masterString = "~master"; enum branchPrefix = '~'; string m_version; } static immutable Version minRelease = Version("0.0.0"); static immutable Version maxRelease = Version(MAX_VERS); static immutable Version masterBranch = Version(masterString); /** Constructs a new `Version` from its string representation. */ this(string vers) @safe pure { enforce(vers.length > 1, "Version strings must not be empty."); if (vers[0] != branchPrefix) enforce(vers.isValidVersion(), "Invalid SemVer format: " ~ vers); m_version = vers; } /** Constructs a new `Version` from its string representation. This method is equivalent to calling the constructor and is used as an endpoint for the serialization framework. */ static Version fromString(string vers) @safe pure { return Version(vers); } bool opEquals(in Version oth) const scope @safe pure { return opCmp(oth) == 0; } /// Tests if this represents a branch instead of a version. @property bool isBranch() const scope @safe pure nothrow @nogc { return m_version.length > 0 && m_version[0] == branchPrefix; } /// Tests if this represents the master branch "~master". @property bool isMaster() const scope @safe pure nothrow @nogc { return m_version == masterString; } /** Tests if this represents a pre-release version. Note that branches are always considered pre-release versions. */ @property bool isPreRelease() const scope @safe pure nothrow @nogc { if (isBranch) return true; return isPreReleaseVersion(m_version); } /** Tests two versions for equality, according to the selected match mode. */ bool matches(in Version other, VersionMatchMode mode = VersionMatchMode.standard) const scope @safe pure { if (mode == VersionMatchMode.strict) return this.toString() == other.toString(); return this == other; } /** Compares two versions/branches for precedence. Versions generally have precedence over branches and the master branch has precedence over other branches. Apart from that, versions are compared using SemVer semantics, while branches are compared lexicographically. */ int opCmp(in Version other) const scope @safe pure { if (isBranch || other.isBranch) { if(m_version == other.m_version) return 0; if (!isBranch) return 1; else if (!other.isBranch) return -1; if (isMaster) return 1; else if (other.isMaster) return -1; return this.m_version < other.m_version ? -1 : 1; } return compareVersions(m_version, other.m_version); } /// Returns the string representation of the version/branch. string toString() const return scope @safe pure nothrow @nogc { return m_version; } } /** * A range of versions that are acceptable * * While not directly described in SemVer v2.0.0, a common set * of range operators have appeared among package managers. * We mostly NPM's: https://semver.npmjs.com/ * * Hence the acceptable forms for this string are as follows: * * $(UL * $(LI `"1.0.0"` - a single version in SemVer format) * $(LI `"==1.0.0"` - alternative single version notation) * $(LI `">1.0.0"` - version range with a single bound) * $(LI `">1.0.0 <2.0.0"` - version range with two bounds) * $(LI `"~>1.0.0"` - a fuzzy version range) * $(LI `"~>1.0"` - a fuzzy version range with partial version) * $(LI `"^1.0.0"` - semver compatible version range (same version if 0.x.y, ==major >=minor.patch if x.y.z)) * $(LI `"^1.0"` - same as ^1.0.0) * $(LI `"~master"` - a branch name) * $(LI `"*"` - match any version (see also `VersionRange.Any`)) * ) * * Apart from "$(LT)" and "$(GT)", "$(GT)=" and "$(LT)=" are also valid * comparators. */ public struct VersionRange { private Version m_versA; private Version m_versB; private bool m_inclusiveA = true; // A comparison > (true) or >= (false) private bool m_inclusiveB = true; // B comparison < (true) or <= (false) /// Matches any version public static immutable Any = VersionRange(Version.minRelease, Version.maxRelease); /// Doesn't match any version public static immutable Invalid = VersionRange(Version.maxRelease, Version.minRelease); /// public int opCmp (in VersionRange o) const scope @safe { if (m_inclusiveA != o.m_inclusiveA) return m_inclusiveA < o.m_inclusiveA ? -1 : 1; if (m_inclusiveB != o.m_inclusiveB) return m_inclusiveB < o.m_inclusiveB ? -1 : 1; if (m_versA != o.m_versA) return m_versA < o.m_versA ? -1 : 1; if (m_versB != o.m_versB) return m_versB < o.m_versB ? -1 : 1; return 0; } public bool matches (in Version v, VersionMatchMode mode = VersionMatchMode.standard) const scope @safe { if (m_versA.isBranch) { enforce(this.isExactVersion()); return m_versA == v; } if (v.isBranch) return m_versA == v; if (m_versA == m_versB) return this.m_versA.matches(v, mode); return doCmp(m_inclusiveA, m_versA, v) && doCmp(m_inclusiveB, v, m_versB); } /// Modify in place public void merge (const VersionRange o) @safe { int acmp = m_versA.opCmp(o.m_versA); int bcmp = m_versB.opCmp(o.m_versB); this.m_inclusiveA = !m_inclusiveA && acmp >= 0 ? false : o.m_inclusiveA; this.m_versA = acmp > 0 ? m_versA : o.m_versA; this.m_inclusiveB = !m_inclusiveB && bcmp <= 0 ? false : o.m_inclusiveB; this.m_versB = bcmp < 0 ? m_versB : o.m_versB; } /// Returns true $(I iff) the version range only matches a specific version. @property bool isExactVersion() const scope @safe { return this.m_versA == this.m_versB; } /// Determines if this dependency specification matches arbitrary versions. /// This is true in particular for the `any` constant. public bool matchesAny() const scope @safe { return this.m_inclusiveA && this.m_inclusiveB && this.m_versA == Version.minRelease && this.m_versB == Version.maxRelease; } unittest { assert(VersionRange.fromString("*").matchesAny); assert(!VersionRange.fromString(">0.0.0").matchesAny); assert(!VersionRange.fromString(">=1.0.0").matchesAny); assert(!VersionRange.fromString("<1.0.0").matchesAny); } public static VersionRange fromString (string ves) @safe { static import std.string; enforce(ves.length > 0); if (ves == Dependency.ANY_IDENT) { // Any version is good. ves = ">=0.0.0"; } if (ves.startsWith("~>")) { // Shortcut: "~>x.y.z" variant. Last non-zero number will indicate // the base for this so something like this: ">=x.y.z =x.y.z <(x+1).0.0-0 // ^x.y is equivalent to ^x.y.0. ves = ves[1..$].expandVersion; return VersionRange( Version(ves), Version(bumpIncompatibleVersion(ves) ~ "-0"), true, false); } if (ves[0] == Version.branchPrefix) { auto ver = Version(ves); return VersionRange(ver, ver, true, true); } if (std.string.indexOf("><=", ves[0]) == -1) { auto ver = Version(ves); return VersionRange(ver, ver, true, true); } auto cmpa = skipComp(ves); size_t idx2 = std.string.indexOf(ves, " "); if (idx2 == -1) { if (cmpa == "<=" || cmpa == "<") return VersionRange(Version.minRelease, Version(ves), true, (cmpa == "<=")); if (cmpa == ">=" || cmpa == ">") return VersionRange(Version(ves), Version.maxRelease, (cmpa == ">="), true); // Converts "==" to ">=a&&<=a", which makes merging easier return VersionRange(Version(ves), Version(ves), true, true); } enforce(cmpa == ">" || cmpa == ">=", "First comparison operator expected to be either > or >=, not " ~ cmpa); assert(ves[idx2] == ' '); VersionRange ret; ret.m_versA = Version(ves[0..idx2]); ret.m_inclusiveA = cmpa == ">="; string v2 = ves[idx2+1..$]; auto cmpb = skipComp(v2); enforce(cmpb == "<" || cmpb == "<=", "Second comparison operator expected to be either < or <=, not " ~ cmpb); ret.m_versB = Version(v2); ret.m_inclusiveB = cmpb == "<="; enforce(!ret.m_versA.isBranch && !ret.m_versB.isBranch, format("Cannot compare branches: %s", ves)); enforce(ret.m_versA <= ret.m_versB, "First version must not be greater than the second one."); return ret; } /// Returns a string representation of this range string toString() const @safe { static import std.string; string r; if (this == Invalid) return "no"; if (this.matchesAny()) return "*"; if (this.isExactVersion() && m_inclusiveA && m_inclusiveB) { // Special "==" case if (m_versA == Version.masterBranch) return "~master"; else return m_versA.toString(); } // "~>", "^" case if (m_inclusiveA && !m_inclusiveB && !m_versA.isBranch) { auto vs = m_versA.toString(); auto i1 = std.string.indexOf(vs, '-'), i2 = std.string.indexOf(vs, '+'); auto i12 = i1 >= 0 ? i2 >= 0 ? i1 < i2 ? i1 : i2 : i1 : i2; auto va = i12 >= 0 ? vs[0 .. i12] : vs; auto parts = va.splitter('.').array; assert(parts.length == 3, "Version string with a digit group count != 3: "~va); // Start at 1 because the notation `~>1` and `^1` are equivalent // to `~>1.0` and `^1.0`, and the latter are better understood // and recognized by users. See for example issue 2830. foreach (i; 1 .. 3) { auto vp = parts[0 .. i+1].join("."); auto ve = Version(expandVersion(vp)); auto veb = Version(bumpVersion(vp) ~ "-0"); if (ve == m_versA && veb == m_versB) return "~>" ~ vp; auto veb2 = Version(bumpIncompatibleVersion(expandVersion(vp)) ~ "-0"); if (ve == m_versA && veb2 == m_versB) return "^" ~ vp; } } if (m_versA != Version.minRelease || !m_inclusiveA) r = (m_inclusiveA ? ">=" : ">") ~ m_versA.toString(); if (m_versB != Version.maxRelease || !m_inclusiveB) r ~= (r.length == 0 ? "" : " ") ~ (m_inclusiveB ? "<=" : "<") ~ m_versB.toString(); return r; } public bool isValid() const @safe { return m_versA <= m_versB && doCmp(m_inclusiveA && m_inclusiveB, m_versA, m_versB); } private static bool doCmp(bool inclusive, in Version a, in Version b) @safe { return inclusive ? a <= b : a < b; } private static bool isDigit(char ch) @safe { return ch >= '0' && ch <= '9'; } private static string skipComp(ref string c) @safe { size_t idx = 0; while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.branchPrefix) idx++; enforce(idx < c.length, "Expected version number in version spec: "~c); string cmp = idx==c.length-1||idx==0? ">=" : c[0..idx]; c = c[idx..$]; switch(cmp) { default: enforce(false, "No/Unknown comparison specified: '"~cmp~"'"); return ">="; case ">=": goto case; case ">": goto case; case "<=": goto case; case "<": goto case; case "==": return cmp; } } } enum VersionMatchMode { standard, /// Match according to SemVer rules strict /// Also include build metadata suffix in the comparison } unittest { Version a, b; assertNotThrown(a = Version("1.0.0"), "Constructing Version('1.0.0') failed"); assert(!a.isBranch, "Error: '1.0.0' treated as branch"); assert(a == a, "a == a failed"); assertNotThrown(a = Version(Version.masterString), "Constructing Version("~Version.masterString~"') failed"); assert(a.isBranch, "Error: '"~Version.masterString~"' treated as branch"); assert(a.isMaster); assert(a == Version.masterBranch, "Constructed master version != default master version."); assertNotThrown(a = Version("~BRANCH"), "Construction of branch Version failed."); assert(a.isBranch, "Error: '~BRANCH' not treated as branch'"); assert(!a.isMaster); assert(a == a, "a == a with branch failed"); // opCmp a = Version("1.0.0"); b = Version("1.0.0"); assert(a == b, "a == b with a:'1.0.0', b:'1.0.0' failed"); b = Version("2.0.0"); assert(a != b, "a != b with a:'1.0.0', b:'2.0.0' failed"); a = Version.masterBranch; b = Version("~BRANCH"); assert(a != b, "a != b with a:MASTER, b:'~branch' failed"); assert(a > b); assert(a < Version("0.0.0")); assert(b < Version("0.0.0")); assert(a > Version("~Z")); assert(b < Version("~Z")); // SemVer 2.0.0-rc.2 a = Version("2.0.0-rc.2"); b = Version("2.0.0-rc.3"); assert(a < b, "Failed: 2.0.0-rc.2 < 2.0.0-rc.3"); a = Version("2.0.0-rc.2+build-metadata"); b = Version("2.0.0+build-metadata"); assert(a < b, "Failed: "~a.toString()~"<"~b.toString()); // 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0 Version[] versions; versions ~= Version("1.0.0-alpha"); versions ~= Version("1.0.0-alpha.1"); versions ~= Version("1.0.0-beta.2"); versions ~= Version("1.0.0-beta.11"); versions ~= Version("1.0.0-rc.1"); versions ~= Version("1.0.0"); for(int i=1; i=0; --j) assert(versions[j] < versions[i], "Failed: " ~ versions[j].toString() ~ "<" ~ versions[i].toString()); assert(Version("1.0.0+a") == Version("1.0.0+b")); assert(Version("1.0.0").matches(Version("1.0.0+foo"))); assert(Version("1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.standard)); assert(!Version("1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); assert(Version("1.0.0+foo").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); } // Erased version specification for dependency, converted to "" instead of ">0.0.0" // https://github.com/dlang/dub/issues/2901 unittest { assert(VersionRange.fromString(">0.0.0").toString() == ">0.0.0"); } /// Determines whether the given string is a Git hash. bool isGitHash(string hash) @nogc nothrow pure @safe { import std.ascii : isHexDigit; import std.utf : byCodeUnit; return hash.length >= 7 && hash.length <= 40 && hash.byCodeUnit.all!isHexDigit; } @nogc nothrow pure @safe unittest { assert(isGitHash("73535568b79a0b124bc1653002637a830ce0fcb8")); assert(!isGitHash("735")); assert(!isGitHash("73535568b79a0b124bc1-53002637a830ce0fcb8")); assert(!isGitHash("73535568b79a0b124bg1")); } dub-1.40.0/source/dub/dependencyresolver.d000066400000000000000000000416421477246567400205220ustar00rootroot00000000000000/** Dependency configuration/version resolution algorithm. Copyright: © 2014-2018 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.dependencyresolver; import dub.dependency; import dub.internal.logging; import std.algorithm : all, canFind, filter, map, sort; import std.array : appender, array, join; import std.conv : to; import std.exception : enforce; import std.string : format, lastIndexOf; /** Resolves dependency graph with multiple configurations per package. The term "configuration" can mean any kind of alternative dependency configuration of a package. In particular, it can mean different versions of a package. `CONFIG` is an abstract type denoting a single configuration of a certain package, whereas `CONFIGS` denotes a set of configurations. The representation of both can be freely chosen, so that `CONFIGS` for example can be defined in terms of a version range. */ class DependencyResolver(CONFIGS, CONFIG) { /// Maximum number of loop rounds to do protected ulong loop_limit; /** * Construct an instance of this class * * Params: * limit = Maximum number of loop rounds to do */ public this (ulong limit) inout scope @safe pure nothrow @nogc { this.loop_limit = limit; } /// Compatibility overload deprecated("Use the overload that accepts a `ulong limit` argument") public this () scope @safe { // Leave the possibility to opt-out from the loop limit import std.process : environment; if (environment.get("DUB_NO_RESOLVE_LIMIT") !is null) this(ulong.max); else this(1_000_000); } /** Encapsulates a list of outgoing edges in the dependency graph. A value of this type represents a single dependency with multiple possible configurations for the target package. */ static struct TreeNodes { PackageName pack; CONFIGS configs; DependencyType depType = DependencyType.required; size_t toHash() const nothrow @trusted { size_t ret = typeid(string).getHash(&pack); ret ^= typeid(CONFIGS).getHash(&configs); return ret; } bool opEqual(const scope ref TreeNodes other) const { return pack == other.pack && configs == other.configs; } int opCmp(const scope ref TreeNodes other) const { if (pack != other.pack) return pack < other.pack ? -1 : 1; if (configs != other.configs) return configs < other.configs ? -1 : 1; return 0; } } /** A single node in the dependency graph. Nodes are a combination of a package and a single package configuration. */ static struct TreeNode { PackageName pack; CONFIG config; size_t toHash() const nothrow @trusted { size_t ret = pack.hashOf(); ret ^= typeid(CONFIG).getHash(&config); return ret; } bool opEqual(const scope ref TreeNode other) const { return pack == other.pack && config == other.config; } int opCmp(const scope ref TreeNode other) const { if (pack != other.pack) return pack < other.pack ? -1 : 1; if (config != other.config) return config < other.config ? -1 : 1; return 0; } } CONFIG[PackageName] resolve(TreeNode root, bool throw_on_failure = true) { auto rootbase = root.pack.main; // build up the dependency graph, eliminating as many configurations/ // versions as possible ResolveContext context; context.configs[rootbase] = [ResolveConfig(root.config, true)]; ulong loop_counter = this.loop_limit; constrain(root, context, loop_counter); // Get best available results foreach (base; context.configs.keys) foreach (j, ref sc; context.configs[base]) if (sc.included){ context.result[base] = sc.config; break; } // remove any non-default optional dependencies purgeOptionalDependencies(root, context.result); // the root package is implied by the `root` argument and will not be // returned explicitly context.result.remove(rootbase); logDiagnostic("Dependency resolution result:"); foreach (d; context.result.keys.sort()) logDiagnostic(" %s: %s", d, context.result[d]); return context.result; } protected abstract CONFIG[] getAllConfigs(in PackageName pack); protected abstract CONFIG[] getSpecificConfigs(in PackageName pack, TreeNodes nodes); protected abstract TreeNodes[] getChildren(TreeNode node); protected abstract bool matches(CONFIGS configs, CONFIG config); private static struct ResolveConfig { CONFIG config; bool included; } private static struct ResolveContext { /** Contains all packages visited by the resolution process so far. The key is the qualified name of the package (base + sub) */ void[0][PackageName] visited; /// The finally chosen configurations for each package CONFIG[PackageName] result; /// The set of available configurations for each package ResolveConfig[][PackageName] configs; /// Determines if a certain package has already been processed bool isVisited(in PackageName package_) const { return (package_ in visited) !is null; } /// Marks a package as processed void setVisited(in PackageName package_) { visited[package_] = (void[0]).init; } /// Returns a deep clone ResolveContext clone() { ResolveContext ret; ret.visited = this.visited.dup; ret.result = this.result.dup; foreach (pack, cfgs; this.configs) { ret.configs[pack] = cfgs.dup; } return ret; } } /** Starting with a single node, fills `context` with a minimized set of configurations that form valid solutions. */ private void constrain(TreeNode n, ref ResolveContext context, ref ulong max_iterations) { auto base = n.pack.main; assert(base in context.configs); if (context.isVisited(n.pack)) return; context.setVisited(n.pack); auto dependencies = getChildren(n); foreach (dep; dependencies) { // lazily load all dependency configurations auto depbase = dep.pack.main; auto di = depbase in context.configs; if (!di) { context.configs[depbase] = getAllConfigs(depbase) .map!(c => ResolveConfig(c, true)) .array; di = depbase in context.configs; } // add any dependee defined dependency configurations foreach (sc; getSpecificConfigs(n.pack, dep)) if (!(*di).canFind!(c => c.config == sc)) *di = ResolveConfig(sc, true) ~ *di; // restrain the configurations to the current dependency spec bool any_config = false; foreach (i, ref c; *di) if (c.included) { if (!matches(dep.configs, c.config)) c.included = false; else any_config = true; } if (!any_config && dep.depType == DependencyType.required) { if ((*di).length) throw new ResolveException(n, dep, context); else throw new DependencyLoadException(n, dep); } } constrainDependencies(n, dependencies, 0, context, max_iterations); } /** Recurses back into `constrain` while recursively going through `n`'s dependencies. This attempts to constrain each dependency, while keeping each of them in a nested stack frame. This allows any errors to properly back propagate. */ private void constrainDependencies(TreeNode n, TreeNodes[] dependencies, size_t depidx, ref ResolveContext context, ref ulong max_iterations) { if (depidx >= dependencies.length) return; assert (--max_iterations > 0, "The dependency resolution process is taking too long. The" ~ " dependency graph is likely hitting a pathological case in" ~ " the resolution algorithm. Please file a bug report at" ~ " https://github.com/dlang/dub/issues and mention the package" ~ " recipe that reproduces this error."); auto dep = &dependencies[depidx]; auto depbase = dep.pack.main; auto depconfigs = context.configs[depbase]; Exception first_err; // try each configuration/version of the current dependency foreach (i, c; depconfigs) { if (c.included) { try { // try the configuration on a cloned context auto subcontext = context.clone; constrain(TreeNode(dep.pack, c.config), subcontext, max_iterations); constrainDependencies(n, dependencies, depidx+1, subcontext, max_iterations); // if a branch succeeded, replace the current context // with the one from the branch and return context = subcontext; return; } catch (Exception e) { if (!first_err) first_err = e; } } } // ignore unsatisfiable optional dependencies if (dep.depType != DependencyType.required) { auto subcontext = context.clone; constrainDependencies(n, dependencies, depidx+1, subcontext, max_iterations); context = subcontext; return; } // report the first error encountered to the user if (first_err) throw first_err; // should have thrown in constrainRec before reaching this assert(false, format("Got no configuration for dependency %s %s of %s %s!?", dep.pack, dep.configs, n.pack, n.config)); } private void purgeOptionalDependencies(TreeNode root, ref CONFIG[PackageName] configs) { bool[PackageName] required; bool[PackageName] visited; void markRecursively(TreeNode node) { if (node.pack in visited) return; visited[node.pack] = true; required[node.pack.main] = true; foreach (dep; getChildren(node).filter!(dep => dep.depType != DependencyType.optional)) if (auto dp = dep.pack.main in configs) markRecursively(TreeNode(dep.pack, *dp)); } // recursively mark all required dependencies of the concrete dependency tree markRecursively(root); // remove all unmarked configurations foreach (p; configs.keys.dup) if (p !in required) configs.remove(p); } final class ResolveException : Exception { import std.range : chain, only; import std.typecons : tuple; PackageName failedNode; this(TreeNode parent, TreeNodes dep, const scope ref ResolveContext context, string file = __FILE__, size_t line = __LINE__) { auto m = format("Unresolvable dependencies to package %s:", dep.pack.main); super(m, file, line); this.failedNode = dep.pack; auto failbase = failedNode.main; // Get partial results CONFIG[PackageName] partial_result; foreach (base; context.configs.keys) foreach (j, ref sc; context.configs[base]) if (sc.included){ partial_result[base] = sc.config; break; } // get the list of all dependencies to the failed package auto deps = context.visited.byKey .filter!(p => !!(p.main in partial_result)) .map!(p => TreeNode(p, partial_result[p.main])) .map!(n => getChildren(n) .filter!(d => d.pack.main == failbase) .map!(d => tuple(n, d)) ) .join .sort!((a, b) => a[0].pack < b[0].pack); foreach (d; deps) { // filter out trivial self-dependencies if (d[0].pack.main == failbase && matches(d[1].configs, d[0].config)) continue; msg ~= format("\n %s %s depends on %s %s", d[0].pack, d[0].config, d[1].pack, d[1].configs); } } } final class DependencyLoadException : Exception { TreeNode parent; TreeNodes dependency; this(TreeNode parent, TreeNodes dep) { auto m = format("Failed to find any versions for package %s, referenced by %s %s", dep.pack, parent.pack, parent.config); super(m, file, line); this.parent = parent; this.dependency = dep; } } } enum DependencyType { required, optionalDefault, optional } unittest { static struct IntConfig { int value; alias value this; enum invalid = IntConfig(-1); } static IntConfig ic(int v) { return IntConfig(v); } static struct IntConfigs { IntConfig[] configs; alias configs this; } static IntConfigs ics(IntConfig[] cfgs) { return IntConfigs(cfgs); } static PackageName pn(string name) { return PackageName(name); } static class TestResolver : DependencyResolver!(IntConfigs, IntConfig) { private TreeNodes[][string] m_children; this(TreeNodes[][string] children) { super(ulong.max); m_children = children; } protected override IntConfig[] getAllConfigs(in PackageName pack) { auto ret = appender!(IntConfig[]); foreach (p_; m_children.byKey) { // Note: We abuse subpackage notation to store configs const p = PackageName(p_); if (p.main != pack.main) continue; ret ~= ic(p.sub.to!uint); } ret.data.sort!"a>b"(); return ret.data; } protected override IntConfig[] getSpecificConfigs(in PackageName pack, TreeNodes nodes) { return null; } protected override TreeNodes[] getChildren(TreeNode node) { assert(node.pack.sub.length == 0); return m_children.get(node.pack.toString() ~ ":" ~ node.config.to!string(), null); } protected override bool matches(IntConfigs configs, IntConfig config) { return configs.canFind(config); } } // properly back up if conflicts are detected along the way (d:2 vs d:1) with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(2), ic(1)])), TreeNodes(pn("d"), ics([ic(1)])), TreeNodes(pn("e"), ics([ic(2), ic(1)]))], "b:1": [TreeNodes(pn("c"), ics([ic(2), ic(1)])), TreeNodes(pn("d"), ics([ic(1)]))], "b:2": [TreeNodes(pn("c"), ics([ic(3), ic(2)])), TreeNodes(pn("d"), ics([ic(2), ic(1)]))], "c:1": [], "c:2": [], "c:3": [], "d:1": [], "d:2": [], "e:1": [], "e:2": [], ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("b"):ic(2), pn("c"):ic(3), pn("d"):ic(1), pn("e"):ic(2)], format("%s", res.resolve(TreeNode(pn("a"), ic(0))))); } // handle cyclic dependencies gracefully with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)]))], "b:1": [TreeNodes(pn("b"), ics([ic(1)]))] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("b"):ic(1)]); } // don't choose optional dependencies by default with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)]), DependencyType.optional)], "b:1": [] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))).length == 0, to!string(res.resolve(TreeNode(pn("a"), ic(0))))); } // choose default optional dependencies by default with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)]), DependencyType.optionalDefault)], "b:1": [] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("b"):ic(1)], to!string(res.resolve(TreeNode(pn("a"), ic(0))))); } // choose optional dependency if non-optional within the dependency tree with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)]), DependencyType.optional), TreeNodes(pn("c"), ics([ic(1)]))], "b:1": [], "c:1": [TreeNodes(pn("b"), ics([ic(1)]))] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("b"):ic(1), pn("c"):ic(1)], to!string(res.resolve(TreeNode(pn("a"), ic(0))))); } // don't choose optional dependency if non-optional outside of final dependency tree with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)]), DependencyType.optional)], "b:1": [], "preset:0": [TreeNodes(pn("b"), ics([ic(1)]))] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))).length == 0, to!string(res.resolve(TreeNode(pn("a"), ic(0))))); } // don't choose optional dependency if non-optional in a non-selected version with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1), ic(2)]))], "b:1": [TreeNodes(pn("c"), ics([ic(1)]))], "b:2": [TreeNodes(pn("c"), ics([ic(1)]), DependencyType.optional)], "c:1": [] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("b"):ic(2)], to!string(res.resolve(TreeNode(pn("a"), ic(0))))); } // make sure non-satisfiable dependencies are not a problem, even if non-optional in some dependencies with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1), ic(2)]))], "b:1": [TreeNodes(pn("c"), ics([ic(2)]))], "b:2": [TreeNodes(pn("c"), ics([ic(2)]), DependencyType.optional)], "c:1": [] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("b"):ic(2)], to!string(res.resolve(TreeNode(pn("a"), ic(0))))); } // check error message for multiple conflicting dependencies with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)])), TreeNodes(pn("c"), ics([ic(1)]))], "b:1": [TreeNodes(pn("d"), ics([ic(1)]))], "c:1": [TreeNodes(pn("d"), ics([ic(2)]))], "d:1": [], "d:2": [] ]); try { res.resolve(TreeNode(pn("a"), ic(0))); assert(false, "Expected resolve to throw."); } catch (ResolveException e) { assert(e.msg == "Unresolvable dependencies to package d:" ~ "\n b 1 depends on d [1]" ~ "\n c 1 depends on d [2]"); } } // check error message for invalid dependency with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes(pn("b"), ics([ic(1)]))] ]); try { res.resolve(TreeNode(pn("a"), ic(0))); assert(false, "Expected resolve to throw."); } catch (DependencyLoadException e) { assert(e.msg == "Failed to find any versions for package b, referenced by a 0"); } } // regression: unresolvable optional dependency skips the remaining dependencies with (TestResolver) { auto res = new TestResolver([ "a:0": [ TreeNodes(pn("b"), ics([ic(2)]), DependencyType.optional), TreeNodes(pn("c"), ics([ic(1)])) ], "b:1": [], "c:1": [] ]); assert(res.resolve(TreeNode(pn("a"), ic(0))) == [pn("c"):ic(1)]); } } dub-1.40.0/source/dub/description.d000066400000000000000000000140521477246567400171400ustar00rootroot00000000000000/** Types for project descriptions (dub describe). Copyright: © 2015-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.description; import dub.compilers.buildsettings; import dub.dependency; import dub.internal.vibecompat.data.serialization; /** Describes a complete project for use in IDEs or build tools. The build settings will be specific to the compiler, platform and configuration that has been selected. */ struct ProjectDescription { string rootPackage; /// Name of the root package being built string configuration; /// Name of the selected build configuration string buildType; /// Name of the selected build type string compiler; /// Canonical name of the compiler used (e.g. "dmd", "gdc" or "ldc") string[] architecture; /// Architecture constants for the selected platform (e.g. `["x86_64"]`) string[] platform; /// Platform constants for the selected platform (e.g. `["posix", "osx"]`) PackageDescription[] packages; /// All packages in the dependency tree TargetDescription[] targets; /// Build targets @ignore size_t[string] targetLookup; /// Target index by package name name /// Targets by name ref inout(TargetDescription) lookupTarget(string name) inout { import std.exception : enforce; auto pti = name in targetLookup; enforce(pti !is null, "Target '"~name~"' doesn't exist. Is the target type set to \"none\" in the package recipe?"); return targets[*pti]; } /// Projects by name ref inout(PackageDescription) lookupPackage(string name) inout { foreach (ref p; packages) if (p.name == name) { return p; } throw new Exception("Package '"~name~"' not found in dependency tree."); } /// Root package ref inout(PackageDescription) lookupRootPackage() inout { return lookupPackage(rootPackage); } } /** Describes the build settings and meta data of a single package. This structure contains the effective build settings and dependencies for the selected build platform. This structure is most useful for displaying information about a package in an IDE. Use `TargetDescription` instead when writing a build-tool. */ struct PackageDescription { string path; /// Path to the package string name; /// Qualified name of the package Version version_; /// Version of the package string description; string homepage; string[] authors; string copyright; string license; string[] dependencies; bool active; /// Does this package take part in the build? string configuration; /// The configuration that is built @byName TargetType targetType; string targetPath; string targetName; string targetFileName; string workingDirectory; string mainSourceFile; string[] dflags; /// Flags passed to the D compiler string[] lflags; /// Flags passed to the linker string[] libs; /// Library names to link against (typically using "-l") string[] injectSourceFiles; /// Files that should be injected when this package is dependent upon by a binary image. string[] copyFiles; /// Files to copy to the target directory string[] extraDependencyFiles; /// Files to check for rebuild dub project string[] versions; /// D version identifiers to set string[] debugVersions; /// D debug version identifiers to set string[] importPaths; string[] cImportPaths; string[] stringImportPaths; string[] preGenerateCommands; /// Commands executed before creating the description, with variables not substituted. string[] postGenerateCommands; /// Commands executed after creating the description, with variables not substituted. string[] preBuildCommands; /// Commands to execute prior to every build, with variables not substituted. string[] postBuildCommands; /// Commands to execute after every build, with variables not substituted. string[] preRunCommands; /// Commands to execute prior to every run, with variables not substituted. string[] postRunCommands; /// Commands to execute after every run, with variables not substituted. string[string] environments; string[string] buildEnvironments; string[string] runEnvironments; string[string] preGenerateEnvironments; string[string] postGenerateEnvironments; string[string] preBuildEnvironments; string[string] postBuildEnvironments; string[string] preRunEnvironments; string[string] postRunEnvironments; @byName BuildRequirement[] buildRequirements; @byName BuildOption[] options; SourceFileDescription[] files; /// A list of all source/import files possibly used by the package } /** Describes the settings necessary to build a certain binary target. */ struct TargetDescription { string rootPackage; /// Main package associated with this target, this is also the name of the target. string[] packages; /// All packages contained in this target (e.g. for target type "sourceLibrary") string rootConfiguration; /// Build configuration of the target's root package used for building BuildSettings buildSettings; /// Final build settings to use when building the target string cacheArtifactPath; /// The full path of the built target in the cache string[] dependencies; /// List of all dependencies of this target (package names) string[] linkDependencies; /// List of all link-dependencies of this target (target names) } /** Description for a single source file known to the package. */ struct SourceFileDescription { @byName SourceFileRole role; /// Main role this file plays in the build process string path; /// Full path to the file } /** Determines the role that a file plays in the build process. If a file has multiple roles, higher enum values will have precedence, i.e. if a file is used both, as a source file and as an import file, it will be classified as a source file. */ enum SourceFileRole { unusedStringImport, /// Used as a string import for another configuration/platform unusedImport, /// Used as an import for another configuration/platform unusedSource, /// Used as a source file for another configuration/platform stringImport, /// Used as a string import file import_, /// Used as an import file source /// Used as a source file } dub-1.40.0/source/dub/dub.d000066400000000000000000002213141477246567400153700ustar00rootroot00000000000000/** A package manager. Copyright: © 2012-2013 Matthias Dondorff, 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ module dub.dub; import dub.compilers.compiler; import dub.data.settings : SPS = SkipPackageSuppliers, Settings; import dub.dependency; import dub.dependencyresolver; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.packagesuppliers; import dub.project; import dub.generators.generator; import dub.init; import std.algorithm; import std.array : array, replace; import std.conv : text, to; import std.encoding : sanitize; import std.exception : enforce; import std.file; import std.process : environment; import std.range : assumeSorted, empty; import std.string; static this() { import dub.compilers.dmd : DMDCompiler; import dub.compilers.gdc : GDCCompiler; import dub.compilers.ldc : LDCCompiler; registerCompiler(new DMDCompiler); registerCompiler(new GDCCompiler); registerCompiler(new LDCCompiler); } deprecated("use defaultRegistryURLs") enum defaultRegistryURL = defaultRegistryURLs[0]; /// The URL to the official package registry and it's default fallback registries. static immutable string[] defaultRegistryURLs = [ "https://code.dlang.org/", "https://codemirror.dlang.org/" ]; /** Returns a default list of package suppliers. This will contain a single package supplier that points to the official package registry. See_Also: `defaultRegistryURLs` */ deprecated("This function wasn't intended for public use - open an issue with Dub if you need it") PackageSupplier[] defaultPackageSuppliers() { logDiagnostic("Using dub registry url '%s'", defaultRegistryURLs[0]); return [new FallbackPackageSupplier(defaultRegistryURLs.map!_getRegistryPackageSupplier.array)]; } /** Returns a registry package supplier according to protocol. Allowed protocols are dub+http(s):// and maven+http(s)://. */ deprecated("This function wasn't intended for public use - open an issue with Dub if you need it") PackageSupplier getRegistryPackageSupplier(string url) { return _getRegistryPackageSupplier(url); } // Private to avoid a bug in `defaultPackageSuppliers` with `map` triggering a deprecation // even though the context is deprecated. // Also used from `commandline`. Note that this is replaced by a method // in the `Dub` class, to allow for proper dependency injection, // but `commandline` is currently completely excluded. package(dub) PackageSupplier _getRegistryPackageSupplier(string url) { switch (url.startsWith("dub+", "mvn+", "file://")) { case 1: return new RegistryPackageSupplier(URL(url[4..$])); case 2: return new MavenRegistryPackageSupplier(URL(url[4..$])); case 3: return new FileSystemPackageSupplier(NativePath(url[7..$])); default: return new RegistryPackageSupplier(URL(url)); } } deprecated unittest { auto dubRegistryPackageSupplier = getRegistryPackageSupplier("dub+https://code.dlang.org"); assert(dubRegistryPackageSupplier.description.canFind(" https://code.dlang.org")); dubRegistryPackageSupplier = getRegistryPackageSupplier("https://code.dlang.org"); assert(dubRegistryPackageSupplier.description.canFind(" https://code.dlang.org")); auto mavenRegistryPackageSupplier = getRegistryPackageSupplier("mvn+http://localhost:8040/maven/libs-release/dubpackages"); assert(mavenRegistryPackageSupplier.description.canFind(" http://localhost:8040/maven/libs-release/dubpackages")); auto fileSystemPackageSupplier = getRegistryPackageSupplier("file:///etc/dubpackages"); assert(fileSystemPackageSupplier.description.canFind(" " ~ NativePath("/etc/dubpackages").toNativeString)); } /** Provides a high-level entry point for DUB's functionality. This class provides means to load a certain project (a root package with all of its dependencies) and to perform high-level operations as found in the command line interface. */ class Dub { protected { bool m_dryRun = false; PackageManager m_packageManager; PackageSupplier[] m_packageSuppliers; NativePath m_rootPath; string m_mainRecipePath; SpecialDirs m_dirs; Settings m_config; Project m_project; string m_defaultCompiler; } /** The default placement location of fetched packages. This property can be altered, so that packages which are downloaded as part of the normal upgrade process are stored in a certain location. This is how the "--local" and "--system" command line switches operate. */ PlacementLocation defaultPlacementLocation = PlacementLocation.user; /** Initializes the instance for use with a specific root package. Note that a package still has to be loaded using one of the `loadPackage` overloads. Params: root_path = Path to the root package base = A list of package suppliers that are always present (regardless of `skip`) and take precedence over the default and configured `PackageSupplier`. This setting is currently not used by the dub application but useful for libraries. skip = Can be used to skip using the configured package suppliers, as well as the default suppliers. */ this(string root_path = ".", PackageSupplier[] base = null, SkipPackageSuppliers skip = SkipPackageSuppliers.none) { m_rootPath = NativePath(root_path); if (!m_rootPath.absolute) m_rootPath = getWorkingDirectory() ~ m_rootPath; init(); if (skip == SkipPackageSuppliers.default_) { // If unspecified on the command line, take // the value from the configuration files, or // default to none. skip = m_config.skipRegistry.set ? m_config.skipRegistry.value : SkipPackageSuppliers.none; } const registry_var = environment.get("DUB_REGISTRY", null); m_packageSuppliers = this.makePackageSuppliers(base, skip, registry_var); m_packageManager = this.makePackageManager(); auto ccps = m_config.customCachePaths; if (ccps.length) m_packageManager.customCachePaths = ccps; // TODO: Move this environment read out of the ctor if (auto p = environment.get("DUBPATH")) { version(Windows) enum pathsep = ";"; else enum pathsep = ":"; NativePath[] paths = p.split(pathsep) .map!(p => NativePath(p))().array(); m_packageManager.searchPath = paths; } } /** Initializes the instance with a single package search path, without loading a package. This constructor corresponds to the "--bare" option of the command line interface. Params: root = The root path of the Dub instance itself. pkg_root = The root of the location where packages are located Only packages under this location will be accessible. Note that packages at the top levels will be ignored. */ this(NativePath root, NativePath pkg_root) { // Note: We're doing `init()` before setting the `rootPath`, // to prevent `init` from reading the project's settings. init(); this.m_rootPath = root; m_packageManager = new PackageManager(pkg_root); } deprecated("Use the overload that takes `(NativePath pkg_root, NativePath root)`") this(NativePath pkg_root) { this(pkg_root, pkg_root); } /** * Get the `PackageManager` instance to use for this `Dub` instance * * The `PackageManager` is a central component of `Dub` as it allows to * store and retrieve packages from the file system. In unittests, or more * generally in a library setup, one may wish to provide a custom * implementation, which can be done by overriding this method. */ protected PackageManager makePackageManager() { return new PackageManager(m_rootPath, m_dirs.userPackages, m_dirs.systemSettings, false); } protected void init() { this.m_dirs = SpecialDirs.make(); this.m_config = this.loadConfig(this.m_dirs); this.m_defaultCompiler = this.determineDefaultCompiler(); } /** * Load user configuration for this instance * * This can be overloaded in child classes to prevent library / unittest * dub from doing any kind of file IO. * As this routine is used during initialization, the only assumption made * in the base implementation is that `m_rootPath` has been populated. * Child implementation should not rely on any other field in the base * class having been populated. * * Params: * dirs = An instance of `SpecialDirs` to read from and write to, * as the configurations being read might set a `dubHome`. * * Returns: * A populated `Settings` instance. */ protected Settings loadConfig(ref SpecialDirs dirs) const { import dub.internal.configy.Read; static void readSettingsFile (NativePath path_, ref Settings current) { // TODO: Remove `StrictMode.Warn` after v1.40 release // The default is to error, but as the previous parser wasn't // complaining, we should first warn the user. const path = path_.toNativeString(); if (path.exists) { auto newConf = parseConfigFileSimple!Settings(path, StrictMode.Warn); if (!newConf.isNull()) current = current.merge(newConf.get()); } } Settings result; const dubFolderPath = NativePath(thisExePath).parentPath; // override default userSettings + userPackages if a $DPATH or // $DUB_HOME environment variable is set. bool overrideDubHomeFromEnv; { string dubHome = environment.get("DUB_HOME"); if (!dubHome.length) { auto dpath = environment.get("DPATH"); if (dpath.length) dubHome = (NativePath(dpath) ~ "dub/").toNativeString(); } if (dubHome.length) { overrideDubHomeFromEnv = true; dirs.userSettings = NativePath(dubHome); dirs.userPackages = dirs.userSettings; dirs.cache = dirs.userPackages ~ "cache"; } } readSettingsFile(dirs.systemSettings ~ "settings.json", result); readSettingsFile(dubFolderPath ~ "../etc/dub/settings.json", result); version (Posix) { if (dubFolderPath.absolute && dubFolderPath.startsWith(NativePath("usr"))) readSettingsFile(NativePath("/etc/dub/settings.json"), result); } // Override user + local package path from system / binary settings // Then continues loading local settings from these folders. (keeping // global /etc/dub/settings.json settings intact) // // Don't use it if either $DPATH or $DUB_HOME are set, as environment // variables usually take precedence over configuration. if (!overrideDubHomeFromEnv && result.dubHome.set) { dirs.userSettings = NativePath(result.dubHome.expandEnvironmentVariables); } // load user config: readSettingsFile(dirs.userSettings ~ "settings.json", result); // load per-package config: if (!this.m_rootPath.empty) readSettingsFile(this.m_rootPath ~ "dub.settings.json", result); // same as userSettings above, but taking into account the // config loaded from user settings and per-package config as well. if (!overrideDubHomeFromEnv && result.dubHome.set) { dirs.userPackages = NativePath(result.dubHome.expandEnvironmentVariables); dirs.cache = dirs.userPackages ~ "cache"; } return result; } /** Get the list of package suppliers. Params: additional_package_suppliers = A list of package suppliers to try before the suppliers found in the configurations files and the `defaultPackageSuppliers`. skip = Can be used to skip using the configured package suppliers, as well as the default suppliers. */ deprecated("This is an implementation detail. " ~ "Use `packageSuppliers` to get the computed list of package " ~ "suppliers once a `Dub` instance has been constructed.") public PackageSupplier[] getPackageSuppliers(PackageSupplier[] base, SkipPackageSuppliers skip) { return this.makePackageSuppliers(base, skip, environment.get("DUB_REGISTRY", null)); } /// Ditto protected PackageSupplier[] makePackageSuppliers(PackageSupplier[] base, SkipPackageSuppliers skip, string registry_var) { PackageSupplier[] ps = base; if (skip < SkipPackageSuppliers.all) { ps ~= registry_var .splitter(";") .map!(url => this.makePackageSupplier(url)) .array; } if (skip < SkipPackageSuppliers.configured) { ps ~= m_config.registryUrls .map!(url => this.makePackageSupplier(url)) .array; } if (skip < SkipPackageSuppliers.standard) ps ~= new FallbackPackageSupplier( defaultRegistryURLs.map!(url => this.makePackageSupplier(url)) .array); return ps; } // Note: This test rely on the environment, which is not how unittests should work. // This should be removed / refactored to keep coverage without affecting the env. unittest { import dub.test.base : TestDub; scope (exit) environment.remove("DUB_REGISTRY"); auto dub = new TestDub(null, "/dub/project/", null, SkipPackageSuppliers.configured); assert(dub.packageSuppliers.length == 0); environment["DUB_REGISTRY"] = "http://example.com/"; dub = new TestDub(null, "/dub/project/", null, SkipPackageSuppliers.configured); assert(dub.packageSuppliers.length == 1); environment["DUB_REGISTRY"] = "http://example.com/;http://foo.com/"; dub = new TestDub(null, "/dub/project/", null, SkipPackageSuppliers.configured); assert(dub.packageSuppliers.length == 2); dub = new TestDub(null, "/dub/project/", [new RegistryPackageSupplier(URL("http://bar.com/"))], SkipPackageSuppliers.configured); assert(dub.packageSuppliers.length == 3); dub = new TestDub(); assert(dub.makePackageSuppliers(null, SkipPackageSuppliers.none, null).length == 1); assert(dub.makePackageSuppliers(null, SkipPackageSuppliers.configured, null).length == 0); assert(dub.makePackageSuppliers(null, SkipPackageSuppliers.standard, null).length == 0); assert(dub.makePackageSuppliers(null, SkipPackageSuppliers.standard, "http://example.com/") .length == 1); } /** * Instantiate a `PackageSupplier` according to a given URL * * This is a factory function for `PackageSupplier`. Child classes may * wish to override this to implement their own `PackageSupplier` logic, * be it by extending this method's ability or replacing it. * * Params: * url = The URL of the `PackageSupplier`. * * Returns: * A new instance of a `PackageSupplier`. */ protected PackageSupplier makePackageSupplier(string url) { switch (url.startsWith("dub+", "mvn+", "file://")) { case 1: return new RegistryPackageSupplier(URL(url[4..$])); case 2: return new MavenRegistryPackageSupplier(URL(url[4..$])); case 3: return new FileSystemPackageSupplier(NativePath(url[7..$])); default: return new RegistryPackageSupplier(URL(url)); } } /// ditto deprecated("This is an implementation detail. " ~ "Use `packageSuppliers` to get the computed list of package " ~ "suppliers once a `Dub` instance has been constructed.") public PackageSupplier[] getPackageSuppliers(PackageSupplier[] additional_package_suppliers) { return getPackageSuppliers(additional_package_suppliers, m_config.skipRegistry); } @property bool dryRun() const { return m_dryRun; } @property void dryRun(bool v) { m_dryRun = v; } /** Returns the root path (usually the current working directory). */ @property NativePath rootPath() const { return m_rootPath; } /// ditto deprecated("Changing the root path is deprecated as it has non-obvious pitfalls " ~ "(e.g. settings aren't reloaded). Instantiate a new `Dub` instead") @property void rootPath(NativePath root_path) { m_rootPath = root_path; if (!m_rootPath.absolute) m_rootPath = getWorkingDirectory() ~ m_rootPath; } /// Returns the name listed in the dub.json of the current /// application. @property string projectName() const { return m_project.name; } @property string mainRecipePath() const { return m_mainRecipePath; } /// Whenever the switch --recipe= is supplied, this member will be populated. @property string mainRecipePath(string recipePath) { return m_mainRecipePath = recipePath; } @property NativePath projectPath() const { return this.m_project.rootPackage.path; } @property string[] configurations() const { return m_project.configurations; } @property inout(PackageManager) packageManager() inout { return m_packageManager; } @property inout(Project) project() inout { return m_project; } @property inout(PackageSupplier)[] packageSuppliers() inout { return m_packageSuppliers; } /** Returns the default compiler binary to use for building D code. If set, the "defaultCompiler" field of the DUB user or system configuration file will be used. Otherwise the PATH environment variable will be searched for files named "dmd", "gdc", "gdmd", "ldc2", "ldmd2" (in that order, taking into account operating system specific file extensions) and the first match is returned. If no match is found, "dmd" will be used. */ @property string defaultCompiler() const { return m_defaultCompiler; } /** Returns the default architecture to use for building D code. If set, the "defaultArchitecture" field of the DUB user or system configuration file will be used. Otherwise null will be returned. */ @property string defaultArchitecture() const { return this.m_config.defaultArchitecture; } /** Returns the default low memory option to use for building D code. If set, the "defaultLowMemory" field of the DUB user or system configuration file will be used. Otherwise false will be returned. */ @property bool defaultLowMemory() const { return this.m_config.defaultLowMemory; } @property const(string[string]) defaultEnvironments() const { return this.m_config.defaultEnvironments; } @property const(string[string]) defaultBuildEnvironments() const { return this.m_config.defaultBuildEnvironments; } @property const(string[string]) defaultRunEnvironments() const { return this.m_config.defaultRunEnvironments; } @property const(string[string]) defaultPreGenerateEnvironments() const { return this.m_config.defaultPreGenerateEnvironments; } @property const(string[string]) defaultPostGenerateEnvironments() const { return this.m_config.defaultPostGenerateEnvironments; } @property const(string[string]) defaultPreBuildEnvironments() const { return this.m_config.defaultPreBuildEnvironments; } @property const(string[string]) defaultPostBuildEnvironments() const { return this.m_config.defaultPostBuildEnvironments; } @property const(string[string]) defaultPreRunEnvironments() const { return this.m_config.defaultPreRunEnvironments; } @property const(string[string]) defaultPostRunEnvironments() const { return this.m_config.defaultPostRunEnvironments; } /** Loads the package that resides within the configured `rootPath`. */ void loadPackage() { loadPackage(m_rootPath); } /// Loads the package from the specified path as the main project package. void loadPackage(NativePath path) { auto pack = this.m_packageManager.getOrLoadPackage( path, NativePath.init, false, StrictMode.Warn); this.loadPackage(pack); } /// Loads a specific package as the main project package (can be a sub package) void loadPackage(Package pack) { auto selections = Project.loadSelections(pack.path, m_packageManager); m_project = new Project(m_packageManager, pack, selections); } /** Loads a single file package. Single-file packages are D files that contain a package recipe comment at their top. A recipe comment must be a nested `/+ ... +/` style comment, containing the virtual recipe file name and a colon, followed by the recipe contents (what would normally be in dub.sdl/dub.json). Example: --- /+ dub.sdl: name "test" dependency "vibe-d" version="~>0.7.29" +/ import vibe.http.server; void main() { auto settings = new HTTPServerSettings; settings.port = 8080; listenHTTP(settings, &hello); } void hello(HTTPServerRequest req, HTTPServerResponse res) { res.writeBody("Hello, World!"); } --- The script above can be invoked with "dub --single test.d". */ void loadSingleFilePackage(NativePath path) { import dub.recipe.io : parsePackageRecipe; import std.file : readText; import std.path : baseName, stripExtension; path = makeAbsolute(path); string file_content = readText(path.toNativeString()); if (file_content.startsWith("#!")) { auto idx = file_content.indexOf('\n'); enforce(idx > 0, "The source fine doesn't contain anything but a shebang line."); file_content = file_content[idx+1 .. $]; } file_content = file_content.strip(); string recipe_content; if (file_content.startsWith("/+")) { file_content = file_content[2 .. $]; auto idx = file_content.indexOf("+/"); enforce(idx >= 0, "Missing \"+/\" to close comment."); recipe_content = file_content[0 .. idx].strip(); } else throw new Exception("The source file must start with a recipe comment."); auto nidx = recipe_content.indexOf('\n'); auto idx = recipe_content.indexOf(':'); enforce(idx > 0 && (nidx < 0 || nidx > idx), "The first line of the recipe comment must list the recipe file name followed by a colon (e.g. \"/+ dub.sdl:\")."); auto recipe_filename = recipe_content[0 .. idx]; recipe_content = recipe_content[idx+1 .. $]; auto recipe_default_package_name = path.toString.baseName.stripExtension.strip; const PackageName empty; auto recipe = parsePackageRecipe(recipe_content, recipe_filename, empty, recipe_default_package_name); enforce(recipe.buildSettings.sourceFiles.length == 0, "Single-file packages are not allowed to specify source files."); enforce(recipe.buildSettings.sourcePaths.length == 0, "Single-file packages are not allowed to specify source paths."); enforce(recipe.buildSettings.cSourcePaths.length == 0, "Single-file packages are not allowed to specify C source paths."); enforce(recipe.buildSettings.importPaths.length == 0, "Single-file packages are not allowed to specify import paths."); enforce(recipe.buildSettings.cImportPaths.length == 0, "Single-file packages are not allowed to specify C import paths."); recipe.buildSettings.sourceFiles[""] = [path.toNativeString()]; recipe.buildSettings.sourcePaths[""] = []; recipe.buildSettings.cSourcePaths[""] = []; recipe.buildSettings.importPaths[""] = []; recipe.buildSettings.cImportPaths[""] = []; recipe.buildSettings.mainSourceFile = path.toNativeString(); if (recipe.buildSettings.targetType == TargetType.autodetect) recipe.buildSettings.targetType = TargetType.executable; auto pack = new Package(recipe, path.parentPath, null, "~master"); loadPackage(pack); } /// ditto void loadSingleFilePackage(string path) { loadSingleFilePackage(NativePath(path)); } /** Gets the default configuration for a particular build platform. This forwards to `Project.getDefaultConfiguration` and requires a project to be loaded. */ string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library_configs = true) const { return m_project.getDefaultConfiguration(platform, allow_non_library_configs); } /** Attempts to upgrade the dependency selection of the loaded project. Params: options = Flags that control how the upgrade is carried out packages_to_upgrade = Optional list of packages. If this list contains one or more packages, only those packages will be upgraded. Otherwise, all packages will be upgraded at once. */ void upgrade(UpgradeOptions options, string[] packages_to_upgrade = null) { // clear non-existent version selections if (!(options & UpgradeOptions.upgrade)) { next_pack: foreach (p; m_project.selections.selectedPackages) { const name = PackageName(p); // Always a main package name auto dep = m_project.selections.getSelectedVersion(name); if (!dep.path.empty) { auto path = dep.path; if (!path.absolute) path = this.rootPath ~ path; try if (m_packageManager.getOrLoadPackage(path)) continue; catch (Exception e) { logDebug("Failed to load path based selection: %s", e.toString().sanitize); } } else if (!dep.repository.empty) { if (m_packageManager.loadSCMPackage(name, dep.repository)) continue; } else { if (m_packageManager.getPackage(name, dep.version_)) continue; foreach (ps; m_packageSuppliers) { try { auto versions = ps.getVersions(name); if (versions.canFind!(v => dep.matches(v, VersionMatchMode.strict))) continue next_pack; } catch (Exception e) { logWarn("Error querying versions for %s, %s: %s", p, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize()); } } } logWarn("Selected package %s %s doesn't exist. Using latest matching version instead.", p, dep); m_project.selections.deselectVersion(name); } } auto resolver = new DependencyVersionResolver( this, options, m_project.rootPackage, m_project.selections); Dependency[PackageName] versions = resolver.resolve(packages_to_upgrade); if (options & UpgradeOptions.dryRun) { bool any = false; string rootbasename = PackageName(m_project.rootPackage.name).main.toString(); foreach (p, ver; versions) { if (!ver.path.empty || !ver.repository.empty) continue; auto basename = p.main; if (basename.toString() == rootbasename) continue; if (!m_project.selections.hasSelectedVersion(basename)) { logInfo("Upgrade", Color.cyan, "Package %s would be selected with version %s", basename, ver); any = true; continue; } auto sver = m_project.selections.getSelectedVersion(basename); if (!sver.path.empty || !sver.repository.empty) continue; if (ver.version_ <= sver.version_) continue; logInfo("Upgrade", Color.cyan, "%s would be upgraded from %s to %s.", basename.toString().color(Mode.bold), sver, ver); any = true; } if (any) logInfo("Use \"%s\" to perform those changes", "dub upgrade".color(Mode.bold)); return; } foreach (name, ver; versions) { assert(!name.sub, "Resolved packages contain a sub package!?: " ~ name.toString()); Package pack; if (!ver.path.empty) { try pack = m_packageManager.getOrLoadPackage(ver.path); catch (Exception e) { logDebug("Failed to load path based selection: %s", e.toString().sanitize); continue; } } else if (!ver.repository.empty) { pack = m_packageManager.loadSCMPackage(name, ver.repository); } else { assert(ver.isExactVersion, "Resolved dependency is neither path, nor repository, nor exact version based!?"); pack = m_packageManager.getPackage(name, ver.version_); if (pack && m_packageManager.isManagedPackage(pack) && ver.version_.isBranch && (options & UpgradeOptions.upgrade) != 0) { // TODO: only re-install if there is actually a new commit available logInfo("Re-installing branch based dependency %s %s", name, ver.toString()); m_packageManager.remove(pack); pack = null; } } FetchOptions fetchOpts; fetchOpts |= (options & UpgradeOptions.preRelease) != 0 ? FetchOptions.usePrerelease : FetchOptions.none; if (!pack) this.fetch(name, ver.version_, fetchOpts, defaultPlacementLocation, "getting selected version"); if ((options & UpgradeOptions.select) && name.toString() != m_project.rootPackage.name) { if (!ver.repository.empty) { m_project.selections.selectVersion(name, ver.repository); } else if (ver.path.empty) { m_project.selections.selectVersion(name, ver.version_); } else { NativePath relpath = ver.path; if (relpath.absolute) relpath = relpath.relativeTo(m_project.rootPackage.path); m_project.selections.selectVersion(name, relpath); } } } string[] missingDependenciesBeforeReinit = m_project.missingDependencies; m_project.reinit(); if (!m_project.hasAllDependencies) { auto resolvedDependencies = setDifference( assumeSorted(missingDependenciesBeforeReinit), assumeSorted(m_project.missingDependencies) ); if (!resolvedDependencies.empty) upgrade(options, m_project.missingDependencies); } if ((options & UpgradeOptions.select) && !(options & (UpgradeOptions.noSaveSelections | UpgradeOptions.dryRun))) m_project.saveSelections(); } /** Generate project files for a specified generator. Any existing project files will be overridden. */ void generateProject(string ide, GeneratorSettings settings) { settings.cache = this.m_dirs.cache; if (settings.overrideToolWorkingDirectory is NativePath.init) settings.overrideToolWorkingDirectory = m_rootPath; // With a requested `unittest` config, switch to the special test runner // config (which doesn't require an existing `unittest` configuration). if (settings.config == "unittest") { const test_config = m_project.addTestRunnerConfiguration(settings, !m_dryRun); if (test_config) settings.config = test_config; } auto generator = createProjectGenerator(ide, m_project); if (m_dryRun) return; // TODO: pass m_dryRun to the generator generator.generate(settings); } /** Generate project files using the special test runner (`dub test`) configuration. Any existing project files will be overridden. */ void testProject(GeneratorSettings settings, string config, NativePath custom_main_file) { settings.cache = this.m_dirs.cache; if (settings.overrideToolWorkingDirectory is NativePath.init) settings.overrideToolWorkingDirectory = m_rootPath; if (!custom_main_file.empty && !custom_main_file.absolute) custom_main_file = m_rootPath ~ custom_main_file; const test_config = m_project.addTestRunnerConfiguration(settings, !m_dryRun, config, custom_main_file); if (!test_config) return; // target type "none" settings.config = test_config; auto generator = createProjectGenerator("build", m_project); generator.generate(settings); } /** Executes D-Scanner tests on the current project. **/ void lintProject(string[] args) { if (m_dryRun) return; auto tool = PackageName("dscanner"); auto tool_pack = m_packageManager.getBestPackage(tool); if (!tool_pack) { logInfo("Hint", Color.light_blue, "%s is not present, getting and storing it locally", tool); tool_pack = this.fetch(tool); } auto dscanner_dub = new Dub(null, m_packageSuppliers); dscanner_dub.loadPackage(tool_pack); dscanner_dub.upgrade(UpgradeOptions.select); GeneratorSettings settings = this.makeAppSettings(); foreach (dependencyPackage; m_project.dependencies) { auto cfgs = m_project.getPackageConfigs(settings.platform, null, true); auto buildSettings = dependencyPackage.getBuildSettings(settings.platform, cfgs[dependencyPackage.name]); foreach (importPath; buildSettings.importPaths) { settings.runArgs ~= ["-I", (dependencyPackage.path ~ importPath).toNativeString()]; } foreach (cimportPath; buildSettings.cImportPaths) { settings.runArgs ~= ["-I", (dependencyPackage.path ~ cimportPath).toNativeString()]; } } string configFilePath = (m_project.rootPackage.path ~ "dscanner.ini").toNativeString(); if (!args.canFind("--config") && exists(configFilePath)) { settings.runArgs ~= ["--config", configFilePath]; } settings.runArgs ~= args ~ [m_project.rootPackage.path.toNativeString()]; dscanner_dub.generateProject("build", settings); } /** Prints the specified build settings necessary for building the root package. */ void listProjectData(GeneratorSettings settings, string[] requestedData, ListBuildSettingsFormat list_type) { import std.stdio; import std.ascii : newline; if (settings.overrideToolWorkingDirectory is NativePath.init) settings.overrideToolWorkingDirectory = m_rootPath; // Split comma-separated lists string[] requestedDataSplit = requestedData .map!(a => a.splitter(",").map!strip) .joiner() .array(); auto data = m_project.listBuildSettings(settings, requestedDataSplit, list_type); string delimiter; final switch (list_type) with (ListBuildSettingsFormat) { case list: delimiter = newline ~ newline; break; case listNul: delimiter = "\0\0"; break; case commandLine: delimiter = " "; break; case commandLineNul: delimiter = "\0\0"; break; } write(data.joiner(delimiter)); if (delimiter != "\0\0") writeln(); } /// Cleans intermediate/cache files of the given package (or all packages) deprecated("Use `clean(Package)` instead") void cleanPackage(NativePath path) { this.clean(Package.load(path)); } /// Ditto void clean() { const cache = this.m_dirs.cache; logInfo("Cleaning", Color.green, "all artifacts at %s", cache.toNativeString().color(Mode.bold)); if (existsFile(cache)) rmdirRecurse(cache.toNativeString()); } /// Ditto void clean(Package pack) { const cache = this.packageCache(pack); logInfo("Cleaning", Color.green, "artifacts for package %s at %s", pack.name.color(Mode.bold), cache.toNativeString().color(Mode.bold)); // TODO: clear target files and copy files if (existsFile(cache)) rmdirRecurse(cache.toNativeString()); } deprecated("Use the overload that accepts either a `Version` or a `VersionRange` as second argument") Package fetch(string packageId, const Dependency dep, PlacementLocation location, FetchOptions options, string reason = "") { const vrange = dep.visit!( (VersionRange range) => range, function VersionRange (any) { throw new Exception("Cannot call `dub.fetch` with a " ~ typeof(any).stringof ~ " dependency"); } ); return this.fetch(packageId, vrange, location, options, reason); } deprecated("Use `fetch(PackageName, Version, [FetchOptions, PlacementLocation, string])`") Package fetch(string name, in Version vers, PlacementLocation location, FetchOptions options, string reason = "") { const n = PackageName(name); return this.fetch(n, VersionRange(vers, vers), options, location, reason); } deprecated("Use `fetch(PackageName, VersionRange, [FetchOptions, PlacementLocation, string])`") Package fetch(string name, in VersionRange range, PlacementLocation location, FetchOptions options, string reason = "") { const n = PackageName(name); return this.fetch(n, range, options, location, reason); } /** * Fetches a missing package and stores it locally * * This will query the configured PackageSuppliers for a package * matching the `range` specification, store it locally, and load * it in the `PackageManager`. Note that unlike the command line * version, this function is not idempotent and will remove an * existing package and re-download it. * * Params: * name = Name of the package to retrieve. Subpackages will lead * to the main package being retrieved and the subpackage * being returned (if it exists). * vers = For `Version` overloads, the exact version to return. * range = The `VersionRange` to match. Default to `Any` to fetch * the latest version. * options = A set of options used for fetching / matching versions. * location = Where to store the retrieved package. Default to the * configured `defaultPlacementLocation`. * reason = Optionally, the reason for retriving this package. * This is used only for logging. * * Returns: * The fetched or loaded `Package`, or `null` in dry-run mode. * * Throws: * If the package cannot be fetched or loaded. */ Package fetch(in PackageName name, in Version vers, FetchOptions options = FetchOptions.none, string reason = "") { return this.fetch(name, VersionRange(vers, vers), options, this.defaultPlacementLocation, reason); } /// Ditto Package fetch(in PackageName name, in Version vers, FetchOptions options, PlacementLocation location, string reason = "") { return this.fetch(name, VersionRange(vers, vers), options, this.defaultPlacementLocation, reason); } /// Ditto Package fetch(in PackageName name, in VersionRange range = VersionRange.Any, FetchOptions options = FetchOptions.none, string reason = "") { return this.fetch(name, range, options, this.defaultPlacementLocation, reason); } /// Ditto Package fetch(in PackageName name, in VersionRange range, FetchOptions options, PlacementLocation location, string reason = "") { Json pinfo; PackageSupplier supplier; foreach(ps; m_packageSuppliers){ try { pinfo = ps.fetchPackageRecipe(name.main, range, (options & FetchOptions.usePrerelease) != 0); if (pinfo.type == Json.Type.null_) continue; supplier = ps; break; } catch(Exception e) { logWarn("Package %s not found for %s: %s", name, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize()); } } enforce(!pinfo.type.among(Json.Type.undefined, Json.Type.null_), "No package %s was found matching the dependency %s" .format(name, range)); Version ver = Version(pinfo["version"].get!string); // always upgrade branch based versions - TODO: actually check if there is a new commit available Package existing = m_packageManager.getPackage(name, ver, location); if (options & FetchOptions.printOnly) { if (existing && existing.version_ != ver) logInfo("A new version for %s is available (%s -> %s). Run \"%s\" to switch.", name.toString().color(Mode.bold), existing, ver, text("dub upgrade ", name.main).color(Mode.bold)); return null; } if (existing) { if (!ver.isBranch() || !(options & FetchOptions.forceBranchUpgrade) || location == PlacementLocation.local) { // TODO: support git working trees by performing a "git pull" instead of this logDiagnostic("Package %s %s (in %s packages) is already present with the latest version, skipping upgrade.", name, ver, location.toString); return existing; } else { logInfo("Removing", Color.yellow, "%s %s to prepare " ~ "replacement with a new version", name.toString().color(Mode.bold), ver); if (!m_dryRun) m_packageManager.remove(existing); } } if (reason.length) logInfo("Fetching", Color.yellow, "%s %s (%s)", name.toString().color(Mode.bold), ver, reason); else logInfo("Fetching", Color.yellow, "%s %s", name.toString().color(Mode.bold), ver); if (m_dryRun) return null; logDebug("Acquiring package zip file"); // repeat download on corrupted zips, see #1336 foreach_reverse (i; 0..3) { import std.zip : ZipException; auto data = supplier.fetchPackage(name.main, range, (options & FetchOptions.usePrerelease) != 0); // Q: continue on fail? logDiagnostic("Placing to %s...", location.toString()); try { return m_packageManager.store(data, location, name.main, ver); } catch (ZipException e) { logInfo("Failed to extract zip archive for %s@%s...", name, ver); // re-throw the exception at the end of the loop if (i == 0) throw e; } } assert(0, "Should throw a ZipException instead."); } /** Removes a specific locally cached package. This will delete the package files from disk and removes the corresponding entry from the list of known packages. Params: pack = Package instance to remove */ void remove(in Package pack) { logInfo("Removing", Color.yellow, "%s (in %s)", pack.name.color(Mode.bold), pack.path.toNativeString()); if (!m_dryRun) m_packageManager.remove(pack); } /// Compatibility overload. Use the version without a `force_remove` argument instead. deprecated("Use `remove(pack)` directly instead, the boolean has no effect") void remove(in Package pack, bool force_remove) { remove(pack); } /// @see remove(string, string, RemoveLocation) enum RemoveVersionWildcard = "*"; /** Removes one or more versions of a locally cached package. This will remove a given package with a specified version from the given location. It will remove at most one package, unless `version_` is set to `RemoveVersionWildcard`. Params: name = Name of the package to be removed location = Specifies the location to look for the given package name/version. resolve_version = Callback to select package version. */ void remove(in PackageName name, PlacementLocation location, scope size_t delegate(in Package[] packages) resolve_version) { enforce(name.main.toString().length); enforce(!name.sub.length, "Cannot remove subpackage %s, remove %s instead" .format(name, name.main)); if (location == PlacementLocation.local) { logInfo("To remove a locally placed package, make sure you don't have any data" ~ "\nleft in it's directory and then simply remove the whole directory."); throw new Exception("dub cannot remove locally installed packages."); } Package[] packages; // Retrieve packages to be removed. foreach(pack; m_packageManager.getPackageIterator(name.toString())) if (m_packageManager.isManagedPackage(pack)) packages ~= pack; // Check validity of packages to be removed. enforce(!packages.empty, "Cannot find package '%s' to remove at %s location" .format(name, location.toString())); // Sort package list in ascending version order packages.sort!((a, b) => a.version_ < b.version_); immutable idx = resolve_version(packages); if (idx == size_t.max) return; else if (idx != packages.length) packages = packages[idx .. idx + 1]; logDebug("Removing %s packages.", packages.length); foreach(pack; packages) { try { remove(pack); } catch (Exception e) { logError("Failed to remove %s %s: %s", name, pack, e.msg); logInfo("Continuing with other packages (if any)."); } } } deprecated("Use `remove(PackageName, PlacementLocation, delegate)`") void remove(string name, PlacementLocation location, scope size_t delegate(in Package[] packages) resolve_version) { this.remove(PackageName(name), location, resolve_version); } /// Compatibility overload. Use the version without a `force_remove` argument instead. deprecated("Use the overload without the 3rd argument (`force_remove`) instead") void remove(string package_id, PlacementLocation location, bool force_remove, scope size_t delegate(in Package[] packages) resolve_version) { remove(package_id, location, resolve_version); } /** Removes a specific version of a package. Params: name = Name of the package to be removed version_ = Identifying a version or a wild card. If an empty string is passed, the package will be removed from the location, if there is only one version retrieved. This will throw an exception, if there are multiple versions retrieved. location = Specifies the location to look for the given package name/version. */ void remove(in PackageName name, string version_, PlacementLocation location) { remove(name, location, (in packages) { if (version_ == RemoveVersionWildcard || version_.empty) return packages.length; foreach (i, p; packages) { if (p.version_ == Version(version_)) return i; } throw new Exception("Cannot find package '%s@%s' to remove at %s location" .format(name, version_, location.toString())); }); } deprecated("Use `remove(PackageName, string, PlacementLocation)`") void remove(string name, string version_, PlacementLocation location) { this.remove(PackageName(name), version_, location); } /// Compatibility overload. Use the version without a `force_remove` argument instead. deprecated("Use the overload without force_remove instead") void remove(string package_id, string version_, PlacementLocation location, bool force_remove) { remove(package_id, version_, location); } /** Adds a directory to the list of locally known packages. Forwards to `PackageManager.addLocalPackage`. Params: path = Path to the package ver = Optional version to associate with the package (can be left empty) system = Make the package known system wide instead of user wide (requires administrator privileges). See_Also: `removeLocalPackage` */ deprecated("Use `addLocalPackage(string, string, PlacementLocation)` instead") void addLocalPackage(string path, string ver, bool system) { this.addLocalPackage(path, ver, system ? PlacementLocation.system : PlacementLocation.user); } /// Ditto void addLocalPackage(string path, string ver, PlacementLocation loc) { if (m_dryRun) return; this.m_packageManager.addLocalPackage(makeAbsolute(path), ver, loc); } /** Removes a directory from the list of locally known packages. Forwards to `PackageManager.removeLocalPackage`. Params: path = Path to the package system = Make the package known system wide instead of user wide (requires administrator privileges). See_Also: `addLocalPackage` */ deprecated("Use `removeLocalPackage(string, string, PlacementLocation)` instead") void removeLocalPackage(string path, bool system) { this.removeLocalPackage(path, system ? PlacementLocation.system : PlacementLocation.user); } /// Ditto void removeLocalPackage(string path, PlacementLocation loc) { if (m_dryRun) return; this.m_packageManager.removeLocalPackage(makeAbsolute(path), loc); } /** Registers a local directory to search for packages to use for satisfying dependencies. Params: path = Path to a directory containing package directories system = Make the package known system wide instead of user wide (requires administrator privileges). See_Also: `removeSearchPath` */ deprecated("Use `addSearchPath(string, PlacementLocation)` instead") void addSearchPath(string path, bool system) { this.addSearchPath(path, system ? PlacementLocation.system : PlacementLocation.user); } /// Ditto void addSearchPath(string path, PlacementLocation loc) { if (m_dryRun) return; this.m_packageManager.addSearchPath(makeAbsolute(path), loc); } /** Deregisters a local directory search path. Params: path = Path to a directory containing package directories system = Make the package known system wide instead of user wide (requires administrator privileges). See_Also: `addSearchPath` */ deprecated("Use `removeSearchPath(string, PlacementLocation)` instead") void removeSearchPath(string path, bool system) { this.removeSearchPath(path, system ? PlacementLocation.system : PlacementLocation.user); } /// Ditto void removeSearchPath(string path, PlacementLocation loc) { if (m_dryRun) return; this.m_packageManager.removeSearchPath(makeAbsolute(path), loc); } /** Queries all package suppliers with the given query string. Returns a list of tuples, where the first entry is the human readable name of the package supplier and the second entry is the list of matched packages. Params: query = the search term to match packages on See_Also: `PackageSupplier.searchPackages` */ auto searchPackages(string query) { import std.typecons : Tuple, tuple; Tuple!(string, PackageSupplier.SearchResult[])[] results; foreach (ps; this.m_packageSuppliers) { try results ~= tuple(ps.description, ps.searchPackages(query)); catch (Exception e) { logWarn("Searching %s for '%s' failed: %s", ps.description, query, e.msg); } } return results.filter!(tup => tup[1].length); } /** Returns a list of all available versions (including branches) for a particular package. The list returned is based on the registered package suppliers. Local packages are not queried in the search for versions. See_also: `getLatestVersion` */ Version[] listPackageVersions(in PackageName name) { Version[] versions; foreach (ps; this.m_packageSuppliers) { try versions ~= ps.getVersions(name); catch (Exception e) { logWarn("Failed to get versions for package %s on provider %s: %s", name, ps.description, e.msg); } } return versions.sort().uniq.array; } deprecated("Use `listPackageVersions(PackageName)`") Version[] listPackageVersions(string name) { const n = PackageName(name); return this.listPackageVersions(n); } /** Returns the latest available version for a particular package. This function returns the latest numbered version of a package. If no numbered versions are available, it will return an available branch, preferring "~master". Params: name = The name of the package in question. prefer_stable = If set to `true` (the default), returns the latest stable version, even if there are newer pre-release versions. See_also: `listPackageVersions` */ Version getLatestVersion(in PackageName name, bool prefer_stable = true) { auto vers = this.listPackageVersions(name); enforce(!vers.empty, "Failed to find any valid versions for a package name of '%s'." .format(name)); auto final_versions = vers.filter!(v => !v.isBranch && !v.isPreRelease).array; if (prefer_stable && final_versions.length) return final_versions[$-1]; else return vers[$-1]; } deprecated("Use `getLatestVersion(PackageName, bool)`") Version getLatestVersion(string name, bool prefer_stable = true) { const n = PackageName(name); return this.getLatestVersion(n, prefer_stable); } /** Initializes a directory with a package skeleton. Params: path = Path of the directory to create the new package in. The directory will be created if it doesn't exist. deps = List of dependencies to add to the package recipe. type = Specifies the type of the application skeleton to use. format = Determines the package recipe format to use. recipe_callback = Optional callback that can be used to customize the recipe before it gets written. app_args = Arguments to provide to the custom initialization routine. */ void createEmptyPackage(NativePath path, string[] deps, string type, PackageFormat format = PackageFormat.sdl, scope void delegate(ref PackageRecipe, ref PackageFormat) recipe_callback = null, string[] app_args = []) { if (!path.absolute) path = m_rootPath ~ path; path.normalize(); VersionRange[string] depVers; string[] notFound; // keep track of any failed packages in here foreach (dep; deps) { const name = PackageName(dep); try { Version ver = this.getLatestVersion(name); if (ver.isBranch()) depVers[dep] = VersionRange(ver); else depVers[dep] = VersionRange.fromString("~>" ~ ver.toString()); } catch (Exception e) { notFound ~= dep; } } if(notFound.length > 1){ throw new Exception(.format("Couldn't find packages: %-(%s, %).", notFound)); } else if(notFound.length == 1){ throw new Exception(.format("Couldn't find package: %-(%s, %).", notFound)); } if (m_dryRun) return; initPackage(path, depVers, type, format, recipe_callback); if (!["vibe.d", "deimos", "minimal"].canFind(type)) { runCustomInitialization(path, type, app_args); } //Act smug to the user. logInfo("Success", Color.green, "created empty project in %s", path.toNativeString().color(Mode.bold)); } /** * Run initialization code from a template project * * Looks up a project, then get its `init-exec` subpackage, * and run this to initialize the repository with a default structure. */ private void runCustomInitialization(NativePath path, string name, string[] runArgs) { auto name_ = PackageName(name); auto template_pack = m_packageManager.getBestPackage(name_); if (!template_pack) { logInfo("%s is not present, getting and storing it locally", name); template_pack = fetch(name_); } Package initSubPackage = m_packageManager.getSubPackage(template_pack, "init-exec", false); auto template_dub = new Dub(null, m_packageSuppliers); template_dub.loadPackage(initSubPackage); GeneratorSettings settings = this.makeAppSettings(); settings.runArgs = runArgs; initSubPackage.recipe.buildSettings.workingDirectory = path.toNativeString(); template_dub.generateProject("build", settings); } /** Converts the package recipe of the loaded root package to the given format. Params: destination_file_ext = The file extension matching the desired format. Possible values are "json" or "sdl". print_only = Print the converted recipe instead of writing to disk */ void convertRecipe(string destination_file_ext, bool print_only = false) { import std.path : extension; import std.stdio : stdout; import dub.recipe.io : serializePackageRecipe, writePackageRecipe; if (print_only) { auto dst = stdout.lockingTextWriter; serializePackageRecipe(dst, m_project.rootPackage.rawRecipe, "dub."~destination_file_ext); return; } auto srcfile = m_project.rootPackage.recipePath; auto srcext = srcfile.head.name.extension; if (srcext == "."~destination_file_ext) { // no logging before this point tagWidth.push(5); logError("Package format is already %s.", destination_file_ext); return; } writePackageRecipe(srcfile.parentPath ~ ("dub."~destination_file_ext), m_project.rootPackage.rawRecipe); removeFile(srcfile); } /** Runs DDOX to generate or serve documentation. Params: run = If set to true, serves documentation on a local web server. Otherwise generates actual HTML files. generate_args = Additional command line arguments to pass to "ddox generate-html" or "ddox serve-html". */ void runDdox(bool run, string[] generate_args = null) { import std.process : browse; if (m_dryRun) return; // allow to choose a custom ddox tool auto tool = m_project.rootPackage.recipe.ddoxTool.empty ? PackageName("ddox") : PackageName(m_project.rootPackage.recipe.ddoxTool); auto tool_pack = m_packageManager.getBestPackage(tool); if (!tool_pack) { logInfo("%s is not present, getting and storing it user wide", tool); tool_pack = this.fetch(tool); } auto ddox_dub = new Dub(null, m_packageSuppliers); ddox_dub.loadPackage(tool_pack); ddox_dub.upgrade(UpgradeOptions.select); GeneratorSettings settings = this.makeAppSettings(); auto filterargs = m_project.rootPackage.recipe.ddoxFilterArgs.dup; if (filterargs.empty) filterargs = ["--min-protection=Protected", "--only-documented"]; settings.runArgs = "filter" ~ filterargs ~ "docs.json"; ddox_dub.generateProject("build", settings); auto p = tool_pack.path; p.endsWithSlash = true; auto tool_path = p.toNativeString(); if (run) { settings.runArgs = ["serve-html", "--navigation-type=ModuleTree", "docs.json", "--web-file-dir="~tool_path~"public"] ~ generate_args; browse("http://127.0.0.1:8080/"); } else { settings.runArgs = ["generate-html", "--navigation-type=ModuleTree", "docs.json", "docs"] ~ generate_args; } ddox_dub.generateProject("build", settings); if (!run) { // TODO: ddox should copy those files itself version(Windows) runCommand(`xcopy /S /D "`~tool_path~`public\*" docs\`, null, m_rootPath.toNativeString()); else runCommand("rsync -ru '"~tool_path~"public/' docs/", null, m_rootPath.toNativeString()); } } /** * Compute and returns the path were artifacts are stored * * Expose `dub.generator.generator : packageCache` with this instance's * configured cache. */ protected NativePath packageCache (Package pkg) const { return .packageCache(this.m_dirs.cache, pkg); } /// Exposed because `commandLine` replicates `generateProject` for `dub describe` /// instead of treating it like a regular generator... Remove this once the /// flaw is fixed, and don't add more calls to this function! package(dub) NativePath cachePathDontUse () const @safe pure nothrow @nogc { return this.m_dirs.cache; } /// Make a `GeneratorSettings` suitable to generate tools (DDOC, DScanner, etc...) private GeneratorSettings makeAppSettings () const { GeneratorSettings settings; auto compiler_binary = this.defaultCompiler; settings.config = "application"; settings.buildType = "debug"; settings.compiler = getCompiler(compiler_binary); settings.platform = settings.compiler.determinePlatform( settings.buildSettings, compiler_binary, this.defaultArchitecture); if (this.defaultLowMemory) settings.buildSettings.options |= BuildOption.lowmem; if (this.defaultEnvironments) settings.buildSettings.addEnvironments(this.defaultEnvironments); if (this.defaultBuildEnvironments) settings.buildSettings.addBuildEnvironments(this.defaultBuildEnvironments); if (this.defaultRunEnvironments) settings.buildSettings.addRunEnvironments(this.defaultRunEnvironments); if (this.defaultPreGenerateEnvironments) settings.buildSettings.addPreGenerateEnvironments(this.defaultPreGenerateEnvironments); if (this.defaultPostGenerateEnvironments) settings.buildSettings.addPostGenerateEnvironments(this.defaultPostGenerateEnvironments); if (this.defaultPreBuildEnvironments) settings.buildSettings.addPreBuildEnvironments(this.defaultPreBuildEnvironments); if (this.defaultPostBuildEnvironments) settings.buildSettings.addPostBuildEnvironments(this.defaultPostBuildEnvironments); if (this.defaultPreRunEnvironments) settings.buildSettings.addPreRunEnvironments(this.defaultPreRunEnvironments); if (this.defaultPostRunEnvironments) settings.buildSettings.addPostRunEnvironments(this.defaultPostRunEnvironments); settings.run = true; settings.overrideToolWorkingDirectory = m_rootPath; return settings; } /** * Determine the default compiler to use for this instance * * The default compiler will be used unless --compiler is specified. * The environment variable `DC` will take precedence over anything, * followed by the configuration. If nothing is found, the folder in * which `dub` is installed will be searched, and if nothing is found, * the $PATH will be searched. * In the majority of cases, as we distribute `dub` alongside the compiler, * it will be found once the directory in which dub reside is searched. * * Returns: The value to use for the default compiler. */ protected string determineDefaultCompiler() const { import std.file : thisExePath; import std.path : buildPath, dirName, expandTilde, isAbsolute, isDirSeparator; import std.range : front; // Env takes precedence string result; if (auto envCompiler = environment.get("DC")) result = envCompiler; else result = this.m_config.defaultCompiler.expandTilde; if (result.length && result.isAbsolute) return result; static immutable BinaryPrefix = `$DUB_BINARY_PATH`; if (result.startsWith(BinaryPrefix)) return thisExePath().dirName() ~ result[BinaryPrefix.length .. $]; if (!find!isDirSeparator(result).empty) throw new Exception("defaultCompiler specified in a DUB config file cannot use an unqualified relative path:\n\n" ~ result ~ "\n\nUse \"$DUB_BINARY_PATH/../path/you/want\" instead."); version (Windows) enum sep = ";", exe = ".exe"; version (Posix) enum sep = ":", exe = ""; auto compilers = ["dmd", "gdc", "gdmd", "ldc2", "ldmd2"]; // If a compiler name is specified, look for it next to dub. // Otherwise, look for any of the common compilers adjacent to dub. if (result.length) { string compilerPath = buildPath(thisExePath().dirName(), result ~ exe); if (existsFile(compilerPath)) return compilerPath; } else { auto nextFound = compilers.find!(bin => existsFile(buildPath(thisExePath().dirName(), bin ~ exe))); if (!nextFound.empty) return buildPath(thisExePath().dirName(), nextFound.front ~ exe); } // If nothing found next to dub, search the user's PATH, starting // with the compiler name from their DUB config file, if specified. auto paths = environment.get("PATH", "").splitter(sep).map!NativePath; if (result.length && paths.canFind!(p => existsFile(p ~ (result ~ exe)))) return result; foreach (p; paths) { auto res = compilers.find!(bin => existsFile(p ~ (bin~exe))); if (!res.empty) return res.front; } return compilers[0]; } // This test also relies on the environment and the filesystem, // as the `makePackageSuppliers` does, and should be refactored. unittest { import dub.test.base : TestDub; auto dub = new TestDub(null, ".", null, SkipPackageSuppliers.configured); immutable testdir = getWorkingDirectory() ~ "test-determineDefaultCompiler"; immutable olddc = environment.get("DC", null); immutable oldpath = environment.get("PATH", null); void repairenv(string name, string var) { if (var !is null) environment[name] = var; else if (name in environment) environment.remove(name); } scope (exit) repairenv("DC", olddc); scope (exit) repairenv("PATH", oldpath); scope (exit) std.file.rmdirRecurse(testdir.toNativeString()); version (Windows) enum sep = ";", exe = ".exe"; version (Posix) enum sep = ":", exe = ""; immutable dmdpath = testdir ~ "dmd" ~ "bin"; immutable ldcpath = testdir ~ "ldc" ~ "bin"; ensureDirectory(dmdpath); ensureDirectory(ldcpath); immutable dmdbin = dmdpath ~ ("dmd" ~ exe); immutable ldcbin = ldcpath ~ ("ldc2" ~ exe); writeFile(dmdbin, null); writeFile(ldcbin, null); environment["DC"] = dmdbin.toNativeString(); assert(dub.determineDefaultCompiler() == dmdbin.toNativeString()); environment["DC"] = "dmd"; environment["PATH"] = dmdpath.toNativeString() ~ sep ~ ldcpath.toNativeString(); assert(dub.determineDefaultCompiler() == "dmd"); environment["DC"] = "ldc2"; environment["PATH"] = dmdpath.toNativeString() ~ sep ~ ldcpath.toNativeString(); assert(dub.determineDefaultCompiler() == "ldc2"); environment.remove("DC"); environment["PATH"] = ldcpath.toNativeString() ~ sep ~ dmdpath.toNativeString(); assert(dub.determineDefaultCompiler() == "ldc2"); } private NativePath makeAbsolute(NativePath p) const { return p.absolute ? p : m_rootPath ~ p; } private NativePath makeAbsolute(string p) const { return makeAbsolute(NativePath(p)); } } /// Option flags for `Dub.fetch` enum FetchOptions { none = 0, forceBranchUpgrade = 1<<0, usePrerelease = 1<<1, forceRemove = 1<<2, /// Deprecated, does nothing. printOnly = 1<<3, } /// Option flags for `Dub.upgrade` enum UpgradeOptions { none = 0, upgrade = 1<<1, /// Upgrade existing packages preRelease = 1<<2, /// include pre-release versions in upgrade forceRemove = 1<<3, /// Deprecated, does nothing. select = 1<<4, /// Update the dub.selections.json file with the upgraded versions dryRun = 1<<5, /// Instead of downloading new packages, just print a message to notify the user of their existence /*deprecated*/ printUpgradesOnly = dryRun, /// deprecated, use dryRun instead /*deprecated*/ useCachedResult = 1<<6, /// deprecated, has no effect noSaveSelections = 1<<7, /// Don't store updated selections on disk } /// Determines which of the default package suppliers are queried for packages. public alias SkipPackageSuppliers = SPS; private class DependencyVersionResolver : DependencyResolver!(Dependency, Dependency) { protected { Dub m_dub; UpgradeOptions m_options; Dependency[][PackageName] m_packageVersions; Package[string] m_remotePackages; SelectedVersions m_selectedVersions; Package m_rootPackage; bool[PackageName] m_packagesToUpgrade; Package[PackageDependency] m_packages; TreeNodes[][TreeNode] m_children; } this(Dub dub, UpgradeOptions options, Package root, SelectedVersions selected_versions) { assert(dub !is null); assert(root !is null); assert(selected_versions !is null); if (environment.get("DUB_NO_RESOLVE_LIMIT") !is null) super(ulong.max); else super(1_000_000); m_dub = dub; m_options = options; m_rootPackage = root; m_selectedVersions = selected_versions; } Dependency[PackageName] resolve(string[] filter) { foreach (name; filter) m_packagesToUpgrade[PackageName(name)] = true; return super.resolve(TreeNode(PackageName(m_rootPackage.name), Dependency(m_rootPackage.version_)), (m_options & UpgradeOptions.dryRun) == 0); } protected bool isFixedPackage(in PackageName pack) { return m_packagesToUpgrade !is null && pack !in m_packagesToUpgrade; } protected override Dependency[] getAllConfigs(in PackageName pack) { if (auto pvers = pack in m_packageVersions) return *pvers; if ((!(m_options & UpgradeOptions.upgrade) || isFixedPackage(pack)) && m_selectedVersions.hasSelectedVersion(pack)) { auto ret = [m_selectedVersions.getSelectedVersion(pack)]; logDiagnostic("Using fixed selection %s %s", pack, ret[0]); m_packageVersions[pack] = ret; return ret; } logDiagnostic("Search for versions of %s (%s package suppliers)", pack, m_dub.m_packageSuppliers.length); Version[] versions; foreach (p; m_dub.packageManager.getPackageIterator(pack.toString())) versions ~= p.version_; foreach (ps; m_dub.m_packageSuppliers) { try { auto vers = ps.getVersions(pack); vers.reverse(); if (!vers.length) { logDiagnostic("No versions for %s for %s", pack, ps.description); continue; } versions ~= vers; break; } catch (Exception e) { logWarn("Package %s not found in %s: %s", pack, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize); } } // sort by version, descending, and remove duplicates versions = versions.sort!"a>b".uniq.array; // move pre-release versions to the back of the list if no preRelease flag is given if (!(m_options & UpgradeOptions.preRelease)) versions = versions.filter!(v => !v.isPreRelease).array ~ versions.filter!(v => v.isPreRelease).array; // filter out invalid/unreachable dependency specs versions = versions.filter!((v) { bool valid = getPackage(pack, Dependency(v)) !is null; if (!valid) logDiagnostic("Excluding invalid dependency specification %s %s from dependency resolution process.", pack, v); return valid; }).array; if (!versions.length) logDiagnostic("Nothing found for %s", pack); else logDiagnostic("Return for %s: %s", pack, versions); auto ret = versions.map!(v => Dependency(v)).array; m_packageVersions[pack] = ret; return ret; } protected override Dependency[] getSpecificConfigs(in PackageName pack, TreeNodes nodes) { if (!nodes.configs.path.empty || !nodes.configs.repository.empty) { if (getPackage(nodes.pack, nodes.configs)) return [nodes.configs]; else return null; } else return null; } protected override TreeNodes[] getChildren(TreeNode node) { if (auto pc = node in m_children) return *pc; auto ret = getChildrenRaw(node); m_children[node] = ret; return ret; } private final TreeNodes[] getChildrenRaw(TreeNode node) { import std.array : appender; auto ret = appender!(TreeNodes[]); auto pack = getPackage(node.pack, node.config); if (!pack) { // this can happen when the package description contains syntax errors logDebug("Invalid package in dependency tree: %s %s", node.pack, node.config); return null; } auto basepack = pack.basePackage; foreach (d; pack.getAllDependenciesRange()) { auto dbasename = d.name.main.toString(); // detect dependencies to the root package (or sub packages thereof) if (dbasename == basepack.name) { auto absdeppath = d.spec.mapToPath(pack.path).path; absdeppath.endsWithSlash = true; auto subpack = m_dub.m_packageManager.getSubPackage(basepack, d.name.sub, true); if (subpack) { auto desireddeppath = basepack.path; desireddeppath.endsWithSlash = true; auto altdeppath = d.name == d.name.main ? basepack.path : subpack.path; altdeppath.endsWithSlash = true; if (!d.spec.path.empty && absdeppath != desireddeppath) logWarn("Sub package %s, referenced by %s %s must be referenced using the path to its base package", subpack.name, pack.name, pack); enforce(d.spec.path.empty || absdeppath == desireddeppath || absdeppath == altdeppath, format("Dependency from %s to %s uses wrong path: %s vs. %s", node.pack, subpack.name, absdeppath.toNativeString(), desireddeppath.toNativeString())); } ret ~= TreeNodes(d.name, node.config); continue; } DependencyType dt; if (d.spec.optional) { if (d.spec.default_) dt = DependencyType.optionalDefault; else dt = DependencyType.optional; } else dt = DependencyType.required; Dependency dspec = d.spec.mapToPath(pack.path); // if not upgrading, use the selected version if (!(m_options & UpgradeOptions.upgrade) && m_selectedVersions.hasSelectedVersion(d.name.main)) dspec = m_selectedVersions.getSelectedVersion(d.name.main); // keep selected optional dependencies and avoid non-selected optional-default dependencies by default if (!m_selectedVersions.bare) { if (dt == DependencyType.optionalDefault && !m_selectedVersions.hasSelectedVersion(d.name.main)) dt = DependencyType.optional; else if (dt == DependencyType.optional && m_selectedVersions.hasSelectedVersion(d.name.main)) dt = DependencyType.optionalDefault; } ret ~= TreeNodes(d.name, dspec, dt); } return ret.data; } protected override bool matches(Dependency configs, Dependency config) { if (!configs.path.empty) return configs.path == config.path; return configs.merge(config).valid; } private Package getPackage(PackageName name, Dependency dep) { auto key = PackageDependency(name, dep); if (auto pp = key in m_packages) return *pp; auto p = getPackageRaw(name, dep); m_packages[key] = p; return p; } private Package getPackageRaw(in PackageName name, Dependency dep) { import dub.recipe.json; // for sub packages, first try to get them from the base package if (name.main != name) { auto subname = name.sub; auto basepack = getPackage(name.main, dep); if (!basepack) return null; if (auto sp = m_dub.m_packageManager.getSubPackage(basepack, subname, true)) return sp; logDiagnostic("Subpackage %s@%s not found.", name, dep); return null; } // shortcut if the referenced package is the root package if (name.main.toString() == m_rootPackage.basePackage.name) return m_rootPackage.basePackage; if (!dep.repository.empty) { auto ret = m_dub.packageManager.loadSCMPackage(name, dep.repository); return ret !is null && dep.matches(ret.version_) ? ret : null; } if (!dep.path.empty) { try { return m_dub.packageManager.getOrLoadPackage(dep.path); } catch (Exception e) { logDiagnostic("Failed to load path based dependency %s: %s", name, e.msg); logDebug("Full error: %s", e.toString().sanitize); return null; } } const vers = dep.version_; if (auto ret = m_dub.m_packageManager.getBestPackage(name, vers)) return ret; auto key = name.toString() ~ ":" ~ vers.toString(); if (auto ret = key in m_remotePackages) return *ret; auto prerelease = (m_options & UpgradeOptions.preRelease) != 0; foreach (ps; m_dub.m_packageSuppliers) { if (name.main == name) { try { auto desc = ps.fetchPackageRecipe(name, VersionRange(vers, vers), prerelease); if (desc.type == Json.Type.null_) continue; PackageRecipe recipe; parseJson(recipe, desc); auto ret = new Package(recipe); m_remotePackages[key] = ret; return ret; } catch (Exception e) { logDiagnostic("Metadata for %s %s could not be downloaded from %s: %s", name, vers, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize); } } else { logDiagnostic("Package %s not found in base package description (%s). Downloading whole package.", name, vers.toString()); try { FetchOptions fetchOpts; fetchOpts |= prerelease ? FetchOptions.usePrerelease : FetchOptions.none; m_dub.fetch(name.main, vers, fetchOpts, m_dub.defaultPlacementLocation, "need sub package description"); auto ret = m_dub.m_packageManager.getBestPackage(name, vers); if (!ret) { logWarn("Package %s %s doesn't have a sub package %s", name.main, dep, name); return null; } m_remotePackages[key] = ret; return ret; } catch (Exception e) { logDiagnostic("Package %s could not be downloaded from %s: %s", name.main, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize); } } } m_remotePackages[key] = null; logWarn("Package %s %s could not be loaded either locally, or from the configured package registries.", name, dep); return null; } } package struct SpecialDirs { /// The path where to store temporary files and directory NativePath temp; /// The system-wide dub-specific folder NativePath systemSettings; /// The dub-specific folder in the user home directory NativePath userSettings; /** * User location where to install packages * * On Windows, this folder, unlike `userSettings`, does not roam, * so an account on a company network will not save the content of this data, * unlike `userSettings`. * * On Posix, this is currently equivalent to `userSettings`. * * See_Also: https://docs.microsoft.com/en-us/windows/win32/shell/knownfolderid */ NativePath userPackages; /** * Location at which build/generation artifact will be written * * All build artifacts are stored under a single build cache, * which is usually located under `$HOME/.dub/cache/` on POSIX, * and `%LOCALAPPDATA%/dub/cache` on Windows. * * Versions of dub prior to v1.31.0 used to store artifact under the * project directory, but this led to issues with packages stored on * read-only file system / location, and lingering artifacts scattered * through the file system. * * Dub writes in the cache directory some Json description files * of the available artifacts. These files are intended to be read by * 3rd party software (e.g. Meson). The default cache location specified * in this function should therefore not change across future Dub versions. */ NativePath cache; /// Returns: An instance of `SpecialDirs` initialized from the environment public static SpecialDirs make () { import std.file : tempDir; SpecialDirs result; result.temp = NativePath(tempDir); version(Windows) { result.systemSettings = NativePath(environment.get("ProgramData")) ~ "dub/"; immutable appDataDir = environment.get("APPDATA"); result.userSettings = NativePath(appDataDir) ~ "dub/"; // LOCALAPPDATA is not defined before Windows Vista result.userPackages = NativePath(environment.get("LOCALAPPDATA", appDataDir)) ~ "dub"; } else version(Posix) { result.systemSettings = NativePath("/var/lib/dub/"); result.userSettings = NativePath(environment.get("HOME")) ~ ".dub/"; if (!result.userSettings.absolute) result.userSettings = getWorkingDirectory() ~ result.userSettings; result.userPackages = result.userSettings; } result.cache = result.userPackages ~ "cache"; return result; } } dub-1.40.0/source/dub/generators/000077500000000000000000000000001477246567400166175ustar00rootroot00000000000000dub-1.40.0/source/dub/generators/build.d000066400000000000000000000770211477246567400200720ustar00rootroot00000000000000/** Generator for direct compiler builds. Copyright: © 2013-2013 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.generators.build; import dub.compilers.compiler; import dub.compilers.utils; import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.project; import std.algorithm; import std.array; import std.conv; import std.exception; import std.file; import std.process; import std.string; import std.encoding : sanitize; string getObjSuffix(const scope ref BuildPlatform platform) { return platform.isWindows() ? ".obj" : ".o"; } string computeBuildName(string config, in GeneratorSettings settings, const string[][] hashing...) { import std.digest.sha : SHA256; import std.base64 : Base64URL; SHA256 hash; hash.start(); void addHash(in string[] strings...) { foreach (s; strings) { hash.put(cast(ubyte[])s); hash.put(0); } hash.put(0); } foreach(strings; hashing) addHash(strings); addHash(settings.platform.platform); addHash(settings.platform.architecture); addHash(settings.platform.compiler); addHash(settings.platform.compilerVersion); if(settings.recipeName != "") addHash(settings.recipeName); const hashstr = Base64URL.encode(hash.finish()[0 .. $ / 2]).stripRight("="); if(settings.recipeName != "") { import std.path:stripExtension, baseName; string recipeName = settings.recipeName.baseName.stripExtension; return format("%s-%s-%s-%s", config, settings.buildType, recipeName, hashstr); } return format("%s-%s-%s", config, settings.buildType, hashstr); } class BuildGenerator : ProjectGenerator { private { NativePath[] m_temporaryFiles; } this(Project project) { super(project); } override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { import std.path : setExtension; scope (exit) cleanupTemporaries(); void checkPkgRequirements(const(Package) pkg) { const tr = pkg.recipe.toolchainRequirements; tr.checkPlatform(settings.platform, pkg.name); } checkPkgRequirements(m_project.rootPackage); foreach (pkg; m_project.dependencies) checkPkgRequirements(pkg); auto root_ti = targets[m_project.rootPackage.name]; const rootTT = root_ti.buildSettings.targetType; enforce(!(settings.rdmd && rootTT == TargetType.none), "Building package with target type \"none\" with rdmd is not supported yet."); logInfo("Starting", Color.light_green, "Performing \"%s\" build using %s for %-(%s, %).", settings.buildType.color(Color.magenta), settings.platform.compilerBinary, settings.platform.architecture); if (settings.rdmd || (rootTT == TargetType.staticLibrary && !settings.buildDeep)) { // Only build the main target. // RDMD always builds everything at once and static libraries don't need their // dependencies to be built, unless --deep flag is specified NativePath tpath; buildTarget(settings, root_ti.buildSettings.dup, m_project.rootPackage, root_ti.config, root_ti.packages, null, tpath); return; } // Recursive build starts here bool any_cached = false; NativePath[string] target_paths; NativePath[] dynamicLibDepsFilesToCopy; // to the root package output dir const copyDynamicLibDepsLinkerFiles = rootTT == TargetType.dynamicLibrary || rootTT == TargetType.none; const copyDynamicLibDepsRuntimeFiles = copyDynamicLibDepsLinkerFiles || rootTT == TargetType.executable; bool[string] visited; void buildTargetRec(string target) { if (target in visited) return; visited[target] = true; auto ti = targets[target]; foreach (dep; ti.dependencies) buildTargetRec(dep); NativePath[] additional_dep_files; auto bs = ti.buildSettings.dup; const tt = bs.targetType; foreach (ldep; ti.linkDependencies) { const ldepPath = target_paths[ldep].toNativeString(); const doLink = tt != TargetType.staticLibrary && !(bs.options & BuildOption.syntaxOnly); if (doLink && isLinkerFile(settings.platform, ldepPath)) bs.addSourceFiles(ldepPath); else additional_dep_files ~= target_paths[ldep]; if (targets[ldep].buildSettings.targetType == TargetType.dynamicLibrary) { // copy the .{dll,so,dylib} if (copyDynamicLibDepsRuntimeFiles) dynamicLibDepsFilesToCopy ~= NativePath(ldepPath); if (settings.platform.isWindows()) { // copy the accompanying .pdb if found if (copyDynamicLibDepsRuntimeFiles) { const pdb = ldepPath.setExtension(".pdb"); if (existsFile(pdb)) dynamicLibDepsFilesToCopy ~= NativePath(pdb); } const importLib = ldepPath.setExtension(".lib"); if (existsFile(importLib)) { // link dependee against the import lib if (doLink) bs.addSourceFiles(importLib); // and copy if (copyDynamicLibDepsLinkerFiles) dynamicLibDepsFilesToCopy ~= NativePath(importLib); } // copy the .exp file if found const exp = ldepPath.setExtension(".exp"); if (copyDynamicLibDepsLinkerFiles && existsFile(exp)) dynamicLibDepsFilesToCopy ~= NativePath(exp); } } } NativePath tpath; if (tt != TargetType.none) { if (buildTarget(settings, bs, ti.pack, ti.config, ti.packages, additional_dep_files, tpath)) any_cached = true; } target_paths[target] = tpath; } buildTargetRec(m_project.rootPackage.name); if (dynamicLibDepsFilesToCopy.length) { const rootTargetPath = NativePath(root_ti.buildSettings.targetPath); ensureDirectory(rootTargetPath); foreach (src; dynamicLibDepsFilesToCopy) { logDiagnostic("Copying target from %s to %s", src.toNativeString(), rootTargetPath.toNativeString()); hardLinkFile(src, rootTargetPath ~ src.head, true); } } if (any_cached) { logInfo("Finished", Color.green, "To force a rebuild of up-to-date targets, run again with --force" ); } } override void performPostGenerateActions(GeneratorSettings settings, in TargetInfo[string] targets) { // run the generated executable auto buildsettings = targets[m_project.rootPackage.name].buildSettings.dup; if (settings.run && !(buildsettings.options & BuildOption.syntaxOnly)) { NativePath exe_file_path; if (m_tempTargetExecutablePath.empty) exe_file_path = getTargetPath(buildsettings, settings); else exe_file_path = m_tempTargetExecutablePath ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); runTarget(exe_file_path, buildsettings, settings.runArgs, settings); } } private bool buildTarget(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, in Package[] packages, in NativePath[] additional_dep_files, out NativePath target_path) { import std.path : absolutePath; auto cwd = settings.toolWorkingDirectory; bool generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); auto build_id = buildsettings.computeBuildID(pack.path, config, settings); // make all paths relative to shrink the command line string makeRelative(string path) { return shrinkPath(NativePath(path), cwd); } foreach (ref f; buildsettings.sourceFiles) f = makeRelative(f); foreach (ref p; buildsettings.importPaths) p = makeRelative(p); foreach (ref p; buildsettings.cImportPaths) p = makeRelative(p); foreach (ref p; buildsettings.stringImportPaths) p = makeRelative(p); // perform the actual build bool cached = false; if (settings.rdmd) performRDMDBuild(settings, buildsettings, pack, config, target_path); else if (!generate_binary) performDirectBuild(settings, buildsettings, pack, config, target_path); else cached = performCachedBuild(settings, buildsettings, pack, config, build_id, packages, additional_dep_files, target_path); // HACK: cleanup dummy doc files, we shouldn't specialize on buildType // here and the compiler shouldn't need dummy doc output. if (settings.buildType == "ddox") { if ("__dummy.html".exists) removeFile("__dummy.html"); if ("__dummy_docs".exists) rmdirRecurse("__dummy_docs"); } // run post-build commands if (!cached && buildsettings.postBuildCommands.length) { logInfo("Post-build", Color.light_green, "Running commands"); runBuildCommands(CommandType.postBuild, buildsettings.postBuildCommands, pack, m_project, settings, buildsettings, [["DUB_BUILD_PATH" : target_path is NativePath.init ? "" : target_path.parentPath.toNativeString.absolutePath(settings.toolWorkingDirectory.toNativeString)]]); } return cached; } private bool performCachedBuild(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, string build_id, in Package[] packages, in NativePath[] additional_dep_files, out NativePath target_binary_path) { NativePath target_path; if (settings.tempBuild) { string packageName = pack.basePackage is null ? pack.name : pack.basePackage.name; m_tempTargetExecutablePath = target_path = getTempDir() ~ format(".dub/build/%s-%s/%s/", packageName, pack.version_, build_id); } else target_path = targetCacheDir(settings.cache, pack, build_id); if (!settings.force && isUpToDate(target_path, buildsettings, settings, pack, packages, additional_dep_files)) { logInfo("Up-to-date", Color.green, "%s %s: target for configuration [%s] is up to date.", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); logDiagnostic("Using existing build in %s.", target_path.toNativeString()); target_binary_path = target_path ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); if (!settings.tempBuild) copyTargetFile(target_path, buildsettings, settings); return true; } if (!isWritableDir(target_path, true)) { if (!settings.tempBuild) logInfo("Build directory %s is not writable. Falling back to direct build in the system's temp folder.", target_path); performDirectBuild(settings, buildsettings, pack, config, target_path); return false; } logInfo("Building", Color.light_green, "%s %s: building configuration [%s]", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); if( buildsettings.preBuildCommands.length ){ logInfo("Pre-build", Color.light_green, "Running commands"); runBuildCommands(CommandType.preBuild, buildsettings.preBuildCommands, pack, m_project, settings, buildsettings); } // override target path auto cbuildsettings = buildsettings; cbuildsettings.targetPath = target_path.toNativeString(); buildWithCompiler(settings, cbuildsettings); target_binary_path = getTargetPath(cbuildsettings, settings); if (!settings.tempBuild) { copyTargetFile(target_path, buildsettings, settings); updateCacheDatabase(settings, cbuildsettings, pack, config, build_id, target_binary_path.toNativeString()); } return false; } private void updateCacheDatabase(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, string build_id, string target_binary_path) { import dub.internal.vibecompat.data.json; import core.time : seconds; // Generate a `db.json` in the package version cache directory. // This is read by 3rd party software (e.g. Meson) in order to find // relevant build artifacts in Dub's cache. enum jsonFileName = "db.json"; enum lockFileName = "db.lock"; const pkgCacheDir = packageCache(settings.cache, pack); auto lock = lockFile((pkgCacheDir ~ lockFileName).toNativeString(), 3.seconds); const dbPath = pkgCacheDir ~ jsonFileName; const dbPathStr = dbPath.toNativeString(); Json db; if (exists(dbPathStr)) { const text = readText(dbPath); db = parseJsonString(text, dbPathStr); enforce(db.type == Json.Type.array, "Expected a JSON array in " ~ dbPathStr); } else { db = Json.emptyArray; } foreach_reverse (entry; db) { if (entry["buildId"].get!string == build_id) { // duplicate return; } } Json entry = Json.emptyObject; entry["architecture"] = serializeToJson(settings.platform.architecture); entry["buildId"] = build_id; entry["buildType"] = settings.buildType; entry["compiler"] = settings.platform.compiler; entry["compilerBinary"] = settings.platform.compilerBinary; entry["compilerVersion"] = settings.platform.compilerVersion; entry["configuration"] = config; entry["package"] = pack.name; entry["platform"] = serializeToJson(settings.platform.platform); entry["targetBinaryPath"] = target_binary_path; entry["version"] = pack.version_.toString(); db ~= entry; writeFile(dbPath, representation(db.toPrettyString())); } private void performRDMDBuild(GeneratorSettings settings, ref BuildSettings buildsettings, in Package pack, string config, out NativePath target_path) { auto cwd = settings.toolWorkingDirectory; //Added check for existence of [AppNameInPackagejson].d //If exists, use that as the starting file. NativePath mainsrc; if (buildsettings.mainSourceFile.length) { mainsrc = NativePath(buildsettings.mainSourceFile); if (!mainsrc.absolute) mainsrc = pack.path ~ mainsrc; } else { mainsrc = getMainSourceFile(pack); logWarn(`Package has no "mainSourceFile" defined. Using best guess: %s`, mainsrc.relativeTo(pack.path).toNativeString()); } // do not pass all source files to RDMD, only the main source file buildsettings.sourceFiles = buildsettings.sourceFiles.filter!(s => !s.endsWith(".d"))().array(); settings.compiler.prepareBuildSettings(buildsettings, settings.platform, BuildSetting.commandLine); auto generate_binary = !buildsettings.dflags.canFind("-o-"); // Create start script, which will be used by the calling bash/cmd script. // build "rdmd --force %DFLAGS% -I%~dp0..\source -Jviews -Isource @deps.txt %LIBS% source\app.d" ~ application arguments // or with "/" instead of "\" bool tmp_target = false; if (generate_binary) { if (settings.tempBuild || (settings.run && !isWritableDir(NativePath(buildsettings.targetPath), true))) { import std.random; auto rnd = to!string(uniform(uint.min, uint.max)) ~ "-"; auto tmpdir = getTempDir()~".rdmd/source/"; buildsettings.targetPath = tmpdir.toNativeString(); buildsettings.targetName = rnd ~ buildsettings.targetName; m_temporaryFiles ~= tmpdir; tmp_target = true; } target_path = getTargetPath(buildsettings, settings); settings.compiler.setTarget(buildsettings, settings.platform); } logDiagnostic("Application output name is '%s'", settings.compiler.getTargetFileName(buildsettings, settings.platform)); string[] flags = ["--build-only", "--compiler="~settings.platform.compilerBinary]; if (settings.force) flags ~= "--force"; flags ~= buildsettings.dflags; flags ~= mainsrc.relativeTo(cwd).toNativeString(); if (buildsettings.preBuildCommands.length){ logInfo("Pre-build", Color.light_green, "Running commands"); runCommands(buildsettings.preBuildCommands, null, cwd.toNativeString()); } logInfo("Building", Color.light_green, "%s %s [%s]", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); logInfo("Running rdmd..."); logDiagnostic("rdmd %s", join(flags, " ")); auto rdmd_pid = spawnProcess("rdmd" ~ flags, null, Config.none, cwd.toNativeString()); auto result = rdmd_pid.wait(); enforce(result == 0, "Build command failed with exit code "~to!string(result)); if (tmp_target) { m_temporaryFiles ~= target_path; foreach (f; buildsettings.copyFiles) m_temporaryFiles ~= NativePath(buildsettings.targetPath).parentPath ~ NativePath(f).head; } } private void performDirectBuild(GeneratorSettings settings, ref BuildSettings buildsettings, in Package pack, string config, out NativePath target_path) { auto cwd = settings.toolWorkingDirectory; auto generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); // make file paths relative to shrink the command line foreach (ref f; buildsettings.sourceFiles) { auto fp = NativePath(f); if( fp.absolute ) fp = fp.relativeTo(cwd); f = fp.toNativeString(); } logInfo("Building", Color.light_green, "%s %s [%s]", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); // make all target/import paths relative string makeRelative(string path) { auto p = NativePath(path); // storing in a separate temprary to work around #601 auto prel = p.absolute ? p.relativeTo(cwd) : p; return prel.toNativeString(); } buildsettings.targetPath = makeRelative(buildsettings.targetPath); foreach (ref p; buildsettings.importPaths) p = makeRelative(p); foreach (ref p; buildsettings.cImportPaths) p = makeRelative(p); foreach (ref p; buildsettings.stringImportPaths) p = makeRelative(p); bool is_temp_target = false; if (generate_binary) { if (settings.tempBuild || (settings.run && !isWritableDir(NativePath(buildsettings.targetPath), true))) { import std.random; auto rnd = to!string(uniform(uint.min, uint.max)); auto tmppath = getTempDir()~("dub/"~rnd~"/"); buildsettings.targetPath = tmppath.toNativeString(); m_temporaryFiles ~= tmppath; is_temp_target = true; } target_path = getTargetPath(buildsettings, settings); } if( buildsettings.preBuildCommands.length ){ logInfo("Pre-build", Color.light_green, "Running commands"); runBuildCommands(CommandType.preBuild, buildsettings.preBuildCommands, pack, m_project, settings, buildsettings); } buildWithCompiler(settings, buildsettings); if (is_temp_target) { m_temporaryFiles ~= target_path; foreach (f; buildsettings.copyFiles) m_temporaryFiles ~= NativePath(buildsettings.targetPath).parentPath ~ NativePath(f).head; } } private void copyTargetFile(in NativePath build_path, in BuildSettings buildsettings, in GeneratorSettings settings) { ensureDirectory(NativePath(buildsettings.targetPath)); string[] filenames = [ settings.compiler.getTargetFileName(buildsettings, settings.platform) ]; // Windows: add .pdb (for executables and DLLs) and/or import .lib & .exp (for DLLs) if found if (settings.platform.isWindows()) { void addIfFound(string extension) { import std.path : setExtension; const candidate = filenames[0].setExtension(extension); if (existsFile(build_path ~ candidate)) filenames ~= candidate; } const tt = buildsettings.targetType; if (tt == TargetType.executable || tt == TargetType.dynamicLibrary) addIfFound(".pdb"); if (tt == TargetType.dynamicLibrary) { addIfFound(".lib"); addIfFound(".exp"); } } foreach (filename; filenames) { auto src = build_path ~ filename; logDiagnostic("Copying target from %s to %s", src.toNativeString(), buildsettings.targetPath); hardLinkFile(src, NativePath(buildsettings.targetPath) ~ filename, true); } } private bool isUpToDate(NativePath target_path, BuildSettings buildsettings, GeneratorSettings settings, in Package main_pack, in Package[] packages, in NativePath[] additional_dep_files) { import std.datetime; auto targetfile = target_path ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); if (!existsFile(targetfile)) { logDiagnostic("Target '%s' doesn't exist, need rebuild.", targetfile.toNativeString()); return false; } auto targettime = getFileInfo(targetfile).timeModified; auto allfiles = appender!(string[]); allfiles ~= buildsettings.sourceFiles; allfiles ~= buildsettings.importFiles; allfiles ~= buildsettings.stringImportFiles; allfiles ~= buildsettings.extraDependencyFiles; // TODO: add library files foreach (p; packages) { if (p.recipePath != NativePath.init) allfiles ~= p.recipePath.toNativeString(); else if (p.basePackage.recipePath != NativePath.init) allfiles ~= p.basePackage.recipePath.toNativeString(); } foreach (f; additional_dep_files) allfiles ~= f.toNativeString(); bool checkSelectedVersions = !settings.single; if (checkSelectedVersions && main_pack is m_project.rootPackage && m_project.rootPackage.getAllDependencies().length > 0) allfiles ~= (main_pack.path ~ SelectedVersions.defaultFile).toNativeString(); foreach (file; allfiles.data) { if (!existsFile(file)) { logDiagnostic("File %s doesn't exist, triggering rebuild.", file); return false; } auto ftime = getFileInfo(file).timeModified; if (ftime > Clock.currTime) logWarn("File '%s' was modified in the future. Please re-save.", file); if (ftime > targettime) { logDiagnostic("File '%s' modified, need rebuild.", file); return false; } } return true; } /// Output an unique name to represent the source file. /// Calls with path that resolve to the same file on the filesystem will return the same, /// unless they include different symbolic links (which are not resolved). deprecated("Use the overload taking in the current working directory") static string pathToObjName(const scope ref BuildPlatform platform, string path) { return pathToObjName(platform, path, getWorkingDirectory); } /// ditto static string pathToObjName(const scope ref BuildPlatform platform, string path, NativePath cwd) { import std.digest.crc : crc32Of; import std.path : buildNormalizedPath, dirSeparator, relativePath, stripDrive; if (path.endsWith(".d")) path = path[0 .. $-2]; auto ret = buildNormalizedPath(cwd.toNativeString(), path).replace(dirSeparator, "."); auto idx = ret.lastIndexOf('.'); const objSuffix = getObjSuffix(platform); return idx < 0 ? ret ~ objSuffix : format("%s_%(%02x%)%s", ret[idx+1 .. $], crc32Of(ret[0 .. idx]), objSuffix); } /// Compile a single source file (srcFile), and write the object to objName. static string compileUnit(string srcFile, string objName, BuildSettings bs, GeneratorSettings gs) { NativePath tempobj = NativePath(bs.targetPath)~objName; string objPath = tempobj.toNativeString(); bs.libs = null; bs.lflags = null; bs.sourceFiles = [ srcFile ]; bs.targetType = TargetType.object; gs.compiler.prepareBuildSettings(bs, gs.platform, BuildSetting.commandLine); gs.compiler.setTarget(bs, gs.platform, objPath); gs.compiler.invoke(bs, gs.platform, gs.compileCallback, gs.toolWorkingDirectory); return objPath; } private void buildWithCompiler(GeneratorSettings settings, BuildSettings buildsettings) { auto generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); auto is_static_library = buildsettings.targetType == TargetType.staticLibrary || buildsettings.targetType == TargetType.library; scope (failure) { logDiagnostic("FAIL %s %s %s" , buildsettings.targetPath, buildsettings.targetName, buildsettings.targetType); auto tpath = getTargetPath(buildsettings, settings); if (generate_binary && existsFile(tpath)) removeFile(tpath); } if (settings.buildMode == BuildMode.singleFile && generate_binary) { import std.parallelism, std.range : walkLength; auto lbuildsettings = buildsettings; auto srcs = buildsettings.sourceFiles.filter!(f => !isLinkerFile(settings.platform, f)); auto objs = new string[](srcs.walkLength); void compileSource(size_t i, string src) { logInfo("Compiling", Color.light_green, "%s", src); const objPath = pathToObjName(settings.platform, src, settings.toolWorkingDirectory); objs[i] = compileUnit(src, objPath, buildsettings, settings); } if (settings.parallelBuild) { foreach (i, src; srcs.parallel(1)) compileSource(i, src); } else { foreach (i, src; srcs.array) compileSource(i, src); } logInfo("Linking", Color.light_green, "%s", buildsettings.targetName.color(Mode.bold)); lbuildsettings.sourceFiles = is_static_library ? [] : lbuildsettings.sourceFiles.filter!(f => isLinkerFile(settings.platform, f)).array; settings.compiler.setTarget(lbuildsettings, settings.platform); settings.compiler.prepareBuildSettings(lbuildsettings, settings.platform, BuildSetting.commandLineSeparate|BuildSetting.sourceFiles); settings.compiler.invokeLinker(lbuildsettings, settings.platform, objs, settings.linkCallback, settings.toolWorkingDirectory); // NOTE: separate compile/link is not yet enabled for GDC. } else if (generate_binary && (settings.buildMode == BuildMode.allAtOnce || settings.compiler.name == "gdc" || is_static_library)) { // don't include symbols of dependencies (will be included by the top level target) if (is_static_library) buildsettings.sourceFiles = buildsettings.sourceFiles.filter!(f => !isLinkerFile(settings.platform, f)).array; // setup for command line settings.compiler.setTarget(buildsettings, settings.platform); settings.compiler.prepareBuildSettings(buildsettings, settings.platform, BuildSetting.commandLine); // invoke the compiler settings.compiler.invoke(buildsettings, settings.platform, settings.compileCallback, settings.toolWorkingDirectory); } else { // determine path for the temporary object file string tempobjname = buildsettings.targetName ~ getObjSuffix(settings.platform); NativePath tempobj = NativePath(buildsettings.targetPath) ~ tempobjname; // setup linker command line auto lbuildsettings = buildsettings; lbuildsettings.sourceFiles = lbuildsettings.sourceFiles.filter!(f => isLinkerFile(settings.platform, f)).array; if (generate_binary) settings.compiler.setTarget(lbuildsettings, settings.platform); settings.compiler.prepareBuildSettings(lbuildsettings, settings.platform, BuildSetting.commandLineSeparate|BuildSetting.sourceFiles); // setup compiler command line buildsettings.libs = null; buildsettings.lflags = null; if (generate_binary) buildsettings.addDFlags("-c", "-of"~tempobj.toNativeString()); buildsettings.sourceFiles = buildsettings.sourceFiles.filter!(f => !isLinkerFile(settings.platform, f)).array; settings.compiler.prepareBuildSettings(buildsettings, settings.platform, BuildSetting.commandLine); settings.compiler.invoke(buildsettings, settings.platform, settings.compileCallback, settings.toolWorkingDirectory); if (generate_binary) { if (settings.tempBuild) { logInfo("Linking", Color.light_green, "%s => %s", buildsettings.targetName.color(Mode.bold), buildsettings.getTargetPath(settings)); } else { logInfo("Linking", Color.light_green, "%s", buildsettings.targetName.color(Mode.bold)); } settings.compiler.invokeLinker(lbuildsettings, settings.platform, [tempobj.toNativeString()], settings.linkCallback, settings.toolWorkingDirectory); } } } private void runTarget(NativePath exe_file_path, in BuildSettings buildsettings, string[] run_args, GeneratorSettings settings) { if (buildsettings.targetType == TargetType.executable) { auto cwd = settings.toolWorkingDirectory; auto runcwd = cwd; if (buildsettings.workingDirectory.length) { runcwd = NativePath(buildsettings.workingDirectory); if (!runcwd.absolute) runcwd = cwd ~ runcwd; } if (!exe_file_path.absolute) exe_file_path = cwd ~ exe_file_path; runPreRunCommands(m_project.rootPackage, m_project, settings, buildsettings); logInfo("Running", Color.green, "%s %s", exe_file_path.relativeTo(runcwd), run_args.join(" ")); string[string] env; foreach (aa; [buildsettings.environments, buildsettings.runEnvironments]) foreach (k, v; aa) env[k] = v; if (settings.runCallback) { auto res = execute([ exe_file_path.toNativeString() ] ~ run_args, env, Config.none, size_t.max, runcwd.toNativeString()); settings.runCallback(res.status, res.output); settings.targetExitStatus = res.status; runPostRunCommands(m_project.rootPackage, m_project, settings, buildsettings); } else { auto prg_pid = spawnProcess([ exe_file_path.toNativeString() ] ~ run_args, env, Config.none, runcwd.toNativeString()); auto result = prg_pid.wait(); settings.targetExitStatus = result; runPostRunCommands(m_project.rootPackage, m_project, settings, buildsettings); enforce(result == 0, "Program exited with code "~to!string(result)); } } else enforce(false, "Target is a library. Skipping execution."); } private void runPreRunCommands(in Package pack, in Project proj, in GeneratorSettings settings, in BuildSettings buildsettings) { if (buildsettings.preRunCommands.length) { logInfo("Pre-run", Color.light_green, "Running commands..."); runBuildCommands(CommandType.preRun, buildsettings.preRunCommands, pack, proj, settings, buildsettings); } } private void runPostRunCommands(in Package pack, in Project proj, in GeneratorSettings settings, in BuildSettings buildsettings) { if (buildsettings.postRunCommands.length) { logInfo("Post-run", Color.light_green, "Running commands..."); runBuildCommands(CommandType.postRun, buildsettings.postRunCommands, pack, proj, settings, buildsettings); } } private void cleanupTemporaries() { foreach_reverse (f; m_temporaryFiles) { try { if (f.endsWithSlash) rmdir(f.toNativeString()); else remove(f.toNativeString()); } catch (Exception e) { logWarn("Failed to remove temporary file '%s': %s", f.toNativeString(), e.msg); logDiagnostic("Full error: %s", e.toString().sanitize); } } m_temporaryFiles = null; } } private NativePath getMainSourceFile(in Package prj) { foreach (f; ["source/app.d", "src/app.d", "source/"~prj.name~".d", "src/"~prj.name~".d"]) if (existsFile(prj.path ~ f)) return prj.path ~ f; return prj.path ~ "source/app.d"; } private NativePath getTargetPath(const scope ref BuildSettings bs, const scope ref GeneratorSettings settings) { return NativePath(bs.targetPath) ~ settings.compiler.getTargetFileName(bs, settings.platform); } private string shrinkPath(NativePath path, NativePath base) { auto orig = path.toNativeString(); if (!path.absolute) return orig; version (Windows) { // avoid relative paths starting with `..\`: https://github.com/dlang/dub/issues/2143 if (!path.startsWith(base)) return orig; } auto rel = path.relativeTo(base).toNativeString(); return rel.length < orig.length ? rel : orig; } unittest { assert(shrinkPath(NativePath("/foo/bar/baz"), NativePath("/foo")) == NativePath("bar/baz").toNativeString()); version (Windows) assert(shrinkPath(NativePath("/foo/bar/baz"), NativePath("/foo/baz")) == NativePath("/foo/bar/baz").toNativeString()); else assert(shrinkPath(NativePath("/foo/bar/baz"), NativePath("/foo/baz")) == NativePath("../bar/baz").toNativeString()); assert(shrinkPath(NativePath("/foo/bar/baz"), NativePath("/bar/")) == NativePath("/foo/bar/baz").toNativeString()); assert(shrinkPath(NativePath("/foo/bar/baz"), NativePath("/bar/baz")) == NativePath("/foo/bar/baz").toNativeString()); } unittest { // issue #1235 - pass no library files to compiler command line when building a static lib import dub.recipe.io : parsePackageRecipe; import dub.compilers.gdc : GDCCompiler; import dub.platform : determinePlatform; version (Windows) auto libfile = "bar.lib"; else auto libfile = "bar.a"; auto recipe = parsePackageRecipe( `{"name":"test", "targetType":"library", "sourceFiles":["foo.d", "`~libfile~`"]}`, `/tmp/fooproject/dub.json`); auto pack = new Package(recipe, NativePath("/tmp/fooproject")); auto pman = new PackageManager(pack.path, NativePath("/tmp/foo/"), NativePath("/tmp/foo/"), false); auto prj = new Project(pman, pack); final static class TestCompiler : GDCCompiler { override void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback, NativePath cwd) { assert(!settings.dflags[].any!(f => f.canFind("bar"))); } override void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback, NativePath cwd) { assert(false); } } GeneratorSettings settings; settings.platform = BuildPlatform(determinePlatform(), ["x86"], "gdc", "test", 2075); settings.compiler = new TestCompiler; settings.config = "library"; settings.buildType = "debug"; settings.tempBuild = true; auto gen = new BuildGenerator(prj); gen.generate(settings); } dub-1.40.0/source/dub/generators/cmake.d000066400000000000000000000117741477246567400200560ustar00rootroot00000000000000/** Generator for CMake build scripts Copyright: © 2015 Steven Dwy License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Steven Dwy */ module dub.generators.cmake; import dub.compilers.buildsettings; import dub.generators.generator; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.project; import std.algorithm: map, uniq; import std.algorithm : stdsort = sort; // to avoid clashing with built-in sort import std.array: appender, join, replace; import std.stdio: File, write; import std.string: format; class CMakeGenerator: ProjectGenerator { this(Project project) { super(project); } override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { auto script = appender!(char[]); auto scripts = appender!(string[]); bool[string] visited; NativePath projectRoot = m_project.rootPackage.path; NativePath cmakeListsPath = projectRoot ~ "CMakeLists.txt"; foreach(name, info; targets) { if(visited.get(name, false)) continue; visited[name] = true; name = name.sanitize; string targetType; string libType; bool addTarget = true; switch(info.buildSettings.targetType) with(TargetType) { case autodetect: throw new Exception("Don't know what to do about autodetect target type"); case executable: targetType = "executable"; break; case dynamicLibrary: libType = "SHARED"; goto case; case library: case staticLibrary: targetType = "library"; break; case sourceLibrary: addTarget = false; break; case none: continue; default: assert(false); } script.put("include(UseD)\n"); script.put( "add_d_conditions(VERSION %s DEBUG %s)\n".format( info.buildSettings.versions.join(" "), info.buildSettings.debugVersions.join(" "), ) ); foreach(directory; info.buildSettings.importPaths) script.put("include_directories(%s)\n".format(directory.sanitizeSlashes)); foreach(directory; info.buildSettings.cImportPaths) script.put("c_include_directories(%s)\n".format(directory.sanitizeSlashes)); if(addTarget) { script.put("add_%s(%s %s\n".format(targetType, name, libType)); foreach(file; info.buildSettings.sourceFiles) script.put(" %s\n".format(file.sanitizeSlashes)); script.put(")\n"); script.put( "target_link_libraries(%s %s %s)\n".format( name, (info.dependencies ~ info.linkDependencies).dup.stdsort.uniq.map!(s => sanitize(s)).join(" "), info.buildSettings.libs.join(" ") ) ); script.put( `set_target_properties(%s PROPERTIES TEXT_INCLUDE_DIRECTORIES "%s")`.format( name, info.buildSettings.stringImportPaths.map!(s => sanitizeSlashes(s)).join(";") ) ~ "\n" ); } string filename = (projectRoot ~ "%s.cmake".format(name)).toNativeString; File file = File(filename, "w"); file.write(script.data); file.close; script.shrinkTo(0); scripts.put(filename); logInfo("Generated", Color.green, "%s.cmake", name); } if(!cmakeListsPath.existsFile) { logWarn("You must use a fork of CMake which has D support for these scripts to function properly."); logWarn("It is available at https://github.com/trentforkert/cmake"); logDiagnostic("Generating default CMakeLists.txt"); script.put("cmake_minimum_required(VERSION 3.0)\n"); script.put("project(%s D)\n".format(m_project.rootPackage.name)); foreach(path; scripts.data) script.put("include(%s)\n".format(path)); File file = File(cmakeListsPath.toNativeString, "w"); file.write(script.data); file.close; logInfo("Generated", Color.green, "CMakeLists.txt (default)"); } } } ///Transform a package name into a valid CMake target name. private string sanitize(string name) { return name.replace(":", "_"); } private string sanitizeSlashes(string path) { version(Windows) return path.replace("\\", "/"); else return path; } dub-1.40.0/source/dub/generators/generator.d000066400000000000000000001326041477246567400207600ustar00rootroot00000000000000/** Generator for project files Copyright: © 2012-2013 Matthias Dondorff, © 2013-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff */ module dub.generators.generator; import dub.compilers.compiler; import dub.generators.cmake; import dub.generators.build; import dub.generators.sublimetext; import dub.generators.visuald; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.project; import std.algorithm : map, filter, canFind, balancedParens; import std.array : array, appender, join; import std.exception; import std.file; import std.string; /** Common interface for project generators/builders. */ class ProjectGenerator { /** Information about a single binary target. A binary target can either be an executable or a static/dynamic library. It consists of one or more packages. */ struct TargetInfo { /// The root package of this target Package pack; /// All packages compiled into this target Package[] packages; /// The configuration used for building the root package string config; /** Build settings used to build the target. The build settings include all sources of all contained packages. Depending on the specific generator implementation, it may be necessary to add any static or dynamic libraries generated for child targets ($(D linkDependencies)). */ BuildSettings buildSettings; /** List of all dependencies. This list includes dependencies that are not the root of a binary target. */ string[] dependencies; /** List of all binary dependencies. This list includes all dependencies that are the root of a binary target. */ string[] linkDependencies; } private struct EnvironmentVariables { string[string] environments; string[string] buildEnvironments; string[string] runEnvironments; string[string] preGenerateEnvironments; string[string] postGenerateEnvironments; string[string] preBuildEnvironments; string[string] postBuildEnvironments; string[string] preRunEnvironments; string[string] postRunEnvironments; this(const scope ref BuildSettings bs) { update(bs); } void update(Envs)(const scope auto ref Envs envs) { import std.algorithm: each; envs.environments.byKeyValue.each!(pair => environments[pair.key] = pair.value); envs.buildEnvironments.byKeyValue.each!(pair => buildEnvironments[pair.key] = pair.value); envs.runEnvironments.byKeyValue.each!(pair => runEnvironments[pair.key] = pair.value); envs.preGenerateEnvironments.byKeyValue.each!(pair => preGenerateEnvironments[pair.key] = pair.value); envs.postGenerateEnvironments.byKeyValue.each!(pair => postGenerateEnvironments[pair.key] = pair.value); envs.preBuildEnvironments.byKeyValue.each!(pair => preBuildEnvironments[pair.key] = pair.value); envs.postBuildEnvironments.byKeyValue.each!(pair => postBuildEnvironments[pair.key] = pair.value); envs.preRunEnvironments.byKeyValue.each!(pair => preRunEnvironments[pair.key] = pair.value); envs.postRunEnvironments.byKeyValue.each!(pair => postRunEnvironments[pair.key] = pair.value); } void updateBuildSettings(ref BuildSettings bs) { bs.updateEnvironments(environments); bs.updateBuildEnvironments(buildEnvironments); bs.updateRunEnvironments(runEnvironments); bs.updatePreGenerateEnvironments(preGenerateEnvironments); bs.updatePostGenerateEnvironments(postGenerateEnvironments); bs.updatePreBuildEnvironments(preBuildEnvironments); bs.updatePostBuildEnvironments(postBuildEnvironments); bs.updatePreRunEnvironments(preRunEnvironments); bs.updatePostRunEnvironments(postRunEnvironments); } } protected { Project m_project; NativePath m_tempTargetExecutablePath; } this(Project project) { m_project = project; } /** Performs the full generator process. */ final void generate(GeneratorSettings settings) { import dub.compilers.utils : enforceBuildRequirements; if (!settings.config.length) settings.config = m_project.getDefaultConfiguration(settings.platform); string[string] configs = m_project.getPackageConfigs(settings.platform, settings.config); TargetInfo[string] targets; EnvironmentVariables[string] envs; foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { auto config = configs[pack.name]; auto bs = pack.getBuildSettings(settings.platform, config); targets[pack.name] = TargetInfo(pack, [pack], config, bs); envs[pack.name] = EnvironmentVariables(bs); } foreach (pack; m_project.getTopologicalPackageList(false, null, configs)) { auto ti = pack.name in targets; auto parentEnvs = ti.pack.name in envs; foreach (deppkgName, depInfo; pack.getDependencies(ti.config)) { if (auto childEnvs = deppkgName in envs) { childEnvs.update(ti.buildSettings); parentEnvs.update(childEnvs); } } } BuildSettings makeBuildSettings(in Package pack, ref BuildSettings src) { BuildSettings bs; if (settings.buildSettings.options & BuildOption.lowmem) bs.options |= BuildOption.lowmem; BuildSettings srcbs = src.dup; envs[pack.name].updateBuildSettings(srcbs); bs.processVars(m_project, pack, srcbs, settings, true); return bs; } foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { BuildSettings bs = makeBuildSettings(pack, targets[pack.name].buildSettings); prepareGeneration(pack, m_project, settings, bs); // Regenerate buildSettings.sourceFiles if (bs.preGenerateCommands.length) { auto newSettings = pack.getBuildSettings(settings.platform, configs[pack.name]); bs = makeBuildSettings(pack, newSettings); } targets[pack.name].buildSettings = bs; } configurePackages(m_project.rootPackage, targets, settings); addBuildTypeSettings(targets, settings); foreach (ref t; targets.byValue) enforceBuildRequirements(t.buildSettings); generateTargets(settings, targets); foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { auto config = configs[pack.name]; auto pkgbs = pack.getBuildSettings(settings.platform, config); BuildSettings buildsettings = makeBuildSettings(pack, pkgbs); bool generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); auto bs = &targets[m_project.rootPackage.name].buildSettings; auto targetPath = !m_tempTargetExecutablePath.empty ? m_tempTargetExecutablePath : !bs.targetPath.empty ? NativePath(bs.targetPath) : NativePath(buildsettings.targetPath); finalizeGeneration(pack, m_project, settings, buildsettings, targetPath, generate_binary); } performPostGenerateActions(settings, targets); } /** Overridden in derived classes to implement the actual generator functionality. The function should go through all targets recursively. The first target (which is guaranteed to be there) is $(D targets[m_project.rootPackage.name]). The recursive descent is then done using the $(D TargetInfo.linkDependencies) list. This method is also potentially responsible for running the pre and post build commands, while pre and post generate commands are already taken care of by the $(D generate) method. Params: settings = The generator settings used for this run targets = A map from package name to TargetInfo that contains all binary targets to be built. */ protected abstract void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets); /** Overridable method to be invoked after the generator process has finished. An examples of functionality placed here is to run the application that has just been built. */ protected void performPostGenerateActions(GeneratorSettings settings, in TargetInfo[string] targets) {} /** Configure `rootPackage` and all of it's dependencies. 1. Merge versions, debugVersions, and inheritable build settings from dependents to their dependencies. 2. Define version identifiers Have_dependency_xyz for all direct dependencies of all packages. 3. Merge versions, debugVersions, and inheritable build settings from dependencies to their dependents, so that importer and importee are ABI compatible. This also transports all Have_dependency_xyz version identifiers to `rootPackage`. 4. Merge injectSourceFiles from dependencies into their dependents. This is based upon binary images and will transcend direct relationships including shared libraries. 5. Filter unused versions and debugVersions from all targets. The filters have previously been upwards inherited (3. and 4.) so that versions used in a dependency are also applied to all dependents. Note: The upwards inheritance is done at last so that siblings do not influence each other, also see https://github.com/dlang/dub/pull/1128. Note: Targets without output are integrated into their dependents and removed from `targets`. */ private void configurePackages(Package rootPackage, TargetInfo[string] targets, GeneratorSettings genSettings) { import std.algorithm : remove, sort; import std.range : repeat; auto roottarget = &targets[rootPackage.name]; // 0. do shallow configuration (not including dependencies) of all packages TargetType determineTargetType(const ref TargetInfo ti, const ref GeneratorSettings genSettings) { TargetType tt = ti.buildSettings.targetType; if (ti.pack is rootPackage) { if (tt == TargetType.autodetect || tt == TargetType.library) tt = TargetType.staticLibrary; } else { if (tt == TargetType.autodetect || tt == TargetType.library) tt = genSettings.combined ? TargetType.sourceLibrary : TargetType.staticLibrary; else if (genSettings.platform.architecture.canFind("x86_omf") && tt == TargetType.dynamicLibrary) { // Unfortunately we cannot remove this check for OMF targets, // due to Optlink not producing shared libraries without a lot of user intervention. // For other targets, say MSVC it'll do the right thing for the most part, // export is still a problem as of this writing, which means static libraries cannot have linking to them removed. // But that is for the most part up to the developer, to get it working correctly. logWarn("Dynamic libraries are not yet supported as dependencies for Windows target OMF - building as static library."); tt = TargetType.staticLibrary; } } if (tt != TargetType.none && tt != TargetType.sourceLibrary && ti.buildSettings.sourceFiles.empty) { logWarn(`Configuration [%s] of package %s contains no source files. Please add %s to its package description to avoid building it.`, ti.config.color(Color.blue), ti.pack.name.color(Mode.bold), `{"targetType": "none"}`.color(Mode.bold)); tt = TargetType.none; } return tt; } string[] mainSourceFiles; bool[string] hasOutput; foreach (ref ti; targets.byValue) { auto bs = &ti.buildSettings; // determine the actual target type bs.targetType = determineTargetType(ti, genSettings); switch (bs.targetType) { case TargetType.none: // ignore any build settings for targetType none (only dependencies will be processed) *bs = BuildSettings.init; bs.targetType = TargetType.none; break; case TargetType.executable: break; case TargetType.dynamicLibrary: // set -fPIC for dynamic library builds ti.buildSettings.addOptions(BuildOption.pic); goto default; default: // remove any mainSourceFile from non-executable builds if (bs.mainSourceFile.length) { bs.sourceFiles = bs.sourceFiles.remove!(f => f == bs.mainSourceFile); mainSourceFiles ~= bs.mainSourceFile; } break; } bool generatesBinary = bs.targetType != TargetType.sourceLibrary && bs.targetType != TargetType.none; hasOutput[ti.pack.name] = generatesBinary || ti.pack is rootPackage; } // add main source files to root executable { auto bs = &roottarget.buildSettings; if (bs.targetType == TargetType.executable || genSettings.single) bs.addSourceFiles(mainSourceFiles); } if (genSettings.filterVersions) foreach (ref ti; targets.byValue) inferVersionFilters(ti); // mark packages as visited (only used during upwards propagation) void[0][Package] visited; // collect all dependencies void collectDependencies(Package pack, ref TargetInfo ti, TargetInfo[string] targets, size_t level = 0) { // use `visited` here as pkgs cannot depend on themselves if (pack in visited) return; // transitive dependencies must be visited multiple times, see #1350 immutable transitive = !hasOutput[pack.name]; if (!transitive) visited[pack] = typeof(visited[pack]).init; auto bs = &ti.buildSettings; if (hasOutput[pack.name]) logDebug("%sConfiguring target %s (%s %s %s)", ' '.repeat(2 * level), pack.name, bs.targetType, bs.targetPath, bs.targetName); else logDebug("%sConfiguring target without output %s", ' '.repeat(2 * level), pack.name); // get specified dependencies, e.g. vibe-d ~0.8.1 auto deps = pack.getDependencies(targets[pack.name].config); logDebug("deps: %s -> %(%s, %)", pack.name, deps.byKey); foreach (depname; deps.keys.sort()) { auto depspec = deps[depname]; // get selected package for that dependency, e.g. vibe-d 0.8.2-beta.2 auto deppack = m_project.getDependency(depname, depspec.optional); if (deppack is null) continue; // optional and not selected // if dependency has no output if (!hasOutput[depname]) { // add itself ti.packages ~= deppack; // and it's transitive dependencies to current target collectDependencies(deppack, ti, targets, level + 1); continue; } auto depti = &targets[depname]; const depbs = &depti.buildSettings; if (depbs.targetType == TargetType.executable && ti.buildSettings.targetType != TargetType.none) continue; // add to (link) dependencies ti.dependencies ~= depname; ti.linkDependencies ~= depname; // recurse collectDependencies(deppack, *depti, targets, level + 1); // also recursively add all link dependencies of static *and* dynamic libraries // preserve topological sorting of dependencies for correct link order if (depbs.targetType == TargetType.staticLibrary || depbs.targetType == TargetType.dynamicLibrary) ti.linkDependencies = ti.linkDependencies.filter!(d => !depti.linkDependencies.canFind(d)).array ~ depti.linkDependencies; } enforce(!(ti.buildSettings.targetType == TargetType.none && ti.dependencies.empty), "Package with target type \"none\" must have dependencies to build."); } collectDependencies(rootPackage, *roottarget, targets); visited.clear(); // 1. downwards inherits versions, debugVersions, and inheritable build settings static void configureDependencies(const scope ref TargetInfo ti, TargetInfo[string] targets, size_t level = 0) { // do not use `visited` here as dependencies must inherit // configurations from *all* of their parents logDebug("%sConfigure dependencies of %s, deps:%(%s, %)", ' '.repeat(2 * level), ti.pack.name, ti.dependencies); foreach (depname; ti.dependencies) { auto pti = &targets[depname]; mergeFromDependent(ti.buildSettings, pti.buildSettings); configureDependencies(*pti, targets, level + 1); } } configureDependencies(*roottarget, targets); // 2. add Have_dependency_xyz for all direct dependencies of a target // (includes incorporated non-target dependencies and their dependencies) foreach (ref ti; targets.byValue) { import std.range : chain; import dub.internal.utils : stripDlangSpecialChars; auto bs = &ti.buildSettings; auto pkgnames = ti.packages.map!(p => p.name).chain(ti.dependencies); bs.addVersions(pkgnames.map!(pn => "Have_" ~ stripDlangSpecialChars(pn)).array); } // 3. upwards inherit full build configurations (import paths, versions, debugVersions, versionFilters, importPaths, ...) // We do a check for if any dependency uses final binary injection source files, // otherwise can ignore that bit of workload entirely bool skipFinalBinaryMerging = true; void configureDependents(ref TargetInfo ti, TargetInfo[string] targets, size_t level = 0) { // use `visited` here as pkgs cannot depend on themselves if (ti.pack in visited) return; visited[ti.pack] = typeof(visited[ti.pack]).init; logDiagnostic("%sConfiguring dependent %s, deps:%(%s, %)", ' '.repeat(2 * level), ti.pack.name, ti.dependencies); // embedded non-binary dependencies foreach (deppack; ti.packages[1 .. $]) ti.buildSettings.add(targets[deppack.name].buildSettings); // binary dependencies foreach (depname; ti.dependencies) { auto pdepti = &targets[depname]; configureDependents(*pdepti, targets, level + 1); mergeFromDependency(pdepti.buildSettings, ti.buildSettings, genSettings.platform); if (!pdepti.buildSettings.injectSourceFiles.empty) skipFinalBinaryMerging = false; } } configureDependents(*roottarget, targets); visited.clear(); // 4. As an extension to configureDependents we need to copy any injectSourceFiles // in our dependencies (ignoring targetType) void configureDependentsFinalImages(ref TargetInfo ti, TargetInfo[string] targets, ref TargetInfo finalBinaryTarget, size_t level = 0) { // use `visited` here as pkgs cannot depend on themselves if (ti.pack in visited) return; visited[ti.pack] = typeof(visited[ti.pack]).init; logDiagnostic("%sConfiguring dependent %s, deps:%(%s, %) for injectSourceFiles", ' '.repeat(2 * level), ti.pack.name, ti.dependencies); foreach (depname; ti.dependencies) { auto pdepti = &targets[depname]; if (!pdepti.buildSettings.injectSourceFiles.empty) finalBinaryTarget.buildSettings.addSourceFiles(pdepti.buildSettings.injectSourceFiles); configureDependentsFinalImages(*pdepti, targets, finalBinaryTarget, level + 1); } } if (!skipFinalBinaryMerging) { foreach (ref target; targets.byValue) { switch (target.buildSettings.targetType) { case TargetType.executable: case TargetType.dynamicLibrary: configureDependentsFinalImages(target, targets, target); // We need to clear visited for each target that is executable dynamicLibrary // due to this process needing to be recursive based upon the final binary targets. visited.clear(); break; default: break; } } } // 5. Filter applicable version and debug version identifiers if (genSettings.filterVersions) { foreach (name, ref ti; targets) { import std.algorithm.sorting : partition; auto bs = &ti.buildSettings; auto filtered = bs.versions.partition!(v => bs.versionFilters.canFind(v)); logDebug("Filtering out unused versions for %s: %s", name, filtered); bs.versions = bs.versions[0 .. $ - filtered.length]; filtered = bs.debugVersions.partition!(v => bs.debugVersionFilters.canFind(v)); logDebug("Filtering out unused debug versions for %s: %s", name, filtered); bs.debugVersions = bs.debugVersions[0 .. $ - filtered.length]; } } // 6. override string import files in dependencies static void overrideStringImports(ref TargetInfo target, ref TargetInfo parent, TargetInfo[string] targets, string[] overrides) { // Since string import paths are inherited from dependencies in the // inheritance step above (step 3), it is guaranteed that all // following dependencies will not have string import paths either, // so we can skip the recursion here if (!target.buildSettings.stringImportPaths.length) return; // do not use visited here as string imports can be overridden by *any* parent // // special support for overriding string imports in parent packages // this is a candidate for deprecation, once an alternative approach // has been found bool any_override = false; // override string import files (used for up to date checking) foreach (ref f; target.buildSettings.stringImportFiles) { foreach (o; overrides) { NativePath op; if (f != o && NativePath(f).head == (op = NativePath(o)).head) { logDebug("string import %s overridden by %s", f, o); f = o; any_override = true; } } } // override string import paths by prepending to the list, in // case there is any overlapping file if (any_override) target.buildSettings.prependStringImportPaths(parent.buildSettings.stringImportPaths); // add all files to overrides for recursion overrides ~= target.buildSettings.stringImportFiles; // recursively override all dependencies with the accumulated files/paths foreach (depname; target.dependencies) overrideStringImports(targets[depname], target, targets, overrides); } // push string import paths/files down to all direct and indirect // dependencies, overriding their own foreach (depname; roottarget.dependencies) overrideStringImports(targets[depname], *roottarget, targets, roottarget.buildSettings.stringImportFiles); // 7. downwards inherits dependency build settings static void applyForcedSettings(const scope ref TargetInfo ti, TargetInfo[string] targets, BuildSettings[string] dependBS, size_t level = 0) { static void apply(const scope ref BuildSettings forced, ref BuildSettings child) { child.addDFlags(forced.dflags); } // apply to all dependencies foreach (depname; ti.dependencies) { BuildSettings forcedSettings; auto pti = &targets[depname]; // fetch the forced dependency build settings if (auto matchedSettings = depname in dependBS) forcedSettings = *matchedSettings; else if (auto matchedSettings = "*" in dependBS) forcedSettings = *matchedSettings; apply(forcedSettings, pti.buildSettings); // recursively apply forced settings to all dependencies of his dependency applyForcedSettings(*pti, targets, ["*" : forcedSettings], level + 1); } } // apply both top level and configuration level forced dependency build settings void applyDependencyBuildSettings (const RecipeDependency[string] configured_dbs) { BuildSettings[string] dependencyBuildSettings; foreach (key, value; configured_dbs) { BuildSettings buildSettings; if (auto target = key in targets) { // get platform specific build settings and process dub variables (BuildSettingsTemplate => BuildSettings) value.settings.getPlatformSettings(buildSettings, genSettings.platform, target.pack.path); buildSettings.processVars(m_project, target.pack, buildSettings, genSettings, true); dependencyBuildSettings[key] = buildSettings; } } applyForcedSettings(*roottarget, targets, dependencyBuildSettings); } applyDependencyBuildSettings(rootPackage.recipe.buildSettings.dependencies); applyDependencyBuildSettings(rootPackage.getBuildSettings(genSettings.config).dependencies); // remove targets without output foreach (name; targets.keys) { if (!hasOutput[name]) targets.remove(name); } } // infer applicable version identifiers private static void inferVersionFilters(ref TargetInfo ti) { import std.algorithm.searching : any; import std.file : timeLastModified; import std.path : extension; import std.range : chain; import std.regex : ctRegex, matchAll; import std.stdio : File; import std.datetime : Clock, SysTime, UTC; import dub.compilers.utils : isLinkerFile; import dub.internal.vibecompat.data.json : Json, JSONException; auto bs = &ti.buildSettings; // only infer if neither version filters are specified explicitly if (bs.versionFilters.length || bs.debugVersionFilters.length) { logDebug("Using specified versionFilters for %s: %s %s", ti.pack.name, bs.versionFilters, bs.debugVersionFilters); return; } // check all existing source files for version identifiers static immutable dexts = [".d", ".di"]; auto srcs = chain(bs.sourceFiles, bs.importFiles, bs.stringImportFiles) .filter!(f => dexts.canFind(f.extension)).filter!exists; // try to load cached filters first const cacheFilePath = packageCache(NativePath(ti.buildSettings.targetPath), ti.pack) ~ "metadata_cache.json"; enum silent_fail = true; auto cache = jsonFromFile(cacheFilePath, silent_fail); try { auto cachedFilters = cache["versionFilters"]; if (cachedFilters.type != Json.Type.undefined) cachedFilters = cachedFilters[ti.config]; if (cachedFilters.type != Json.Type.undefined) { immutable mtime = SysTime.fromISOExtString(cachedFilters["mtime"].get!string); if (!srcs.any!(src => src.timeLastModified > mtime)) { auto versionFilters = cachedFilters["versions"][].map!(j => j.get!string).array; auto debugVersionFilters = cachedFilters["debugVersions"][].map!(j => j.get!string).array; logDebug("Using cached versionFilters for %s: %s %s", ti.pack.name, versionFilters, debugVersionFilters); bs.addVersionFilters(versionFilters); bs.addDebugVersionFilters(debugVersionFilters); return; } } } catch (JSONException e) { logWarn("Exception during loading invalid package cache %s.\n%s", ti.pack.path ~ ".dub/metadata_cache.json", e); } // use ctRegex for performance reasons, only small compile time increase enum verRE = ctRegex!`(?:^|\s)version\s*\(\s*([^\s]*?)\s*\)`; enum debVerRE = ctRegex!`(?:^|\s)debug\s*\(\s*([^\s]*?)\s*\)`; auto versionFilters = appender!(string[]); auto debugVersionFilters = appender!(string[]); foreach (file; srcs) { foreach (line; File(file).byLine) { foreach (m; line.matchAll(verRE)) if (!versionFilters.data.canFind(m[1])) versionFilters.put(m[1].idup); foreach (m; line.matchAll(debVerRE)) if (!debugVersionFilters.data.canFind(m[1])) debugVersionFilters.put(m[1].idup); } } logDebug("Using inferred versionFilters for %s: %s %s", ti.pack.name, versionFilters.data, debugVersionFilters.data); bs.addVersionFilters(versionFilters.data); bs.addDebugVersionFilters(debugVersionFilters.data); auto cachedFilters = cache["versionFilters"]; if (cachedFilters.type == Json.Type.undefined) cachedFilters = cache["versionFilters"] = [ti.config: Json.emptyObject]; cachedFilters[ti.config] = [ "mtime": Json(Clock.currTime(UTC()).toISOExtString), "versions": Json(versionFilters.data.map!Json.array), "debugVersions": Json(debugVersionFilters.data.map!Json.array), ]; enum create_if_missing = true; if (isWritableDir(cacheFilePath.parentPath, create_if_missing)) writeJsonFile(cacheFilePath, cache); } private static void mergeFromDependent(const scope ref BuildSettings parent, ref BuildSettings child) { child.addVersions(parent.versions); child.addDebugVersions(parent.debugVersions); child.addOptions(Flags!BuildOption(parent.options & inheritedBuildOptions)); } private static void mergeFromDependency(const scope ref BuildSettings child, ref BuildSettings parent, const scope ref BuildPlatform platform) { import dub.compilers.utils : isLinkerFile; parent.addDFlags(child.dflags); parent.addVersions(child.versions); parent.addDebugVersions(child.debugVersions); parent.addVersionFilters(child.versionFilters); parent.addDebugVersionFilters(child.debugVersionFilters); parent.addImportPaths(child.importPaths); parent.addCImportPaths(child.cImportPaths); parent.addStringImportPaths(child.stringImportPaths); parent.addInjectSourceFiles(child.injectSourceFiles); // linker stuff propagates up from static *and* dynamic library deps if (child.targetType == TargetType.staticLibrary || child.targetType == TargetType.dynamicLibrary) { parent.addSourceFiles(child.sourceFiles.filter!(f => isLinkerFile(platform, f)).array); parent.addLibs(child.libs); parent.addLFlags(child.lflags); } } // configure targets for build types such as release, or unittest-cov private void addBuildTypeSettings(TargetInfo[string] targets, in GeneratorSettings settings) { foreach (ref ti; targets.byValue) { ti.buildSettings.add(settings.buildSettings); // add build type settings and convert plain DFLAGS to build options m_project.addBuildTypeSettings(ti.buildSettings, settings, ti.pack is m_project.rootPackage); settings.compiler.extractBuildOptions(ti.buildSettings); auto tt = ti.buildSettings.targetType; enforce (tt != TargetType.sourceLibrary || ti.pack !is m_project.rootPackage || (ti.buildSettings.options & BuildOption.syntaxOnly), format("Main package must not have target type \"%s\". Cannot build.", tt)); } } } /** * Compute and returns the path were artifacts are stored for a given package * * Artifacts are stored in: * `$DUB_HOME/cache/$PKG_NAME/$PKG_VERSION[/+$SUB_PKG_NAME]/` * Note that the leading `+` in the sub-package name is to avoid any ambiguity. * * Dub writes in the returned path a Json description file of the available * artifacts in this cache location. This Json file is read by 3rd party * software (e.g. Meson). Returned path should therefore not change across * future Dub versions. * * Build artifacts are usually stored in a sub-folder named "build", * as their names are based on user-supplied values. * * Params: * cachePath = Base path at which the build cache is located, * e.g. `$HOME/.dub/cache/` * pkg = The package. Cannot be `null`. */ package(dub) NativePath packageCache(NativePath cachePath, in Package pkg) { import std.algorithm.searching : findSplit; assert(pkg !is null); assert(!cachePath.empty); // For subpackages if (const names = pkg.name.findSplit(":")) return cachePath ~ names[0] ~ pkg.version_.toString() ~ ("+" ~ names[2]); // For regular packages return cachePath ~ pkg.name ~ pkg.version_.toString(); } /** * Compute and return the directory where a target should be cached. * * Params: * cachePath = Base path at which the build cache is located, * e.g. `$HOME/.dub/cache/` * pkg = The package. Cannot be `null`. * buildId = The build identifier of the target. */ package(dub) NativePath targetCacheDir(NativePath cachePath, in Package pkg, string buildId) { return packageCache(cachePath, pkg) ~ "build" ~ buildId; } /** * Provides a unique (per build) identifier * * When building a package, it is important to have a unique but stable * identifier to differentiate builds and allow their caching. * This function provides such an identifier. * Example: * ``` * library-debug-Z7qINYX4IxM8muBSlyNGrw * ``` */ package(dub) string computeBuildID(in BuildSettings buildsettings, in NativePath packagePath, string config, GeneratorSettings settings) { import std.conv : to; const(string[])[] hashing = [ buildsettings.versions, buildsettings.debugVersions, buildsettings.dflags, buildsettings.lflags, buildsettings.stringImportPaths, buildsettings.importPaths, buildsettings.cImportPaths, settings.platform.architecture, [ (cast(uint)(buildsettings.options & ~BuildOption.color)).to!string, // exclude color option from id // Needed for things such as `__FULL_FILE_PATH__` packagePath.toNativeString(), settings.platform.compilerBinary, settings.platform.compiler, settings.platform.compilerVersion, ], ]; return computeBuildName(config, settings, hashing); } struct GeneratorSettings { NativePath cache; BuildPlatform platform; Compiler compiler; string config; string recipeName; string buildType; BuildSettings buildSettings; BuildMode buildMode = BuildMode.separate; int targetExitStatus; NativePath overrideToolWorkingDirectory; bool combined; // compile all in one go instead of each dependency separately bool filterVersions; // only used for generator "build" bool run, force, rdmd, tempBuild, parallelBuild; /// single file dub package bool single; /// build all dependencies for static libraries bool buildDeep; string[] runArgs; void delegate(int status, string output) compileCallback; void delegate(int status, string output) linkCallback; void delegate(int status, string output) runCallback; /// Returns `overrideToolWorkingDirectory` or if that's not set, just the /// current working directory of the application. This may differ if dub is /// called with the `--root` parameter or when using DUB as a library. NativePath toolWorkingDirectory() const { return overrideToolWorkingDirectory is NativePath.init ? getWorkingDirectory() : overrideToolWorkingDirectory; } } /** Determines the mode in which the compiler and linker are invoked. */ enum BuildMode { separate, /// Compile and link separately allAtOnce, /// Perform compile and link with a single compiler invocation singleFile, /// Compile each file separately //multipleObjects, /// Generate an object file per module //multipleObjectsPerModule, /// Use the -multiobj switch to generate multiple object files per module //compileOnly /// Do not invoke the linker (can be done using a post build command) } /** Creates a project generator of the given type for the specified project. */ ProjectGenerator createProjectGenerator(string generator_type, Project project) { assert(project !is null, "Project instance needed to create a generator."); generator_type = generator_type.toLower(); switch(generator_type) { default: throw new Exception("Unknown project generator: "~generator_type); case "build": logDebug("Creating build generator."); return new BuildGenerator(project); case "mono-d": throw new Exception("The Mono-D generator has been removed. Use Mono-D's built in DUB support instead."); case "visuald": logDebug("Creating VisualD generator."); return new VisualDGenerator(project); case "sublimetext": logDebug("Creating SublimeText generator."); return new SublimeTextGenerator(project); case "cmake": logDebug("Creating CMake generator."); return new CMakeGenerator(project); } } /** Calls delegates on files and directories in the given path that match any globs. */ void findFilesMatchingGlobs(in NativePath path, in string[] globList, void delegate(string file) addFile, void delegate(string dir) addDir) { import std.path : globMatch; string[] globs; foreach (f; globList) { if (f.canFind("*", "?") || (f.canFind("{") && f.balancedParens('{', '}')) || (f.canFind("[") && f.balancedParens('[', ']'))) { globs ~= f; } else { if (f.isDir) addDir(f); else addFile(f); } } if (globs.length) // Search all files for glob matches foreach (f; dirEntries(path.toNativeString(), SpanMode.breadth)) foreach (glob; globs) if (f.name().globMatch(glob)) { if (f.isDir) addDir(f); else addFile(f); break; } } /** Calls delegates on files in the given path that match any globs. If a directory matches a glob, the delegate is called on all existing files inside it recursively in depth-first pre-order. */ void findFilesMatchingGlobs(in NativePath path, in string[] globList, void delegate(string file) addFile) { void addDir(string dir) { foreach (f; dirEntries(dir, SpanMode.breadth)) addFile(f); } findFilesMatchingGlobs(path, globList, addFile, &addDir); } /** Runs pre-build commands and performs other required setup before project files are generated. */ private void prepareGeneration(in Package pack, in Project proj, in GeneratorSettings settings, in BuildSettings buildsettings) { if (buildsettings.preGenerateCommands.length && !isRecursiveInvocation(pack.name)) { logInfo("Pre-gen", Color.light_green, "Running commands for %s", pack.name); runBuildCommands(CommandType.preGenerate, buildsettings.preGenerateCommands, pack, proj, settings, buildsettings); } } /** Runs post-build commands and copies required files to the binary directory. */ private void finalizeGeneration(in Package pack, in Project proj, in GeneratorSettings settings, in BuildSettings buildsettings, NativePath target_path, bool generate_binary) { if (buildsettings.postGenerateCommands.length && !isRecursiveInvocation(pack.name)) { logInfo("Post-gen", Color.light_green, "Running commands for %s", pack.name); runBuildCommands(CommandType.postGenerate, buildsettings.postGenerateCommands, pack, proj, settings, buildsettings); } if (generate_binary) { if (!settings.tempBuild) ensureDirectory(NativePath(buildsettings.targetPath)); if (buildsettings.copyFiles.length) { void copyFolderRec(NativePath folder, NativePath dstfolder) { ensureDirectory(dstfolder); foreach (de; iterateDirectory(folder)) { if (de.isDirectory) { copyFolderRec(folder ~ de.name, dstfolder ~ de.name); } else { try hardLinkFile(folder ~ de.name, dstfolder ~ de.name, true); catch (Exception e) { logWarn("Failed to copy file %s: %s", (folder ~ de.name).toNativeString(), e.msg); } } } } void tryCopyDir(string file) { auto src = NativePath(file); if (!src.absolute) src = pack.path ~ src; auto dst = target_path ~ NativePath(file).head; if (src == dst) { logDiagnostic("Skipping copy of %s (same source and destination)", file); return; } logDiagnostic(" %s to %s", src.toNativeString(), dst.toNativeString()); try { copyFolderRec(src, dst); } catch(Exception e) logWarn("Failed to copy %s to %s: %s", src.toNativeString(), dst.toNativeString(), e.msg); } void tryCopyFile(string file) { auto src = NativePath(file); if (!src.absolute) src = pack.path ~ src; auto dst = target_path ~ NativePath(file).head; if (src == dst) { logDiagnostic("Skipping copy of %s (same source and destination)", file); return; } logDiagnostic(" %s to %s", src.toNativeString(), dst.toNativeString()); try { hardLinkFile(src, dst, true); } catch(Exception e) logWarn("Failed to copy %s to %s: %s", src.toNativeString(), dst.toNativeString(), e.msg); } logInfo("Copying files for %s...", pack.name); findFilesMatchingGlobs(pack.path, buildsettings.copyFiles, &tryCopyFile, &tryCopyDir); } } } /** Runs a list of build commands for a particular package. This function sets all DUB specific environment variables and makes sure that recursive dub invocations are detected and don't result in infinite command execution loops. The latter could otherwise happen when a command runs "dub describe" or similar functionality. */ void runBuildCommands(CommandType type, in string[] commands, in Package pack, in Project proj, in GeneratorSettings settings, in BuildSettings build_settings, in string[string][] extraVars = null) { import dub.internal.utils : runCommands; auto env = makeCommandEnvironmentVariables(type, pack, proj, settings, build_settings, extraVars); auto sub_commands = processVars(proj, pack, settings, commands, false, env); auto depNames = proj.dependencies.map!((a) => a.name).array(); storeRecursiveInvokations(env, proj.rootPackage.name ~ depNames); runCommands(sub_commands, env.collapseEnv, pack.path().toString()); } const(string[string])[] makeCommandEnvironmentVariables(CommandType type, in Package pack, in Project proj, in GeneratorSettings settings, in BuildSettings build_settings, in string[string][] extraVars = null) { import dub.internal.utils : getDUBExePath; import std.conv : to, text; import std.process : environment, escapeShellFileName; string[string] env; // TODO: do more elaborate things here // TODO: escape/quote individual items appropriately env["VERSIONS"] = join(build_settings.versions, " "); env["LIBS"] = join(build_settings.libs, " "); env["SOURCE_FILES"] = join(build_settings.sourceFiles, " "); env["IMPORT_PATHS"] = join(build_settings.importPaths, " "); env["C_IMPORT_PATHS"] = join(build_settings.cImportPaths, " "); env["STRING_IMPORT_PATHS"] = join(build_settings.stringImportPaths, " "); env["DC"] = settings.platform.compilerBinary; env["DC_BASE"] = settings.platform.compiler; env["D_FRONTEND_VER"] = to!string(settings.platform.frontendVersion); env["DUB_EXE"] = getDUBExePath(settings.platform.compilerBinary).toNativeString(); env["DUB_PLATFORM"] = join(settings.platform.platform, " "); env["DUB_ARCH"] = join(settings.platform.architecture, " "); env["DUB_TARGET_TYPE"] = to!string(build_settings.targetType); env["DUB_TARGET_PATH"] = build_settings.targetPath; env["DUB_TARGET_NAME"] = build_settings.targetName; env["DUB_TARGET_EXIT_STATUS"] = settings.targetExitStatus.text; env["DUB_WORKING_DIRECTORY"] = build_settings.workingDirectory; env["DUB_MAIN_SOURCE_FILE"] = build_settings.mainSourceFile; env["DUB_CONFIG"] = settings.config; env["DUB_BUILD_TYPE"] = settings.buildType; env["DUB_BUILD_MODE"] = to!string(settings.buildMode); env["DUB_PACKAGE"] = pack.name; env["DUB_PACKAGE_DIR"] = pack.path.toNativeString(); env["DUB_ROOT_PACKAGE"] = proj.rootPackage.name; env["DUB_ROOT_PACKAGE_DIR"] = proj.rootPackage.path.toNativeString(); env["DUB_PACKAGE_VERSION"] = pack.version_.toString(); env["DUB_COMBINED"] = settings.combined? "TRUE" : ""; env["DUB_RUN"] = settings.run? "TRUE" : ""; env["DUB_FORCE"] = settings.force? "TRUE" : ""; env["DUB_RDMD"] = settings.rdmd? "TRUE" : ""; env["DUB_TEMP_BUILD"] = settings.tempBuild? "TRUE" : ""; env["DUB_PARALLEL_BUILD"] = settings.parallelBuild? "TRUE" : ""; env["DUB_RUN_ARGS"] = (cast(string[])settings.runArgs).map!(escapeShellFileName).join(" "); auto cfgs = proj.getPackageConfigs(settings.platform, settings.config, true); auto rootPackageBuildSettings = proj.rootPackage.getBuildSettings(settings.platform, cfgs[proj.rootPackage.name]); env["DUB_ROOT_PACKAGE_TARGET_TYPE"] = to!string(rootPackageBuildSettings.targetType); env["DUB_ROOT_PACKAGE_TARGET_PATH"] = rootPackageBuildSettings.targetPath; env["DUB_ROOT_PACKAGE_TARGET_NAME"] = rootPackageBuildSettings.targetName; const(string[string])[] typeEnvVars; with (build_settings) final switch (type) { // pre/postGenerate don't have generateEnvironments, but reuse buildEnvironments case CommandType.preGenerate: typeEnvVars = [environments, buildEnvironments, preGenerateEnvironments]; break; case CommandType.postGenerate: typeEnvVars = [environments, buildEnvironments, postGenerateEnvironments]; break; case CommandType.preBuild: typeEnvVars = [environments, buildEnvironments, preBuildEnvironments]; break; case CommandType.postBuild: typeEnvVars = [environments, buildEnvironments, postBuildEnvironments]; break; case CommandType.preRun: typeEnvVars = [environments, runEnvironments, preRunEnvironments]; break; case CommandType.postRun: typeEnvVars = [environments, runEnvironments, postRunEnvironments]; break; } return [environment.toAA()] ~ env ~ typeEnvVars ~ extraVars; } string[string] collapseEnv(in string[string][] envs) { string[string] ret; foreach (subEnv; envs) { foreach (k, v; subEnv) ret[k] = v; } return ret; } /// Type to specify where CLI commands that need to be run came from. Needed for /// proper substitution with support for the different environments. enum CommandType { /// Defined in the preGenerateCommands setting preGenerate, /// Defined in the postGenerateCommands setting postGenerate, /// Defined in the preBuildCommands setting preBuild, /// Defined in the postBuildCommands setting postBuild, /// Defined in the preRunCommands setting preRun, /// Defined in the postRunCommands setting postRun } private bool isRecursiveInvocation(string pack) { import std.algorithm : canFind, splitter; import std.process : environment; return environment .get("DUB_PACKAGES_USED", "") .splitter(",") .canFind(pack); } private void storeRecursiveInvokations(ref const(string[string])[] env, string[] packs) { import std.algorithm : canFind, splitter; import std.range : chain; import std.process : environment; env ~= [ "DUB_PACKAGES_USED": environment .get("DUB_PACKAGES_USED", "") .splitter(",") .chain(packs) .join(",") ]; } version(Posix) { // https://github.com/dlang/dub/issues/2238 unittest { import dub.recipe.io : parsePackageRecipe; import dub.compilers.gdc : GDCCompiler; import std.algorithm : canFind; import std.path : absolutePath; import std.file : rmdirRecurse, write; mkdirRecurse("dubtest/preGen/source"); write("dubtest/preGen/source/foo.d", ""); scope(exit) rmdirRecurse("dubtest"); auto recipe = parsePackageRecipe( `{"name":"test", "targetType":"library", "preGenerateCommands":["touch $PACKAGE_DIR/source/bar.d"]}`, `dubtest/preGen/dub.json`); auto pack = new Package(recipe, NativePath("dubtest/preGen".absolutePath)); auto pman = new PackageManager(pack.path, NativePath("/tmp/foo/"), NativePath("/tmp/foo/"), false); auto prj = new Project(pman, pack); final static class TestCompiler : GDCCompiler { override void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback, NativePath cwd) { assert(false); } override void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback, NativePath cwd) { assert(false); } } GeneratorSettings settings; settings.compiler = new TestCompiler; settings.buildType = "debug"; final static class TestGenerator : ProjectGenerator { this(Project project) { super(project); } override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { import std.conv : text; const sourceFiles = targets["test"].buildSettings.sourceFiles; assert(sourceFiles.canFind("dubtest/preGen/source/bar.d".absolutePath), sourceFiles.text); } } auto gen = new TestGenerator(prj); gen.generate(settings); } } dub-1.40.0/source/dub/generators/sublimetext.d000066400000000000000000000063641477246567400213420ustar00rootroot00000000000000/** Generator for SublimeText project files Copyright: © 2014 Nicholas Londey License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Nicholas Londey */ module dub.generators.sublimetext; import dub.compilers.compiler; import dub.generators.generator; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.packagemanager; import dub.project; import std.algorithm; import std.array; import std.file; class SublimeTextGenerator : ProjectGenerator { this(Project project) { super(project); } override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { auto buildSettings = targets[m_project.name].buildSettings; logDebug("About to generate sublime project for %s.", m_project.rootPackage.name); auto root = Json([ "folders": targets.byValue.map!(f => targetFolderJson(f)).array.Json, "build_systems": buildSystems(settings.platform, settings.toolWorkingDirectory.toNativeString()), "settings": [ "include_paths": buildSettings.importPaths.map!Json.array.Json ].Json, ]); auto jsonString = appender!string(); writePrettyJsonString(jsonString, root); string projectPath = m_project.name ~ ".sublime-project"; write(projectPath, jsonString.data); logInfo("Generated", Color.green, "%s", projectPath); } } private Json targetFolderJson(in ProjectGenerator.TargetInfo target) { return [ "name": target.pack.basePackage.name.Json, "path": target.pack.basePackage.path.toNativeString.Json, "follow_symlinks": true.Json, "folder_exclude_patterns": [".dub"].map!Json.array.Json, ].Json; } private Json buildSystems(BuildPlatform buildPlatform, string workingDiretory) { static immutable BUILD_TYPES = [ //"plain", "debug", "release", "release-debug", "release-nobounds", //"unittest", "docs", "ddox", "profile", "profile-gc", "cov", "cov-ctfe", "unittest-cov", "unittest-cov-ctfe", "syntax" ]; string fileRegex; if (buildPlatform.frontendVersion >= 2066 && buildPlatform.compiler == "dmd") fileRegex = r"^(.+)\(([0-9]+)\,([0-9]+)\)\: (.*)$"; else fileRegex = r"^(.+)\(([0-9]+)\)\:() (.*)$"; auto arch = buildPlatform.architecture[0]; Json makeBuildSystem(string buildType) { return Json([ "name": "DUB build " ~ buildType.Json, "cmd": ["dub", "build", "--build=" ~ buildType, "--arch=" ~ arch, "--compiler="~buildPlatform.compilerBinary].map!Json.array.Json, "file_regex": fileRegex.Json, "working_dir": workingDiretory.Json, "variants": [ [ "name": "Run".Json, "cmd": ["dub", "run", "--build=" ~ buildType, "--arch=" ~ arch, "--compiler="~buildPlatform.compilerBinary].map!Json.array.Json, ].Json ].array.Json, ]); } auto buildSystems = BUILD_TYPES.map!makeBuildSystem.array; buildSystems ~= [ "name": "DUB test".Json, "cmd": ["dub", "test", "--arch=" ~ arch, "--compiler="~buildPlatform.compilerBinary].map!Json.array.Json, "file_regex": r"^(.+)\(([0-9]+)\)\:() (.*)$".Json, "working_dir": workingDiretory.Json, ].Json; return buildSystems.array.Json; } unittest { auto buildPlatform = BuildPlatform(); buildPlatform.architecture ~= "x86_64"; auto result = buildPlatform.buildSystems(getcwd()).toString; } dub-1.40.0/source/dub/generators/targetdescription.d000066400000000000000000000042151477246567400225200ustar00rootroot00000000000000/** Pseudo generator to output build descriptions. Copyright: © 2015 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.generators.targetdescription; import dub.compilers.buildsettings; import dub.compilers.compiler; import dub.description; import dub.generators.generator; import dub.internal.vibecompat.inet.path; import dub.project; class TargetDescriptionGenerator : ProjectGenerator { TargetDescription[] targetDescriptions; size_t[string] targetDescriptionLookup; this(Project project) { super(project); } protected override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { import std.algorithm : map; import std.array : array; auto configs = m_project.getPackageConfigs(settings.platform, settings.config); targetDescriptions.length = targets.length; size_t i = 0; bool[string] visited; void visitTargetRec(string target) { if (target in visited) return; visited[target] = true; auto ti = targets[target]; TargetDescription d; d.rootPackage = ti.pack.name; d.packages = ti.packages.map!(p => p.name).array; d.rootConfiguration = ti.config; d.buildSettings = ti.buildSettings.dup; const buildId = computeBuildID(d.buildSettings, ti.pack.path, ti.config, settings); const filename = settings.compiler.getTargetFileName(d.buildSettings, settings.platform); d.cacheArtifactPath = (targetCacheDir(settings.cache, ti.pack, buildId) ~ filename).toNativeString(); d.dependencies = ti.dependencies.dup; d.linkDependencies = ti.linkDependencies.dup; // Add static library dependencies foreach (ld; ti.linkDependencies) { auto ltarget = targets[ld]; auto ltbs = ltarget.buildSettings; auto targetfil = (NativePath(ltbs.targetPath) ~ settings.compiler.getTargetFileName(ltbs, settings.platform)).toNativeString(); d.buildSettings.addLinkerFiles(targetfil); } targetDescriptionLookup[d.rootPackage] = i; targetDescriptions[i++] = d; foreach (dep; ti.dependencies) visitTargetRec(dep); } visitTargetRec(m_project.rootPackage.name); } } dub-1.40.0/source/dub/generators/visuald.d000066400000000000000000000527221477246567400204430ustar00rootroot00000000000000/** Generator for VisualD project files Copyright: © 2012-2013 Matthias Dondorff License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff */ module dub.generators.visuald; import dub.compilers.compiler; import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.project; import std.algorithm; import std.array; import std.conv; import std.exception; import std.format; import std.string : format; import std.uuid; // Dubbing is developing dub... //version = DUBBING; // TODO: handle pre/post build commands class VisualDGenerator : ProjectGenerator { private { string[string] m_projectUuids; } this(Project project) { super(project); } override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { logDebug("About to generate projects for %s, with %s direct dependencies.", m_project.rootPackage.name, m_project.rootPackage.getAllDependencies().length); generateProjectFiles(settings, targets); generateSolutionFile(settings, targets); } private { void generateSolutionFile(GeneratorSettings settings, in TargetInfo[string] targets) { auto ret = appender!(char[])(); auto configs = m_project.getPackageConfigs(settings.platform, settings.config); auto some_uuid = generateUUID(); // Solution header ret.put("Microsoft Visual Studio Solution File, Format Version 11.00\n"); ret.put("# Visual Studio 2010\n"); bool[string] visited; void generateSolutionEntry(string pack) { if (pack in visited) return; visited[pack] = true; auto ti = targets[pack]; auto uuid = guid(pack); ret.formattedWrite("Project(\"%s\") = \"%s\", \"%s\", \"%s\"\n", some_uuid, pack, projFileName(pack), uuid); if (ti.linkDependencies.length && ti.buildSettings.targetType != TargetType.staticLibrary) { ret.put("\tProjectSection(ProjectDependencies) = postProject\n"); foreach (d; ti.linkDependencies) if (!isHeaderOnlyPackage(d, targets)) { // TODO: clarify what "uuid = uuid" should mean ret.formattedWrite("\t\t%s = %s\n", guid(d), guid(d)); } ret.put("\tEndProjectSection\n"); } ret.put("EndProject\n"); foreach (d; ti.dependencies) generateSolutionEntry(d); } auto mainpack = m_project.rootPackage.name; generateSolutionEntry(mainpack); // Global section contains configurations ret.put("Global\n"); ret.put("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n"); ret.formattedWrite("\t\t%s|%s = %s|%s\n", settings.buildType, settings.platform.architecture[0].vsArchitecture, settings.buildType, settings.platform.architecture[0].vsArchitecture); ret.put("\tEndGlobalSection\n"); ret.put("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n"); const string[] sub = ["ActiveCfg", "Build.0"]; const string[] conf = [settings.buildType~"|"~settings.platform.architecture[0].vsArchitecture]; foreach (t; targets.byKey) foreach (c; conf) foreach (s; sub) formattedWrite(ret, "\t\t%s.%s.%s = %s\n", guid(t), c, s, c); // TODO: for all dependencies ret.put("\tEndGlobalSection\n"); ret.put("\tGlobalSection(SolutionProperties) = preSolution\n"); ret.put("\t\tHideSolutionNode = FALSE\n"); ret.put("\tEndGlobalSection\n"); ret.put("EndGlobal\n"); // Writing solution file logDebug("About to write to .sln file with %s bytes", to!string(ret.data.length)); NativePath(solutionFileName()).writeFile(ret.data); logInfo("Generated", Color.green, "%s (solution)", solutionFileName()); } void generateProjectFiles(GeneratorSettings settings, in TargetInfo[string] targets) { bool[string] visited; void performRec(string name) { if (name in visited) return; visited[name] = true; generateProjectFile(name, settings, targets); foreach (d; targets[name].dependencies) performRec(d); } performRec(m_project.rootPackage.name); } bool isHeaderOnlyPackage(string pack, in TargetInfo[string] targets) const { auto buildsettings = targets[pack].buildSettings; if (!buildsettings.sourceFiles.any!(f => f.endsWith(".d"))()) return true; return false; } void generateProjectFile(string packname, GeneratorSettings settings, in TargetInfo[string] targets) { import dub.compilers.utils : isLinkerFile; auto ret = appender!(char[])(); auto root_package_path = m_project.rootPackage.path; auto basepath = NativePath(".dub/"); if (!isWritableDir(basepath, true)) throw new Exception(".dub is not writeable"); ret.put("\n"); ret.formattedWrite(" %s\n", guid(packname)); // Several configurations (debug, release, unittest) generateProjectConfiguration(ret, packname, settings.buildType, settings, targets); //generateProjectConfiguration(ret, packname, "release", settings, targets); //generateProjectConfiguration(ret, packname, "unittest", settings, targets); // Add all files auto files = targets[packname].buildSettings; SourceFile[string] sourceFiles; void addSourceFile(NativePath file_path, NativePath structure_path, SourceFile.Action action) { auto key = file_path.toString(); auto sf = sourceFiles.get(key, SourceFile.init); sf.filePath = file_path; if (sf.action == SourceFile.Action.none) { sf.action = action; sf.structurePath = structure_path; } sourceFiles[key] = sf; } void addFile(string s, SourceFile.Action action) { auto sp = NativePath(s); assert(sp.absolute, format("Source path in %s expected to be absolute: %s", packname, s)); //if( !sp.absolute ) sp = pack.path ~ sp; addSourceFile(sp.relativeTo(settings.toolWorkingDirectory ~ basepath), determineStructurePath(sp, targets[packname]), action); } foreach (p; targets[packname].packages) if (!p.recipePath.empty) addFile(p.recipePath.toNativeString(), SourceFile.Action.none); if (files.targetType == TargetType.staticLibrary) foreach(s; files.sourceFiles.filter!(s => !isLinkerFile(settings.platform, s))) addFile(s, SourceFile.Action.build); else foreach(s; files.sourceFiles.filter!(s => !s.endsWith(".lib"))) addFile(s, SourceFile.Action.build); foreach(s; files.importFiles) addFile(s, SourceFile.Action.none); foreach(s; files.stringImportFiles) addFile(s, SourceFile.Action.none); findFilesMatchingGlobs(root_package_path, files.copyFiles, s => addFile(s, SourceFile.Action.copy)); findFilesMatchingGlobs(root_package_path, files.extraDependencyFiles, s => addFile(s, SourceFile.Action.none)); // Create folders and files ret.formattedWrite(" ", getPackageFileName(packname)); typeof(NativePath.init.head)[] lastFolder; foreach(source; sortedSources(sourceFiles.values)) { logDebug("source looking at %s", source.structurePath); auto cur = source.structurePath.parentPath.bySegment.array; if(lastFolder != cur) { size_t same = 0; foreach(idx; 0..min(lastFolder.length, cur.length)) if(lastFolder[idx] != cur[idx]) break; else same = idx+1; const decrease = lastFolder.length - min(lastFolder.length, same); const increase = cur.length - min(cur.length, same); foreach(unused; 0..decrease) ret.put("\n "); foreach(idx; 0..increase) ret.formattedWrite("\n ", cur[same + idx].name); lastFolder = cur; } final switch (source.action) with (SourceFile.Action) { case none: ret.formattedWrite("\n ", source.filePath.toNativeString()); break; case build: ret.formattedWrite("\n ", source.filePath.toNativeString()); break; case copy: ret.formattedWrite("\n ", source.filePath.toNativeString()); break; } } // Finalize all open folders foreach(unused; 0..lastFolder.length) ret.put("\n "); ret.put("\n \n"); logDebug("About to write to '%s.visualdproj' file %s bytes", getPackageFileName(packname), ret.data.length); projFileName(packname).writeFile(ret.data); } void generateProjectConfiguration(Appender!(char[]) ret, string pack, string type, GeneratorSettings settings, in TargetInfo[string] targets) { auto cwd = settings.toolWorkingDirectory; auto buildsettings = targets[pack].buildSettings.dup; auto basepath = NativePath(".dub/"); string[] getSettings(string setting)(){ return __traits(getMember, buildsettings, setting); } string[] getPathSettings(string setting)() { auto settings = getSettings!setting(); auto ret = new string[settings.length]; foreach (i; 0 .. settings.length) { // \" is interpreted as an escaped " by cmd.exe, so we need to avoid that auto p = NativePath(settings[i]).relativeTo(cwd ~ basepath); p.endsWithSlash = false; ret[i] = '"' ~ p.toNativeString() ~ '"'; } return ret; } if (buildsettings.targetType == TargetType.none) return; foreach(architecture; settings.platform.architecture) { auto arch = architecture.vsArchitecture; ret.formattedWrite(" \n", to!string(type), arch); // debug and optimize setting ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.debugInfo ? "1" : "0"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.optimize ? "1" : "0"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.inline ? "1" : "0"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.releaseMode ? "1" : "0"); // Lib or exe? enum { Executable = 0, StaticLib = 1, DynamicLib = 2 } int output_type = StaticLib; // library string output_ext = "lib"; if (buildsettings.targetType == TargetType.executable) { output_type = Executable; output_ext = "exe"; } else if (buildsettings.targetType == TargetType.dynamicLibrary) { output_type = DynamicLib; output_ext = "dll"; } auto bin_path = pack == m_project.rootPackage.name ? NativePath(buildsettings.targetPath) : NativePath("lib/"); bin_path.endsWithSlash = true; ret.formattedWrite(" %s\n", output_type); ret.formattedWrite(" %s%s.%s\n", bin_path.toNativeString(), buildsettings.targetName, output_ext); // include paths and string imports string imports = join(getPathSettings!"importPaths"(), " "); string cimports = join(getPathSettings!"cImportPaths"(), " "); string stringImports = join(getPathSettings!"stringImportPaths"(), " "); string combinedImports = join([imports, cimports], " "); ret.formattedWrite(" %s\n", combinedImports); ret.formattedWrite(" %s\n", stringImports); ret.formattedWrite(" %s\n", "$(DMDInstallDir)windows\\bin\\dmd.exe"); // FIXME: use the actually selected compiler! ret.formattedWrite(" %s\n", getSettings!"dflags"().join(" ")); // Add version identifiers string versions = join(getSettings!"versions"(), " "); ret.formattedWrite(" %s\n", versions); // Add libraries, system libs need to be suffixed by ".lib". string linkLibs = join(map!(a => a~".lib")(getSettings!"libs"()), " "); string addLinkFiles = join(getSettings!"sourceFiles"().filter!(s => s.endsWith(".lib"))(), " "); if (arch == "x86") addLinkFiles ~= " phobos.lib"; if (output_type != StaticLib) ret.formattedWrite(" %s %s\n", linkLibs, addLinkFiles); // Unittests ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.unittests ? "1" : "0"); // Better C ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.betterC ? "1" : "0"); // compute directory for intermediate files (need dummy/ because of how -op determines the resulting path) size_t ndummy = 0; foreach (f; buildsettings.sourceFiles) { auto rpath = NativePath(f).relativeTo(cwd ~ basepath); size_t nd = 0; foreach (s; rpath.bySegment) if (s == "..") nd++; if (nd > ndummy) ndummy = nd; } string intersubdir = replicate("dummy/", ndummy) ~ getPackageFileName(pack); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); int singlefilemode; final switch (settings.buildMode) with (BuildMode) { case separate: singlefilemode = 2; break; case allAtOnce: singlefilemode = 0; break; case singleFile: singlefilemode = 1; break; //case compileOnly: singlefilemode = 3; break; } ret.formattedWrite(" %s\n", singlefilemode); ret.formattedWrite(" %s", buildsettings.dflags.canFind("-m32mscoff") ? "1" : "0"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.verbose ? "1" : "0"); ret.put(" 0\n"); ret.put(" 0\n"); ret.formattedWrite(" %s\n", arch == "x64" ? 1 : 0); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.noBoundsCheck ? "1" : "0"); ret.put(" 0\n"); ret.put(" 1\n"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.warningsAsErrors ? "1" : "0"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.warnings ? "1" : "0"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.property ? "1" : "0"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.alwaysStackFrame ? "1" : "0"); ret.put(" 0\n"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.coverage ? "1" : "0"); ret.put(" 0\n"); ret.put(" 2\n"); ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.ignoreUnknownPragmas ? "1" : "0"); ret.formattedWrite(" %s\n", settings.compiler.name == "ldc" ? 2 : settings.compiler.name == "gdc" ? 1 : 0); ret.formattedWrite(" 0\n"); ret.formattedWrite(" %s\n", bin_path.toNativeString()); ret.formattedWrite(" obj/%s/%s\n", to!string(type), intersubdir); ret.put(" \n"); ret.put(" \n"); ret.put(" 0\n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); ret.put(" 0\n"); ret.put(" \n"); ret.put(" \n"); ret.put(" 1\n"); ret.put(" $(IntDir)\\$(TargetName).json\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" \n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); ret.put(" 0\n"); ret.put(" \n"); ret.put(" 1\n"); ret.put(" $(VisualDInstallDir)cv2pdb\\cv2pdb.exe\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); auto wdir = NativePath(buildsettings.workingDirectory); if (!wdir.absolute) wdir = m_project.rootPackage.path ~ wdir; ret.formattedWrite(" %s\n", wdir.relativeTo(cwd ~ basepath).toNativeString()); ret.put(" \n"); ret.put(" \n"); ret.put(" *.obj;*.cmd;*.build;*.dep\n"); ret.put(" \n"); } // foreach(architecture) } void performOnDependencies(const Package main, string[string] configs, void delegate(const Package pack) op) { foreach (p; m_project.getTopologicalPackageList(false, main, configs)) { if (p is main) continue; op(p); } } string generateUUID() const { import std.string; return "{" ~ toUpper(randomUUID().toString()) ~ "}"; } string guid(string projectName) { if(projectName !in m_projectUuids) m_projectUuids[projectName] = generateUUID(); return m_projectUuids[projectName]; } auto solutionFileName() const { version(DUBBING) return getPackageFileName(m_project.rootPackage) ~ ".dubbed.sln"; else return getPackageFileName(m_project.rootPackage.name) ~ ".sln"; } NativePath projFileName(string pack) const { auto basepath = NativePath(".dub/"); version(DUBBING) return basepath ~ (getPackageFileName(pack) ~ ".dubbed.visualdproj"); else return basepath ~ (getPackageFileName(pack) ~ ".visualdproj"); } } // TODO: nice folders private struct SourceFile { NativePath structurePath; NativePath filePath; enum Action { none, build, copy }; Action action = Action.none; size_t toHash() const nothrow @trusted { return structurePath.toHash() ^ filePath.toHash() ^ (action * 0x1f3e7b2c); } int opCmp(ref const SourceFile rhs) const { return sortOrder(this, rhs); } // "a < b" for folder structures (deepest folder first, else lexical) private final static int sortOrder(ref const SourceFile a, ref const SourceFile b) { assert(!a.structurePath.empty); assert(!b.structurePath.empty); static if (is(typeof(a.structurePath.nodes))) { // vibe.d < 0.8.2 auto as = a.structurePath.nodes; auto bs = b.structurePath.nodes; } else { auto as = a.structurePath.bySegment.array; auto bs = b.structurePath.bySegment.array; } // Check for different folders, compare folders only (omit last one). for(uint idx=0; idx bs.length? -1 : 1; } else { // Both paths indicate files in the same directory, use lexical // ordering for those. return as[$-1].name.cmp(bs[$-1].name); } } } private auto sortedSources(SourceFile[] sources) { return sort(sources); } unittest { SourceFile[] sfs = [ { NativePath("b/file.d"), NativePath("") }, { NativePath("b/b/fileA.d"), NativePath("") }, { NativePath("a/file.d"), NativePath("") }, { NativePath("b/b/fileB.d"), NativePath("") }, { NativePath("b/b/b/fileA.d"), NativePath("") }, { NativePath("b/c/fileA.d"), NativePath("") }, ]; auto sorted = sort(sfs); SourceFile[] sortedSfs; foreach(sr; sorted) sortedSfs ~= sr; assert(sortedSfs[0].structurePath == NativePath("a/file.d"), "1"); assert(sortedSfs[1].structurePath == NativePath("b/b/b/fileA.d"), "2"); assert(sortedSfs[2].structurePath == NativePath("b/b/fileA.d"), "3"); assert(sortedSfs[3].structurePath == NativePath("b/b/fileB.d"), "4"); assert(sortedSfs[4].structurePath == NativePath("b/c/fileA.d"), "5"); assert(sortedSfs[5].structurePath == NativePath("b/file.d"), "6"); } } private NativePath determineStructurePath(NativePath file_path, in ProjectGenerator.TargetInfo target) { foreach (p; target.packages) { if (file_path.startsWith(p.path)) return NativePath(getPackageFileName(p.name)) ~ file_path.relativeTo(p.path); } return NativePath("misc/") ~ file_path.head; } private string getPackageFileName(string pack) { return pack.replace(":", "_"); } private @property string vsArchitecture(string architecture) { switch(architecture) { default: logWarn("Unsupported platform('%s'), defaulting to x86", architecture); goto case; case "x86", "x86_mscoff": return "Win32"; case "x86_64": return "x64"; } } dub-1.40.0/source/dub/init.d000066400000000000000000000145101477246567400155570ustar00rootroot00000000000000/** Package skeleton initialization code. Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.init; import dub.internal.vibecompat.core.file; import dub.internal.logging; import dub.package_ : PackageFormat, packageInfoFiles, defaultPackageFilename; import dub.recipe.packagerecipe; import dub.dependency; import std.exception; import std.file; import std.format; import std.process : environment; import std.string; /** Initializes a new package in the given directory. The given `root_path` will be checked for any of the files that will be created by this function. If any exist, an exception will be thrown before altering the directory. Params: root_path = Directory in which to create the new package. If the directory doesn't exist, a new one will be created. deps = A set of extra dependencies to add to the package recipe. The associative array is expected to map from package name to package version. type = The type of package skeleton to create. Can currently be "minimal", "vibe.d" or "deimos" format = Format in which the recipe will be written (SDL / JSON) recipe_callback = Optional callback that can be used to customize the package recipe and the file format used to store it prior to writing it to disk. */ void initPackage(NativePath root_path, VersionRange[string] deps, string type, PackageFormat format, scope RecipeCallback recipe_callback = null) { import std.conv : to; import dub.recipe.io : writePackageRecipe; void enforceDoesNotExist(string filename) { enforce(!existsFile(root_path ~ filename), "The target directory already contains a '%s' %s. Aborting." .format(filename, filename.isDir ? "directory" : "file")); } string username = getUserName(); PackageRecipe p; p.name = root_path.head.name.toLower(); p.authors ~= username; // Use proprietary as conservative default, so that we don't announce a more // permissive license than actually chosen in case the dub.json wasn't updated. p.license = "proprietary"; foreach (pack, v; deps) { p.buildSettings.dependencies[pack] = Dependency(v); } //Check to see if a target directory needs to be created if (!root_path.empty) { ensureDirectory(root_path); } //Make sure we do not overwrite anything accidentally foreach (fil; packageInfoFiles) enforceDoesNotExist(fil.filename); auto files = ["source/", "views/", "public/", "dub.json"]; foreach (fil; files) enforceDoesNotExist(fil); void processRecipe() { if (recipe_callback) recipe_callback(p, format); } switch (type) { default: break; case "minimal": initMinimalPackage(root_path, p, &processRecipe); break; case "vibe.d": initVibeDPackage(root_path, p, &processRecipe); break; case "deimos": initDeimosPackage(root_path, p, &processRecipe); break; } writePackageRecipe(root_path ~ ("dub."~format.to!string), p); writeGitignore(root_path, p.name); } alias RecipeCallback = void delegate(ref PackageRecipe, ref PackageFormat); private void initMinimalPackage(NativePath root_path, ref PackageRecipe p, scope void delegate() pre_write_callback) { p.description = "A minimal D application."; pre_write_callback(); ensureDirectory(root_path ~ "source"); write((root_path ~ "source/app.d").toNativeString(), q{import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } }); } private void initVibeDPackage(NativePath root_path, ref PackageRecipe p, scope void delegate() pre_write_callback) { if ("vibe-d" !in p.buildSettings.dependencies) p.buildSettings.dependencies["vibe-d"] = Dependency("~>0.9"); p.description = "A simple vibe.d server application."; pre_write_callback(); ensureDirectory(root_path ~ "source"); ensureDirectory(root_path ~ "views"); ensureDirectory(root_path ~ "public"); write((root_path ~ "source/app.d").toNativeString(), q{import vibe.vibe; void main() { auto settings = new HTTPServerSettings; settings.port = 8080; settings.bindAddresses = ["::1", "127.0.0.1"]; auto listener = listenHTTP(settings, &hello); scope (exit) { listener.stopListening(); } logInfo("Please open http://127.0.0.1:8080/ in your browser."); runApplication(); } void hello(HTTPServerRequest req, HTTPServerResponse res) { res.writeBody("Hello, World!"); } }); } private void initDeimosPackage(NativePath root_path, ref PackageRecipe p, scope void delegate() pre_write_callback) { import dub.compilers.buildsettings : TargetType; p.description = format("Deimos Bindings for "~p.name~"."); p.buildSettings.importPaths[""] ~= "."; p.buildSettings.targetType = TargetType.sourceLibrary; pre_write_callback(); ensureDirectory(root_path ~ "C"); ensureDirectory(root_path ~ "deimos"); } /** * Write the `.gitignore` file to the directory, if it does not already exists * * As `dub` is often used with `git`, adding a `.gitignore` is a nice touch for * most users. However, this file is not mandatory for `dub` to do its job, * so we do not depend on the content. * One important use case we need to support is people running `dub init` on * a GitHub-initialized repository. Those might already contain a `.gitignore` * (and a README and a LICENSE), thus we should not bail out if the file already * exists, just ignore it. * * Params: * root_path = The path to the directory hosting the project * pkg_name = Name of the package, to generate a list of binaries to ignore */ private void writeGitignore(NativePath root_path, const(char)[] pkg_name) { auto full_path = (root_path ~ ".gitignore").toNativeString(); if (existsFile(full_path)) return; write(full_path, q"{.dub docs.json __dummy.html docs/ /%1$s %1$s.so %1$s.dylib %1$s.dll %1$s.a %1$s.lib %1$s-test-* *.exe *.pdb *.o *.obj *.lst }".format(pkg_name)); } private string getUserName() { version (Windows) return environment.get("USERNAME", "Peter Parker"); else version (Posix) { import core.sys.posix.pwd, core.sys.posix.unistd, core.stdc.string : strlen; import std.algorithm : splitter; // Bionic doesn't have pw_gecos on ARM version(CRuntime_Bionic) {} else if (auto pw = getpwuid(getuid)) { auto uinfo = pw.pw_gecos[0 .. strlen(pw.pw_gecos)].splitter(','); if (!uinfo.empty && uinfo.front.length) return uinfo.front.idup; } return environment.get("USER", "Peter Parker"); } else static assert(0); } dub-1.40.0/source/dub/internal/000077500000000000000000000000001477246567400162625ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/colorize/000077500000000000000000000000001477246567400201105ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/colorize/colors.d000066400000000000000000000054411477246567400215620ustar00rootroot00000000000000/** * Authors: Pedro Tacla Yamada * Date: June 9, 2014 * License: Licensed under the MIT license. See LICENSE for more information * Version: 1.0.2 */ module dub.internal.colorize.colors; import std.string : format; private template color_type(int offset) { enum type : int { init = 39 + offset, black = 30 + offset, red = 31 + offset, green = 32 + offset, yellow = 33 + offset, blue = 34 + offset, magenta = 35 + offset, cyan = 36 + offset, white = 37 + offset, light_black = 90 + offset, light_red = 91 + offset, light_green = 92 + offset, light_yellow = 93 + offset, light_blue = 94 + offset, light_magenta = 95 + offset, light_cyan = 96 + offset, light_white = 97 + offset } } alias color_type!0 .type fg; alias color_type!10 .type bg; // Text modes enum mode : int { init = 0, bold = 1, underline = 4, blink = 5, swap = 7, hide = 8 } /** * Wraps a string around color escape sequences. * * Params: * str = The string to wrap with colors and modes * c = The foreground color (see the fg enum type) * b = The background color (see the bg enum type) * m = The text mode (see the mode enum type) * Example: * --- * writeln("This is blue".color(fg.blue)); * writeln( * color("This is red over green blinking", fg.blue, bg.green, mode.blink) * ); * --- */ string color( const string str, const fg c=fg.init, const bg b=bg.init, const mode m=mode.init ) pure { return format("\033[%d;%d;%dm%s\033[0m", m, c, b, str); } unittest { import std.string : representation; string ret; ret = "This is yellow".color(fg.yellow); assert(ret.representation == "\033[0;33;49mThis is yellow\033[0m".representation); ret = "This is light green".color(fg.light_green); assert(ret.representation == "\033[0;92;49mThis is light green\033[0m".representation); ret = "This is light blue with red background".color(fg.light_blue, bg.red); assert(ret.representation == "\033[0;94;41mThis is light blue with red background\033[0m".representation); ret = "This is red on blue blinking".color(fg.red, bg.blue, mode.blink); assert(ret.representation == "\033[5;31;44mThis is red on blue blinking\033[0m".representation); } string colorHelper(T)(const string str, const T t=T.init) pure if(is(T : fg) || is(T : bg) || is(T : mode)) { return format("\033[%dm%s\033[0m", t, str); } alias background = colorHelper!bg; alias foreground = colorHelper!fg; alias style = colorHelper!mode; alias color = colorHelper; unittest { import std.string : representation; string ret; ret = "This is red on blue blinking" .foreground(fg.red) .background(bg.blue) .style(mode.blink); assert(ret.representation == "\033[5m\033[44m\033[31mThis is red on blue blinking\033[0m\033[0m\033[0m".representation); } dub-1.40.0/source/dub/internal/colorize/cwrite.d000066400000000000000000000025031477246567400215520ustar00rootroot00000000000000/** * Authors: ponce * Date: July 28, 2014 * License: Licensed under the MIT license. See LICENSE for more information * Version: 1.0.2 */ module dub.internal.colorize.cwrite; import std.stdio : File, stdout; import dub.internal.colorize.winterm; /// Coloured write. void cwrite(T...)(T args) if (!is(T[0] : File)) { stdout.cwrite(args); } /// Coloured writef. void cwritef(Char, T...)(in Char[] fmt, T args) if (!is(T[0] : File)) { stdout.cwritef(fmt, args); } /// Coloured writefln. void cwritefln(Char, T...)(in Char[] fmt, T args) { stdout.cwritef(fmt ~ "\n", args); } /// Coloured writeln. void cwriteln(T...)(T args) { // Most general instance stdout.cwrite(args, '\n'); } /// Coloured writef to a File. void cwritef(Char, A...)(File f, in Char[] fmt, A args) { import std.string : format; auto s = format(fmt, args); f.cwrite(s); } /// Coloured writef to a File. void cwrite(S...)(File f, S args) { import std.conv : to; string s = ""; foreach(arg; args) s ~= to!string(arg); version(Windows) { WinTermEmulation winterm; winterm.initialize(); foreach(dchar c ; s) { auto charAction = winterm.feed(c); final switch(charAction) with (WinTermEmulation.CharAction) { case drop: break; case write: f.write(c); break; case flush: f.flush(); break; } } } else { f.write(s); } } dub-1.40.0/source/dub/internal/colorize/package.d000066400000000000000000000004071477246567400216510ustar00rootroot00000000000000/** * Authors: ponce * Date: July 28, 2014 * License: Licensed under the MIT license. See LICENSE for more information * Version: 1.0.2 */ module dub.internal.colorize; public import dub.internal.colorize.colors; public import dub.internal.colorize.cwrite; dub-1.40.0/source/dub/internal/colorize/winterm.d000066400000000000000000000115151477246567400217450ustar00rootroot00000000000000/** * Authors: ponce * Date: July 28, 2014 * License: Licensed under the MIT license. See LICENSE for more information * Version: 1.0.2 */ module dub.internal.colorize.winterm; version(Windows) { import core.sys.windows.windows; // Patch for DMD 2.065 compatibility static if( __VERSION__ < 2066 ) private enum nogc = 1; // This is a state machine to enable terminal colors on Windows. // Parses and interpret ANSI/VT100 Terminal Control Escape Sequences. // Only supports colour sequences, will output char incorrectly on invalid input. struct WinTermEmulation { public: @nogc void initialize() nothrow { // saves console attributes _console = GetStdHandle(STD_OUTPUT_HANDLE); _savedInitialColor = (0 != GetConsoleScreenBufferInfo(_console, &consoleInfo)); _state = State.initial; } @nogc ~this() nothrow { // Restore initial text attributes on release if (_savedInitialColor) { SetConsoleTextAttribute(_console, consoleInfo.wAttributes); _savedInitialColor = false; } } enum CharAction { write, drop, flush } // Eat one character and update color state accordingly. // Returns what to do with the fed character. @nogc CharAction feed(dchar d) nothrow { final switch(_state) with (State) { case initial: if (d == '\x1B') { _state = escaped; return CharAction.flush; } break; case escaped: if (d == '[') { _state = readingAttribute; _parsedAttr = 0; return CharAction.drop; } break; case readingAttribute: if (d >= '0' && d <= '9') { _parsedAttr = _parsedAttr * 10 + (d - '0'); return CharAction.drop; } else if (d == ';') { executeAttribute(_parsedAttr); _parsedAttr = 0; return CharAction.drop; } else if (d == 'm') { executeAttribute(_parsedAttr); _state = State.initial; return CharAction.drop; } break; } return CharAction.write; } private: HANDLE _console; bool _savedInitialColor; CONSOLE_SCREEN_BUFFER_INFO consoleInfo; State _state; WORD _currentAttr; int _parsedAttr; enum State { initial, escaped, readingAttribute } @nogc void setForegroundColor(WORD fgFlags) nothrow { _currentAttr = _currentAttr & ~(FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY); _currentAttr = _currentAttr | fgFlags; SetConsoleTextAttribute(_console, _currentAttr); } @nogc void setBackgroundColor(WORD bgFlags) nothrow { _currentAttr = _currentAttr & ~(BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY); _currentAttr = _currentAttr | bgFlags; SetConsoleTextAttribute(_console, _currentAttr); } // resets to the same foreground color that was set on initialize() @nogc void resetForegroundColor() nothrow { if (!_savedInitialColor) return; _currentAttr = _currentAttr & ~(FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY); _currentAttr = _currentAttr | (consoleInfo.wAttributes & (FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY)); SetConsoleTextAttribute(_console, _currentAttr); } // resets to the same background color that was set on initialize() @nogc void resetBackgroundColor() nothrow { if (!_savedInitialColor) return; _currentAttr = _currentAttr & ~(BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY); _currentAttr = _currentAttr | (consoleInfo.wAttributes & (BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY)); SetConsoleTextAttribute(_console, _currentAttr); } @nogc void executeAttribute(int attr) nothrow { switch (attr) { case 0: // reset all attributes SetConsoleTextAttribute(_console, consoleInfo.wAttributes); break; default: if ( (30 <= attr && attr <= 37) || (90 <= attr && attr <= 97) ) { WORD color = 0; if (90 <= attr && attr <= 97) { color = FOREGROUND_INTENSITY; attr -= 60; } attr -= 30; color |= (attr & 1 ? FOREGROUND_RED : 0) | (attr & 2 ? FOREGROUND_GREEN : 0) | (attr & 4 ? FOREGROUND_BLUE : 0); setForegroundColor(color); } else if (attr == 39) // fg.init { resetForegroundColor(); } if ( (40 <= attr && attr <= 47) || (100 <= attr && attr <= 107) ) { WORD color = 0; if (100 <= attr && attr <= 107) { color = BACKGROUND_INTENSITY; attr -= 60; } attr -= 40; color |= (attr & 1 ? BACKGROUND_RED : 0) | (attr & 2 ? BACKGROUND_GREEN : 0) | (attr & 4 ? BACKGROUND_BLUE : 0); setBackgroundColor(color); } else if (attr == 49) // bg.init { resetBackgroundColor(); } } } } } dub-1.40.0/source/dub/internal/configy/000077500000000000000000000000001477246567400177205ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/configy/Attributes.d000066400000000000000000000250651477246567400222230ustar00rootroot00000000000000/******************************************************************************* Define UDAs that can be applied to a configuration struct This module is stand alone (a leaf module) to allow importing the UDAs without importing the whole configuration parsing code. Copyright: Copyright (c) 2019-2022 BOSAGORA Foundation All rights reserved. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.Attributes; import std.traits; /******************************************************************************* An optional parameter with an initial value of `T.init` The config parser automatically recognize non-default initializer, so that the following: ``` public struct Config { public string greeting = "Welcome home"; } ``` Will not error out if `greeting` is not defined in the config file. However, this relies on the initializer of the field (`greeting`) being different from the type initializer (`string.init` is `null`). In some cases, the default value is also the desired initializer, e.g.: ``` public struct Config { /// Maximum number of connections. 0 means unlimited. public uint connections_limit = 0; } ``` In this case, one can add `@Optional` to the field to inform the parser. *******************************************************************************/ public struct Optional {} /******************************************************************************* Inform the config filler that this sequence is to be read as a mapping On some occasions, one might want to read a mapping as an array. One reason to do so may be to provide a better experience to the user, e.g. having to type: ``` interfaces: eth0: ip: "192.168.0.1" private: true wlan0: ip: "1.2.3.4" ``` Instead of the slightly more verbose: ``` interfaces: - name: eth0 ip: "192.168.0.1" private: true - name: wlan0 ip: "1.2.3.4" ``` The former would require to be expressed as an associative arrays. However, one major drawback of associative arrays is that they can't have an initializer, which makes them cumbersome to use in the context of the config filler. To remediate this issue, one may use `@Key("name")` on a field (here, `interfaces`) so that the mapping is flattened to an array. If `name` is `null`, the key will be discarded. *******************************************************************************/ public struct Key { /// public string name; } /******************************************************************************* Look up the provided name in the YAML node, instead of the field name. By default, the config filler will look up the field name of a mapping in the YAML node. If this is not desired, an explicit `Name` attribute can be given. This is especially useful for names which are keyword. ``` public struct Config { public @Name("delete") bool remove; } ``` *******************************************************************************/ public struct Name { /// public string name; /// public bool startsWith; } /// Short hand syntax public Name StartsWith(string name) @safe pure nothrow @nogc { return Name(name, true); } /******************************************************************************* A field which carries information about whether it was set or not Some configurations may need to know which fields were set explicitly while keeping defaults. An example of this is a `struct` where at least one field needs to be set, such as the following: ``` public struct ProtoDuration { public @Optional long weeks; public @Optional long days; public @Optional long hours; public @Optional long minutes; public long seconds = 42; public @Optional long msecs; public @Optional long usecs; public @Optional long hnsecs; public @Optional long nsecs; } ``` In this case, it would be impossible to know if any field was explicitly provided. Hence, the struct should be written as: ``` public struct ProtoDuration { public SetInfo!long weeks; public SetInfo!long days; public SetInfo!long hours; public SetInfo!long minutes; public SetInfo!long seconds = 42; public SetInfo!long msecs; public SetInfo!long usecs; public SetInfo!long hnsecs; public SetInfo!long nsecs; } ``` Note that `SetInfo` implies `Optional`, and supports default values. *******************************************************************************/ public struct SetInfo (T) { /*************************************************************************** Allow initialization as a field This sets the field as having been set, so that: ``` struct Config { SetInfo!Duration timeout; } Config myConf = { timeout: 10.minutes } ``` Will behave as if set explicitly. If this behavior is not wanted, pass `false` as second argument: ``` Config myConf = { timeout: SetInfo!Duration(10.minutes, false) } ``` ***************************************************************************/ public this (T initVal, bool isSet = true) @safe pure nothrow @nogc { this.value = initVal; this.set = isSet; } /// Underlying data public T value; /// alias value this; /// Whether this field was set or not public bool set; } /******************************************************************************* Provides a means to convert a field from a `Node` to a complex type When filling the config, it might be useful to store types which are not only simple `string` and integer, such as `URL`, `BigInt`, or any other library type not directly under the user's control. To allow reading those values from the config file, a `Converter` may be used. The converter will tell the `ConfigFiller` how to convert from `Node` to the desired type `T`. If the type is under the user's control, one can also add a constructor accepting a single string, or define the `fromString` method, both of which are tried if no `Converter` is found. For types not under the user's control, there might be different ways to parse the same type within the same struct, or neither the ctor nor the `fromString` method may be defined under that name. The exmaple below uses `parse` in place of `fromString`, for example. ``` /// Complex structure representing the age of a person based on its birthday public struct Age { /// public uint birth_year; /// public uint birth_month; /// public uint birth_day; /// Note that this will be picked up automatically if named `fromString` /// but this struct might be a library type. public static Age parse (string value) { /+ Magic +/ } } public struct Person { /// @Converter!Age((Node value) => Age.parse(value.as!string)) public Age age; } ``` Note that some fields may also be of multiple YAML types, such as DUB's `dependencies`, which is either a simple string (`"vibe-d": "~>1.0 "`), or an in its complex form (`"vibe-d": { "version": "~>1.0" }`). For those use cases, a `Converter` is the best approach. To avoid repeating the field type, a convenience function is provided: ``` public struct Age { public uint birth_year; public uint birth_month; public uint birth_day; public static Age parse (string value) { /+ Magic +/ } } public struct Person { /// Here `converter` will deduct the type from the delegate argument, /// and return an instance of `Converter`. Mind the case. @converter((Node value) => Age.parse(value.as!string)) public Age age; } ``` *******************************************************************************/ public struct Converter (T) { /// public alias ConverterFunc = T function (scope ConfigParser!T context); /// public ConverterFunc converter; } /// Ditto public auto converter (FT) (FT func) { static assert(isFunctionPointer!FT, "Error: Argument to `converter` should be a function pointer, not: " ~ FT.stringof); alias RType = ReturnType!FT; static assert(!is(RType == void), "Error: Converter needs to be of the return type of the field, not `void`"); return Converter!RType(func); } /******************************************************************************* Interface that is passed to `fromYAML` hook The `ConfigParser` exposes the raw YAML node (`see `node` method), the path within the file (`path` method), and a simple ability to recurse via `parseAs`. Params: T = The type of the structure which defines a `fromYAML` hook *******************************************************************************/ public interface ConfigParser (T) { import dub.internal.dyaml.node; import dub.internal.configy.FieldRef : StructFieldRef; import dub.internal.configy.Read : Context, parseField; /// Returns: the node being processed public inout(Node) node () inout @safe pure nothrow @nogc; /// Returns: current location we are parsing public string path () const @safe pure nothrow @nogc; /*************************************************************************** Parse this struct as another type This allows implementing union-like behavior, where a `struct` which implements `fromYAML` can parse a simple representation as one type, and one more advanced as another type. Params: OtherType = The type to parse as defaultValue = The instance to use as a default value for fields ***************************************************************************/ public final auto parseAs (OtherType) (auto ref OtherType defaultValue = OtherType.init) { alias TypeFieldRef = StructFieldRef!OtherType; return this.node().parseField!(TypeFieldRef)( this.path(), defaultValue, this.context()); } /// Internal use only protected const(Context) context () const @safe pure nothrow @nogc; } dub-1.40.0/source/dub/internal/configy/DubTest.d000066400000000000000000000040521477246567400214400ustar00rootroot00000000000000/******************************************************************************* Contains tests for dub-specific extensions Whenever integrating changes from upstream configy, most conflicts tend to be on `configy.Test`, and as the structure is very similar, the default diff algorithms are useless. Having a separate module simplify this greatly. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.DubTest; import dub.internal.configy.Attributes; import dub.internal.configy.Read; import dub.internal.dyaml.node; /// Test name pattern matching unittest { static struct Config { @StartsWith("names") string[][string] names_; } auto c = parseConfigString!Config("names-x86:\n - John\n - Luca\nnames:\n - Marie", "/dev/null"); assert(c.names_[null] == [ "Marie" ]); assert(c.names_["x86"] == [ "John", "Luca" ]); } /// Test our `fromYAML` extension unittest { static struct PackageDef { string name; @Optional string target; int build = 42; } static struct Package { string path; PackageDef def; public static Package fromYAML (scope ConfigParser!Package parser) { if (parser.node.nodeID == NodeID.mapping) return Package(null, parser.parseAs!PackageDef); else return Package(parser.parseAs!string); } } static struct Config { string name; Package[] deps; } auto c = parseConfigString!Config( ` name: myPkg deps: - /foo/bar - name: foo target: bar build: 24 - name: fur - /one/last/path `, "/dev/null"); assert(c.name == "myPkg"); assert(c.deps.length == 4); assert(c.deps[0] == Package("/foo/bar")); assert(c.deps[1] == Package(null, PackageDef("foo", "bar", 24))); assert(c.deps[2] == Package(null, PackageDef("fur", null, 42))); assert(c.deps[3] == Package("/one/last/path")); } dub-1.40.0/source/dub/internal/configy/Exceptions.d000066400000000000000000000317001477246567400222070ustar00rootroot00000000000000/******************************************************************************* Definitions for Exceptions used by the config module. Copyright: Copyright (c) 2019-2022 BOSAGORA Foundation All rights reserved. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.Exceptions; import dub.internal.configy.Utils; import dub.internal.dyaml.exception; import dub.internal.dyaml.node; import std.algorithm : filter, map; import std.format; import std.string : soundexer; /******************************************************************************* Base exception type thrown by the config parser Whenever dealing with Exceptions thrown by the config parser, catching this type will allow to optionally format with colors: ``` try { auto conf = parseConfigFile!Config(cmdln); // ... } catch (ConfigException exc) { writeln("Parsing the config file failed:"); writelfln(isOutputATTY() ? "%S" : "%s", exc); } ``` *******************************************************************************/ public abstract class ConfigException : Exception { /// Position at which the error happened public Mark yamlPosition; /// The path at which the key resides public string path; /// If non-empty, the key under 'path' which triggered the error /// If empty, the key should be considered part of 'path' public string key; /// Constructor public this (string path, string key, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { super(null, file, line); this.path = path; this.key = key; this.yamlPosition = position; } /// Ditto public this (string path, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { this(path, null, position, file, line); } /*************************************************************************** Overrides `Throwable.toString` and its sink overload It is quite likely that errors from this module may be printed directly to the end user, who might not have technical knowledge. This format the error in a nicer format (e.g. with colors), and will additionally provide a stack-trace if the `ConfigFillerDebug` `debug` version was provided. Format_chars: The default format char ("%s") will print a regular message. If an uppercase 's' is used ("%S"), colors will be used. Params: sink = The sink to send the piece-meal string to spec = See https://dlang.org/phobos/std_format_spec.html ***************************************************************************/ public override string toString () scope { // Need to be overridden, otherwise the overload is shadowed return super.toString(); } /// Ditto public override void toString (scope void delegate(in char[]) sink) const scope @trusted { // This breaks the type system, as it blindly trusts a delegate // However, the type system lacks a way to sanely build an utility // which accepts a delegate with different qualifiers, so this is the // less evil approach. this.toString(cast(SinkType) sink, FormatSpec!char("%s")); } /// Ditto public void toString (scope SinkType sink, in FormatSpec!char spec) const scope @safe { import core.internal.string : unsignedToTempString; const useColors = spec.spec == 'S'; char[20] buffer = void; if (useColors) sink(Yellow); sink(this.yamlPosition.name); if (useColors) sink(Reset); sink("("); if (useColors) sink(Cyan); sink(unsignedToTempString(this.yamlPosition.line, buffer)); if (useColors) sink(Reset); sink(":"); if (useColors) sink(Cyan); sink(unsignedToTempString(this.yamlPosition.column, buffer)); if (useColors) sink(Reset); sink("): "); if (this.path.length || this.key.length) { if (useColors) sink(Yellow); sink(this.path); if (this.path.length && this.key.length) sink("."); sink(this.key); if (useColors) sink(Reset); sink(": "); } this.formatMessage(sink, spec); debug (ConfigFillerDebug) { sink("\n\tError originated from: "); sink(this.file); sink("("); sink(unsignedToTempString(line, buffer)); sink(")"); if (!this.info) return; () @trusted nothrow { try { sink("\n----------------"); foreach (t; info) { sink("\n"); sink(t); } } // ignore more errors catch (Throwable) {} }(); } } /// Hook called by `toString` to simplify coloring protected abstract void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @safe; } /// A configuration exception that is only a single message package final class ConfigExceptionImpl : ConfigException { public this (string msg, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { this(msg, null, null, position, file, line); } public this (string msg, string path, string key, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { super(path, key, position, file, line); this.msg = msg; } protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @safe { sink(this.msg); } } /// Exception thrown when the type of the YAML node does not match the D type package final class TypeConfigException : ConfigException { /// The actual (in the YAML document) type of the node public string actual; /// The expected (as specified in the D type) type public string expected; /// Constructor public this (Node node, string expected, string path, string key = null, string file = __FILE__, size_t line = __LINE__) @safe nothrow { this(node.nodeTypeString(), expected, path, key, node.startMark(), file, line); } /// Ditto public this (string actual, string expected, string path, string key, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { super(path, key, position, file, line); this.actual = actual; this.expected = expected; } /// Format the message with or without colors protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @safe { const useColors = spec.spec == 'S'; const fmt = "Expected to be of type %s, but is a %s"; if (useColors) formattedWrite(sink, fmt, this.expected.paint(Green), this.actual.paint(Red)); else formattedWrite(sink, fmt, this.expected, this.actual); } } /// Similar to a `TypeConfigException`, but specific to `Duration` package final class DurationTypeConfigException : ConfigException { /// The list of valid fields public immutable string[] DurationSuffixes = [ "weeks", "days", "hours", "minutes", "seconds", "msecs", "usecs", "hnsecs", "nsecs", ]; /// Actual type of the node public string actual; /// Constructor public this (Node node, string path, string file = __FILE__, size_t line = __LINE__) @safe nothrow { super(path, null, node.startMark(), file, line); this.actual = node.nodeTypeString(); } /// Format the message with or without colors protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @safe { const useColors = spec.spec == 'S'; const fmt = "Field is of type %s, but expected a mapping with at least one of: %-(%s, %)"; if (useColors) formattedWrite(sink, fmt, this.actual.paint(Red), this.DurationSuffixes.map!(s => s.paint(Green))); else formattedWrite(sink, fmt, this.actual, this.DurationSuffixes); } } /// Exception thrown when an unknown key is found in strict mode public class UnknownKeyConfigException : ConfigException { /// The list of valid field names public immutable string[] fieldNames; /// Constructor public this (string path, string key, immutable string[] fieldNames, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { super(path, key, position, file, line); this.fieldNames = fieldNames; } /// Format the message with or without colors protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @safe { const useColors = spec.spec == 'S'; // Try to find a close match, as the error is likely a typo // This is especially important when the config file has a large // number of fields, where the message is otherwise near-useless. const origSound = soundexer(this.key); auto matches = this.fieldNames.filter!(f => f.soundexer == origSound); const hasMatch = !matches.save.empty; if (hasMatch) { const fmt = "Key is not a valid member of this section. Did you mean: %-(%s, %)"; if (useColors) formattedWrite(sink, fmt, matches.map!(f => f.paint(Green))); else formattedWrite(sink, fmt, matches); } else { // No match, just print everything const fmt = "Key is not a valid member of this section. There are %s valid keys: %-(%s, %)"; if (useColors) formattedWrite(sink, fmt, this.fieldNames.length.paint(Yellow), this.fieldNames.map!(f => f.paint(Green))); else formattedWrite(sink, fmt, this.fieldNames.length, this.fieldNames); } } } /// Exception thrown when a required key is missing public class MissingKeyException : ConfigException { /// Constructor public this (string path, string key, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { super(path, key, position, file, line); } /// Format the message with or without colors protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @safe { sink("Required key was not found in configuration or command line arguments"); } } /// Wrap an user-thrown Exception that happened in a Converter/ctor/fromString public class ConstructionException : ConfigException { /// Constructor public this (Exception next, string path, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { super(path, position, file, line); this.next = next; } /// Format the message with or without colors protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @trusted { if (auto dyn = cast(ConfigException) this.next) dyn.toString(sink, spec); else sink(this.next.message); } } /// Thrown when an array read from config does not match a static array size public class ArrayLengthException : ConfigException { private size_t actual; private size_t expected; /// Constructor public this (size_t actual, size_t expected, string path, string key, Mark position, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow @nogc { assert(actual != expected); this.actual = actual; this.expected = expected; super(path, key, position, file, line); } /// Format the message with or without colors protected override void formatMessage ( scope SinkType sink, in FormatSpec!char spec) const scope @trusted { import core.internal.string : unsignedToTempString; char[20] buffer = void; sink("Too "); sink((this.actual > this.expected) ? "many" : "few"); sink(" entries for sequence: Expected "); sink(unsignedToTempString(this.expected, buffer)); sink(", got "); sink(unsignedToTempString(this.actual, buffer)); } } dub-1.40.0/source/dub/internal/configy/FieldRef.d000066400000000000000000000162451477246567400215550ustar00rootroot00000000000000/******************************************************************************* Implement a template to keep track of a field references Passing field references by `alias` template parameter creates many problem, and is extremely cumbersome to work with. Instead, we pass an instance of a `FieldRef` around, which also contains structured information. Copyright: Copyright (c) 2019-2022 BOSAGORA Foundation All rights reserved. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.FieldRef; // Renamed imports as the names exposed by `FieldRef` shadow the imported ones. import dub.internal.configy.Attributes : CAName = Name, CAOptional = Optional, SetInfo; import std.meta; import std.traits; /******************************************************************************* A reference to a field in a `struct` The compiler sometimes rejects passing fields by `alias`, or complains about missing `this` (meaning it tries to evaluate the value). Sometimes, it also discards the UDAs. To prevent this from happening, we always pass around a `FieldRef`, which wraps the parent struct type (`T`), the name of the field as `FieldName`, and other information. To avoid any issue, eponymous usage is also avoided, hence the reference needs to be accessed using `Ref`. *******************************************************************************/ package template FieldRef (alias T, string name, bool forceOptional = false) { /// The reference to the field public alias Ref = __traits(getMember, T, name); /// Type of the field public alias Type = typeof(Ref); /// The name of the field in the struct itself public alias FieldName = name; /// The name used in the configuration field (taking `@Name` into account) static if (hasUDA!(Ref, CAName)) { static assert (getUDAs!(Ref, CAName).length == 1, "Field `" ~ fullyQualifiedName!(Ref) ~ "` cannot have more than one `Name` attribute"); public immutable Name = getUDAs!(Ref, CAName)[0].name; public immutable Pattern = getUDAs!(Ref, CAName)[0].startsWith; } else { public immutable Name = FieldName; public immutable Pattern = false; } /// Default value of the field (may or may not be `Type.init`) public enum Default = __traits(getMember, T.init, name); /// Evaluates to `true` if this field is to be considered optional /// (does not need to be present in the YAML document) static if (forceOptional || hasUDA!(Ref, CAOptional)) public enum Optional = true; // Booleans are always optional else static if (is(immutable(Type) == immutable(bool))) public enum Optional = true; // A mandatory SetInfo would not make sense else static if (is(Type : SetInfo!FT, FT)) public enum Optional = true; // Use `is` to avoid calling `opEquals` which might not be CTFEable, // except for static arrays as that triggers a deprecation warning. else static if (is(Type : E[k], E, size_t k)) public enum Optional = (Default[] !is Type.init[]); else public enum Optional = (Default !is Type.init); } unittest { import dub.internal.configy.Attributes : Name; static struct Config1 { int integer2 = 42; @Name("notStr2") @(42) string str2; } static struct Config2 { Config1 c1dup = { 42, "Hello World" }; string message = "Something"; } static struct Config3 { Config1 c1; int integer; string str; Config2 c2 = { c1dup: { integer2: 69 } }; } static assert(is(FieldRef!(Config3, "c2").Type == Config2)); static assert(FieldRef!(Config3, "c2").Default != Config2.init); static assert(FieldRef!(Config2, "message").Default == Config2.init.message); alias NFR1 = FieldRef!(Config3, "c2"); alias NFR2 = FieldRef!(NFR1.Ref, "c1dup"); alias NFR3 = FieldRef!(NFR2.Ref, "integer2"); alias NFR4 = FieldRef!(NFR2.Ref, "str2"); static assert(hasUDA!(NFR4.Ref, int)); static assert(FieldRefTuple!(Config3)[1].Name == "integer"); static assert(FieldRefTuple!(FieldRefTuple!(Config3)[0].Type)[1].Name == "notStr2"); } /** * A pseudo `FieldRef` used for structs which are not fields (top-level) * * Params: * ST = Type for which this pseudo-FieldRef is * DefaultName = A name to give to this FieldRef, default to `null`, * but required to prevent forward references in `parseAs`. */ package template StructFieldRef (ST, string DefaultName = null) { /// public enum Ref = ST.init; /// public alias Type = ST; /// public enum Default = ST.init; /// public enum Optional = false; /// Some places reference their parent's Name / FieldName public enum Name = DefaultName; /// Ditto public enum FieldName = DefaultName; } /// A pseudo `FieldRef` for nested types (e.g. arrays / associative arrays) package template NestedFieldRef (ElemT, alias FR) { /// public enum Ref = ElemT.init; /// public alias Type = ElemT; /// public enum Name = FR.Name; /// public enum FieldName = FR.FieldName; /// Element or keys are never optional public enum Optional = false; } /// Get a tuple of `FieldRef` from a `struct` package template FieldRefTuple (T) { static assert(is(T == struct), "Argument " ~ T.stringof ~ " to `FieldRefTuple` should be a `struct`"); /// static if (__traits(getAliasThis, T).length == 0) public alias FieldRefTuple = staticMap!(Pred, FieldNameTuple!T); else { /// Tuple of strings of aliased fields /// As of DMD v2.100.0, only a single alias this is supported in D. private immutable AliasedFieldNames = __traits(getAliasThis, T); static assert(AliasedFieldNames.length == 1, "Multiple `alias this` are not supported"); // Ignore alias to functions (if it's a property we can't do anything) static if (isSomeFunction!(__traits(getMember, T, AliasedFieldNames))) public alias FieldRefTuple = staticMap!(Pred, FieldNameTuple!T); else { /// "Base" field names minus aliased ones private immutable BaseFields = Erase!(AliasedFieldNames, FieldNameTuple!T); static assert(BaseFields.length == FieldNameTuple!(T).length - 1); public alias FieldRefTuple = AliasSeq!( staticMap!(Pred, BaseFields), FieldRefTuple!(typeof(__traits(getMember, T, AliasedFieldNames)))); } } private alias Pred (string name) = FieldRef!(T, name); } /// Returns: An alias sequence of field names, taking UDAs (`@Name` et al) into account package alias FieldsName (T) = staticMap!(FieldRefToName, FieldRefTuple!T); /// Helper template for `staticMap` used for strict mode private enum FieldRefToName (alias FR) = FR.Name; /// Dub extension package enum IsPattern (alias FR) = FR.Pattern; /// Dub extension package alias Patterns (T) = staticMap!(FieldRefToName, Filter!(IsPattern, FieldRefTuple!T)); dub-1.40.0/source/dub/internal/configy/Read.d000066400000000000000000001224271477246567400207500ustar00rootroot00000000000000/******************************************************************************* Utilities to fill a struct representing the configuration with the content of a YAML document. The main function of this module is `parseConfig`. Convenience functions `parseConfigString` and `parseConfigFile` are also available. The type parameter to those three functions must be a struct and is used to drive the processing of the YAML node. When an error is encountered, an `Exception` will be thrown, with a descriptive message. The rules by which the struct is filled are designed to be as intuitive as possible, and are described below. Optional_Fields: One of the major convenience offered by this utility is its handling of optional fields. A field is detected as optional if it has an initializer that is different from its type `init` value, for example `string field = "Something";` is an optional field, but `int count = 0;` is not. To mark a field as optional even with its default value, use the `Optional` UDA: `@Optional int count = 0;`. fromYAML: Because config structs may contain complex types outside of the project's control (e.g. a Phobos type, Vibe.d's `URL`, etc...) or one may want the config format to be more dynamic (e.g. by exposing union-like behavior), one may need to apply more custom logic than what Configy does. For this use case, one can define a `fromYAML` static method in the type: `static S fromYAML(scope ConfigParser!S parser)`, where `S` is the type of the enclosing structure. Structs with `fromYAML` will have this method called instead of going through the normal parsing rules. The `ConfigParser` exposes the current path of the field, as well as the raw YAML `Node` itself, allowing for maximum flexibility. Composite_Types: Processing starts from a `struct` at the top level, and recurse into every fields individually. If a field is itself a struct, the filler will attempt the following, in order: - If the field has no value and is not optional, an Exception will be thrown with an error message detailing where the issue happened. - If the field has no value and is optional, the default value will be used. - If the field has a value, the filler will first check for a converter and use it if present. - If the type has a `static` method named `fromString` whose sole argument is a `string`, it will be used. - If the type has a constructor whose sole argument is a `string`, it will be used; - Finally, the filler will attempt to deserialize all struct members one by one and pass them to the default constructor, if there is any. - If none of the above succeeded, a `static assert` will trigger. Alias_this: If a `struct` contains an `alias this`, the field that is aliased will be ignored, instead the config parser will parse nested fields as if they were part of the enclosing structure. This allow to re-use a single `struct` in multiple place without having to resort to a `mixin template`. Having an initializer will make all fields in the aliased struct optional. The aliased field cannot have attributes other than `@Optional`, which will then apply to all fields it exposes. Duration_parsing: If the config field is of type `core.time.Duration`, special parsing rules will apply. There are two possible forms in which a Duration field may be expressed. In the first form, the YAML node should be a mapping, and it will be checked for fields matching the supported units in `core.time`: `weeks`, `days`, `hours`, `minutes`, `seconds`, `msecs`, `usecs`, `hnsecs`, `nsecs`. Strict parsing option will be respected. The values of the fields will then be added together, so the following YAML usages are equivalent: --- // sleepFor: // hours: 8 // minutes: 30 --- and: --- // sleepFor: // minutes: 510 --- Provided that the definition of the field is: --- public Duration sleepFor; --- In the second form, the field should have a suffix composed of an underscore ('_'), followed by a unit name as defined in `core.time`. This can be either the field name directly, or a name override. The latter is recommended to avoid confusion when using the field in code. In this form, the YAML node is expected to be a scalar. So the previous example, using this form, would be expressed as: --- sleepFor_minutes: 510 --- and the field definition should be one of those two: --- public @Name("sleepFor_minutes") Duration sleepFor; /// Prefer this public Duration sleepFor_minutes; /// This works too --- Those forms are mutually exclusive, so a field with a unit suffix will error out if a mapping is used. This prevents surprises and ensures that the error message, if any, is consistent across user input. To disable or change this behavior, one may use a `Converter` instead. Strict_Parsing: When strict parsing is enabled, the config filler will also validate that the YAML nodes do not contains entry which are not present in the mapping (struct) being processed. This can be useful to catch typos or outdated configuration options. Post_Validation: Some configuration will require validation across multiple sections. For example, two sections may be mutually exclusive as a whole, or may have fields which are mutually exclusive with another section's field(s). This kind of dependence is hard to account for declaratively, and does not affect parsing. For this reason, the preferred way to handle those cases is to define a `validate` member method on the affected config struct(s), which will be called once parsing for that mapping is completed. If an error is detected, this method should throw an Exception. Enabled_or_disabled_field: While most complex logic validation should be handled post-parsing, some section may be optional by default, but if provided, will have required fields. To support this use case, if a field with the name `enabled` is present in a struct, the parser will first process it. If it is `false`, the parser will not attempt to process the struct further, and the other fields will have their default value. Likewise, if a field named `disabled` exists, the struct will not be processed if it is set to `true`. Copyright: Copyright (c) 2019-2022 BOSAGORA Foundation All rights reserved. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.Read; public import dub.internal.configy.Attributes; public import dub.internal.configy.Exceptions : ConfigException; import dub.internal.configy.Exceptions; import dub.internal.configy.FieldRef; import dub.internal.configy.Utils; import dub.internal.dyaml.exception; import dub.internal.dyaml.node; import dub.internal.dyaml.loader; import std.algorithm; import std.conv; import std.datetime; import std.format; import std.getopt; import std.meta; import std.range; import std.traits; import std.typecons : Nullable, nullable, tuple; static import core.time; // Dub-specific adjustments for output import dub.internal.logging; /// Command-line arguments public struct CLIArgs { /// Path to the config file public string config_path = "config.yaml"; /// Overrides for config options public string[][string] overrides; /// Helper to add items to `overrides` public void overridesHandler (string, string value) { import std.string; const idx = value.indexOf('='); if (idx < 0) return; string k = value[0 .. idx], v = value[idx + 1 .. $]; if (auto val = k in this.overrides) (*val) ~= v; else this.overrides[k] = [ v ]; } /*************************************************************************** Parses the base command line arguments This can be composed with the program argument. For example, consider a program which wants to expose a `--version` switch, the definition could look like this: --- public struct ProgramCLIArgs { public CLIArgs base; // This struct public alias base this; // For convenience public bool version_; // Program-specific part } --- Then, an application-specific configuration routine would be: --- public GetoptResult parse (ref ProgramCLIArgs clargs, ref string[] args) { auto r = clargs.base.parse(args); if (r.helpWanted) return r; return getopt( args, "version", "Print the application version, &clargs.version_"); } --- Params: args = The command line args to parse (parsed options will be removed) passThrough = Whether to enable `config.passThrough` and `config.keepEndOfOptions`. `true` by default, to allow composability. If your program doesn't have other arguments, pass `false`. Returns: The result of calling `getopt` ***************************************************************************/ public GetoptResult parse (ref string[] args, bool passThrough = true) { return getopt( args, // `caseInsensitive` is the default, but we need something // with the same type for the ternary passThrough ? config.keepEndOfOptions : config.caseInsensitive, // Also the default, same reasoning passThrough ? config.passThrough : config.noPassThrough, "config|c", "Path to the config file. Defaults to: " ~ this.config_path, &this.config_path, "override|O", "Override a config file value\n" ~ "Example: -O foo.bar=true -o dns=1.1.1.1 -o dns=2.2.2.2\n" ~ "Array values are additive, other items are set to the last override", &this.overridesHandler, ); } } /******************************************************************************* Attempt to read and process the config file at `path`, print any error This 'simple' overload of the more detailed `parseConfigFile` will attempt to read the file at `path`, and return a `Nullable` instance of it. If an error happens, either because the file isn't readable or the configuration has an issue, a message will be printed to `stderr`, with colors if the output is a TTY, and a `null` instance will be returned. The calling code can hence just read a config file via: ``` int main () { auto configN = parseConfigFileSimple!Config("config.yaml"); if (configN.isNull()) return 1; // Error path auto config = configN.get(); // Rest of the program ... } ``` An overload accepting `CLIArgs args` also exists. Params: path = Path of the file to read from args = Command line arguments on which `parse` has been called strict = Whether the parsing should reject unknown keys in the document, warn, or ignore them (default: `StrictMode.Error`) Returns: An initialized `Config` instance if reading/parsing was successful; a `null` instance otherwise. *******************************************************************************/ public Nullable!T parseConfigFileSimple (T) (string path, StrictMode strict = StrictMode.Error) { return parseConfigFileSimple!(T)(CLIArgs(path), strict); } /// Ditto public Nullable!T parseConfigFileSimple (T) (in CLIArgs args, StrictMode strict = StrictMode.Error) { return wrapException(parseConfigFile!T(args, strict)); } /// Ditto public Nullable!T wrapException (T) (lazy T parseCall) { try return nullable(parseCall); catch (ConfigException exc) { exc.printException(); return typeof(return).init; } catch (Exception exc) { // Other Exception type may be thrown by D-YAML, // they won't include rich information. logWarn("%s", exc.message()); return typeof(return).init; } } /******************************************************************************* Print an Exception, potentially with colors on Trusted because of `stderr` usage. *******************************************************************************/ private void printException (scope ConfigException exc) @trusted { import dub.internal.logging; if (hasColors) logWarn("%S", exc); else logWarn("%s", exc.message()); } /******************************************************************************* Parses the config file or string and returns a `Config` instance. Params: cmdln = command-line arguments (containing the path to the config) path = When parsing a string, the path corresponding to it strict = Whether the parsing should reject unknown keys in the document, warn, or ignore them (default: `StrictMode.Error`) Throws: `Exception` if parsing the config file failed. Returns: `Config` instance *******************************************************************************/ public T parseConfigFile (T) (in CLIArgs cmdln, StrictMode strict = StrictMode.Error) { Node root = Loader.fromFile(cmdln.config_path).load(); return parseConfig!T(cmdln, root, strict); } /// ditto public T parseConfigString (T) (string data, string path, StrictMode strict = StrictMode.Error) { CLIArgs cmdln = { config_path: path }; auto loader = Loader.fromString(data, path); Node root = loader.load(); return parseConfig!T(cmdln, root, strict); } /******************************************************************************* Process the content of the YAML document described by `node` into an instance of the struct `T`. See the module description for a complete overview of this function. Params: T = Type of the config struct to fill cmdln = Command line arguments node = The root node matching `T` strict = Action to take when encountering unknown keys in the document Returns: An instance of `T` filled with the content of `node` Throws: If the content of `node` cannot satisfy the requirements set by `T`, or if `node` contain extra fields and `strict` is `true`. *******************************************************************************/ public T parseConfig (T) ( in CLIArgs cmdln, Node node, StrictMode strict = StrictMode.Error) { static assert(is(T == struct), "`" ~ __FUNCTION__ ~ "` should only be called with a `struct` type as argument, not: `" ~ fullyQualifiedName!T ~ "`"); final switch (node.nodeID) { case NodeID.mapping: dbgWrite("Parsing config '%s', strict: %s", fullyQualifiedName!T, strict == StrictMode.Warn ? strict.paint(Yellow) : strict.paintIf(!!strict, Green, Red)); return node.parseField!(StructFieldRef!T)( null, T.init, const(Context)(cmdln, strict)); case NodeID.sequence: case NodeID.scalar: case NodeID.invalid: throw new TypeConfigException(node, "mapping (object)", "document root"); } } /******************************************************************************* The behavior to have when encountering a field in YAML not present in the config definition. *******************************************************************************/ public enum StrictMode { /// Issue an error by throwing an `UnknownKeyConfigException` Error = 0, /// Write a message to `stderr`, but continue processing the file Warn = 1, /// Be silent and do nothing Ignore = 2, } /// Used to pass around configuration package struct Context { /// private CLIArgs cmdln; /// private StrictMode strict; } /******************************************************************************* Parse a mapping from `node` into an instance of `T` Params: TLFR = Top level field reference for this mapping node = The YAML node object matching the struct being read path = The runtime path to this mapping, used for nested types defaultValue = The default value to use for `T`, which can be different from `T.init` when recursing into fields with initializers. ctx = A context where properties that need to be conserved during recursion are stored fieldDefaults = Default value for some fields, used for `Key` recursion *******************************************************************************/ private TLFR.Type parseMapping (alias TLFR) (Node node, string path, auto ref TLFR.Type defaultValue, in Context ctx, in Node[string] fieldDefaults) { static assert(is(TLFR.Type == struct), "`parseMapping` called with wrong type (should be a `struct`)"); assert(node.nodeID == NodeID.mapping, "Internal error: parseMapping shouldn't have been called"); dbgWrite("%s: `parseMapping` called for '%s' (node entries: %s)", TLFR.Type.stringof.paint(Cyan), path.paint(Cyan), node.length.paintIf(!!node.length, Green, Red)); static foreach (FR; FieldRefTuple!(TLFR.Type)) { static if (FR.Name != FR.FieldName && hasMember!(TLFR.Type, FR.Name) && !is(typeof(mixin("TLFR.Type.", FR.Name)) == function)) static assert (FieldRef!(TLFR.Type, FR.Name).Name != FR.Name, "Field `" ~ FR.FieldName ~ "` `@Name` attribute shadows field `" ~ FR.Name ~ "` in `" ~ TLFR.Type.stringof ~ "`: Add a `@Name` attribute to `" ~ FR.Name ~ "` or change that of `" ~ FR.FieldName ~ "`"); } if (ctx.strict != StrictMode.Ignore) { /// First, check that all the sections found in the mapping are present in the type /// If not, the user might have made a typo. immutable string[] fieldNames = [ FieldsName!(TLFR.Type) ]; immutable string[] patterns = [ Patterns!(TLFR.Type) ]; FIELD: foreach (const ref Node key, const ref Node value; node) { const k = key.as!string; if (!fieldNames.canFind(k)) { foreach (p; patterns) if (k.startsWith(p)) // Require length because `0` would match `canFind` // and we don't want to allow `$PATTERN-` if (k[p.length .. $].length > 1 && k[p.length] == '-') continue FIELD; if (ctx.strict == StrictMode.Warn) { scope exc = new UnknownKeyConfigException( path, key.as!string, fieldNames, key.startMark()); exc.printException(); } else throw new UnknownKeyConfigException( path, key.as!string, fieldNames, key.startMark()); } } } const enabledState = node.isMappingEnabled!(TLFR.Type)(defaultValue); if (enabledState.field != EnabledState.Field.None) dbgWrite("%s: Mapping is enabled: %s", TLFR.Type.stringof.paint(Cyan), (!!enabledState).paintBool()); auto convertField (alias FR) () { static if (FR.Name != FR.FieldName) dbgWrite("Field name `%s` will use YAML field `%s`", FR.FieldName.paint(Yellow), FR.Name.paint(Green)); // Using exact type here matters: we could get a qualified type // (e.g. `immutable(string)`) if the field is qualified, // which causes problems. FR.Type default_ = __traits(getMember, defaultValue, FR.FieldName); // If this struct is disabled, do not attempt to parse anything besides // the `enabled` / `disabled` field. if (!enabledState) { // Even this is too noisy version (none) dbgWrite("%s: %s field of disabled struct, default: %s", path.paint(Cyan), "Ignoring".paint(Yellow), default_); static if (FR.Name == "enabled") return false; else static if (FR.Name == "disabled") return true; else return default_; } if (auto ptr = FR.FieldName in fieldDefaults) { dbgWrite("Found %s (%s.%s) in `fieldDefaults`", FR.Name.paint(Cyan), path.paint(Cyan), FR.FieldName.paint(Cyan)); if (ctx.strict && FR.FieldName in node) throw new ConfigExceptionImpl("'Key' field is specified twice", path, FR.FieldName, node.startMark()); return (*ptr).parseField!(FR)(path.addPath(FR.FieldName), default_, ctx) .dbgWriteRet("Using value '%s' from fieldDefaults for field '%s'", FR.FieldName.paint(Cyan)); } // This, `FR.Pattern`, and the field in `@Name` are special support for `dub` static if (FR.Pattern) { static if (is(FR.Type : V[K], K, V)) { alias AAFieldRef = NestedFieldRef!(V, FR); static assert(is(K : string), "Key type should be string-like"); } else static assert(0, "Cannot have pattern on non-AA field"); AAFieldRef.Type[string] result; foreach (pair; node.mapping) { const key = pair.key.as!string; if (!key.startsWith(FR.Name)) continue; string suffix = key[FR.Name.length .. $]; if (suffix.length) { if (suffix[0] == '-') suffix = suffix[1 .. $]; else continue; } result[suffix] = pair.value.parseField!(AAFieldRef)( path.addPath(key), default_.get(key, AAFieldRef.Type.init), ctx); } bool hack = true; if (hack) return result; } if (auto ptr = FR.Name in node) { dbgWrite("%s: YAML field is %s in node%s", FR.Name.paint(Cyan), "present".paint(Green), (FR.Name == FR.FieldName ? "" : " (note that field name is overriden)").paint(Yellow)); return (*ptr).parseField!(FR)(path.addPath(FR.Name), default_, ctx) .dbgWriteRet("Using value '%s' from YAML document for field '%s'", FR.FieldName.paint(Cyan)); } dbgWrite("%s: Field is %s from node%s", FR.Name.paint(Cyan), "missing".paint(Red), (FR.Name == FR.FieldName ? "" : " (note that field name is overriden)").paint(Yellow)); // A field is considered optional if it has an initializer that is different // from its default value, or if it has the `Optional` UDA. // In that case, just return this value. static if (FR.Optional) return default_ .dbgWriteRet("Using default value '%s' for optional field '%s'", FR.FieldName.paint(Cyan)); // The field is not present, but it could be because it is an optional section. // For example, the section could be defined as: // --- // struct RequestLimit { size_t reqs = 100; } // struct Config { RequestLimit limits; } // --- // In this case we need to recurse into `RequestLimit` to check if any // of its field is required. else static if (mightBeOptional!FR) { const npath = path.addPath(FR.Name); string[string] aa; return Node(aa).parseMapping!(FR)(npath, default_, ctx, null); } else throw new MissingKeyException(path, FR.Name, node.startMark()); } FR.Type convert (alias FR) () { static if (__traits(getAliasThis, TLFR.Type).length == 1 && __traits(getAliasThis, TLFR.Type)[0] == FR.FieldName) { static assert(FR.Name == FR.FieldName, "Field `" ~ fullyQualifiedName!(FR.Ref) ~ "` is the target of an `alias this` and cannot have a `@Name` attribute"); static assert(!hasConverter!(FR.Ref), "Field `" ~ fullyQualifiedName!(FR.Ref) ~ "` is the target of an `alias this` and cannot have a `@Converter` attribute"); alias convertW(string FieldName) = convert!(FieldRef!(FR.Type, FieldName, FR.Optional)); return FR.Type(staticMap!(convertW, FieldNameTuple!(FR.Type))); } else return convertField!(FR)(); } debug (ConfigFillerDebug) { indent++; scope (exit) indent--; } TLFR.Type doValidation (TLFR.Type result) { static if (is(typeof(result.validate()))) { if (enabledState) { dbgWrite("%s: Calling `%s` method", TLFR.Type.stringof.paint(Cyan), "validate()".paint(Green)); result.validate(); } else { dbgWrite("%s: Ignoring `%s` method on disabled mapping", TLFR.Type.stringof.paint(Cyan), "validate()".paint(Green)); } } else if (enabledState) dbgWrite("%s: No `%s` method found", TLFR.Type.stringof.paint(Cyan), "validate()".paint(Yellow)); return result; } // This might trigger things like "`this` is not accessible". // In this case, the user most likely needs to provide a converter. alias convertWrapper(string FieldName) = convert!(FieldRef!(TLFR.Type, FieldName)); return doValidation(TLFR.Type(staticMap!(convertWrapper, FieldNameTuple!(TLFR.Type)))); } /******************************************************************************* Parse a field, trying to match up the compile-time expectation with the run time value of the Node (`nodeID`). This is the central point which does "type conversion", from the YAML node to the field type. Whenever adding support for a new type, things should happen here. Because a `struct` can be filled from either a mapping or a scalar, this function will first try the converter / fromString / string ctor methods before defaulting to field-wise construction. Note that optional fields are checked before recursion happens, so this method does not do this check. *******************************************************************************/ package FR.Type parseField (alias FR) (Node node, string path, auto ref FR.Type defaultValue, in Context ctx) { if (node.nodeID == NodeID.invalid) throw new TypeConfigException(node, "valid", path); // If we reached this, it means the field is set, so just recurse // to peel the type static if (is(FR.Type : SetInfo!FT, FT)) return FR.Type( parseField!(FieldRef!(FR.Type, "value"))(node, path, defaultValue, ctx), true); else static if (hasConverter!(FR.Ref)) return wrapException(node.viaConverter!(FR)(path, ctx), path, node.startMark()); else static if (hasFromYAML!(FR.Type)) { scope impl = new ConfigParserImpl!(FR.Type)(node, path, ctx); return wrapException(FR.Type.fromYAML(impl), path, node.startMark()); } else static if (hasFromString!(FR.Type)) return wrapException(FR.Type.fromString(node.as!string), path, node.startMark()); else static if (hasStringCtor!(FR.Type)) return wrapException(FR.Type(node.as!string), path, node.startMark()); else static if (is(immutable(FR.Type) == immutable(core.time.Duration))) { if (node.nodeID != NodeID.mapping) throw new DurationTypeConfigException(node, path); return node.parseMapping!(StructFieldRef!DurationMapping)( path, DurationMapping.make(defaultValue), ctx, null).opCast!Duration; } else static if (is(FR.Type == struct)) { if (node.nodeID != NodeID.mapping) throw new TypeConfigException(node, "mapping (object)", path); return node.parseMapping!(FR)(path, defaultValue, ctx, null); } // Handle string early as they match the sequence rule too else static if (isSomeString!(FR.Type)) // Use `string` type explicitly because `Variant` thinks // `immutable(char)[]` (aka `string`) and `immutable(char[])` // (aka `immutable(string)`) are not compatible. return node.parseScalar!(string)(path); // Enum too, as their base type might be an array (including strings) else static if (is(FR.Type == enum)) return node.parseScalar!(FR.Type)(path); else static if (is(FR.Type : E[K], E, K)) { if (node.nodeID != NodeID.mapping) throw new TypeConfigException(node, "mapping (associative array)", path); // Note: As of June 2022 (DMD v2.100.0), associative arrays cannot // have initializers, hence their UX for config is less optimal. return node.mapping().map!( (Node.Pair pair) { return tuple( pair.key.get!K, pair.value.parseField!(NestedFieldRef!(E, FR))( format("%s[%s]", path, pair.key.as!string), E.init, ctx)); }).assocArray(); } else static if (is(FR.Type : E[], E)) { static if (hasUDA!(FR.Ref, Key)) { static assert(getUDAs!(FR.Ref, Key).length == 1, "`" ~ fullyQualifiedName!(FR.Ref) ~ "` field shouldn't have more than one `Key` attribute"); static assert(is(E == struct), "Field `" ~ fullyQualifiedName!(FR.Ref) ~ "` has a `Key` attribute, but is a sequence of `" ~ fullyQualifiedName!E ~ "`, not a sequence of `struct`"); string key = getUDAs!(FR.Ref, Key)[0].name; if (node.nodeID != NodeID.mapping && node.nodeID != NodeID.sequence) throw new TypeConfigException(node, "mapping (object) or sequence", path); if (node.nodeID == NodeID.mapping) return node.mapping().map!( (Node.Pair pair) { if (pair.value.nodeID != NodeID.mapping) throw new TypeConfigException( "sequence of " ~ pair.value.nodeTypeString(), "sequence of mapping (array of objects)", path, null, node.startMark()); return pair.value.parseMapping!(StructFieldRef!E)( path.addPath(pair.key.as!string), E.init, ctx, key.length ? [ key: pair.key ] : null); }).array(); } if (node.nodeID != NodeID.sequence) throw new TypeConfigException(node, "sequence (array)", path); typeof(return) validateLength (E[] res) { static if (is(FR.Type : E_[k], E_, size_t k)) { if (res.length != k) throw new ArrayLengthException( res.length, k, path, null, node.startMark()); return res[0 .. k]; } else return res; } // We pass `E.init` as default value as it is not going to be used: // Either there is something in the YAML document, and that will be // converted, or `sequence` will not iterate. return validateLength( node.sequence.enumerate.map!( kv => kv.value.parseField!(NestedFieldRef!(E, FR))( format("%s[%s]", path, kv.index), E.init, ctx)) .array() ); } else { static assert (!is(FR.Type == union), "`union` are not supported. Use a converter instead"); return node.parseScalar!(FR.Type)(path); } } /// Parse a node as a scalar private T parseScalar (T) (Node node, string path) { if (node.nodeID != NodeID.scalar) throw new TypeConfigException(node, "scalar (value)", path); static if (is(T == enum)) return node.as!string.to!(T); else return node.as!(T); } /******************************************************************************* Write a potentially throwing user-provided expression in ConfigException The user-provided hooks may throw (e.g. `fromString / the constructor), and the error may or may not be clear. We can't do anything about a bad message but we can wrap the thrown exception in a `ConfigException` to provide the location in the yaml file where the error happened. Params: exp = The expression that may throw path = Path within the config file of the field position = Position of the node in the YAML file file = Call site file (otherwise the message would point to this function) line = Call site line (see `file` reasoning) Returns: The result of `exp` evaluation. *******************************************************************************/ private T wrapException (T) (lazy T exp, string path, Mark position, string file = __FILE__, size_t line = __LINE__) { try return exp; catch (ConfigException exc) throw exc; catch (Exception exc) throw new ConstructionException(exc, path, position, file, line); } /// Allows us to reuse parseMapping and strict parsing private struct DurationMapping { public SetInfo!long weeks; public SetInfo!long days; public SetInfo!long hours; public SetInfo!long minutes; public SetInfo!long seconds; public SetInfo!long msecs; public SetInfo!long usecs; public SetInfo!long hnsecs; public SetInfo!long nsecs; private static DurationMapping make (Duration def) @safe pure nothrow @nogc { typeof(return) result; auto fullSplit = def.split(); result.weeks = SetInfo!long(fullSplit.weeks, fullSplit.weeks != 0); result.days = SetInfo!long(fullSplit.days, fullSplit.days != 0); result.hours = SetInfo!long(fullSplit.hours, fullSplit.hours != 0); result.minutes = SetInfo!long(fullSplit.minutes, fullSplit.minutes != 0); result.seconds = SetInfo!long(fullSplit.seconds, fullSplit.seconds != 0); result.msecs = SetInfo!long(fullSplit.msecs, fullSplit.msecs != 0); result.usecs = SetInfo!long(fullSplit.usecs, fullSplit.usecs != 0); result.hnsecs = SetInfo!long(fullSplit.hnsecs, fullSplit.hnsecs != 0); // nsecs is ignored by split as it's not representable in `Duration` return result; } /// public void validate () const @safe { // That check should never fail, as the YAML parser would error out, // but better be safe than sorry. foreach (field; this.tupleof) if (field.set) return; throw new Exception( "Expected at least one of the components (weeks, days, hours, " ~ "minutes, seconds, msecs, usecs, hnsecs, nsecs) to be set"); } /// Allow conversion to a `Duration` public Duration opCast (T : Duration) () const scope @safe pure nothrow @nogc { return core.time.weeks(this.weeks) + core.time.days(this.days) + core.time.hours(this.hours) + core.time.minutes(this.minutes) + core.time.seconds(this.seconds) + core.time.msecs(this.msecs) + core.time.usecs(this.usecs) + core.time.hnsecs(this.hnsecs) + core.time.nsecs(this.nsecs); } } /// Evaluates to `true` if we should recurse into the struct via `parseMapping` private enum mightBeOptional (alias FR) = is(FR.Type == struct) && !is(immutable(FR.Type) == immutable(core.time.Duration)) && !hasConverter!(FR.Ref) && !hasFromString!(FR.Type) && !hasStringCtor!(FR.Type) && !hasFromYAML!(FR.Type); /// Convenience template to check for the presence of converter(s) private enum hasConverter (alias Field) = hasUDA!(Field, Converter); /// Provided a field reference `FR` which is known to have at least one converter, /// perform basic checks and return the value after applying the converter. private auto viaConverter (alias FR) (Node node, string path, in Context context) { enum Converters = getUDAs!(FR.Ref, Converter); static assert (Converters.length, "Internal error: `viaConverter` called on field `" ~ FR.FieldName ~ "` with no converter"); static assert(Converters.length == 1, "Field `" ~ FR.FieldName ~ "` cannot have more than one `Converter`"); scope impl = new ConfigParserImpl!(FR.Type)(node, path, context); return Converters[0].converter(impl); } private final class ConfigParserImpl (T) : ConfigParser!T { private Node node_; private string path_; private const(Context) context_; /// Ctor public this (Node n, string p, const Context c) scope @safe pure nothrow @nogc { this.node_ = n; this.path_ = p; this.context_ = c; } public final override inout(Node) node () inout @safe pure nothrow @nogc { return this.node_; } public final override string path () const @safe pure nothrow @nogc { return this.path_; } protected final override const(Context) context () const @safe pure nothrow @nogc { return this.context_; } } /// Helper predicate private template NameIs (string searching) { enum bool Pred (alias FR) = (searching == FR.Name); } /// Returns whether or not the field has a `enabled` / `disabled` field, /// and its value. If it does not, returns `true`. private EnabledState isMappingEnabled (M) (Node node, auto ref M default_) { import std.meta : Filter; alias EMT = Filter!(NameIs!("enabled").Pred, FieldRefTuple!M); alias DMT = Filter!(NameIs!("disabled").Pred, FieldRefTuple!M); static if (EMT.length) { static assert (DMT.length == 0, "`enabled` field `" ~ EMT[0].FieldName ~ "` conflicts with `disabled` field `" ~ DMT[0].FieldName ~ "`"); if (auto ptr = "enabled" in node) return EnabledState(EnabledState.Field.Enabled, (*ptr).as!bool); return EnabledState(EnabledState.Field.Enabled, __traits(getMember, default_, EMT[0].FieldName)); } else static if (DMT.length) { if (auto ptr = "disabled" in node) return EnabledState(EnabledState.Field.Disabled, (*ptr).as!bool); return EnabledState(EnabledState.Field.Disabled, __traits(getMember, default_, DMT[0].FieldName)); } else { return EnabledState(EnabledState.Field.None); } } /// Return value of `isMappingEnabled` private struct EnabledState { /// Used to determine which field controls a mapping enabled state private enum Field { /// No such field, the mapping is considered enabled None, /// The field is named 'enabled' Enabled, /// The field is named 'disabled' Disabled, } /// Check if the mapping is considered enabled public bool opCast () const scope @safe pure @nogc nothrow { return this.field == Field.None || (this.field == Field.Enabled && this.fieldValue) || (this.field == Field.Disabled && !this.fieldValue); } /// Type of field found private Field field; /// Value of the field, interpretation depends on `field` private bool fieldValue; } /// Evaluates to `true` if `T` is a `struct` with a default ctor private enum hasFieldwiseCtor (T) = (is(T == struct) && is(typeof(() => T(T.init.tupleof)))); /// Evaluates to `true` if `T` has a static method that is designed to work with this library private enum hasFromYAML (T) = is(typeof(T.fromYAML(ConfigParser!(T).init)) : T); /// Evaluates to `true` if `T` has a static method that accepts a `string` and returns a `T` private enum hasFromString (T) = is(typeof(T.fromString(string.init)) : T); /// Evaluates to `true` if `T` is a `struct` which accepts a single string as argument private enum hasStringCtor (T) = (is(T == struct) && is(typeof(T.__ctor)) && Parameters!(T.__ctor).length == 1 && is(typeof(() => T(string.init)))); unittest { static struct Simple { int value; string otherValue; } static assert( hasFieldwiseCtor!Simple); static assert(!hasStringCtor!Simple); static struct PubKey { ubyte[] data; this (string hex) @safe pure nothrow @nogc{} } static assert(!hasFieldwiseCtor!PubKey); static assert( hasStringCtor!PubKey); static assert(!hasFieldwiseCtor!string); static assert(!hasFieldwiseCtor!int); static assert(!hasStringCtor!string); static assert(!hasStringCtor!int); } /// Convenience function to extend a YAML path private string addPath (string opath, string newPart) in(newPart.length) do { return opath.length ? format("%s.%s", opath, newPart) : newPart; } dub-1.40.0/source/dub/internal/configy/Test.d000066400000000000000000000531151477246567400210110ustar00rootroot00000000000000/******************************************************************************* Contains all the tests for this library. Copyright: Copyright (c) 2019-2022 BOSAGORA Foundation All rights reserved. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.Test; import dub.internal.configy.Attributes; import dub.internal.configy.Exceptions; import dub.internal.configy.Read; import dub.internal.configy.Utils; import dub.internal.dyaml.node; import std.format; import core.time; /// Basic usage tests unittest { static struct Address { string address; string city; bool accessible; } static struct Nested { Address address; } static struct Config { bool enabled = true; string name = "Jessie"; int age = 42; double ratio = 24.42; Address address = { address: "Yeoksam-dong", city: "Seoul", accessible: true }; Nested nested = { address: { address: "Gangnam-gu", city: "Also Seoul", accessible: false } }; } auto c1 = parseConfigString!Config("enabled: false", "/dev/null"); assert(!c1.enabled); assert(c1.name == "Jessie"); assert(c1.age == 42); assert(c1.ratio == 24.42); assert(c1.address.address == "Yeoksam-dong"); assert(c1.address.city == "Seoul"); assert(c1.address.accessible); assert(c1.nested.address.address == "Gangnam-gu"); assert(c1.nested.address.city == "Also Seoul"); assert(!c1.nested.address.accessible); } // Tests for SetInfo unittest { static struct Address { string address; string city; bool accessible; } static struct Config { SetInfo!int value; SetInfo!int answer = 42; SetInfo!string name = SetInfo!string("Lorene", false); SetInfo!Address address; } auto c1 = parseConfigString!Config("value: 24", "/dev/null"); assert(c1.value == 24); assert(c1.value.set); assert(c1.answer.set); assert(c1.answer == 42); assert(!c1.name.set); assert(c1.name == "Lorene"); assert(!c1.address.set); auto c2 = parseConfigString!Config(` name: Lorene address: address: Somewhere city: Over the rainbow `, "/dev/null"); assert(!c2.value.set); assert(c2.name == "Lorene"); assert(c2.name.set); assert(c2.address.set); assert(c2.address.address == "Somewhere"); assert(c2.address.city == "Over the rainbow"); } unittest { static struct Nested { core.time.Duration timeout; } static struct Config { Nested node; } try { auto result = parseConfigString!Config("node:\n timeout:", "/dev/null"); assert(0); } catch (Exception exc) { assert(exc.toString() == "/dev/null(1:10): node.timeout: Field is of type scalar, " ~ "but expected a mapping with at least one of: weeks, days, hours, minutes, " ~ "seconds, msecs, usecs, hnsecs, nsecs"); } { auto result = parseConfigString!Nested("timeout:\n days: 10\n minutes: 100\n hours: 3\n", "/dev/null"); assert(result.timeout == 10.days + 4.hours + 40.minutes); } } unittest { static struct Config { string required; } try auto result = parseConfigString!Config("value: 24", "/dev/null"); catch (ConfigException e) { assert(format("%s", e) == "/dev/null(0:0): value: Key is not a valid member of this section. There are 1 valid keys: required"); assert(format("%S", e) == format("%s/dev/null%s(%s0%s:%s0%s): %svalue%s: Key is not a valid member of this section. " ~ "There are %s1%s valid keys: %srequired%s", Yellow, Reset, Cyan, Reset, Cyan, Reset, Yellow, Reset, Yellow, Reset, Green, Reset)); } } // Test for various type errors unittest { static struct Mapping { string value; } static struct Config { @Optional Mapping map; @Optional Mapping[] array; int scalar; } try { auto result = parseConfigString!Config("map: Hello World", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(0:5): map: Expected to be of type mapping (object), but is a scalar"); } try { auto result = parseConfigString!Config("map:\n - Hello\n - World", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(1:2): map: Expected to be of type mapping (object), but is a sequence"); } try { auto result = parseConfigString!Config("scalar:\n - Hello\n - World", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(1:2): scalar: Expected to be of type scalar (value), but is a sequence"); } try { auto result = parseConfigString!Config("scalar:\n hello:\n World", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(1:2): scalar: Expected to be of type scalar (value), but is a mapping"); } } // Test for strict mode unittest { static struct Config { string value; string valhu; string halvue; } try { auto result = parseConfigString!Config("valeu: This is a typo", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(0:0): valeu: Key is not a valid member of this section. Did you mean: value, valhu"); } } // Test for required key unittest { static struct Nested { string required; string optional = "Default"; } static struct Config { Nested inner; } try { auto result = parseConfigString!Config("inner:\n optional: Not the default value", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(1:2): inner.required: Required key was not found in configuration or command line arguments"); } } // Testing 'validate()' on nested structures unittest { __gshared int validateCalls0 = 0; __gshared int validateCalls1 = 1; __gshared int validateCalls2 = 2; static struct SecondLayer { string value = "default"; public void validate () const { validateCalls2++; } } static struct FirstLayer { bool enabled = true; SecondLayer ltwo; public void validate () const { validateCalls1++; } } static struct Config { FirstLayer lone; public void validate () const { validateCalls0++; } } auto r1 = parseConfigString!Config("lone:\n ltwo:\n value: Something\n", "/dev/null"); assert(r1.lone.ltwo.value == "Something"); // `validateCalls` are given different value to avoid false-positive // if they are set to 0 / mixed up assert(validateCalls0 == 1); assert(validateCalls1 == 2); assert(validateCalls2 == 3); auto r2 = parseConfigString!Config("lone:\n enabled: false\n", "/dev/null"); assert(validateCalls0 == 2); // + 1 assert(validateCalls1 == 2); // Other are disabled assert(validateCalls2 == 3); } // Test the throwing ctor / fromString unittest { static struct ThrowingFromString { public static ThrowingFromString fromString (scope const(char)[] value) @safe pure { throw new Exception("Some meaningful error message"); } public int value; } static struct ThrowingCtor { public this (scope const(char)[] value) @safe pure { throw new Exception("Something went wrong... Obviously"); } public int value; } static struct InnerConfig { public int value; @Optional ThrowingCtor ctor; @Optional ThrowingFromString fromString; @Converter!int( (scope ConfigParser!int parser) { // We have to trick DMD a bit so that it infers an `int` return // type but doesn't emit a "Statement is not reachable" warning if (parser.node is Node.init || parser.node !is Node.init ) throw new Exception("You shall not pass"); return 42; }) @Optional int converter; } static struct Config { public InnerConfig config; } try { auto result = parseConfigString!Config("config:\n value: 42\n ctor: 42", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(2:8): config.ctor: Something went wrong... Obviously"); } try { auto result = parseConfigString!Config("config:\n value: 42\n fromString: 42", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(2:14): config.fromString: Some meaningful error message"); } try { auto result = parseConfigString!Config("config:\n value: 42\n converter: 42", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(2:13): config.converter: You shall not pass"); } // We also need to test with arrays, to ensure they are correctly called static struct InnerArrayConfig { @Optional int value; @Optional ThrowingCtor ctor; @Optional ThrowingFromString fromString; } static struct ArrayConfig { public InnerArrayConfig[] configs; } try { auto result = parseConfigString!ArrayConfig("configs:\n - ctor: something", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(1:10): configs[0].ctor: Something went wrong... Obviously"); } try { auto result = parseConfigString!ArrayConfig( "configs:\n - value: 42\n - fromString: something", "/dev/null"); assert(0); } catch (ConfigException exc) { assert(exc.toString() == "/dev/null(2:16): configs[1].fromString: Some meaningful error message"); } } // Test duplicate fields detection unittest { static struct Config { @Name("shadow") int value; @Name("value") int shadow; } auto result = parseConfigString!Config("shadow: 42\nvalue: 84\n", "/dev/null"); assert(result.value == 42); assert(result.shadow == 84); static struct BadConfig { int value; @Name("value") int something; } // Cannot test the error message, so this is as good as it gets static assert(!is(typeof(() { auto r = parseConfigString!BadConfig("shadow: 42\nvalue: 84\n", "/dev/null"); }))); } // Test a renamed `enabled` / `disabled` unittest { static struct ConfigA { @Name("enabled") bool shouldIStay; int value; } static struct ConfigB { @Name("disabled") bool orShouldIGo; int value; } { auto c = parseConfigString!ConfigA("enabled: true\nvalue: 42", "/dev/null"); assert(c.shouldIStay == true); assert(c.value == 42); } { auto c = parseConfigString!ConfigB("disabled: false\nvalue: 42", "/dev/null"); assert(c.orShouldIGo == false); assert(c.value == 42); } } // Test for 'mightBeOptional' & missing key unittest { static struct RequestLimit { size_t reqs = 100; } static struct Nested { @Name("jay") int value; } static struct Config { @Name("chris") Nested value; RequestLimit limits; } auto r = parseConfigString!Config("chris:\n jay: 42", "/dev/null"); assert(r.limits.reqs == 100); try { auto _ = parseConfigString!Config("limits:\n reqs: 42", "/dev/null"); } catch (ConfigException exc) { assert(exc.toString() == "(0:0): chris.jay: Required key was not found in configuration or command line arguments"); } } // Support for associative arrays unittest { static struct Nested { int[string] answers; } static struct Parent { Nested[string] questions; string[int] names; } auto c = parseConfigString!Parent( `names: 42: "Forty two" 97: "Quatre vingt dix sept" questions: first: answers: # Need to use quotes here otherwise it gets interpreted as # true / false, perhaps a dyaml issue ? 'yes': 42 'no': 24 second: answers: maybe: 69 whynot: 20 `, "/dev/null"); assert(c.names == [42: "Forty two", 97: "Quatre vingt dix sept"]); assert(c.questions.length == 2); assert(c.questions["first"] == Nested(["yes": 42, "no": 24])); assert(c.questions["second"] == Nested(["maybe": 69, "whynot": 20])); } unittest { static struct FlattenMe { int value; string name; } static struct Config { FlattenMe flat = FlattenMe(24, "Four twenty"); alias flat this; FlattenMe not_flat; } auto c = parseConfigString!Config( "value: 42\nname: John\nnot_flat:\n value: 69\n name: Henry", "/dev/null"); assert(c.flat.value == 42); assert(c.flat.name == "John"); assert(c.not_flat.value == 69); assert(c.not_flat.name == "Henry"); auto c2 = parseConfigString!Config( "not_flat:\n value: 69\n name: Henry", "/dev/null"); assert(c2.flat.value == 24); assert(c2.flat.name == "Four twenty"); static struct OptConfig { @Optional FlattenMe flat; alias flat this; int value; } auto c3 = parseConfigString!OptConfig("value: 69\n", "/dev/null"); assert(c3.value == 69); } unittest { static struct Config { @Name("names") string[] names_; size_t names () const scope @safe pure nothrow @nogc { return this.names_.length; } } auto c = parseConfigString!Config("names:\n - John\n - Luca\n", "/dev/null"); assert(c.names_ == [ "John", "Luca" ]); assert(c.names == 2); } unittest { static struct BuildTemplate { string targetName; string platform; } static struct BuildConfig { BuildTemplate config; alias config this; } static struct Config { string name; @Optional BuildConfig config; alias config this; } auto c = parseConfigString!Config("name: dummy\n", "/dev/null"); assert(c.name == "dummy"); auto c2 = parseConfigString!Config("name: dummy\nplatform: windows\n", "/dev/null"); assert(c2.name == "dummy"); assert(c2.config.platform == "windows"); } // Make sure unions don't compile unittest { static union MyUnion { string value; int number; } static struct Config { MyUnion hello; } static assert(!is(typeof(parseConfigString!Config("hello: world\n", "/dev/null")))); static assert(!is(typeof(parseConfigString!MyUnion("hello: world\n", "/dev/null")))); } // Test the `@Key` attribute unittest { static struct Interface { string name; string static_ip; } static struct Config { string profile; @Key("name") immutable(Interface)[] ifaces = [ Interface("lo", "127.0.0.1"), ]; } auto c = parseConfigString!Config(`profile: default ifaces: eth0: static_ip: "192.168.1.42" lo: static_ip: "127.0.0.42" `, "/dev/null"); assert(c.ifaces.length == 2); assert(c.ifaces == [ Interface("eth0", "192.168.1.42"), Interface("lo", "127.0.0.42")]); } // Nested ConstructionException unittest { static struct WillFail { string name; this (string value) @safe pure { throw new Exception("Parsing failed!"); } } static struct Container { WillFail[] array; } static struct Config { Container data; } try auto c = parseConfigString!Config(`data: array: - Not - Working `, "/dev/null"); catch (Exception exc) assert(exc.toString() == `/dev/null(2:6): data.array[0]: Parsing failed!`); } /// Test for error message: Has to be versioned out, uncomment to check manually unittest { static struct Nested { int field1; private this (string arg) {} } static struct Config { Nested nested; } static struct Config2 { Nested nested; alias nested this; } version(none) auto c1 = parseConfigString!Config(null, null); version(none) auto c2 = parseConfigString!Config2(null, null); } /// Test support for `fromYAML` hook unittest { static struct PackageDef { string name; @Optional string target; int build = 42; } static struct Package { string path; PackageDef def; public static Package fromYAML (scope ConfigParser!Package parser) { if (parser.node.nodeID == NodeID.mapping) return Package(null, parser.parseAs!PackageDef); else return Package(parser.parseAs!string); } } static struct Config { string name; Package[] deps; } auto c = parseConfigString!Config( ` name: myPkg deps: - /foo/bar - name: foo target: bar build: 24 - name: fur - /one/last/path `, "/dev/null"); assert(c.name == "myPkg"); assert(c.deps.length == 4); assert(c.deps[0] == Package("/foo/bar")); assert(c.deps[1] == Package(null, PackageDef("foo", "bar", 24))); assert(c.deps[2] == Package(null, PackageDef("fur", null, 42))); assert(c.deps[3] == Package("/one/last/path")); } /// Test top level hook (fromYAML / fromString) unittest { static struct Version1 { uint fileVersion; uint value; } static struct Version2 { uint fileVersion; string str; } static struct Config { uint fileVersion; union { Version1 v1; Version2 v2; } static Config fromYAML (scope ConfigParser!Config parser) { static struct OnlyVersion { uint fileVersion; } auto vers = parseConfig!OnlyVersion( CLIArgs.init, parser.node, StrictMode.Ignore); switch (vers.fileVersion) { case 1: return Config(1, parser.parseAs!Version1); case 2: Config conf = Config(2); conf.v2 = parser.parseAs!Version2; return conf; default: assert(0); } } } auto v1 = parseConfigString!Config("fileVersion: 1\nvalue: 42", "/dev/null"); auto v2 = parseConfigString!Config("fileVersion: 2\nstr: hello world", "/dev/null"); assert(v1.fileVersion == 1); assert(v1.v1.fileVersion == 1); assert(v1.v1.value == 42); assert(v2.fileVersion == 2); assert(v2.v2.fileVersion == 2); assert(v2.v2.str == "hello world"); } /// Don't call `opCmp` / `opEquals` as they might not be CTFEable /// Also various tests around static arrays unittest { static struct NonCTFEAble { int value; public bool opEquals (const NonCTFEAble other) const scope { assert(0); } public bool opEquals (const ref NonCTFEAble other) const scope { assert(0); } public int opCmp (const NonCTFEAble other) const scope { assert(0); } public int opCmp (const ref NonCTFEAble other) const scope { assert(0); } } static struct Config { NonCTFEAble fixed; @Name("static") NonCTFEAble[3] static_; NonCTFEAble[] dynamic; } auto c = parseConfigString!Config(`fixed: value: 42 static: - value: 84 - value: 126 - value: 168 dynamic: - value: 420 - value: 840 `, "/dev/null"); assert(c.fixed.value == 42); assert(c.static_[0].value == 84); assert(c.static_[1].value == 126); assert(c.static_[2].value == 168); assert(c.dynamic.length == 2); assert(c.dynamic[0].value == 420); assert(c.dynamic[1].value == 840); try parseConfigString!Config(`fixed: value: 42 dynamic: - value: 420 - value: 840 `, "/dev/null"); catch (ConfigException e) assert(e.toString() == "/dev/null(0:0): static: Required key was not found in configuration or command line arguments"); try parseConfigString!Config(`fixed: value: 42 static: - value: 1 - value: 2 dynamic: - value: 420 - value: 840 `, "/dev/null"); catch (ConfigException e) assert(e.toString() == "/dev/null(3:2): static: Too few entries for sequence: Expected 3, got 2"); try parseConfigString!Config(`fixed: value: 42 static: - value: 1 - value: 2 - value: 3 - value: 4 dynamic: - value: 420 - value: 840 `, "/dev/null"); catch (ConfigException e) assert(e.toString() == "/dev/null(3:2): static: Too many entries for sequence: Expected 3, got 4"); // Check that optional static array work static struct ConfigOpt { NonCTFEAble fixed; @Name("static") NonCTFEAble[3] static_ = [ NonCTFEAble(69), NonCTFEAble(70), NonCTFEAble(71), ]; } auto c1 = parseConfigString!ConfigOpt(`fixed: value: 1100 `, "/dev/null"); assert(c1.fixed.value == 1100); assert(c1.static_[0].value == 69); assert(c1.static_[1].value == 70); assert(c1.static_[2].value == 71); } dub-1.40.0/source/dub/internal/configy/Utils.d000066400000000000000000000077161477246567400212000ustar00rootroot00000000000000/******************************************************************************* Utilities used internally by the config parser. Compile this library with `-debug=ConfigFillerDebug` to get verbose output. This can be achieved with `debugVersions` in dub, or by depending on the `debug` configuration provided by `dub.json`. Copyright: Copyright (c) 2019-2022 BOSAGORA Foundation All rights reserved. License: MIT License. See LICENSE for details. *******************************************************************************/ module dub.internal.configy.Utils; import std.format; /// Type of sink used by the `toString` package alias SinkType = void delegate (in char[]) @safe; /******************************************************************************* Debugging utility for config filler Since this module does a lot of meta-programming, some things can easily go wrong. For example, a condition being false might happen because it is genuinely false or because the condition is buggy. To make figuring out if a config is properly parsed or not, a little utility (config-dumper) exists, which will provide a verbose output of what the config filler does. To do this, `config-dumper` is compiled with the below `debug` version. *******************************************************************************/ debug (ConfigFillerDebug) { /// A thin wrapper around `stderr.writefln` with indentation package void dbgWrite (Args...) (string fmt, Args args) { import std.stdio; stderr.write(IndentChars[0 .. indent >= IndentChars.length ? $ : indent]); stderr.writefln(fmt, args); } /// Log a value that is to be returned /// The value will be the first argument and painted yellow package T dbgWriteRet (T, Args...) (auto ref T return_, string fmt, Args args) { dbgWrite(fmt, return_.paint(Yellow), args); return return_; } /// The current indentation package size_t indent; /// Helper for indentation (who needs more than 16 levels of indent?) private immutable IndentChars = "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"; } else { /// No-op package void dbgWrite (Args...) (string fmt, lazy Args args) {} /// Ditto package T dbgWriteRet (T, Args...) (auto ref T return_, string fmt, lazy Args args) { return return_; } } /// Thin wrapper to simplify colorization package struct Colored (T) { /// Color used private string color; /// Value to print private T value; /// Hook for `formattedWrite` public void toString (scope SinkType sink) { static if (is(typeof(T.init.length) : size_t)) if (this.value.length == 0) return; formattedWrite(sink, "%s%s%s", this.color, this.value, Reset); } } /// Ditto package Colored!T paint (T) (T arg, string color) { return Colored!T(color, arg); } /// Paint `arg` in color `ifTrue` if `cond` evaluates to `true`, use color `ifFalse` otherwise package Colored!T paintIf (T) (T arg, bool cond, string ifTrue, string ifFalse) { return Colored!T(cond ? ifTrue : ifFalse, arg); } /// Paint a boolean in green if `true`, red otherwise, unless `reverse` is set to `true`, /// in which case the colors are swapped package Colored!bool paintBool (bool value, bool reverse = false) { return value.paintIf(reverse ^ value, Green, Red); } /// Reset the foreground color used package immutable Reset = "\u001b[0m"; /// Set the foreground color to red, used for `false`, missing, errors, etc... package immutable Red = "\u001b[31m"; /// Set the foreground color to red, used for warnings and other things /// that should draw attention but do not pose an immediate issue package immutable Yellow = "\u001b[33m"; /// Set the foreground color to green, used for `true`, present, etc... package immutable Green = "\u001b[32m"; /// Set the foreground color to green, used field names / path package immutable Cyan = "\u001b[36m"; dub-1.40.0/source/dub/internal/dyaml/000077500000000000000000000000001477246567400173705ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/dyaml/composer.d000066400000000000000000000406701477246567400213730ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * Composes nodes from YAML events provided by parser. * Code based on PyYAML: http://www.pyyaml.org */ module dub.internal.dyaml.composer; import core.memory; import std.algorithm; import std.array; import std.conv; import std.exception; import std.format; import std.range; import std.typecons; import dub.internal.dyaml.constructor; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.node; import dub.internal.dyaml.parser; import dub.internal.dyaml.resolver; package: ///Composes YAML documents from events provided by a Parser. struct Composer { private: ///Parser providing YAML events. Parser parser_; ///Resolver resolving tags (data types). Resolver resolver_; ///Nodes associated with anchors. Used by YAML aliases. Node[string] anchors_; ///Used to reduce allocations when creating pair arrays. /// ///We need one appender for each nesting level that involves ///a pair array, as the inner levels are processed as a ///part of the outer levels. Used as a stack. Appender!(Node.Pair[])[] pairAppenders_; ///Used to reduce allocations when creating node arrays. /// ///We need one appender for each nesting level that involves ///a node array, as the inner levels are processed as a ///part of the outer levels. Used as a stack. Appender!(Node[])[] nodeAppenders_; public: /** * Construct a composer. * * Params: parser = Parser to provide YAML events. * resolver = Resolver to resolve tags (data types). */ this(Parser parser, Resolver resolver) @safe nothrow { parser_ = parser; resolver_ = resolver; } /** * Determine if there are any nodes left. * * Must be called before loading as it handles the stream start event. */ bool checkNode() @safe { // If next event is stream start, skip it parser_.skipOver!"a.id == b"(EventID.streamStart); //True if there are more documents available. return parser_.front.id != EventID.streamEnd; } ///Get a YAML document as a node (the root of the document). Node getNode() @safe { //Get the root node of the next document. assert(parser_.front.id != EventID.streamEnd, "Trying to get a node from Composer when there is no node to " ~ "get. use checkNode() to determine if there is a node."); return composeDocument(); } /// Set file name. ref inout(string) name() inout @safe return pure nothrow @nogc { return parser_.name; } /// Get a mark from the current reader position Mark mark() const @safe pure nothrow @nogc { return parser_.mark; } /// Get resolver ref Resolver resolver() @safe return pure nothrow @nogc { return resolver_; } private: void skipExpected(const EventID id) @safe { const foundExpected = parser_.skipOver!"a.id == b"(id); assert(foundExpected, text("Expected ", id, " not found.")); } ///Ensure that appenders for specified nesting levels exist. /// ///Params: pairAppenderLevel = Current level in the pair appender stack. /// nodeAppenderLevel = Current level the node appender stack. void ensureAppendersExist(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe { while(pairAppenders_.length <= pairAppenderLevel) { pairAppenders_ ~= appender!(Node.Pair[])(); } while(nodeAppenders_.length <= nodeAppenderLevel) { nodeAppenders_ ~= appender!(Node[])(); } } ///Compose a YAML document and return its root node. Node composeDocument() @safe { skipExpected(EventID.documentStart); //Compose the root node. Node node = composeNode(0, 0); skipExpected(EventID.documentEnd); anchors_.destroy(); return node; } /// Compose a node. /// /// Params: pairAppenderLevel = Current level of the pair appender stack. /// nodeAppenderLevel = Current level of the node appender stack. Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe { if(parser_.front.id == EventID.alias_) { const event = parser_.front; parser_.popFront(); const anchor = event.anchor; enforce((anchor in anchors_) !is null, new ComposerException("Found undefined alias: " ~ anchor, event.startMark)); //If the node referenced by the anchor is uninitialized, //it's not finished, i.e. we're currently composing it //and trying to use it recursively here. enforce(anchors_[anchor] != Node(), new ComposerException(text("Found recursive alias: ", anchor), event.startMark, "defined here", anchors_[anchor].startMark)); return anchors_[anchor]; } const event = parser_.front; const anchor = event.anchor; if((anchor !is null) && (anchor in anchors_) !is null) { throw new ComposerException(text("Found duplicate anchor: ", anchor), event.startMark, "defined here", anchors_[anchor].startMark); } Node result; //Associate the anchor, if any, with an uninitialized node. //used to detect duplicate and recursive anchors. if(anchor !is null) { Node tempNode; tempNode.startMark_ = event.startMark; anchors_[anchor] = tempNode; } switch (parser_.front.id) { case EventID.scalar: result = composeScalarNode(); break; case EventID.sequenceStart: result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel); break; case EventID.mappingStart: result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel); break; default: assert(false, "This code should never be reached"); } if(anchor !is null) { anchors_[anchor] = result; } return result; } ///Compose a scalar node. Node composeScalarNode() @safe { const event = parser_.front; parser_.popFront(); const tag = resolver_.resolve(NodeID.scalar, event.tag, event.value, event.implicit); Node node = constructNode(event.startMark, event.endMark, tag, event.value); node.scalarStyle = event.scalarStyle; return node; } /// Compose a sequence node. /// /// Params: pairAppenderLevel = Current level of the pair appender stack. /// nodeAppenderLevel = Current level of the node appender stack. Node composeSequenceNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe { ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); auto nodeAppender = &(nodeAppenders_[nodeAppenderLevel]); const startEvent = parser_.front; parser_.popFront(); const tag = resolver_.resolve(NodeID.sequence, startEvent.tag, null, startEvent.implicit); while(parser_.front.id != EventID.sequenceEnd) { nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1)); } Node node = constructNode(startEvent.startMark, parser_.front.endMark, tag, nodeAppender.data.dup); node.collectionStyle = startEvent.collectionStyle; parser_.popFront(); nodeAppender.clear(); return node; } /** * Flatten a node, merging it with nodes referenced through YAMLMerge data type. * * Node must be a mapping or a sequence of mappings. * * Params: root = Node to flatten. * startMark = Start position of the node. * endMark = End position of the node. * pairAppenderLevel = Current level of the pair appender stack. * nodeAppenderLevel = Current level of the node appender stack. * * Returns: Flattened mapping as pairs. */ Node.Pair[] flatten(ref Node root, const Mark startMark, const Mark endMark, const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe { void error(Node node) { //this is Composer, but the code is related to Constructor. throw new ConstructorException("While constructing a mapping, " ~ "expected a mapping or a list of " ~ "mappings for merging, but found: " ~ text(node.type), endMark, "mapping started here", startMark); } ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); auto pairAppender = &(pairAppenders_[pairAppenderLevel]); final switch (root.nodeID) { case NodeID.mapping: Node[] toMerge; toMerge.reserve(root.length); foreach (ref Node key, ref Node value; root) { if(key.type == NodeType.merge) { toMerge ~= value; } else { auto temp = Node.Pair(key, value); pairAppender.put(temp); } } foreach (node; toMerge) { pairAppender.put(flatten(node, startMark, endMark, pairAppenderLevel + 1, nodeAppenderLevel)); } break; case NodeID.sequence: foreach (ref Node node; root) { if (node.nodeID != NodeID.mapping) { error(node); } pairAppender.put(flatten(node, startMark, endMark, pairAppenderLevel + 1, nodeAppenderLevel)); } break; case NodeID.scalar: case NodeID.invalid: error(root); break; } auto flattened = pairAppender.data.dup; pairAppender.clear(); return flattened; } /// Compose a mapping node. /// /// Params: pairAppenderLevel = Current level of the pair appender stack. /// nodeAppenderLevel = Current level of the node appender stack. Node composeMappingNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe { ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); const startEvent = parser_.front; parser_.popFront(); const tag = resolver_.resolve(NodeID.mapping, startEvent.tag, null, startEvent.implicit); auto pairAppender = &(pairAppenders_[pairAppenderLevel]); Tuple!(Node, Mark)[] toMerge; while(parser_.front.id != EventID.mappingEnd) { auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel), composeNode(pairAppenderLevel + 1, nodeAppenderLevel)); //Need to flatten and merge the node referred by YAMLMerge. if(pair.key.type == NodeType.merge) { toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark); } //Not YAMLMerge, just add the pair. else { pairAppender.put(pair); } } foreach(node; toMerge) { merge(*pairAppender, flatten(node[0], startEvent.startMark, node[1], pairAppenderLevel + 1, nodeAppenderLevel)); } auto sorted = pairAppender.data.dup.sort!((x,y) => x.key > y.key); if (sorted.length) { foreach (index, const ref value; sorted[0 .. $ - 1].enumerate) if (value.key == sorted[index + 1].key) { throw new ComposerException( text("Key '", value.key.get!string, "' appears multiple times in mapping"), sorted[index + 1].key.startMark, "defined here", value.key.startMark); } } Node node = constructNode(startEvent.startMark, parser_.front.endMark, tag, pairAppender.data.dup); node.collectionStyle = startEvent.collectionStyle; parser_.popFront(); pairAppender.clear(); return node; } } // Provide good error message on multiple keys (which JSON supports) @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `{ "comment": "This is a common technique", "name": "foobar", "comment": "To write down comments pre-JSON5" }`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : Key 'comment' appears multiple times in mapping\n" ~ ":4,5\ndefined here: :2,5"); } // Provide good error message on duplicate anchors @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `{ a: &anchor b, b: &anchor c, }`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : Found duplicate anchor: anchor\n" ~ ":3,8\ndefined here: :2,8"); } // Provide good error message on missing alias @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `{ a: *anchor, }`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : Found undefined alias: anchor\n" ~ ":2,8"); } // Provide good error message on recursive alias @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `a: &anchor { b: *anchor }`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : Found recursive alias: anchor\n" ~ ":2,8\ndefined here: :1,4"); } // Provide good error message on failed merges @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `a: &anchor 3 b: { <<: *anchor }`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While constructing a mapping, expected a mapping or a list of mappings for merging, but found: integer\n" ~ ":2,19\nmapping started here: :2,4"); } dub-1.40.0/source/dub/internal/dyaml/constructor.d000066400000000000000000000515101477246567400221240ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * Class that processes YAML mappings, sequences and scalars into nodes. * This can be used to add custom data types. A tutorial can be found * $(LINK2 https://dlang-community.github.io/D-YAML/, here). */ module dub.internal.dyaml.constructor; import std.array; import std.algorithm; import std.base64; import std.container; import std.conv; import std.datetime; import std.exception; import std.regex; import std.string; import std.typecons; import std.utf; import dub.internal.dyaml.node; import dub.internal.dyaml.exception; import dub.internal.dyaml.style; package: /** Constructs YAML values. * * Each YAML scalar, sequence or mapping has a tag specifying its data type. * Constructor uses user-specifyable functions to create a node of desired * data type from a scalar, sequence or mapping. * * * Each of these functions is associated with a tag, and can process either * a scalar, a sequence, or a mapping. The constructor passes each value to * the function with corresponding tag, which then returns the resulting value * that can be stored in a node. * * If a tag is detected with no known constructor function, it is considered an error. */ /* * Construct a node. * * Params: start = Start position of the node. * end = End position of the node. * tag = Tag (data type) of the node. * value = Value to construct node from (string, nodes or pairs). * style = Style of the node (scalar or collection style). * * Returns: Constructed node. */ Node constructNode(T)(const Mark start, const Mark end, const string tag, T value) @safe if((is(T : string) || is(T == Node[]) || is(T == Node.Pair[]))) { Node newNode; noreturn error(string a, string b)() { enum msg = "Error constructing " ~ T.stringof ~ ": Only " ~ a ~ " can be " ~ b; throw new ConstructorException(msg, start, "end", end); } switch(tag) { case "tag:yaml.org,2002:null": newNode = Node(YAMLNull(), tag); break; case "tag:yaml.org,2002:bool": static if(is(T == string)) { newNode = Node(constructBool(value, start, end), tag); break; } else error!("scalars", "bools"); case "tag:yaml.org,2002:int": static if(is(T == string)) { newNode = Node(constructLong(value, start, end), tag); break; } else error!("scalars", "ints"); case "tag:yaml.org,2002:float": static if(is(T == string)) { newNode = Node(constructReal(value, start, end), tag); break; } else error!("scalars", "floats"); case "tag:yaml.org,2002:binary": static if(is(T == string)) { newNode = Node(constructBinary(value, start, end), tag); break; } else error!("scalars", "binary data"); case "tag:yaml.org,2002:timestamp": static if(is(T == string)) { newNode = Node(constructTimestamp(value, start, end), tag); break; } else error!("scalars", "timestamps"); case "tag:yaml.org,2002:str": static if(is(T == string)) { newNode = Node(constructString(value, start, end), tag); break; } else error!("scalars", "strings"); case "tag:yaml.org,2002:value": static if(is(T == string)) { newNode = Node(constructString(value, start, end), tag); break; } else error!("scalars", "values"); case "tag:yaml.org,2002:omap": static if(is(T == Node[])) { newNode = Node(constructOrderedMap(value, start, end), tag); break; } else error!("sequences", "ordered maps"); case "tag:yaml.org,2002:pairs": static if(is(T == Node[])) { newNode = Node(constructPairs(value, start, end), tag); break; } else error!("sequences", "pairs"); case "tag:yaml.org,2002:set": static if(is(T == Node.Pair[])) { newNode = Node(constructSet(value, start, end), tag); break; } else error!("mappings", "sets"); case "tag:yaml.org,2002:seq": static if(is(T == Node[])) { newNode = Node(constructSequence(value, start, end), tag); break; } else error!("sequences", "sequences"); case "tag:yaml.org,2002:map": static if(is(T == Node.Pair[])) { newNode = Node(constructMap(value, start, end), tag); break; } else error!("mappings", "maps"); case "tag:yaml.org,2002:merge": newNode = Node(YAMLMerge(), tag); break; default: newNode = Node(value, tag); break; } newNode.startMark_ = start; return newNode; } private: // Construct a boolean _node. bool constructBool(const string str, const Mark start, const Mark end) @safe { string value = str.toLower(); if(value.among!("yes", "true", "on")){return true;} if(value.among!("no", "false", "off")){return false;} throw new ConstructorException("Invalid boolean value: " ~ str, start, "ending at", end); } @safe unittest { assert(collectException!ConstructorException(constructBool("foo", Mark("unittest", 1, 0), Mark("unittest", 1, 3))).msg == "Invalid boolean value: foo"); } // Construct an integer (long) _node. long constructLong(const string str, const Mark start, const Mark end) @safe { string value = str.replace("_", ""); const char c = value[0]; const long sign = c != '-' ? 1 : -1; if(c == '-' || c == '+') { value = value[1 .. $]; } enforce(value != "", new ConstructorException("Unable to parse integer value: " ~ str, start, "ending at", end)); long result; try { //Zero. if(value == "0") {result = cast(long)0;} //Binary. else if(value.startsWith("0b")){result = sign * to!int(value[2 .. $], 2);} //Hexadecimal. else if(value.startsWith("0x")){result = sign * to!int(value[2 .. $], 16);} //Octal. else if(value[0] == '0') {result = sign * to!int(value, 8);} //Sexagesimal. else if(value.canFind(":")) { long val; long base = 1; foreach_reverse(digit; value.split(":")) { val += to!long(digit) * base; base *= 60; } result = sign * val; } //Decimal. else{result = sign * to!long(value);} } catch(ConvException e) { throw new ConstructorException("Unable to parse integer value: " ~ str, start, "ending at", end); } return result; } @safe unittest { string canonical = "685230"; string decimal = "+685_230"; string octal = "02472256"; string hexadecimal = "0x_0A_74_AE"; string binary = "0b1010_0111_0100_1010_1110"; string sexagesimal = "190:20:30"; assert(685230 == constructLong(canonical, Mark.init, Mark.init)); assert(685230 == constructLong(decimal, Mark.init, Mark.init)); assert(685230 == constructLong(octal, Mark.init, Mark.init)); assert(685230 == constructLong(hexadecimal, Mark.init, Mark.init)); assert(685230 == constructLong(binary, Mark.init, Mark.init)); assert(685230 == constructLong(sexagesimal, Mark.init, Mark.init)); assert(collectException!ConstructorException(constructLong("+", Mark.init, Mark.init)).msg == "Unable to parse integer value: +"); assert(collectException!ConstructorException(constructLong("0xINVALID", Mark.init, Mark.init)).msg == "Unable to parse integer value: 0xINVALID"); } // Construct a floating point (real) _node. real constructReal(const string str, const Mark start, const Mark end) @safe { string value = str.replace("_", "").toLower(); const char c = value[0]; const real sign = c != '-' ? 1.0 : -1.0; if(c == '-' || c == '+') { value = value[1 .. $]; } enforce(value != "" && value != "nan" && value != "inf" && value != "-inf", new ConstructorException("Unable to parse float value: \"" ~ str ~ "\"", start, "ending at", end)); real result; try { //Infinity. if (value == ".inf"){result = sign * real.infinity;} //Not a Number. else if(value == ".nan"){result = real.nan;} //Sexagesimal. else if(value.canFind(":")) { real val = 0.0; real base = 1.0; foreach_reverse(digit; value.split(":")) { val += to!real(digit) * base; base *= 60.0; } result = sign * val; } //Plain floating point. else{result = sign * to!real(value);} } catch(ConvException e) { throw new ConstructorException("Unable to parse float value: \"" ~ str ~ "\"", start, "ending at", end); } return result; } @safe unittest { bool eq(real a, real b, real epsilon = 0.2) @safe { return a >= (b - epsilon) && a <= (b + epsilon); } string canonical = "6.8523015e+5"; string exponential = "685.230_15e+03"; string fixed = "685_230.15"; string sexagesimal = "190:20:30.15"; string negativeInf = "-.inf"; string NaN = ".NaN"; assert(eq(685230.15, constructReal(canonical, Mark.init, Mark.init))); assert(eq(685230.15, constructReal(exponential, Mark.init, Mark.init))); assert(eq(685230.15, constructReal(fixed, Mark.init, Mark.init))); assert(eq(685230.15, constructReal(sexagesimal, Mark.init, Mark.init))); assert(eq(-real.infinity, constructReal(negativeInf, Mark.init, Mark.init))); assert(to!string(constructReal(NaN, Mark.init, Mark.init)) == "nan"); assert(collectException!ConstructorException(constructReal("+", Mark.init, Mark.init)).msg == "Unable to parse float value: \"+\""); assert(collectException!ConstructorException(constructReal("74.invalid", Mark.init, Mark.init)).msg == "Unable to parse float value: \"74.invalid\""); } // Construct a binary (base64) _node. ubyte[] constructBinary(const string value, const Mark start, const Mark end) @safe { import std.ascii : newline; import std.array : array; // For an unknown reason, this must be nested to work (compiler bug?). try { return Base64.decode(value.representation.filter!(c => !newline.canFind(c)).array); } catch(Base64Exception e) { throw new ConstructorException("Unable to decode base64 value: " ~ e.msg, start, "ending at", end); } } @safe unittest { auto test = "The Answer: 42".representation; char[] buffer; buffer.length = 256; string input = Base64.encode(test, buffer).idup; const value = constructBinary(input, Mark.init, Mark.init); assert(value == test); assert(value == [84, 104, 101, 32, 65, 110, 115, 119, 101, 114, 58, 32, 52, 50]); } // Construct a timestamp (SysTime) _node. SysTime constructTimestamp(const string str, const Mark start, const Mark end) @safe { string value = str; auto YMDRegexp = regex("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)"); auto HMSRegexp = regex("^[Tt \t]+([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(\\.[0-9]*)?"); auto TZRegexp = regex("^[ \t]*Z|([-+][0-9][0-9]?)(:[0-9][0-9])?"); try { // First, get year, month and day. auto matches = match(value, YMDRegexp); enforce(!matches.empty, new Exception("Unable to parse timestamp value: " ~ value)); auto captures = matches.front.captures; const year = to!int(captures[1]); const month = to!int(captures[2]); const day = to!int(captures[3]); // If available, get hour, minute, second and fraction, if present. value = matches.front.post; matches = match(value, HMSRegexp); if(matches.empty) { return SysTime(DateTime(year, month, day), UTC()); } captures = matches.front.captures; const hour = to!int(captures[1]); const minute = to!int(captures[2]); const second = to!int(captures[3]); const hectonanosecond = cast(int)(to!real("0" ~ captures[4]) * 10_000_000); // If available, get timezone. value = matches.front.post; matches = match(value, TZRegexp); if(matches.empty || matches.front.captures[0] == "Z") { // No timezone. return SysTime(DateTime(year, month, day, hour, minute, second), hectonanosecond.dur!"hnsecs", UTC()); } // We have a timezone, so parse it. captures = matches.front.captures; int sign = 1; int tzHours; if(!captures[1].empty) { if(captures[1][0] == '-') {sign = -1;} tzHours = to!int(captures[1][1 .. $]); } const tzMinutes = (!captures[2].empty) ? to!int(captures[2][1 .. $]) : 0; const tzOffset = dur!"minutes"(sign * (60 * tzHours + tzMinutes)); return SysTime(DateTime(year, month, day, hour, minute, second), hectonanosecond.dur!"hnsecs", new immutable SimpleTimeZone(tzOffset)); } catch(ConvException e) { throw new Exception("Unable to parse timestamp value " ~ value ~ " : " ~ e.msg); } catch(DateTimeException e) { throw new Exception("Invalid timestamp value " ~ value ~ " : " ~ e.msg); } assert(false, "This code should never be reached"); } @safe unittest { string timestamp(string value) { return constructTimestamp(value, Mark.init, Mark.init).toISOString(); } string canonical = "2001-12-15T02:59:43.1Z"; string iso8601 = "2001-12-14t21:59:43.10-05:00"; string spaceSeparated = "2001-12-14 21:59:43.10 -5"; string noTZ = "2001-12-15 2:59:43.10"; string noFraction = "2001-12-15 2:59:43"; string ymd = "2002-12-14"; assert(timestamp(canonical) == "20011215T025943.1Z"); //avoiding float conversion errors assert(timestamp(iso8601) == "20011214T215943.0999999-05:00" || timestamp(iso8601) == "20011214T215943.1-05:00"); assert(timestamp(spaceSeparated) == "20011214T215943.0999999-05:00" || timestamp(spaceSeparated) == "20011214T215943.1-05:00"); assert(timestamp(noTZ) == "20011215T025943.0999999Z" || timestamp(noTZ) == "20011215T025943.1Z"); assert(timestamp(noFraction) == "20011215T025943Z"); assert(timestamp(ymd) == "20021214T000000Z"); } // Construct a string _node. string constructString(const string str, const Mark start, const Mark end) @safe { return str; } // Convert a sequence of single-element mappings into a sequence of pairs. Node.Pair[] getPairs(string type)(const Node[] nodes) @safe { enum msg = "While constructing " ~ type ~ ", expected a mapping with single element"; Node.Pair[] pairs; pairs.reserve(nodes.length); foreach(node; nodes) { enforce(node.nodeID == NodeID.mapping && node.length == 1, new ConstructorException(msg, node.startMark)); pairs ~= node.as!(Node.Pair[]); } return pairs; } // Construct an ordered map (ordered sequence of key:value pairs without duplicates) _node. Node.Pair[] constructOrderedMap(const Node[] nodes, const Mark start, const Mark end) @safe { auto pairs = getPairs!"an ordered map"(nodes); //Detect duplicates. //TODO this should be replaced by something with deterministic memory allocation. auto keys = new RedBlackTree!Node(); foreach(ref pair; pairs) { auto foundMatch = keys.equalRange(pair.key); enforce(foundMatch.empty, new ConstructorException( "Duplicate entry in an ordered map", pair.key.startMark, "first occurrence here", foundMatch.front.startMark)); keys.insert(pair.key); } return pairs; } @safe unittest { uint lines; Node[] alternateTypes(uint length) @safe { Node[] pairs; foreach(long i; 0 .. length) { auto pair = (i % 2) ? Node.Pair(i.to!string, i) : Node.Pair(i, i.to!string); pair.key.startMark_ = Mark("unittest", lines++, 0); pairs ~= Node([pair]); } return pairs; } Node[] sameType(uint length) @safe { Node[] pairs; foreach(long i; 0 .. length) { auto pair = Node.Pair(i.to!string, i); pair.key.startMark_ = Mark("unittest", lines++, 0); pairs ~= Node([pair]); } return pairs; } assert(collectException!ConstructorException(constructOrderedMap(alternateTypes(8) ~ alternateTypes(2), Mark.init, Mark.init)).message == "Duplicate entry in an ordered map\nunittest:9,1\nfirst occurrence here: unittest:1,1"); assertNotThrown(constructOrderedMap(alternateTypes(8), Mark.init, Mark.init)); assert(collectException!ConstructorException(constructOrderedMap(sameType(64) ~ sameType(16), Mark.init, Mark.init)).message == "Duplicate entry in an ordered map\nunittest:83,1\nfirst occurrence here: unittest:19,1"); assert(collectException!ConstructorException(constructOrderedMap(alternateTypes(64) ~ alternateTypes(16), Mark.init, Mark.init)).message == "Duplicate entry in an ordered map\nunittest:163,1\nfirst occurrence here: unittest:99,1"); assertNotThrown(constructOrderedMap(sameType(64), Mark.init, Mark.init)); assertNotThrown(constructOrderedMap(alternateTypes(64), Mark.init, Mark.init)); assert(collectException!ConstructorException(constructOrderedMap([Node([Node(1), Node(2)])], Mark.init, Mark.init)).message == "While constructing an ordered map, expected a mapping with single element\n:1,1"); } // Construct a pairs (ordered sequence of key: value pairs allowing duplicates) _node. Node.Pair[] constructPairs(const Node[] nodes, const Mark start, const Mark end) @safe { return getPairs!"pairs"(nodes); } // Construct a set _node. Node[] constructSet(const Node.Pair[] pairs, const Mark start, const Mark end) @safe { // In future, the map here should be replaced with something with deterministic // memory allocation if possible. // Detect duplicates. ubyte[Node] map; Node[] nodes; nodes.reserve(pairs.length); foreach(pair; pairs) { enforce((pair.key in map) is null, new Exception("Duplicate entry in a set")); map[pair.key] = 0; nodes ~= pair.key; } return nodes; } @safe unittest { Node.Pair[] set(uint length) @safe { Node.Pair[] pairs; foreach(long i; 0 .. length) { pairs ~= Node.Pair(i.to!string, YAMLNull()); } return pairs; } auto DuplicatesShort = set(8) ~ set(2); auto noDuplicatesShort = set(8); auto DuplicatesLong = set(64) ~ set(4); auto noDuplicatesLong = set(64); bool eq(Node.Pair[] a, Node[] b) { if(a.length != b.length){return false;} foreach(i; 0 .. a.length) { if(a[i].key != b[i]) { return false; } } return true; } auto nodeDuplicatesShort = DuplicatesShort.dup; auto nodeNoDuplicatesShort = noDuplicatesShort.dup; auto nodeDuplicatesLong = DuplicatesLong.dup; auto nodeNoDuplicatesLong = noDuplicatesLong.dup; assertThrown(constructSet(nodeDuplicatesShort, Mark.init, Mark.init)); assertNotThrown(constructSet(nodeNoDuplicatesShort, Mark.init, Mark.init)); assertThrown(constructSet(nodeDuplicatesLong, Mark.init, Mark.init)); assertNotThrown(constructSet(nodeNoDuplicatesLong, Mark.init, Mark.init)); } // Construct a sequence (array) _node. Node[] constructSequence(Node[] nodes, const Mark start, const Mark end) @safe { return nodes; } // Construct an unordered map (unordered set of key:value _pairs without duplicates) _node. Node.Pair[] constructMap(Node.Pair[] pairs, const Mark start, const Mark end) @safe { //Detect duplicates. //TODO this should be replaced by something with deterministic memory allocation. auto keys = new RedBlackTree!Node(); foreach(ref pair; pairs) { enforce(!(pair.key in keys), new Exception("Duplicate entry in a map: " ~ pair.key.debugString())); keys.insert(pair.key); } return pairs; } dub-1.40.0/source/dub/internal/dyaml/dumper.d000066400000000000000000000233711477246567400210370ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * YAML dumper. * * Code based on $(LINK2 http://www.pyyaml.org, PyYAML). */ module dub.internal.dyaml.dumper; import std.array; import std.range.primitives; import std.typecons; import dub.internal.dyaml.emitter; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.linebreak; import dub.internal.dyaml.node; import dub.internal.dyaml.representer; import dub.internal.dyaml.resolver; import dub.internal.dyaml.serializer; import dub.internal.dyaml.style; import dub.internal.dyaml.tagdirective; /** * Dumps YAML documents to files or streams. * * User specified Representer and/or Resolver can be used to support new * tags / data types. * * Setters are provided to affect output details (style, etc.). */ auto dumper() { auto dumper = Dumper(); dumper.resolver = Resolver.withDefaultResolvers; return dumper; } struct Dumper { private: //Indentation width. int indent_ = 2; //Tag directives to use. TagDirective[] tags_; public: //Resolver to resolve tags. Resolver resolver; //Write scalars in canonical form? bool canonical; //Preferred text width. uint textWidth = 80; //Line break to use. Unix by default. LineBreak lineBreak = LineBreak.unix; //YAML version string. Default is 1.1. string YAMLVersion = "1.1"; //Always explicitly write document start? Default is no explicit start. bool explicitStart = false; //Always explicitly write document end? Default is no explicit end. bool explicitEnd = false; //Name of the output file or stream, used in error messages. string name = ""; // Default style for scalar nodes. If style is $(D ScalarStyle.invalid), the _style is chosen automatically. ScalarStyle defaultScalarStyle = ScalarStyle.invalid; // Default style for collection nodes. If style is $(D CollectionStyle.invalid), the _style is chosen automatically. CollectionStyle defaultCollectionStyle = CollectionStyle.invalid; @disable bool opEquals(ref Dumper); @disable int opCmp(ref Dumper); ///Set indentation width. 2 by default. Must not be zero. @property void indent(uint indent) pure @safe nothrow in { assert(indent != 0, "Can't use zero YAML indent width"); } do { indent_ = indent; } /** * Specify tag directives. * * A tag directive specifies a shorthand notation for specifying _tags. * Each tag directive associates a handle with a prefix. This allows for * compact tag notation. * * Each handle specified MUST start and end with a '!' character * (a single character "!" handle is allowed as well). * * Only alphanumeric characters, '-', and '__' may be used in handles. * * Each prefix MUST not be empty. * * The "!!" handle is used for default YAML _tags with prefix * "tag:yaml.org,2002:". This can be overridden. * * Params: tags = Tag directives (keys are handles, values are prefixes). */ @property void tagDirectives(string[string] tags) pure @safe { TagDirective[] t; foreach(handle, prefix; tags) { assert(handle.length >= 1 && handle[0] == '!' && handle[$ - 1] == '!', "A tag handle is empty or does not start and end with a " ~ "'!' character : " ~ handle); assert(prefix.length >= 1, "A tag prefix is empty"); t ~= TagDirective(handle, prefix); } tags_ = t; } /// @safe unittest { auto dumper = dumper(); string[string] directives; directives["!short!"] = "tag:long.org,2011:"; //This will emit tags starting with "tag:long.org,2011" //with a "!short!" prefix instead. dumper.tagDirectives(directives); dumper.dump(new Appender!string(), Node("foo")); } /** * Dump one or more YAML _documents to the file/stream. * * Note that while you can call dump() multiple times on the same * dumper, you will end up writing multiple YAML "files" to the same * file/stream. * * Params: documents = Documents to _dump (root nodes of the _documents). * * Throws: YAMLException on error (e.g. invalid nodes, * unable to write to file/stream). */ void dump(CharacterType = char, Range)(Range range, Node[] documents ...) if (isOutputRange!(Range, CharacterType) && isOutputRange!(Range, char) || isOutputRange!(Range, wchar) || isOutputRange!(Range, dchar)) { try { auto emitter = new Emitter!(Range, CharacterType)(range, canonical, indent_, textWidth, lineBreak); auto serializer = Serializer(resolver, explicitStart ? Yes.explicitStart : No.explicitStart, explicitEnd ? Yes.explicitEnd : No.explicitEnd, YAMLVersion, tags_); serializer.startStream(emitter); foreach(ref document; documents) { auto data = representData(document, defaultScalarStyle, defaultCollectionStyle); serializer.serialize(emitter, data); } serializer.endStream(emitter); } catch(YAMLException e) { throw new YAMLException("Unable to dump YAML to stream " ~ name ~ " : " ~ e.msg, e.file, e.line); } } } ///Write to a file @safe unittest { auto node = Node([1, 2, 3, 4, 5]); dumper().dump(new Appender!string(), node); } ///Write multiple YAML documents to a file @safe unittest { auto node1 = Node([1, 2, 3, 4, 5]); auto node2 = Node("This document contains only one string"); dumper().dump(new Appender!string(), node1, node2); //Or with an array: dumper().dump(new Appender!string(), [node1, node2]); } ///Write to memory @safe unittest { auto stream = new Appender!string(); auto node = Node([1, 2, 3, 4, 5]); dumper().dump(stream, node); } ///Use a custom resolver to support custom data types and/or implicit tags @safe unittest { import std.regex : regex; auto node = Node([1, 2, 3, 4, 5]); auto dumper = dumper(); dumper.resolver.addImplicitResolver("!tag", regex("A.*"), "A"); dumper.dump(new Appender!string(), node); } /// Set default scalar style @safe unittest { auto stream = new Appender!string(); auto node = Node("Hello world!"); auto dumper = dumper(); dumper.defaultScalarStyle = ScalarStyle.singleQuoted; dumper.dump(stream, node); } /// Set default collection style @safe unittest { auto stream = new Appender!string(); auto node = Node(["Hello", "world!"]); auto dumper = dumper(); dumper.defaultCollectionStyle = CollectionStyle.flow; dumper.dump(stream, node); } // Make sure the styles are actually used @safe unittest { auto stream = new Appender!string(); auto node = Node([Node("Hello world!"), Node(["Hello", "world!"])]); auto dumper = dumper(); dumper.defaultScalarStyle = ScalarStyle.singleQuoted; dumper.defaultCollectionStyle = CollectionStyle.flow; dumper.explicitEnd = false; dumper.explicitStart = false; dumper.YAMLVersion = null; dumper.dump(stream, node); assert(stream.data == "['Hello world!', ['Hello', 'world!']]\n"); } // Explicit document start/end markers @safe unittest { auto stream = new Appender!string(); auto node = Node([1, 2, 3, 4, 5]); auto dumper = dumper(); dumper.explicitEnd = true; dumper.explicitStart = true; dumper.YAMLVersion = null; dumper.dump(stream, node); //Skip version string assert(stream.data[0..3] == "---"); //account for newline at end assert(stream.data[$-4..$-1] == "..."); } @safe unittest { auto stream = new Appender!string(); auto node = Node([Node("Te, st2")]); auto dumper = dumper(); dumper.explicitStart = true; dumper.explicitEnd = false; dumper.YAMLVersion = null; dumper.dump(stream, node); assert(stream.data == "--- ['Te, st2']\n"); } // No explicit document start/end markers @safe unittest { auto stream = new Appender!string(); auto node = Node([1, 2, 3, 4, 5]); auto dumper = dumper(); dumper.explicitEnd = false; dumper.explicitStart = false; dumper.YAMLVersion = null; dumper.dump(stream, node); //Skip version string assert(stream.data[0..3] != "---"); //account for newline at end assert(stream.data[$-4..$-1] != "..."); } // Windows, macOS line breaks @safe unittest { auto node = Node(0); { auto stream = new Appender!string(); auto dumper = dumper(); dumper.explicitEnd = true; dumper.explicitStart = true; dumper.YAMLVersion = null; dumper.lineBreak = LineBreak.windows; dumper.dump(stream, node); assert(stream.data == "--- 0\r\n...\r\n"); } { auto stream = new Appender!string(); auto dumper = dumper(); dumper.explicitEnd = true; dumper.explicitStart = true; dumper.YAMLVersion = null; dumper.lineBreak = LineBreak.macintosh; dumper.dump(stream, node); assert(stream.data == "--- 0\r...\r"); } } dub-1.40.0/source/dub/internal/dyaml/emitter.d000066400000000000000000001615471477246567400212240ustar00rootroot00000000000000// Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * YAML emitter. * Code based on PyYAML: http://www.pyyaml.org */ module dub.internal.dyaml.emitter; import std.algorithm; import std.array; import std.ascii; import std.conv; import std.encoding; import std.exception; import std.format; import std.range; import std.string; import std.system; import std.typecons; import std.utf; import dub.internal.dyaml.encoding; import dub.internal.dyaml.escapes; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.linebreak; import dub.internal.dyaml.queue; import dub.internal.dyaml.scanner; import dub.internal.dyaml.style; import dub.internal.dyaml.tagdirective; package: //Stores results of analysis of a scalar, determining e.g. what scalar style to use. struct ScalarAnalysis { //Scalar itself. string scalar; enum AnalysisFlags { empty = 1<<0, multiline = 1<<1, allowFlowPlain = 1<<2, allowBlockPlain = 1<<3, allowSingleQuoted = 1<<4, allowDoubleQuoted = 1<<5, allowBlock = 1<<6, isNull = 1<<7 } ///Analysis results. BitFlags!AnalysisFlags flags; } private alias isNewLine = among!('\n', '\u0085', '\u2028', '\u2029'); private alias isSpecialChar = among!('#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\\', '\'', '"', '%', '@', '`'); private alias isFlowIndicator = among!(',', '?', '[', ']', '{', '}'); private alias isSpace = among!('\0', '\n', '\r', '\u0085', '\u2028', '\u2029', ' ', '\t'); //Emits YAML events into a file/stream. struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType)) { private: ///Default tag handle shortcuts and replacements. static TagDirective[] defaultTagDirectives_ = [TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")]; ///Stream to write to. Range stream_; /// Type used for upcoming emitter steps alias EmitterFunction = void function(scope typeof(this)*) @safe; ///Stack of states. Appender!(EmitterFunction[]) states_; ///Current state. EmitterFunction state_; ///Event queue. Queue!Event events_; ///Event we're currently emitting. Event event_; ///Stack of previous indentation levels. Appender!(int[]) indents_; ///Current indentation level. int indent_ = -1; ///Level of nesting in flow context. If 0, we're in block context. uint flowLevel_ = 0; /// Describes context (where we are in the document). enum Context { /// Root node of a document. root, /// Sequence. sequence, /// Mapping. mappingNoSimpleKey, /// Mapping, in a simple key. mappingSimpleKey, } /// Current context. Context context_; ///Characteristics of the last emitted character: ///Line. uint line_ = 0; ///Column. uint column_ = 0; ///Whitespace character? bool whitespace_ = true; ///indentation space, '-', '?', or ':'? bool indentation_ = true; ///Does the document require an explicit document indicator? bool openEnded_; ///Formatting details. ///Canonical scalar format? bool canonical_; ///Best indentation width. uint bestIndent_ = 2; ///Best text width. uint bestWidth_ = 80; ///Best line break character/s. LineBreak bestLineBreak_; ///Tag directive handle - prefix pairs. TagDirective[] tagDirectives_; ///Anchor/alias to process. string preparedAnchor_ = null; ///Tag to process. string preparedTag_ = null; ///Analysis result of the current scalar. ScalarAnalysis analysis_; ///Style of the current scalar. ScalarStyle style_ = ScalarStyle.invalid; public: @disable int opCmp(ref Emitter); @disable bool opEquals(ref Emitter); /** * Construct an emitter. * * Params: stream = Output range to write to. * canonical = Write scalars in canonical form? * indent = Indentation width. * lineBreak = Line break character/s. */ this(Range stream, const bool canonical, const int indent, const int width, const LineBreak lineBreak) @safe { states_.reserve(32); indents_.reserve(32); stream_ = stream; canonical_ = canonical; nextExpected!"expectStreamStart"(); if(indent > 1 && indent < 10){bestIndent_ = indent;} if(width > bestIndent_ * 2) {bestWidth_ = width;} bestLineBreak_ = lineBreak; analysis_.flags.isNull = true; } ///Emit an event. void emit(Event event) @safe { events_.push(event); while(!needMoreEvents()) { event_ = events_.pop(); callNext(); event_.destroy(); } } private: ///Pop and return the newest state in states_. EmitterFunction popState() @safe in(states_.data.length > 0, "Emitter: Need to pop a state but there are no states left") { const result = states_.data[$-1]; states_.shrinkTo(states_.data.length - 1); return result; } void pushState(string D)() @safe { states_ ~= mixin("function(typeof(this)* self) { self."~D~"(); }"); } ///Pop and return the newest indent in indents_. int popIndent() @safe in(indents_.data.length > 0, "Emitter: Need to pop an indent level but there" ~ " are no indent levels left") { const result = indents_.data[$-1]; indents_.shrinkTo(indents_.data.length - 1); return result; } ///Write a string to the file/stream. void writeString(const scope char[] str) @safe { static if(is(CharType == char)) { copy(str, stream_); } static if(is(CharType == wchar)) { const buffer = to!wstring(str); copy(buffer, stream_); } static if(is(CharType == dchar)) { const buffer = to!dstring(str); copy(buffer, stream_); } } ///In some cases, we wait for a few next events before emitting. bool needMoreEvents() @safe nothrow { if(events_.length == 0){return true;} const event = events_.peek(); if(event.id == EventID.documentStart){return needEvents(1);} if(event.id == EventID.sequenceStart){return needEvents(2);} if(event.id == EventID.mappingStart) {return needEvents(3);} return false; } ///Determines if we need specified number of more events. bool needEvents(in uint count) @safe nothrow { int level; foreach(const event; events_.range) { if(event.id.among!(EventID.documentStart, EventID.sequenceStart, EventID.mappingStart)) {++level;} else if(event.id.among!(EventID.documentEnd, EventID.sequenceEnd, EventID.mappingEnd)) {--level;} else if(event.id == EventID.streamStart){level = -1;} if(level < 0) { return false; } } return events_.length < (count + 1); } ///Increase indentation level. void increaseIndent(const Flag!"flow" flow = No.flow, const bool indentless = false) @safe { indents_ ~= indent_; if(indent_ == -1) { indent_ = flow ? bestIndent_ : 0; } else if(!indentless) { indent_ += bestIndent_; } } ///Determines if the type of current event is as specified. Throws if no event. bool eventTypeIs(in EventID id) const pure @safe in(!event_.isNull, "Expected an event, but no event is available.") { return event_.id == id; } //States. //Stream handlers. ///Handle start of a file/stream. void expectStreamStart() @safe in(eventTypeIs(EventID.streamStart), "Expected streamStart, but got " ~ event_.idString) { writeStreamStart(); nextExpected!"expectDocumentStart!(Yes.first)"(); } ///Expect nothing, throwing if we still have something. void expectNothing() @safe { assert(0, "Expected nothing, but got " ~ event_.idString); } //Document handlers. ///Handle start of a document. void expectDocumentStart(Flag!"first" first)() @safe in(eventTypeIs(EventID.documentStart) || eventTypeIs(EventID.streamEnd), "Expected documentStart or streamEnd, but got " ~ event_.idString) { if(event_.id == EventID.documentStart) { const YAMLVersion = event_.value; auto tagDirectives = event_.tagDirectives; if(openEnded_ && (YAMLVersion !is null || tagDirectives !is null)) { writeIndicator("...", Yes.needWhitespace); writeIndent(); } if(YAMLVersion !is null) { writeVersionDirective(prepareVersion(YAMLVersion)); } if(tagDirectives !is null) { tagDirectives_ = tagDirectives; sort!"icmp(a.handle, b.handle) < 0"(tagDirectives_); foreach(ref pair; tagDirectives_) { writeTagDirective(prepareTagHandle(pair.handle), prepareTagPrefix(pair.prefix)); } } bool eq(ref TagDirective a, ref TagDirective b){return a.handle == b.handle;} //Add any default tag directives that have not been overriden. foreach(ref def; defaultTagDirectives_) { if(!std.algorithm.canFind!eq(tagDirectives_, def)) { tagDirectives_ ~= def; } } const implicit = first && !event_.explicitDocument && !canonical_ && YAMLVersion is null && tagDirectives is null && !checkEmptyDocument(); if(!implicit) { writeIndent(); writeIndicator("---", Yes.needWhitespace); if(canonical_){writeIndent();} } nextExpected!"expectRootNode"(); } else if(event_.id == EventID.streamEnd) { if(openEnded_) { writeIndicator("...", Yes.needWhitespace); writeIndent(); } writeStreamEnd(); nextExpected!"expectNothing"(); } } ///Handle end of a document. void expectDocumentEnd() @safe in(eventTypeIs(EventID.documentEnd), "Expected DocumentEnd, but got " ~ event_.idString) { writeIndent(); if(event_.explicitDocument) { writeIndicator("...", Yes.needWhitespace); writeIndent(); } nextExpected!"expectDocumentStart!(No.first)"(); } ///Handle the root node of a document. void expectRootNode() @safe { pushState!"expectDocumentEnd"(); expectNode(Context.root); } ///Handle a mapping node. // //Params: simpleKey = Are we in a simple key? void expectMappingNode(const bool simpleKey = false) @safe { expectNode(simpleKey ? Context.mappingSimpleKey : Context.mappingNoSimpleKey); } ///Handle a sequence node. void expectSequenceNode() @safe { expectNode(Context.sequence); } ///Handle a new node. Context specifies where in the document we are. void expectNode(const Context context) @safe { context_ = context; const flowCollection = event_.collectionStyle == CollectionStyle.flow; switch(event_.id) { case EventID.alias_: expectAlias(); break; case EventID.scalar: processAnchor("&"); processTag(); expectScalar(); break; case EventID.sequenceStart: processAnchor("&"); processTag(); if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptySequence()) { expectFlowSequence(); } else { expectBlockSequence(); } break; case EventID.mappingStart: processAnchor("&"); processTag(); if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptyMapping()) { expectFlowMapping(); } else { expectBlockMapping(); } break; default: assert(0, "Expected alias_, scalar, sequenceStart or " ~ "mappingStart, but got: " ~ event_.idString); } } ///Handle an alias. void expectAlias() @safe in(event_.anchor != "", "Anchor is not specified for alias") { processAnchor("*"); nextExpected(popState()); } ///Handle a scalar. void expectScalar() @safe { increaseIndent(Yes.flow); processScalar(); indent_ = popIndent(); nextExpected(popState()); } //Flow sequence handlers. ///Handle a flow sequence. void expectFlowSequence() @safe { writeIndicator("[", Yes.needWhitespace, Yes.whitespace); ++flowLevel_; increaseIndent(Yes.flow); nextExpected!"expectFlowSequenceItem!(Yes.first)"(); } ///Handle a flow sequence item. void expectFlowSequenceItem(Flag!"first" first)() @safe { if(event_.id == EventID.sequenceEnd) { indent_ = popIndent(); --flowLevel_; static if(!first) if(canonical_) { writeIndicator(",", No.needWhitespace); writeIndent(); } writeIndicator("]", No.needWhitespace); nextExpected(popState()); return; } static if(!first){writeIndicator(",", No.needWhitespace);} if(canonical_ || column_ > bestWidth_){writeIndent();} pushState!"expectFlowSequenceItem!(No.first)"(); expectSequenceNode(); } //Flow mapping handlers. ///Handle a flow mapping. void expectFlowMapping() @safe { writeIndicator("{", Yes.needWhitespace, Yes.whitespace); ++flowLevel_; increaseIndent(Yes.flow); nextExpected!"expectFlowMappingKey!(Yes.first)"(); } ///Handle a key in a flow mapping. void expectFlowMappingKey(Flag!"first" first)() @safe { if(event_.id == EventID.mappingEnd) { indent_ = popIndent(); --flowLevel_; static if (!first) if(canonical_) { writeIndicator(",", No.needWhitespace); writeIndent(); } writeIndicator("}", No.needWhitespace); nextExpected(popState()); return; } static if(!first){writeIndicator(",", No.needWhitespace);} if(canonical_ || column_ > bestWidth_){writeIndent();} if(!canonical_ && checkSimpleKey()) { pushState!"expectFlowMappingSimpleValue"(); expectMappingNode(true); return; } writeIndicator("?", Yes.needWhitespace); pushState!"expectFlowMappingValue"(); expectMappingNode(); } ///Handle a simple value in a flow mapping. void expectFlowMappingSimpleValue() @safe { writeIndicator(":", No.needWhitespace); pushState!"expectFlowMappingKey!(No.first)"(); expectMappingNode(); } ///Handle a complex value in a flow mapping. void expectFlowMappingValue() @safe { if(canonical_ || column_ > bestWidth_){writeIndent();} writeIndicator(":", Yes.needWhitespace); pushState!"expectFlowMappingKey!(No.first)"(); expectMappingNode(); } //Block sequence handlers. ///Handle a block sequence. void expectBlockSequence() @safe { const indentless = (context_ == Context.mappingNoSimpleKey || context_ == Context.mappingSimpleKey) && !indentation_; increaseIndent(No.flow, indentless); nextExpected!"expectBlockSequenceItem!(Yes.first)"(); } ///Handle a block sequence item. void expectBlockSequenceItem(Flag!"first" first)() @safe { static if(!first) if(event_.id == EventID.sequenceEnd) { indent_ = popIndent(); nextExpected(popState()); return; } writeIndent(); writeIndicator("-", Yes.needWhitespace, No.whitespace, Yes.indentation); pushState!"expectBlockSequenceItem!(No.first)"(); expectSequenceNode(); } //Block mapping handlers. ///Handle a block mapping. void expectBlockMapping() @safe { increaseIndent(No.flow); nextExpected!"expectBlockMappingKey!(Yes.first)"(); } ///Handle a key in a block mapping. void expectBlockMappingKey(Flag!"first" first)() @safe { static if(!first) if(event_.id == EventID.mappingEnd) { indent_ = popIndent(); nextExpected(popState()); return; } writeIndent(); if(checkSimpleKey()) { pushState!"expectBlockMappingSimpleValue"(); expectMappingNode(true); return; } writeIndicator("?", Yes.needWhitespace, No.whitespace, Yes.indentation); pushState!"expectBlockMappingValue"(); expectMappingNode(); } ///Handle a simple value in a block mapping. void expectBlockMappingSimpleValue() @safe { writeIndicator(":", No.needWhitespace); pushState!"expectBlockMappingKey!(No.first)"(); expectMappingNode(); } ///Handle a complex value in a block mapping. void expectBlockMappingValue() @safe { writeIndent(); writeIndicator(":", Yes.needWhitespace, No.whitespace, Yes.indentation); pushState!"expectBlockMappingKey!(No.first)"(); expectMappingNode(); } //Checkers. ///Check if an empty sequence is next. bool checkEmptySequence() const @safe pure nothrow { return event_.id == EventID.sequenceStart && events_.length > 0 && events_.peek().id == EventID.sequenceEnd; } ///Check if an empty mapping is next. bool checkEmptyMapping() const @safe pure nothrow { return event_.id == EventID.mappingStart && events_.length > 0 && events_.peek().id == EventID.mappingEnd; } ///Check if an empty document is next. bool checkEmptyDocument() const @safe pure nothrow { if(event_.id != EventID.documentStart || events_.length == 0) { return false; } const event = events_.peek(); const emptyScalar = event.id == EventID.scalar && (event.anchor is null) && (event.tag is null) && event.implicit && event.value == ""; return emptyScalar; } ///Check if a simple key is next. bool checkSimpleKey() @safe { uint length; const id = event_.id; const scalar = id == EventID.scalar; const collectionStart = id == EventID.mappingStart || id == EventID.sequenceStart; if((id == EventID.alias_ || scalar || collectionStart) && (event_.anchor !is null)) { if(preparedAnchor_ is null) { preparedAnchor_ = prepareAnchor(event_.anchor); } length += preparedAnchor_.length; } if((scalar || collectionStart) && (event_.tag !is null)) { if(preparedTag_ is null){preparedTag_ = prepareTag(event_.tag);} length += preparedTag_.length; } if(scalar) { if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} length += analysis_.scalar.length; } if(length >= 128){return false;} return id == EventID.alias_ || (scalar && !analysis_.flags.empty && !analysis_.flags.multiline) || checkEmptySequence() || checkEmptyMapping(); } ///Process and write a scalar. void processScalar() @safe { if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} if(style_ == ScalarStyle.invalid) { style_ = chooseScalarStyle(); } //if(analysis_.flags.multiline && (context_ != Context.mappingSimpleKey) && // ([ScalarStyle.invalid, ScalarStyle.plain, ScalarStyle.singleQuoted, ScalarStyle.doubleQuoted) // .canFind(style_)) //{ // writeIndent(); //} auto writer = ScalarWriter!(Range, CharType)(&this, analysis_.scalar, context_ != Context.mappingSimpleKey); final switch(style_) { case ScalarStyle.invalid: assert(false); case ScalarStyle.doubleQuoted: writer.writeDoubleQuoted(); break; case ScalarStyle.singleQuoted: writer.writeSingleQuoted(); break; case ScalarStyle.folded: writer.writeFolded(); break; case ScalarStyle.literal: writer.writeLiteral(); break; case ScalarStyle.plain: writer.writePlain(); break; } analysis_.flags.isNull = true; style_ = ScalarStyle.invalid; } ///Process and write an anchor/alias. void processAnchor(const string indicator) @safe { if(event_.anchor is null) { preparedAnchor_ = null; return; } if(preparedAnchor_ is null) { preparedAnchor_ = prepareAnchor(event_.anchor); } if(preparedAnchor_ !is null && preparedAnchor_ != "") { writeIndicator(indicator, Yes.needWhitespace); writeString(preparedAnchor_); } preparedAnchor_ = null; } ///Process and write a tag. void processTag() @safe { string tag = event_.tag; if(event_.id == EventID.scalar) { if(style_ == ScalarStyle.invalid){style_ = chooseScalarStyle();} if((!canonical_ || (tag is null)) && (((tag == "tag:yaml.org,2002:str") && event_.implicit) || (style_ == ScalarStyle.plain ? event_.implicit : !event_.implicit && (tag is null)))) { preparedTag_ = null; return; } if(event_.implicit && (tag is null)) { tag = "!"; preparedTag_ = null; } } else if((!canonical_ || (tag is null)) && event_.implicit) { preparedTag_ = null; return; } assert(tag != "", "Tag is not specified"); if(preparedTag_ is null){preparedTag_ = prepareTag(tag);} if(preparedTag_ !is null && preparedTag_ != "") { writeIndicator(preparedTag_, Yes.needWhitespace); } preparedTag_ = null; } ///Determine style to write the current scalar in. ScalarStyle chooseScalarStyle() @safe { if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} const style = event_.scalarStyle; const invalidOrPlain = style == ScalarStyle.invalid || style == ScalarStyle.plain; const block = style == ScalarStyle.literal || style == ScalarStyle.folded; const singleQuoted = style == ScalarStyle.singleQuoted; const doubleQuoted = style == ScalarStyle.doubleQuoted; const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain : analysis_.flags.allowBlockPlain; //simple empty or multiline scalars can't be written in plain style const simpleNonPlain = (context_ == Context.mappingSimpleKey) && (analysis_.flags.empty || analysis_.flags.multiline); if(doubleQuoted || canonical_) { return ScalarStyle.doubleQuoted; } if(invalidOrPlain && event_.implicit && !simpleNonPlain && allowPlain) { return ScalarStyle.plain; } if(block && flowLevel_ == 0 && context_ != Context.mappingSimpleKey && analysis_.flags.allowBlock) { return style; } if((invalidOrPlain || singleQuoted) && analysis_.flags.allowSingleQuoted && !(context_ == Context.mappingSimpleKey && analysis_.flags.multiline)) { return ScalarStyle.singleQuoted; } return ScalarStyle.doubleQuoted; } ///Prepare YAML version string for output. static string prepareVersion(const string YAMLVersion) @safe in(YAMLVersion.split(".")[0] == "1", "Unsupported YAML version: " ~ YAMLVersion) { return YAMLVersion; } ///Encode an Unicode character for tag directive and write it to writer. static void encodeChar(Writer)(ref Writer writer, in dchar c) @safe { char[4] data; const bytes = encode(data, c); //For each byte add string in format %AB , where AB are hex digits of the byte. foreach(const char b; data[0 .. bytes]) { formattedWrite(writer, "%%%02X", cast(ubyte)b); } } ///Prepare tag directive handle for output. static string prepareTagHandle(const string handle) @safe in(handle != "", "Tag handle must not be empty") in(handle.drop(1).dropBack(1).all!(c => isAlphaNum(c) || c.among!('-', '_')), "Tag handle contains invalid characters") { return handle; } ///Prepare tag directive prefix for output. static string prepareTagPrefix(const string prefix) @safe in(prefix != "", "Tag prefix must not be empty") { auto appender = appender!string(); const int offset = prefix[0] == '!'; size_t start, end; foreach(const size_t i, const dchar c; prefix) { const size_t idx = i + offset; if(isAlphaNum(c) || c.among!('-', ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '!', '~', '*', '\\', '\'', '(', ')', '[', ']', '%')) { end = idx + 1; continue; } if(start < idx){appender.put(prefix[start .. idx]);} start = end = idx + 1; encodeChar(appender, c); } end = min(end, prefix.length); if(start < end){appender.put(prefix[start .. end]);} return appender.data; } ///Prepare tag for output. string prepareTag(in string tag) @safe in(tag != "", "Tag must not be empty") { string tagString = tag; if (tagString == "!") return "!"; string handle; string suffix = tagString; //Sort lexicographically by prefix. sort!"icmp(a.prefix, b.prefix) < 0"(tagDirectives_); foreach(ref pair; tagDirectives_) { auto prefix = pair.prefix; if(tagString.startsWith(prefix) && (prefix != "!" || prefix.length < tagString.length)) { handle = pair.handle; suffix = tagString[prefix.length .. $]; } } auto appender = appender!string(); appender.put(handle !is null && handle != "" ? handle : "!<"); size_t start, end; foreach(const dchar c; suffix) { if(isAlphaNum(c) || c.among!('-', ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\\', '\'', '(', ')', '[', ']') || (c == '!' && handle != "!")) { ++end; continue; } if(start < end){appender.put(suffix[start .. end]);} start = end = end + 1; encodeChar(appender, c); } if(start < end){appender.put(suffix[start .. end]);} if(handle is null || handle == ""){appender.put(">");} return appender.data; } ///Prepare anchor for output. static string prepareAnchor(const string anchor) @safe in(anchor != "", "Anchor must not be empty") in(anchor.all!isNSAnchorName, "Anchor contains invalid characters") { return anchor; } ///Analyze specifed scalar and return the analysis result. static ScalarAnalysis analyzeScalar(string scalar) @safe { ScalarAnalysis analysis; analysis.flags.isNull = false; analysis.scalar = scalar; //Empty scalar is a special case. if(scalar is null || scalar == "") { with(ScalarAnalysis.AnalysisFlags) analysis.flags = empty | allowBlockPlain | allowSingleQuoted | allowDoubleQuoted; return analysis; } //Indicators and special characters (All false by default). bool blockIndicators, flowIndicators, lineBreaks, specialCharacters; //Important whitespace combinations (All false by default). bool leadingSpace, leadingBreak, trailingSpace, trailingBreak, breakSpace, spaceBreak; //Check document indicators. if(scalar.startsWith("---", "...")) { blockIndicators = flowIndicators = true; } //First character or preceded by a whitespace. bool preceededByWhitespace = true; //Last character or followed by a whitespace. bool followedByWhitespace = scalar.length == 1 || scalar[1].among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); //The previous character is a space/break (false by default). bool previousSpace, previousBreak; foreach(const size_t index, const dchar c; scalar) { //Check for indicators. if(index == 0) { //Leading indicators are special characters. if(c.isSpecialChar) { flowIndicators = blockIndicators = true; } if(':' == c || '?' == c) { flowIndicators = true; if(followedByWhitespace){blockIndicators = true;} } if(c == '-' && followedByWhitespace) { flowIndicators = blockIndicators = true; } } else { //Some indicators cannot appear within a scalar as well. if(c.isFlowIndicator){flowIndicators = true;} if(c == ':') { flowIndicators = true; if(followedByWhitespace){blockIndicators = true;} } if(c == '#' && preceededByWhitespace) { flowIndicators = blockIndicators = true; } } //Check for line breaks, special, and unicode characters. if(c.isNewLine){lineBreaks = true;} if(!(c == '\n' || (c >= '\x20' && c <= '\x7E')) && !((c == '\u0085' || (c >= '\xA0' && c <= '\uD7FF') || (c >= '\uE000' && c <= '\uFFFD')) && c != '\uFEFF')) { specialCharacters = true; } //Detect important whitespace combinations. if(c == ' ') { if(index == 0){leadingSpace = true;} if(index == scalar.length - 1){trailingSpace = true;} if(previousBreak){breakSpace = true;} previousSpace = true; previousBreak = false; } else if(c.isNewLine) { if(index == 0){leadingBreak = true;} if(index == scalar.length - 1){trailingBreak = true;} if(previousSpace){spaceBreak = true;} previousSpace = false; previousBreak = true; } else { previousSpace = previousBreak = false; } //Prepare for the next character. preceededByWhitespace = c.isSpace != 0; followedByWhitespace = index + 2 >= scalar.length || scalar[index + 2].isSpace; } with(ScalarAnalysis.AnalysisFlags) { //Let's decide what styles are allowed. analysis.flags |= allowFlowPlain | allowBlockPlain | allowSingleQuoted | allowDoubleQuoted | allowBlock; //Leading and trailing whitespaces are bad for plain scalars. if(leadingSpace || leadingBreak || trailingSpace || trailingBreak) { analysis.flags &= ~(allowFlowPlain | allowBlockPlain); } //We do not permit trailing spaces for block scalars. if(trailingSpace) { analysis.flags &= ~allowBlock; } //Spaces at the beginning of a new line are only acceptable for block //scalars. if(breakSpace) { analysis.flags &= ~(allowFlowPlain | allowBlockPlain | allowSingleQuoted); } //Spaces followed by breaks, as well as special character are only //allowed for double quoted scalars. if(spaceBreak || specialCharacters) { analysis.flags &= ~(allowFlowPlain | allowBlockPlain | allowSingleQuoted | allowBlock); } //Although the plain scalar writer supports breaks, we never emit //multiline plain scalars. if(lineBreaks) { analysis.flags &= ~(allowFlowPlain | allowBlockPlain); analysis.flags |= multiline; } //Flow indicators are forbidden for flow plain scalars. if(flowIndicators) { analysis.flags &= ~allowFlowPlain; } //Block indicators are forbidden for block plain scalars. if(blockIndicators) { analysis.flags &= ~allowBlockPlain; } } return analysis; } @safe unittest { with(analyzeScalar("").flags) { // workaround for empty being std.range.primitives.empty here alias empty = ScalarAnalysis.AnalysisFlags.empty; assert(empty && allowBlockPlain && allowSingleQuoted && allowDoubleQuoted); } with(analyzeScalar("a").flags) { assert(allowFlowPlain && allowBlockPlain && allowSingleQuoted && allowDoubleQuoted && allowBlock); } with(analyzeScalar(" ").flags) { assert(allowSingleQuoted && allowDoubleQuoted); } with(analyzeScalar(" a").flags) { assert(allowSingleQuoted && allowDoubleQuoted); } with(analyzeScalar("a ").flags) { assert(allowSingleQuoted && allowDoubleQuoted); } with(analyzeScalar("\na").flags) { assert(allowSingleQuoted && allowDoubleQuoted); } with(analyzeScalar("a\n").flags) { assert(allowSingleQuoted && allowDoubleQuoted); } with(analyzeScalar("\n").flags) { assert(multiline && allowSingleQuoted && allowDoubleQuoted && allowBlock); } with(analyzeScalar(" \n").flags) { assert(multiline && allowDoubleQuoted); } with(analyzeScalar("\n a").flags) { assert(multiline && allowDoubleQuoted && allowBlock); } } //Writers. ///Start the YAML stream (write the unicode byte order mark). void writeStreamStart() @safe { //Write BOM (except for UTF-8) static if(is(CharType == wchar) || is(CharType == dchar)) { stream_.put(cast(CharType)'\uFEFF'); } } ///End the YAML stream. void writeStreamEnd() @safe {} ///Write an indicator (e.g. ":", "[", ">", etc.). void writeIndicator(const scope char[] indicator, const Flag!"needWhitespace" needWhitespace, const Flag!"whitespace" whitespace = No.whitespace, const Flag!"indentation" indentation = No.indentation) @safe { const bool prefixSpace = !whitespace_ && needWhitespace; whitespace_ = whitespace; indentation_ = indentation_ && indentation; openEnded_ = false; column_ += indicator.length; if(prefixSpace) { ++column_; writeString(" "); } writeString(indicator); } ///Write indentation. void writeIndent() @safe { const indent = indent_ == -1 ? 0 : indent_; if(!indentation_ || column_ > indent || (column_ == indent && !whitespace_)) { writeLineBreak(); } if(column_ < indent) { whitespace_ = true; //Used to avoid allocation of arbitrary length strings. static immutable spaces = " "; size_t numSpaces = indent - column_; column_ = indent; while(numSpaces >= spaces.length) { writeString(spaces); numSpaces -= spaces.length; } writeString(spaces[0 .. numSpaces]); } } ///Start new line. void writeLineBreak(const scope char[] data = null) @safe { whitespace_ = indentation_ = true; ++line_; column_ = 0; writeString(data is null ? lineBreak(bestLineBreak_) : data); } ///Write a YAML version directive. void writeVersionDirective(const string versionText) @safe { writeString("%YAML "); writeString(versionText); writeLineBreak(); } ///Write a tag directive. void writeTagDirective(const string handle, const string prefix) @safe { writeString("%TAG "); writeString(handle); writeString(" "); writeString(prefix); writeLineBreak(); } void nextExpected(string D)() @safe { state_ = mixin("function(typeof(this)* self) { self."~D~"(); }"); } void nextExpected(EmitterFunction f) @safe { state_ = f; } void callNext() @safe { state_(&this); } } private: ///RAII struct used to write out scalar values. struct ScalarWriter(Range, CharType) { invariant() { assert(emitter_.bestIndent_ > 0 && emitter_.bestIndent_ < 10, "Emitter bestIndent must be 1 to 9 for one-character indent hint"); } private: @disable int opCmp(ref Emitter!(Range, CharType)); @disable bool opEquals(ref Emitter!(Range, CharType)); ///Used as "null" UTF-32 character. static immutable dcharNone = dchar.max; ///Emitter used to emit the scalar. Emitter!(Range, CharType)* emitter_; ///UTF-8 encoded text of the scalar to write. string text_; ///Can we split the scalar into multiple lines? bool split_; ///Are we currently going over spaces in the text? bool spaces_; ///Are we currently going over line breaks in the text? bool breaks_; ///Start and end byte of the text range we're currently working with. size_t startByte_, endByte_; ///End byte of the text range including the currently processed character. size_t nextEndByte_; ///Start and end character of the text range we're currently working with. long startChar_, endChar_; public: ///Construct a ScalarWriter using emitter to output text. this(Emitter!(Range, CharType)* emitter, string text, const bool split = true) @safe nothrow { emitter_ = emitter; text_ = text; split_ = split; } ///Write text as single quoted scalar. void writeSingleQuoted() @safe { emitter_.writeIndicator("\'", Yes.needWhitespace); spaces_ = breaks_ = false; resetTextPosition(); do { const dchar c = nextChar(); if(spaces_) { if(c != ' ' && tooWide() && split_ && startByte_ != 0 && endByte_ != text_.length) { writeIndent(Flag!"ResetSpace".no); updateRangeStart(); } else if(c != ' ') { writeCurrentRange(Flag!"UpdateColumn".yes); } } else if(breaks_) { if(!c.isNewLine) { writeStartLineBreak(); writeLineBreaks(); emitter_.writeIndent(); } } else if((c == dcharNone || c == '\'' || c == ' ' || c.isNewLine) && startChar_ < endChar_) { writeCurrentRange(Flag!"UpdateColumn".yes); } if(c == '\'') { emitter_.column_ += 2; emitter_.writeString("\'\'"); startByte_ = endByte_ + 1; startChar_ = endChar_ + 1; } updateBreaks(c, Flag!"UpdateSpaces".yes); }while(endByte_ < text_.length); emitter_.writeIndicator("\'", No.needWhitespace); } ///Write text as double quoted scalar. void writeDoubleQuoted() @safe { resetTextPosition(); emitter_.writeIndicator("\"", Yes.needWhitespace); do { const dchar c = nextChar(); //handle special characters if(c == dcharNone || c.among!('\"', '\\', '\u0085', '\u2028', '\u2029', '\uFEFF') || !((c >= '\x20' && c <= '\x7E') || ((c >= '\xA0' && c <= '\uD7FF') || (c >= '\uE000' && c <= '\uFFFD')))) { if(startChar_ < endChar_) { writeCurrentRange(Flag!"UpdateColumn".yes); } if(c != dcharNone) { auto appender = appender!string(); if(const dchar es = toEscape(c)) { appender.put('\\'); appender.put(es); } else { //Write an escaped Unicode character. const format = c <= 255 ? "\\x%02X": c <= 65535 ? "\\u%04X": "\\U%08X"; formattedWrite(appender, format, cast(uint)c); } emitter_.column_ += appender.data.length; emitter_.writeString(appender.data); startChar_ = endChar_ + 1; startByte_ = nextEndByte_; } } if((endByte_ > 0 && endByte_ < text_.length - strideBack(text_, text_.length)) && (c == ' ' || startChar_ >= endChar_) && (emitter_.column_ + endChar_ - startChar_ > emitter_.bestWidth_) && split_) { //text_[2:1] is ok in Python but not in D, so we have to use min() emitter_.writeString(text_[min(startByte_, endByte_) .. endByte_]); emitter_.writeString("\\"); emitter_.column_ += startChar_ - endChar_ + 1; startChar_ = max(startChar_, endChar_); startByte_ = max(startByte_, endByte_); writeIndent(Flag!"ResetSpace".yes); if(charAtStart() == ' ') { emitter_.writeString("\\"); ++emitter_.column_; } } }while(endByte_ < text_.length); emitter_.writeIndicator("\"", No.needWhitespace); } ///Write text as folded block scalar. void writeFolded() @safe { initBlock('>'); bool leadingSpace = true; spaces_ = false; breaks_ = true; resetTextPosition(); do { const dchar c = nextChar(); if(breaks_) { if(!c.isNewLine) { if(!leadingSpace && c != dcharNone && c != ' ') { writeStartLineBreak(); } leadingSpace = (c == ' '); writeLineBreaks(); if(c != dcharNone){emitter_.writeIndent();} } } else if(spaces_) { if(c != ' ' && tooWide()) { writeIndent(Flag!"ResetSpace".no); updateRangeStart(); } else if(c != ' ') { writeCurrentRange(Flag!"UpdateColumn".yes); } } else if(c == dcharNone || c.isNewLine || c == ' ') { writeCurrentRange(Flag!"UpdateColumn".yes); if(c == dcharNone){emitter_.writeLineBreak();} } updateBreaks(c, Flag!"UpdateSpaces".yes); }while(endByte_ < text_.length); } ///Write text as literal block scalar. void writeLiteral() @safe { initBlock('|'); breaks_ = true; resetTextPosition(); do { const dchar c = nextChar(); if(breaks_) { if(!c.isNewLine) { writeLineBreaks(); if(c != dcharNone){emitter_.writeIndent();} } } else if(c == dcharNone || c.isNewLine) { writeCurrentRange(Flag!"UpdateColumn".no); if(c == dcharNone){emitter_.writeLineBreak();} } updateBreaks(c, Flag!"UpdateSpaces".no); }while(endByte_ < text_.length); } ///Write text as plain scalar. void writePlain() @safe { if(emitter_.context_ == Emitter!(Range, CharType).Context.root){emitter_.openEnded_ = true;} if(text_ == ""){return;} if(!emitter_.whitespace_) { ++emitter_.column_; emitter_.writeString(" "); } emitter_.whitespace_ = emitter_.indentation_ = false; spaces_ = breaks_ = false; resetTextPosition(); do { const dchar c = nextChar(); if(spaces_) { if(c != ' ' && tooWide() && split_) { writeIndent(Flag!"ResetSpace".yes); updateRangeStart(); } else if(c != ' ') { writeCurrentRange(Flag!"UpdateColumn".yes); } } else if(breaks_) { if(!c.isNewLine) { writeStartLineBreak(); writeLineBreaks(); writeIndent(Flag!"ResetSpace".yes); } } else if(c == dcharNone || c.isNewLine || c == ' ') { writeCurrentRange(Flag!"UpdateColumn".yes); } updateBreaks(c, Flag!"UpdateSpaces".yes); }while(endByte_ < text_.length); } private: ///Get next character and move end of the text range to it. @property dchar nextChar() pure @safe { ++endChar_; endByte_ = nextEndByte_; if(endByte_ >= text_.length){return dcharNone;} const c = text_[nextEndByte_]; //c is ascii, no need to decode. if(c < 0x80) { ++nextEndByte_; return c; } return decode(text_, nextEndByte_); } ///Get character at start of the text range. @property dchar charAtStart() const pure @safe { size_t idx = startByte_; return decode(text_, idx); } ///Is the current line too wide? @property bool tooWide() const pure @safe nothrow { return startChar_ + 1 == endChar_ && emitter_.column_ > emitter_.bestWidth_; } ///Determine hints (indicators) for block scalar. size_t determineBlockHints(char[] hints, uint bestIndent) const pure @safe { size_t hintsIdx; if(text_.length == 0) return hintsIdx; dchar lastChar(const string str, ref size_t end) { size_t idx = end = end - strideBack(str, end); return decode(text_, idx); } size_t end = text_.length; const last = lastChar(text_, end); const secondLast = end > 0 ? lastChar(text_, end) : 0; if(text_[0].isNewLine || text_[0] == ' ') { hints[hintsIdx++] = cast(char)('0' + bestIndent); } if(!last.isNewLine) { hints[hintsIdx++] = '-'; } else if(std.utf.count(text_) == 1 || secondLast.isNewLine) { hints[hintsIdx++] = '+'; } return hintsIdx; } ///Initialize for block scalar writing with specified indicator. void initBlock(const char indicator) @safe { char[4] hints; hints[0] = indicator; const hintsLength = 1 + determineBlockHints(hints[1 .. $], emitter_.bestIndent_); emitter_.writeIndicator(hints[0 .. hintsLength], Yes.needWhitespace); if(hints.length > 0 && hints[$ - 1] == '+') { emitter_.openEnded_ = true; } emitter_.writeLineBreak(); } ///Write out the current text range. void writeCurrentRange(const Flag!"UpdateColumn" updateColumn) @safe { emitter_.writeString(text_[startByte_ .. endByte_]); if(updateColumn){emitter_.column_ += endChar_ - startChar_;} updateRangeStart(); } ///Write line breaks in the text range. void writeLineBreaks() @safe { foreach(const dchar br; text_[startByte_ .. endByte_]) { if(br == '\n'){emitter_.writeLineBreak();} else { char[4] brString; const bytes = encode(brString, br); emitter_.writeLineBreak(brString[0 .. bytes]); } } updateRangeStart(); } ///Write line break if start of the text range is a newline. void writeStartLineBreak() @safe { if(charAtStart == '\n'){emitter_.writeLineBreak();} } ///Write indentation, optionally resetting whitespace/indentation flags. void writeIndent(const Flag!"ResetSpace" resetSpace) @safe { emitter_.writeIndent(); if(resetSpace) { emitter_.whitespace_ = emitter_.indentation_ = false; } } ///Move start of text range to its end. void updateRangeStart() pure @safe nothrow { startByte_ = endByte_; startChar_ = endChar_; } ///Update the line breaks_ flag, optionally updating the spaces_ flag. void updateBreaks(in dchar c, const Flag!"UpdateSpaces" updateSpaces) pure @safe { if(c == dcharNone){return;} breaks_ = (c.isNewLine != 0); if(updateSpaces){spaces_ = c == ' ';} } ///Move to the beginning of text. void resetTextPosition() pure @safe nothrow { startByte_ = endByte_ = nextEndByte_ = 0; startChar_ = endChar_ = -1; } } dub-1.40.0/source/dub/internal/dyaml/encoding.d000066400000000000000000000005251477246567400213250ustar00rootroot00000000000000// Copyright Ferdinand Majerech 2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) module dub.internal.dyaml.encoding; import dub.internal.tinyendian; alias Encoding = dub.internal.tinyendian.UTFEncoding; dub-1.40.0/source/dub/internal/dyaml/escapes.d000066400000000000000000000057131477246567400211660ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) module dub.internal.dyaml.escapes; package: import std.meta : AliasSeq; alias escapes = AliasSeq!('0', 'a', 'b', 't', '\t', 'n', 'v', 'f', 'r', 'e', ' ', '/', '\"', '\\', 'N', '_', 'L', 'P'); /// YAML hex codes specifying the length of the hex number. alias escapeHexCodeList = AliasSeq!('x', 'u', 'U'); /// Convert a YAML escape to a dchar. dchar fromEscape(dchar escape) @safe pure nothrow @nogc { switch(escape) { case '0': return '\0'; case 'a': return '\x07'; case 'b': return '\x08'; case 't': return '\x09'; case '\t': return '\x09'; case 'n': return '\x0A'; case 'v': return '\x0B'; case 'f': return '\x0C'; case 'r': return '\x0D'; case 'e': return '\x1B'; case '/': return '/'; case ' ': return '\x20'; case '\"': return '\"'; case '\\': return '\\'; case 'N': return '\x85'; //'\u0085'; case '_': return '\xA0'; case 'L': return '\u2028'; case 'P': return '\u2029'; default: assert(false, "No such YAML escape"); } } /** * Convert a dchar to a YAML escape. * * Params: * value = The possibly escapable character. * * Returns: * If the character passed as parameter can be escaped, returns the matching * escape, otherwise returns a null character. */ dchar toEscape(dchar value) @safe pure nothrow @nogc { switch(value) { case '\0': return '0'; case '\x07': return 'a'; case '\x08': return 'b'; case '\x09': return 't'; case '\x0A': return 'n'; case '\x0B': return 'v'; case '\x0C': return 'f'; case '\x0D': return 'r'; case '\x1B': return 'e'; case '\"': return '\"'; case '\\': return '\\'; case '\xA0': return '_'; case '\x85': return 'N'; case '\u2028': return 'L'; case '\u2029': return 'P'; default: return 0; } } /// Get the length of a hexadecimal number determined by its hex code. /// /// Need a function as associative arrays don't work with @nogc. /// (And this may be even faster with a function.) uint escapeHexLength(dchar hexCode) @safe pure nothrow @nogc { switch(hexCode) { case 'x': return 2; case 'u': return 4; case 'U': return 8; default: assert(false, "No such YAML hex code"); } } // Issue #302: Support optional escaping of forward slashes in string // for JSON compatibility @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `{ "forward/slashes": "can\/be\/optionally\/escaped" }`; auto node = Loader.fromString(str).load(); assert(node["forward/slashes"] == "can/be/optionally/escaped"); } dub-1.40.0/source/dub/internal/dyaml/event.d000066400000000000000000000262661477246567400206720ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * YAML events. * Code based on PyYAML: http://www.pyyaml.org */ module dub.internal.dyaml.event; import std.array; import std.conv; import dub.internal.dyaml.exception; import dub.internal.dyaml.reader; import dub.internal.dyaml.tagdirective; import dub.internal.dyaml.style; package: ///Event types. enum EventID : ubyte { invalid = 0, /// Invalid (uninitialized) event. streamStart, /// Stream start streamEnd, /// Stream end documentStart, /// Document start documentEnd, /// Document end alias_, /// Alias scalar, /// Scalar sequenceStart, /// Sequence start sequenceEnd, /// Sequence end mappingStart, /// Mapping start mappingEnd /// Mapping end } /** * YAML event produced by parser. * * 48 bytes on 64bit. */ struct Event { @disable int opCmp(ref Event); ///Value of the event, if any. string value; ///Start position of the event in file/stream. Mark startMark; ///End position of the event in file/stream. Mark endMark; union { struct { ///Anchor of the event, if any. string _anchor; ///Tag of the event, if any. string _tag; } ///Tag directives, if this is a DocumentStart. //TagDirectives tagDirectives; TagDirective[] _tagDirectives; } ///Event type. EventID id = EventID.invalid; ///Style of scalar event, if this is a scalar event. ScalarStyle scalarStyle = ScalarStyle.invalid; ///Should the tag be implicitly resolved? bool implicit; /** * Is this document event explicit? * * Used if this is a DocumentStart or DocumentEnd. */ alias explicitDocument = implicit; ///Collection style, if this is a SequenceStart or MappingStart. CollectionStyle collectionStyle = CollectionStyle.invalid; ///Is this a null (uninitialized) event? @property bool isNull() const pure @safe nothrow {return id == EventID.invalid;} ///Get string representation of the token ID. @property string idString() const @safe {return to!string(id);} auto ref anchor() inout @trusted pure { assert(id != EventID.documentStart, "DocumentStart events cannot have anchors."); return _anchor; } auto ref tag() inout @trusted pure { assert(id != EventID.documentStart, "DocumentStart events cannot have tags."); return _tag; } auto ref tagDirectives() inout @trusted pure { assert(id == EventID.documentStart, "Only DocumentStart events have tag directives."); return _tagDirectives; } void toString(W)(ref W writer) const { import std.algorithm.iteration : substitute; import std.format : formattedWrite; import std.range : put; final switch (id) { case EventID.scalar: put(writer, "=VAL "); if (anchor != "") { writer.formattedWrite!"&%s " (anchor); } if (tag != "") { writer.formattedWrite!"<%s> " (tag); } final switch(scalarStyle) { case ScalarStyle.singleQuoted: put(writer, "'"); break; case ScalarStyle.doubleQuoted: put(writer, "\""); break; case ScalarStyle.literal: put(writer, "|"); break; case ScalarStyle.folded: put(writer, ">"); break; case ScalarStyle.invalid: //default to plain case ScalarStyle.plain: put(writer, ":"); break; } if (value != "") { writer.formattedWrite!"%s"(value.substitute("\n", "\\n", `\`, `\\`, "\r", "\\r", "\t", "\\t", "\b", "\\b")); } break; case EventID.streamStart: put(writer, "+STR"); break; case EventID.documentStart: put(writer, "+DOC"); if (explicitDocument) { put(writer, " ---"); } break; case EventID.mappingStart: put(writer, "+MAP"); if (collectionStyle == CollectionStyle.flow) { put(writer, " {}"); } if (anchor != "") { put(writer, " &"); put(writer, anchor); } if (tag != "") { put(writer, " <"); put(writer, tag); put(writer, ">"); } break; case EventID.sequenceStart: put(writer, "+SEQ"); if (collectionStyle == CollectionStyle.flow) { put(writer, " []"); } if (anchor != "") { put(writer, " &"); put(writer, anchor); } if (tag != "") { put(writer, " <"); put(writer, tag); put(writer, ">"); } break; case EventID.streamEnd: put(writer, "-STR"); break; case EventID.documentEnd: put(writer, "-DOC"); if (explicitDocument) { put(writer, " ..."); } break; case EventID.mappingEnd: put(writer, "-MAP"); break; case EventID.sequenceEnd: put(writer, "-SEQ"); break; case EventID.alias_: put(writer, "=ALI *"); put(writer, anchor); break; case EventID.invalid: assert(0, "Invalid EventID produced"); } } } /** * Construct a simple event. * * Params: start = Start position of the event in the file/stream. * end = End position of the event in the file/stream. * anchor = Anchor, if this is an alias event. */ Event event(EventID id)(const Mark start, const Mark end, const string anchor = null) @safe in(!(id == EventID.alias_ && anchor == ""), "Missing anchor for alias event") { Event result; result.startMark = start; result.endMark = end; result.anchor = anchor; result.id = id; return result; } /** * Construct a collection (mapping or sequence) start event. * * Params: start = Start position of the event in the file/stream. * end = End position of the event in the file/stream. * anchor = Anchor of the sequence, if any. * tag = Tag of the sequence, if specified. * implicit = Should the tag be implicitly resolved? * style = Style to use when outputting document. */ Event collectionStartEvent(EventID id) (const Mark start, const Mark end, const string anchor, const string tag, const bool implicit, const CollectionStyle style) pure @safe nothrow { static assert(id == EventID.sequenceStart || id == EventID.sequenceEnd || id == EventID.mappingStart || id == EventID.mappingEnd); Event result; result.startMark = start; result.endMark = end; result.anchor = anchor; result.tag = tag; result.id = id; result.implicit = implicit; result.collectionStyle = style; return result; } /** * Construct a stream start event. * * Params: start = Start position of the event in the file/stream. * end = End position of the event in the file/stream. */ Event streamStartEvent(const Mark start, const Mark end) pure @safe nothrow { Event result; result.startMark = start; result.endMark = end; result.id = EventID.streamStart; return result; } ///Aliases for simple events. alias streamEndEvent = event!(EventID.streamEnd); alias aliasEvent = event!(EventID.alias_); alias sequenceEndEvent = event!(EventID.sequenceEnd); alias mappingEndEvent = event!(EventID.mappingEnd); ///Aliases for collection start events. alias sequenceStartEvent = collectionStartEvent!(EventID.sequenceStart); alias mappingStartEvent = collectionStartEvent!(EventID.mappingStart); /** * Construct a document start event. * * Params: start = Start position of the event in the file/stream. * end = End position of the event in the file/stream. * explicit = Is this an explicit document start? * YAMLVersion = YAML version string of the document. * tagDirectives = Tag directives of the document. */ Event documentStartEvent(const Mark start, const Mark end, const bool explicit, string YAMLVersion, TagDirective[] tagDirectives) pure @safe nothrow { Event result; result.value = YAMLVersion; result.startMark = start; result.endMark = end; result.id = EventID.documentStart; result.explicitDocument = explicit; result.tagDirectives = tagDirectives; return result; } /** * Construct a document end event. * * Params: start = Start position of the event in the file/stream. * end = End position of the event in the file/stream. * explicit = Is this an explicit document end? */ Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pure @safe nothrow { Event result; result.startMark = start; result.endMark = end; result.id = EventID.documentEnd; result.explicitDocument = explicit; return result; } /// Construct a scalar event. /// /// Params: start = Start position of the event in the file/stream. /// end = End position of the event in the file/stream. /// anchor = Anchor of the scalar, if any. /// tag = Tag of the scalar, if specified. /// implicit = Should the tag be implicitly resolved? /// value = String value of the scalar. /// style = Scalar style. Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag, const bool implicit, const string value, const ScalarStyle style = ScalarStyle.invalid) @safe pure nothrow @nogc { Event result; result.value = value; result.startMark = start; result.endMark = end; result.anchor = anchor; result.tag = tag; result.id = EventID.scalar; result.scalarStyle = style; result.implicit = implicit; return result; } dub-1.40.0/source/dub/internal/dyaml/exception.d000066400000000000000000000130621477246567400215350ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) ///Exceptions thrown by D:YAML and _exception related code. module dub.internal.dyaml.exception; import std.algorithm; import std.array; import std.conv; import std.exception; import std.format; import std.range; import std.string; import std.typecons; /// Base class for all exceptions thrown by D:YAML. class YAMLException : Exception { mixin basicExceptionCtors; } /// Position in a YAML stream, used for error messages. struct Mark { /// File name. string name = ""; /// Line number. ushort line; /// Column number. ushort column; public: /// Construct a Mark with specified line and column in the file. this(string name, const uint line, const uint column) @safe pure nothrow @nogc { this.name = name; this.line = cast(ushort)min(ushort.max, line); // This *will* overflow on extremely wide files but saves CPU time // (mark ctor takes ~5% of time) this.column = cast(ushort)column; } /// Get a string representation of the mark. void toString(W)(ref W writer) const scope { // Line/column numbers start at zero internally, make them start at 1. void writeClamped(ushort v) { writer.formattedWrite!"%s"(v + 1); if (v == ushort.max) { put(writer, "or higher"); } } put(writer, name); put(writer, ":"); writeClamped(line); put(writer, ","); writeClamped(column); } } /// Base class of YAML exceptions with marked positions of the problem. abstract class MarkedYAMLException : YAMLException { /// Position of the error. Mark mark; /// Additional position information, usually the start of a token or scalar Nullable!Mark mark2; /// A label for the extra information string mark2Label; // Construct a MarkedYAMLException with two marks this(string context, const Mark mark, string mark2Label, const Nullable!Mark mark2, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow { super(context, file, line); this.mark = mark; this.mark2 = mark2; this.mark2Label = mark2Label; } // Construct a MarkedYAMLException with specified problem. this(string msg, const Mark mark, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow { super(msg, file, line); this.mark = mark; } /// Custom toString to add context without requiring allocation up-front void toString(W)(ref W sink) const { sink.formattedWrite!"%s@%s(%s): "(typeid(this).name, file, line); put(sink, msg); put(sink, "\n"); mark.toString(sink); if (!mark2.isNull) { put(sink, "\n"); put(sink, mark2Label); put(sink, ":"); mark2.get.toString(sink); } put(sink, "\n"); put(sink, info.toString()); } /// Ditto override void toString(scope void delegate(in char[]) sink) const { toString!(typeof(sink))(sink); } /// An override of message override const(char)[] message() const @safe nothrow { if (mark2.isNull) { return assertNotThrown(text(msg, "\n", mark)); } else { return assertNotThrown(text(msg, "\n", mark, "\n", mark2Label, ": ", mark2.get)); } } } /// Exception thrown on composer errors. class ComposerException : MarkedYAMLException { mixin MarkedExceptionCtors; } /// Exception thrown on constructor errors. class ConstructorException : MarkedYAMLException { mixin MarkedExceptionCtors; } /// Exception thrown on loader errors. class LoaderException : MarkedYAMLException { mixin MarkedExceptionCtors; } /// Exception thrown on node related errors. class NodeException : MarkedYAMLException { mixin MarkedExceptionCtors; } /// Exception thrown on parser errors. class ParserException : MarkedYAMLException { mixin MarkedExceptionCtors; } /// Exception thrown on Reader errors. class ReaderException : MarkedYAMLException { mixin MarkedExceptionCtors; } /// Exception thrown on Representer errors. class RepresenterException : YAMLException { mixin basicExceptionCtors; } /// Exception thrown on scanner errors. class ScannerException : MarkedYAMLException { mixin MarkedExceptionCtors; } private: /// Constructors of marked YAML exceptions are identical, so we use a mixin. /// /// See_Also: MarkedYAMLException template MarkedExceptionCtors() { public: this(string msg, const Mark mark1, string mark2Label, const Mark mark2, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow { super(msg, mark1, mark2Label, Nullable!Mark(mark2), file, line); } this(string msg, const Mark mark, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow { super(msg, mark, file, line); } this(string msg, const Mark mark1, string mark2Label, const Nullable!Mark mark2, string file = __FILE__, size_t line = __LINE__) @safe pure nothrow { super(msg, mark1, mark2Label, mark2, file, line); } } dub-1.40.0/source/dub/internal/dyaml/linebreak.d000066400000000000000000000013601477246567400214710ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) module dub.internal.dyaml.linebreak; ///Enumerates platform specific line breaks. enum LineBreak { ///Unix line break ("\n"). unix, ///Windows line break ("\r\n"). windows, ///Macintosh line break ("\r"). macintosh } package: //Get line break string for specified line break. string lineBreak(in LineBreak b) pure @safe nothrow { final switch(b) { case LineBreak.unix: return "\n"; case LineBreak.windows: return "\r\n"; case LineBreak.macintosh: return "\r"; } } dub-1.40.0/source/dub/internal/dyaml/loader.d000066400000000000000000000306271477246567400210130ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /// Class used to load YAML documents. module dub.internal.dyaml.loader; import std.exception; import std.file; import std.stdio : File; import std.string; import dub.internal.dyaml.composer; import dub.internal.dyaml.constructor; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.node; import dub.internal.dyaml.parser; import dub.internal.dyaml.reader; import dub.internal.dyaml.resolver; import dub.internal.dyaml.scanner; import dub.internal.dyaml.token; /** Loads YAML documents from files or char[]. * * User specified Constructor and/or Resolver can be used to support new * tags / data types. */ struct Loader { private: // Assembles YAML documents Composer composer_; // Are we done loading? bool done_; // Last node read from stream Node currentNode; // Has the range interface been initialized yet? bool rangeInitialized; public: @disable int opCmp(ref Loader); @disable bool opEquals(ref Loader); /** Construct a Loader to load YAML from a file. * * Params: filename = Name of the file to load from. * file = Already-opened file to load from. * * Throws: YAMLException if the file could not be opened or read. */ static Loader fromFile(string filename) @trusted { try { auto loader = Loader(std.file.read(filename), filename); return loader; } catch(FileException e) { throw new YAMLException("Unable to open file %s for YAML loading: %s" .format(filename, e.msg), e.file, e.line); } } /// ditto static Loader fromFile(File file) @system { auto loader = Loader(file.byChunk(4096).join, file.name); return loader; } /** Construct a Loader to load YAML from a string. * * Params: * data = String to load YAML from. The char[] version $(B will) * overwrite its input during parsing as D:YAML reuses memory. * filename = The filename to give to the Loader, defaults to `""` * * Returns: Loader loading YAML from given string. * * Throws: * * YAMLException if data could not be read (e.g. a decoding error) */ static Loader fromString(char[] data, string filename = "") @safe { return Loader(cast(ubyte[])data, filename); } /// Ditto static Loader fromString(string data, string filename = "") @safe { return fromString(data.dup, filename); } /// Load a char[]. @safe unittest { assert(Loader.fromString("42".dup).load().as!int == 42); } /// Load a string. @safe unittest { assert(Loader.fromString("42").load().as!int == 42); } /** Construct a Loader to load YAML from a buffer. * * Params: yamlData = Buffer with YAML data to load. This may be e.g. a file * loaded to memory or a string with YAML data. Note that * buffer $(B will) be overwritten, as D:YAML minimizes * memory allocations by reusing the input _buffer. * $(B Must not be deleted or modified by the user as long * as nodes loaded by this Loader are in use!) - Nodes may * refer to data in this buffer. * * Note that D:YAML looks for byte-order-marks YAML files encoded in * UTF-16/UTF-32 (and sometimes UTF-8) use to specify the encoding and * endianness, so it should be enough to load an entire file to a buffer and * pass it to D:YAML, regardless of Unicode encoding. * * Throws: YAMLException if yamlData contains data illegal in YAML. */ static Loader fromBuffer(ubyte[] yamlData) @safe { return Loader(yamlData); } /// Ditto static Loader fromBuffer(void[] yamlData) @system { return Loader(yamlData); } /// Ditto private this(void[] yamlData, string name = "") @system { this(cast(ubyte[])yamlData, name); } /// Ditto private this(ubyte[] yamlData, string name = "") @safe { try { auto reader = Reader(yamlData, name); auto parser = new Parser(Scanner(reader)); composer_ = Composer(parser, Resolver.withDefaultResolvers); } catch(MarkedYAMLException e) { throw new LoaderException("Unable to open %s for YAML loading: %s" .format(name, e.msg), e.mark, e.file, e.line); } } /// Set stream _name. Used in debugging messages. ref inout(string) name() inout @safe return pure nothrow @nogc { return composer_.name; } /// Specify custom Resolver to use. auto ref resolver() pure @safe nothrow @nogc { return composer_.resolver; } /** Load single YAML document. * * If none or more than one YAML document is found, this throws a YAMLException. * * This can only be called once; this is enforced by contract. * * Returns: Root node of the document. * * Throws: YAMLException if there wasn't exactly one document * or on a YAML parsing error. */ Node load() @safe { enforce(!empty, new LoaderException("Zero documents in stream", composer_.mark)); auto output = front; popFront(); enforce(empty, new LoaderException("More than one document in stream", composer_.mark)); return output; } /** Implements the empty range primitive. * * If there's no more documents left in the stream, this will be true. * * Returns: `true` if no more documents left, `false` otherwise. */ bool empty() @safe { // currentNode and done_ are both invalid until popFront is called once if (!rangeInitialized) { popFront(); } return done_; } /** Implements the popFront range primitive. * * Reads the next document from the stream, if possible. */ void popFront() @safe { scope(success) rangeInitialized = true; assert(!done_, "Loader.popFront called on empty range"); try { if (composer_.checkNode()) { currentNode = composer_.getNode(); } else { done_ = true; } } catch(MarkedYAMLException e) { throw new LoaderException("Unable to load %s: %s" .format(name, e.msg), e.mark, e.mark2Label, e.mark2, e.file, e.line); } } /** Implements the front range primitive. * * Returns: the current document as a Node. */ Node front() @safe { // currentNode and done_ are both invalid until popFront is called once if (!rangeInitialized) { popFront(); } return currentNode; } } /// Load single YAML document from a file: @safe unittest { write("example.yaml", "Hello world!"); auto rootNode = Loader.fromFile("example.yaml").load(); assert(rootNode == "Hello world!"); } /// Load single YAML document from an already-opened file: @system unittest { // Open a temporary file auto file = File.tmpfile; // Write valid YAML file.write("Hello world!"); // Return to the beginning file.seek(0); // Load document auto rootNode = Loader.fromFile(file).load(); assert(rootNode == "Hello world!"); } /// Load all YAML documents from a file: @safe unittest { import std.array : array; import std.file : write; write("example.yaml", "---\n"~ "Hello world!\n"~ "...\n"~ "---\n"~ "Hello world 2!\n"~ "...\n" ); auto nodes = Loader.fromFile("example.yaml").array; assert(nodes.length == 2); } /// Iterate over YAML documents in a file, lazily loading them: @safe unittest { import std.file : write; write("example.yaml", "---\n"~ "Hello world!\n"~ "...\n"~ "---\n"~ "Hello world 2!\n"~ "...\n" ); auto loader = Loader.fromFile("example.yaml"); foreach(ref node; loader) { //Do something } } /// Load YAML from a string: @safe unittest { string yaml_input = ("red: '#ff0000'\n" ~ "green: '#00ff00'\n" ~ "blue: '#0000ff'"); auto colors = Loader.fromString(yaml_input).load(); foreach(string color, string value; colors) { // Do something with the color and its value... } } /// Load a file into a buffer in memory and then load YAML from that buffer: @safe unittest { import std.file : read, write; import std.stdio : writeln; // Create a yaml document write("example.yaml", "---\n"~ "Hello world!\n"~ "...\n"~ "---\n"~ "Hello world 2!\n"~ "...\n" ); try { string buffer = readText("example.yaml"); auto yamlNode = Loader.fromString(buffer); // Read data from yamlNode here... } catch(FileException e) { writeln("Failed to read file 'example.yaml'"); } } /// Use a custom resolver to support custom data types and/or implicit tags: @safe unittest { import std.file : write; // Create a yaml document write("example.yaml", "---\n"~ "Hello world!\n"~ "...\n" ); auto loader = Loader.fromFile("example.yaml"); // Add resolver expressions here... // loader.resolver.addImplicitResolver(...); auto rootNode = loader.load(); } //Issue #258 - https://github.com/dlang-community/D-YAML/issues/258 @safe unittest { auto yaml = "{\n\"root\": {\n\t\"key\": \"value\"\n }\n}"; auto doc = Loader.fromString(yaml).load(); assert(doc.isValid); } @safe unittest { import std.exception : collectException; auto yaml = q"EOS value: invalid: string EOS"; auto filename = "invalid.yml"; auto loader = Loader.fromString(yaml); loader.name = filename; Node unused; auto e = loader.load().collectException!LoaderException(unused); assert(e.mark.name == filename); } /// https://github.com/dlang-community/D-YAML/issues/325 @safe unittest { assert(Loader.fromString("--- {x: a}").load()["x"] == "a"); } // Ensure exceptions are thrown as appropriate @safe unittest { LoaderException e; // No documents e = collectException!LoaderException(Loader.fromString("", "filename.yaml").load()); assert(e); with(e) { assert(mark.name == "filename.yaml"); assert(mark.line == 0); assert(mark.column == 0); } // Too many documents e = collectException!LoaderException(Loader.fromString("--- 4\n--- 6\n--- 5", "filename.yaml").load()); assert(e, "No exception thrown"); with(e) { assert(mark.name == "filename.yaml"); // FIXME: should be position of second document, not end of file //assert(mark.line == 1); //assert(mark.column == 0); } // Invalid document e = collectException!LoaderException(Loader.fromString("[", "filename.yaml").load()); assert(e, "No exception thrown"); with(e) { assert(mark.name == "filename.yaml"); // FIXME: should be position of second document, not end of file assert(mark.line == 0); assert(mark.column == 1); } } dub-1.40.0/source/dub/internal/dyaml/node.d000066400000000000000000002641061477246567400204730ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /// Node of a YAML document. Used to read YAML data once it's loaded, /// and to prepare data to emit. module dub.internal.dyaml.node; import std.algorithm; import std.array; import std.conv; import std.datetime; import std.exception; import std.format; import std.math; import std.meta : AliasSeq; import std.range; import std.string; import std.traits; import std.typecons; // FIXME: Switch back to upstream's when v2.101 is the oldest // supported version (recommended: after v2.111 release). import dub.internal.dyaml.stdsumtype; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.style; // Node kinds. enum NodeID : ubyte { scalar, sequence, mapping, invalid } /// Null YAML type. Used in nodes with _null values. struct YAMLNull { /// Used for string conversion. string toString() const pure @safe nothrow {return "null";} } /// Invalid YAML type, used internally by SumType private struct YAMLInvalid {} // Merge YAML type, used to support "tag:yaml.org,2002:merge". package struct YAMLMerge{} // Key-value pair of YAML nodes, used in mappings. private struct Pair { public: /// Key node. Node key; /// Value node. Node value; /// Construct a Pair from two values. Will be converted to Nodes if needed. this(K, V)(K key, V value) { static if(is(Unqual!K == Node)){this.key = key;} else {this.key = Node(key);} static if(is(Unqual!V == Node)){this.value = value;} else {this.value = Node(value);} } /// Equality test with another Pair. bool opEquals(const ref Pair rhs) const scope @safe { return key == rhs.key && value == rhs.value; } // Comparison with another Pair. int opCmp(const scope ref Pair rhs) const scope @safe { const keyCmp = key.opCmp(rhs.key); return keyCmp != 0 ? keyCmp : value.opCmp(rhs.value); } /// public void toString (scope void delegate(scope const(char)[]) @safe sink) const scope @safe { // formattedWrite does not accept `scope` parameters () @trusted { formattedWrite(sink, "%s: %s", this.key, this.value); }(); } } enum NodeType { null_, merge, boolean, integer, decimal, binary, timestamp, string, mapping, sequence, invalid } /** YAML node. * * This is a pseudo-dynamic type that can store any YAML value, including a * sequence or mapping of nodes. You can get data from a Node directly or * iterate over it if it's a collection. */ struct Node { public: alias Pair = .Pair; package: // YAML value type. alias Value = SumType!( YAMLInvalid, YAMLNull, YAMLMerge, bool, long, real, ubyte[], SysTime, string, Node.Pair[], Node[]); // Can Value hold this type naturally? enum allowed(T) = isIntegral!T || isFloatingPoint!T || isSomeString!T || is(typeof({ Value i = T.init; })); // Stored value. Value value_; // Start position of the node. Mark startMark_; // Tag of the node. string tag_; // Node scalar style. Used to remember style this node was loaded with. ScalarStyle scalarStyle = ScalarStyle.invalid; // Node collection style. Used to remember style this node was loaded with. CollectionStyle collectionStyle = CollectionStyle.invalid; public: /** Construct a Node from a value. * * Any type except for Node can be stored in a Node, but default YAML * types (integers, floats, strings, timestamps, etc.) will be stored * more efficiently. To create a node representing a null value, * construct it from YAMLNull. * * If value is a node, its value will be copied directly. The tag and * other information attached to the original node will be discarded. * * If value is an array of nodes or pairs, it is stored directly. * Otherwise, every value in the array is converted to a node, and * those nodes are stored. * * Note that to emit any non-default types you store * in a node, you need a Representer to represent them in YAML - * otherwise emitting will fail. * * Params: value = Value to store in the node. * tag = Overrides tag of the node when emitted, regardless * of tag determined by Representer. Representer uses * this to determine YAML data type when a D data type * maps to multiple different YAML data types. Tag must * be in full form, e.g. "tag:yaml.org,2002:int", not * a shortcut, like "!!int". */ this(T)(T value, const string tag = null) @safe if (allowed!T || isArray!T || isAssociativeArray!T || is(Unqual!T == Node) || castableToNode!T) { tag_ = tag; //Unlike with assignment, we're just copying the value. static if (is(Unqual!T == Node)) { setValue(value.value_); } else static if(isSomeString!T) { setValue(value.to!string); } else static if(is(Unqual!T == bool)) { setValue(cast(bool)value); } else static if(isIntegral!T) { setValue(cast(long)value); } else static if(isFloatingPoint!T) { setValue(cast(real)value); } else static if (isArray!T) { alias ElementT = Unqual!(ElementType!T); // Construction from raw node or pair array. static if(is(ElementT == Node) || is(ElementT == Node.Pair)) { setValue(value); } // Need to handle byte buffers separately. else static if(is(ElementT == byte) || is(ElementT == ubyte)) { setValue(cast(ubyte[]) value); } else { Node[] nodes; foreach(ref v; value) { nodes ~= Node(v); } setValue(nodes); } } else static if (isAssociativeArray!T) { Node.Pair[] pairs; foreach(k, ref v; value) { pairs ~= Pair(k, v); } setValue(pairs); } // User defined type. else { setValue(value); } } /// Construct a scalar node @safe unittest { // Integer { auto node = Node(5); } // String { auto node = Node("Hello world!"); } // Floating point { auto node = Node(5.0f); } // Boolean { auto node = Node(true); } // Time { auto node = Node(SysTime(DateTime(2005, 6, 15, 20, 0, 0), UTC())); } // Integer, dumped as a string { auto node = Node(5, "tag:yaml.org,2002:str"); } } /// Construct a sequence node @safe unittest { // Will be emitted as a sequence (default for arrays) { auto seq = Node([1, 2, 3, 4, 5]); } // Will be emitted as a set (overridden tag) { auto set = Node([1, 2, 3, 4, 5], "tag:yaml.org,2002:set"); } // Can also store arrays of arrays { auto node = Node([[1,2], [3,4]]); } } /// Construct a mapping node @safe unittest { // Will be emitted as an unordered mapping (default for mappings) auto map = Node([1 : "a", 2 : "b"]); // Will be emitted as an ordered map (overridden tag) auto omap = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:omap"); // Will be emitted as pairs (overridden tag) auto pairs = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:pairs"); } @safe unittest { { auto node = Node(42); assert(node.nodeID == NodeID.scalar); assert(node.as!int == 42 && node.as!float == 42.0f && node.as!string == "42"); } { auto node = Node("string"); assert(node.as!string == "string"); } } @safe unittest { with(Node([1, 2, 3])) { assert(nodeID == NodeID.sequence); assert(length == 3); assert(opIndex(2).as!int == 3); } } @safe unittest { int[string] aa; aa["1"] = 1; aa["2"] = 2; with(Node(aa)) { assert(nodeID == NodeID.mapping); assert(length == 2); assert(opIndex("2").as!int == 2); } } @safe unittest { auto node = Node(Node(4, "tag:yaml.org,2002:str")); assert(node == 4); assert(node.tag_ == ""); } /** Construct a node from arrays of _keys and _values. * * Constructs a mapping node with key-value pairs from * _keys and _values, keeping their order. Useful when order * is important (ordered maps, pairs). * * * keys and values must have equal length. * * * If _keys and/or _values are nodes, they are stored directly/ * Otherwise they are converted to nodes and then stored. * * Params: keys = Keys of the mapping, from first to last pair. * values = Values of the mapping, from first to last pair. * tag = Overrides tag of the node when emitted, regardless * of tag determined by Representer. Representer uses * this to determine YAML data type when a D data type * maps to multiple different YAML data types. * This is used to differentiate between YAML unordered * mappings ("!!map"), ordered mappings ("!!omap"), and * pairs ("!!pairs") which are all internally * represented as an array of node pairs. Tag must be * in full form, e.g. "tag:yaml.org,2002:omap", not a * shortcut, like "!!omap". * */ this(K, V)(K[] keys, V[] values, const string tag = null) if(!(isSomeString!(K[]) || isSomeString!(V[]))) in(keys.length == values.length, "Lengths of keys and values arrays to construct " ~ "a YAML node from don't match") { tag_ = tag; Node.Pair[] pairs; foreach(i; 0 .. keys.length){pairs ~= Pair(keys[i], values[i]);} setValue(pairs); } /// @safe unittest { // Will be emitted as an unordered mapping (default for mappings) auto map = Node([1, 2], ["a", "b"]); // Will be emitted as an ordered map (overridden tag) auto omap = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:omap"); // Will be emitted as pairs (overriden tag) auto pairs = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:pairs"); } @safe unittest { with(Node(["1", "2"], [1, 2])) { assert(nodeID == NodeID.mapping); assert(length == 2); assert(opIndex("2").as!int == 2); } } /// Is this node valid (initialized)? @property bool isValid() const scope @safe pure nothrow @nogc { return value_.match!((const YAMLInvalid _) => false, _ => true); } /// Return tag of the node. @property string tag() const return scope @safe pure nothrow @nogc { return tag_; } /// Return the start position of the node. @property Mark startMark() const return scope @safe pure nothrow @nogc { return startMark_; } /** Equality test. * * If T is Node, recursively compares all subnodes. * This might be quite expensive if testing entire documents. * * If T is not Node, gets a value of type T from the node and tests * equality with that. * * To test equality with a null YAML value, use YAMLNull. * * Params: rhs = Variable to test equality with. * * Returns: true if equal, false otherwise. */ bool opEquals(const scope Node rhs) const scope @safe { return opCmp(rhs) == 0; } bool opEquals(T)(const scope auto ref T rhs) const @safe { try { auto stored = get!(T, No.stringConversion); // NaNs aren't normally equal to each other, but we'll pretend they are. static if(isFloatingPoint!T) { return rhs == stored || (isNaN(rhs) && isNaN(stored)); } else { return rhs == stored; } } catch(NodeException e) { return false; } } /// @safe unittest { auto node = Node(42); assert(node == 42); assert(node != "42"); assert(node != "43"); auto node2 = Node(YAMLNull()); assert(node2 == YAMLNull()); const node3 = Node(42); assert(node3 == 42); } /// Shortcut for get(). alias as = get; /** Get the value of the node as specified type. * * If the specifed type does not match type in the node, * conversion is attempted. The stringConversion template * parameter can be used to disable conversion from non-string * types to strings. * * Numeric values are range checked, throwing if out of range of * requested type. * * Timestamps are stored as std.datetime.SysTime. * Binary values are decoded and stored as ubyte[]. * * To get a null value, use get!YAMLNull . This is to * prevent getting null values for types such as strings or classes. * * $(BR)$(B Mapping default values:) * * $(PBR * The '=' key can be used to denote the default value of a mapping. * This can be used when a node is scalar in early versions of a program, * but is replaced by a mapping later. Even if the node is a mapping, the * get method can be used as if it was a scalar if it has a default value. * This way, new YAML files where the node is a mapping can still be read * by old versions of the program, which expect the node to be a scalar. * ) * * Returns: Value of the node as specified type. * * Throws: NodeException if unable to convert to specified type, or if * the value is out of range of requested type. */ inout(T) get(T, Flag!"stringConversion" stringConversion = Yes.stringConversion)() inout @safe return scope { static assert (allowed!(Unqual!T) || hasNodeConstructor!(inout(Unqual!T)) || (!hasIndirections!(Unqual!T) && hasNodeConstructor!(Unqual!T))); static if(!allowed!(Unqual!T)) { static if (hasSimpleNodeConstructor!(Unqual!T) || hasSimpleNodeConstructor!(inout(Unqual!T))) { alias params = AliasSeq!(this); } else static if (hasExpandedNodeConstructor!(Unqual!T) || hasExpandedNodeConstructor!(inout(Unqual!T))) { alias params = AliasSeq!(this, tag_); } else { static assert(0, "Unknown Node constructor?"); } static if (is(T == class)) { return new inout T(params); } else static if (is(T == struct)) { return T(params); } else { static assert(0, "Unhandled user type"); } } else { static if (canBeType!T) if (isType!(Unqual!T)) { return getValue!T; } // If we're getting from a mapping and we're not getting Node.Pair[], // we're getting the default value. if(nodeID == NodeID.mapping){return this["="].get!( T, stringConversion);} static if(isSomeString!T) { static if(!stringConversion) { enforce(type == NodeType.string, new NodeException( "Node stores unexpected type: " ~ text(type) ~ ". Expected: " ~ typeid(T).toString(), startMark_)); return to!T(getValue!string); } else { // Try to convert to string. try { return coerceValue!T().dup; } catch (MatchException e) { throw new NodeException("Unable to convert node value to string", startMark_); } } } else static if(isFloatingPoint!T) { final switch (type) { case NodeType.integer: return to!T(getValue!long); case NodeType.decimal: return to!T(getValue!real); case NodeType.binary: case NodeType.string: case NodeType.boolean: case NodeType.null_: case NodeType.merge: case NodeType.invalid: case NodeType.timestamp: case NodeType.mapping: case NodeType.sequence: throw new NodeException("Node stores unexpected type: " ~ text(type) ~ ". Expected: " ~ typeid(T).toString, startMark_); } } else static if(isIntegral!T) { enforce(type == NodeType.integer, new NodeException("Node stores unexpected type: " ~ text(type) ~ ". Expected: " ~ typeid(T).toString, startMark_)); immutable temp = getValue!long; enforce(temp >= T.min && temp <= T.max, new NodeException("Integer value of type " ~ typeid(T).toString() ~ " out of range. Value: " ~ to!string(temp), startMark_)); return temp.to!T; } else throw new NodeException("Node stores unexpected type: " ~ text(type) ~ ". Expected: " ~ typeid(T).toString, startMark_); } } /// ditto T get(T)() const if (hasIndirections!(Unqual!T) && hasNodeConstructor!(Unqual!T) && (!hasNodeConstructor!(inout(Unqual!T)))) { static if (hasSimpleNodeConstructor!T) { alias params = AliasSeq!(this); } else static if (hasExpandedNodeConstructor!T) { alias params = AliasSeq!(this, tag_); } else { static assert(0, "Unknown Node constructor?"); } static if (is(T == class)) { return new T(params); } else static if (is(T == struct)) { return T(params); } else { static assert(0, "Unhandled user type"); } } /// Automatic type conversion @safe unittest { auto node = Node(42); assert(node.get!int == 42); assert(node.get!string == "42"); assert(node.get!double == 42.0); } /// Scalar node to struct and vice versa @safe unittest { import dub.internal.dyaml.dumper : dumper; import dub.internal.dyaml.loader : Loader; static struct MyStruct { int x, y, z; this(int x, int y, int z) @safe { this.x = x; this.y = y; this.z = z; } this(scope const Node node) @safe { // `std.array.split` is not marked as taking a `scope` range, // but we don't escape a reference. scope parts = () @trusted { return node.as!string().split(":"); }(); x = parts[0].to!int; y = parts[1].to!int; z = parts[2].to!int; } Node opCast(T: Node)() @safe { //Using custom scalar format, x:y:z. auto scalar = format("%s:%s:%s", x, y, z); //Representing as a scalar, with custom tag to specify this data type. return Node(scalar, "!mystruct.tag"); } } auto appender = new Appender!string; // Dump struct to yaml document dumper().dump(appender, Node(MyStruct(1,2,3))); // Read yaml document back as a MyStruct auto loader = Loader.fromString(appender.data); Node node = loader.load(); assert(node.as!MyStruct == MyStruct(1,2,3)); } /// Sequence node to struct and vice versa @safe unittest { import dub.internal.dyaml.dumper : dumper; import dub.internal.dyaml.loader : Loader; static struct MyStruct { int x, y, z; this(int x, int y, int z) @safe { this.x = x; this.y = y; this.z = z; } this(Node node) @safe { x = node[0].as!int; y = node[1].as!int; z = node[2].as!int; } Node opCast(T: Node)() { return Node([x, y, z], "!mystruct.tag"); } } auto appender = new Appender!string; // Dump struct to yaml document dumper().dump(appender, Node(MyStruct(1,2,3))); // Read yaml document back as a MyStruct auto loader = Loader.fromString(appender.data); Node node = loader.load(); assert(node.as!MyStruct == MyStruct(1,2,3)); } /// Mapping node to struct and vice versa @safe unittest { import dub.internal.dyaml.dumper : dumper; import dub.internal.dyaml.loader : Loader; static struct MyStruct { int x, y, z; Node opCast(T: Node)() { auto pairs = [Node.Pair("x", x), Node.Pair("y", y), Node.Pair("z", z)]; return Node(pairs, "!mystruct.tag"); } this(int x, int y, int z) { this.x = x; this.y = y; this.z = z; } this(Node node) @safe { x = node["x"].as!int; y = node["y"].as!int; z = node["z"].as!int; } } auto appender = new Appender!string; // Dump struct to yaml document dumper().dump(appender, Node(MyStruct(1,2,3))); // Read yaml document back as a MyStruct auto loader = Loader.fromString(appender.data); Node node = loader.load(); assert(node.as!MyStruct == MyStruct(1,2,3)); } /// Classes can be used too @system unittest { import dub.internal.dyaml.dumper : dumper; import dub.internal.dyaml.loader : Loader; static class MyClass { int x, y, z; this(int x, int y, int z) { this.x = x; this.y = y; this.z = z; } this(scope const Node node) @safe inout { // `std.array.split` is not marked as taking a `scope` range, // but we don't escape a reference. scope parts = () @trusted { return node.as!string().split(":"); }(); x = parts[0].to!int; y = parts[1].to!int; z = parts[2].to!int; } ///Useful for Node.as!string. override string toString() { return format("MyClass(%s, %s, %s)", x, y, z); } Node opCast(T: Node)() @safe { //Using custom scalar format, x:y:z. auto scalar = format("%s:%s:%s", x, y, z); //Representing as a scalar, with custom tag to specify this data type. return Node(scalar, "!myclass.tag"); } override bool opEquals(Object o) { if (auto other = cast(MyClass)o) { return (other.x == x) && (other.y == y) && (other.z == z); } return false; } } auto appender = new Appender!string; // Dump class to yaml document dumper().dump(appender, Node(new MyClass(1,2,3))); // Read yaml document back as a MyClass auto loader = Loader.fromString(appender.data); Node node = loader.load(); assert(node.as!MyClass == new MyClass(1,2,3)); } // Make sure custom tags and styles are kept. @safe unittest { static struct MyStruct { Node opCast(T: Node)() { auto node = Node("hi", "!mystruct.tag"); node.setStyle(ScalarStyle.doubleQuoted); return node; } } auto node = Node(MyStruct.init); assert(node.tag == "!mystruct.tag"); assert(node.scalarStyle == ScalarStyle.doubleQuoted); } // ditto, but for collection style @safe unittest { static struct MyStruct { Node opCast(T: Node)() { auto node = Node(["hi"], "!mystruct.tag"); node.setStyle(CollectionStyle.flow); return node; } } auto node = Node(MyStruct.init); assert(node.tag == "!mystruct.tag"); assert(node.collectionStyle == CollectionStyle.flow); } @safe unittest { assertThrown!NodeException(Node("42").get!int); assertThrown!NodeException(Node("42").get!double); assertThrown!NodeException(Node(long.max).get!ushort); Node(YAMLNull()).get!YAMLNull; } @safe unittest { const node = Node(42); assert(node.get!int == 42); assert(node.get!string == "42"); assert(node.get!double == 42.0); immutable node2 = Node(42); assert(node2.get!int == 42); assert(node2.get!(const int) == 42); assert(node2.get!(immutable int) == 42); assert(node2.get!string == "42"); assert(node2.get!(const string) == "42"); assert(node2.get!(immutable string) == "42"); assert(node2.get!double == 42.0); assert(node2.get!(const double) == 42.0); assert(node2.get!(immutable double) == 42.0); } /** If this is a collection, return its _length. * * Otherwise, throw NodeException. * * Returns: Number of elements in a sequence or key-value pairs in a mapping. * * Throws: NodeException if this is not a sequence nor a mapping. */ @property size_t length() const @safe { final switch(nodeID) { case NodeID.sequence: return getValue!(Node[]).length; case NodeID.mapping: return getValue!(Pair[]).length; case NodeID.scalar: case NodeID.invalid: throw new NodeException("Trying to get length of a " ~ nodeTypeString ~ " node", startMark_); } } @safe unittest { auto node = Node([1,2,3]); assert(node.length == 3); const cNode = Node([1,2,3]); assert(cNode.length == 3); immutable iNode = Node([1,2,3]); assert(iNode.length == 3); } /** Get the element at specified index. * * If the node is a sequence, index must be integral. * * * If the node is a mapping, return the value corresponding to the first * key equal to index. containsKey() can be used to determine if a mapping * has a specific key. * * To get element at a null index, use YAMLNull for index. * * Params: index = Index to use. * * Returns: Value corresponding to the index. * * Throws: NodeException if the index could not be found, * non-integral index is used with a sequence or the node is * not a collection. */ ref inout(Node) opIndex(T)(T index) inout return scope @safe { final switch (nodeID) { case NodeID.sequence: checkSequenceIndex(index); static if(isIntegral!T) { return getValue!(Node[])[index]; } else { assert(false, "Only integers may index sequence nodes"); } case NodeID.mapping: auto idx = findPair(index); if(idx >= 0) { return getValue!(Pair[])[idx].value; } string msg = "Mapping index not found" ~ (isSomeString!T ? ": " ~ to!string(index) : ""); throw new NodeException(msg, startMark_); case NodeID.scalar: case NodeID.invalid: throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_); } } /// @safe unittest { Node narray = Node([11, 12, 13, 14]); Node nmap = Node(["11", "12", "13", "14"], [11, 12, 13, 14]); assert(narray[0].as!int == 11); assert(null !is collectException(narray[42])); assert(nmap["11"].as!int == 11); assert(nmap["14"].as!int == 14); } @safe unittest { Node narray = Node([11, 12, 13, 14]); Node nmap = Node(["11", "12", "13", "14"], [11, 12, 13, 14]); assert(narray[0].as!int == 11); assert(null !is collectException(narray[42])); assert(nmap["11"].as!int == 11); assert(nmap["14"].as!int == 14); assert(null !is collectException(nmap["42"])); narray.add(YAMLNull()); nmap.add(YAMLNull(), "Nothing"); assert(narray[4].as!YAMLNull == YAMLNull()); assert(nmap[YAMLNull()].as!string == "Nothing"); assertThrown!NodeException(nmap[11]); assertThrown!NodeException(nmap[14]); } /** Determine if a collection contains specified value. * * If the node is a sequence, check if it contains the specified value. * If it's a mapping, check if it has a value that matches specified value. * * Params: rhs = Item to look for. Use YAMLNull to check for a null value. * * Returns: true if rhs was found, false otherwise. * * Throws: NodeException if the node is not a collection. */ bool contains(T)(T rhs) const { return contains_!(T, No.key, "contains")(rhs); } @safe unittest { auto mNode = Node(["1", "2", "3"]); assert(mNode.contains("2")); const cNode = Node(["1", "2", "3"]); assert(cNode.contains("2")); immutable iNode = Node(["1", "2", "3"]); assert(iNode.contains("2")); } /** Determine if a mapping contains specified key. * * Params: rhs = Key to look for. Use YAMLNull to check for a null key. * * Returns: true if rhs was found, false otherwise. * * Throws: NodeException if the node is not a mapping. */ bool containsKey(T)(T rhs) const { return contains_!(T, Yes.key, "containsKey")(rhs); } // Unittest for contains() and containsKey(). @safe unittest { auto seq = Node([1, 2, 3, 4, 5]); assert(seq.contains(3)); assert(seq.contains(5)); assert(!seq.contains("5")); assert(!seq.contains(6)); assert(!seq.contains(float.nan)); assertThrown!NodeException(seq.containsKey(5)); auto seq2 = Node(["1", "2"]); assert(seq2.contains("1")); assert(!seq2.contains(1)); auto map = Node(["1", "2", "3", "4"], [1, 2, 3, 4]); assert(map.contains(1)); assert(!map.contains("1")); assert(!map.contains(5)); assert(!map.contains(float.nan)); assert(map.containsKey("1")); assert(map.containsKey("4")); assert(!map.containsKey(1)); assert(!map.containsKey("5")); assert(!seq.contains(YAMLNull())); assert(!map.contains(YAMLNull())); assert(!map.containsKey(YAMLNull())); seq.add(YAMLNull()); map.add("Nothing", YAMLNull()); assert(seq.contains(YAMLNull())); assert(map.contains(YAMLNull())); assert(!map.containsKey(YAMLNull())); map.add(YAMLNull(), "Nothing"); assert(map.containsKey(YAMLNull())); auto map2 = Node([1, 2, 3, 4], [1, 2, 3, 4]); assert(!map2.contains("1")); assert(map2.contains(1)); assert(!map2.containsKey("1")); assert(map2.containsKey(1)); // scalar assertThrown!NodeException(Node(1).contains(4)); assertThrown!NodeException(Node(1).containsKey(4)); auto mapNan = Node([1.0, 2, double.nan], [1, double.nan, 5]); assert(mapNan.contains(double.nan)); assert(mapNan.containsKey(double.nan)); } /// Assignment (shallow copy) by value. void opAssign()(auto ref Node rhs) { assumeWontThrow(setValue(rhs.value_)); startMark_ = rhs.startMark_; tag_ = rhs.tag_; scalarStyle = rhs.scalarStyle; collectionStyle = rhs.collectionStyle; } // Unittest for opAssign(). @safe unittest { auto seq = Node([1, 2, 3, 4, 5]); auto assigned = seq; assert(seq == assigned, "Node.opAssign() doesn't produce an equivalent copy"); } /** Set element at specified index in a collection. * * This method can only be called on collection nodes. * * If the node is a sequence, index must be integral. * * If the node is a mapping, sets the _value corresponding to the first * key matching index (including conversion, so e.g. "42" matches 42). * * If the node is a mapping and no key matches index, a new key-value * pair is added to the mapping. In sequences the index must be in * range. This ensures behavior siilar to D arrays and associative * arrays. * * To set element at a null index, use YAMLNull for index. * * Params: * value = Value to assign. * index = Index of the value to set. * * Throws: NodeException if the node is not a collection, index is out * of range or if a non-integral index is used on a sequence node. */ void opIndexAssign(K, V)(V value, K index) { final switch (nodeID) { case NodeID.sequence: checkSequenceIndex(index); static if(isIntegral!K || is(Unqual!K == bool)) { auto nodes = getValue!(Node[]); static if(is(Unqual!V == Node)){nodes[index] = value;} else {nodes[index] = Node(value);} setValue(nodes); return; } assert(false, "Only integers may index sequence nodes"); case NodeID.mapping: const idx = findPair(index); if(idx < 0){add(index, value);} else { auto pairs = as!(Node.Pair[])(); static if(is(Unqual!V == Node)){pairs[idx].value = value;} else {pairs[idx].value = Node(value);} setValue(pairs); } return; case NodeID.scalar: case NodeID.invalid: throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_); } } @safe unittest { with(Node([1, 2, 3, 4, 3])) { opIndexAssign(42, 3); assert(length == 5); assert(opIndex(3).as!int == 42); opIndexAssign(YAMLNull(), 0); assert(opIndex(0) == YAMLNull()); } with(Node(["1", "2", "3"], [4, 5, 6])) { opIndexAssign(42, "3"); opIndexAssign(123, 456); assert(length == 4); assert(opIndex("3").as!int == 42); assert(opIndex(456).as!int == 123); opIndexAssign(43, 3); //3 and "3" should be different assert(length == 5); assert(opIndex("3").as!int == 42); assert(opIndex(3).as!int == 43); opIndexAssign(YAMLNull(), "2"); assert(opIndex("2") == YAMLNull()); } } /** Return a range object iterating over a sequence, getting each * element as T. * * If T is Node, simply iterate over the nodes in the sequence. * Otherwise, convert each node to T during iteration. * * Throws: NodeException if the node is not a sequence or an element * could not be converted to specified type. */ template sequence(T = Node) { struct Range(N) { N subnodes; size_t position; this(N nodes) { subnodes = nodes; position = 0; } /* Input range functionality. */ bool empty() const @property { return position >= subnodes.length; } void popFront() { enforce(!empty, "Attempted to popFront an empty sequence"); position++; } T front() const @property { enforce(!empty, "Attempted to take the front of an empty sequence"); static if (is(Unqual!T == Node)) return subnodes[position]; else return subnodes[position].as!T; } /* Forward range functionality. */ Range save() { return this; } /* Bidirectional range functionality. */ void popBack() { enforce(!empty, "Attempted to popBack an empty sequence"); subnodes = subnodes[0 .. $ - 1]; } T back() { enforce(!empty, "Attempted to take the back of an empty sequence"); static if (is(Unqual!T == Node)) return subnodes[$ - 1]; else return subnodes[$ - 1].as!T; } /* Random-access range functionality. */ size_t length() const @property { return subnodes.length; } T opIndex(size_t index) { static if (is(Unqual!T == Node)) return subnodes[index]; else return subnodes[index].as!T; } static assert(isInputRange!Range); static assert(isForwardRange!Range); static assert(isBidirectionalRange!Range); static assert(isRandomAccessRange!Range); } auto sequence() { enforce(nodeID == NodeID.sequence, new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", startMark_)); return Range!(Node[])(get!(Node[])); } auto sequence() const { enforce(nodeID == NodeID.sequence, new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", startMark_)); return Range!(const(Node)[])(get!(Node[])); } } @safe unittest { Node n1 = Node([1, 2, 3, 4]); int[int] array; Node n2 = Node(array); const n3 = Node([1, 2, 3, 4]); auto r = n1.sequence!int.map!(x => x * 10); assert(r.equal([10, 20, 30, 40])); assertThrown(n2.sequence); auto r2 = n3.sequence!int.map!(x => x * 10); assert(r2.equal([10, 20, 30, 40])); } /** Return a range object iterating over mapping's pairs. * * Throws: NodeException if the node is not a mapping. * */ template mapping() { struct Range(T) { T pairs; size_t position; this(T pairs) @safe { this.pairs = pairs; position = 0; } /* Input range functionality. */ bool empty() @safe { return position >= pairs.length; } void popFront() @safe { enforce(!empty, "Attempted to popFront an empty mapping"); position++; } auto front() @safe { enforce(!empty, "Attempted to take the front of an empty mapping"); return pairs[position]; } /* Forward range functionality. */ Range save() @safe { return this; } /* Bidirectional range functionality. */ void popBack() @safe { enforce(!empty, "Attempted to popBack an empty mapping"); pairs = pairs[0 .. $ - 1]; } auto back() @safe { enforce(!empty, "Attempted to take the back of an empty mapping"); return pairs[$ - 1]; } /* Random-access range functionality. */ size_t length() const @property @safe { return pairs.length; } auto opIndex(size_t index) @safe { return pairs[index]; } static assert(isInputRange!Range); static assert(isForwardRange!Range); static assert(isBidirectionalRange!Range); static assert(isRandomAccessRange!Range); } auto mapping() { enforce(nodeID == NodeID.mapping, new NodeException("Trying to 'mapping'-iterate over a " ~ nodeTypeString ~ " node", startMark_)); return Range!(Node.Pair[])(get!(Node.Pair[])); } auto mapping() const { enforce(nodeID == NodeID.mapping, new NodeException("Trying to 'mapping'-iterate over a " ~ nodeTypeString ~ " node", startMark_)); return Range!(const(Node.Pair)[])(get!(Node.Pair[])); } } @safe unittest { int[int] array; Node n = Node(array); n[1] = "foo"; n[2] = "bar"; n[3] = "baz"; string[int] test; foreach (pair; n.mapping) test[pair.key.as!int] = pair.value.as!string.idup; assert(test[1] == "foo"); assert(test[2] == "bar"); assert(test[3] == "baz"); int[int] constArray = [1: 2, 3: 4]; const x = Node(constArray); foreach (pair; x.mapping) assert(pair.value == constArray[pair.key.as!int]); } /** Return a range object iterating over mapping's keys. * * If K is Node, simply iterate over the keys in the mapping. * Otherwise, convert each key to T during iteration. * * Throws: NodeException if the nodes is not a mapping or an element * could not be converted to specified type. */ auto mappingKeys(K = Node)() const { enforce(nodeID == NodeID.mapping, new NodeException("Trying to 'mappingKeys'-iterate over a " ~ nodeTypeString ~ " node", startMark_)); static if (is(Unqual!K == Node)) return mapping.map!(pair => pair.key); else return mapping.map!(pair => pair.key.as!K); } @safe unittest { int[int] array; Node m1 = Node(array); m1["foo"] = 2; m1["bar"] = 3; assert(m1.mappingKeys.equal(["foo", "bar"]) || m1.mappingKeys.equal(["bar", "foo"])); const cm1 = Node(["foo": 2, "bar": 3]); assert(cm1.mappingKeys.equal(["foo", "bar"]) || cm1.mappingKeys.equal(["bar", "foo"])); } /** Return a range object iterating over mapping's values. * * If V is Node, simply iterate over the values in the mapping. * Otherwise, convert each key to V during iteration. * * Throws: NodeException if the nodes is not a mapping or an element * could not be converted to specified type. */ auto mappingValues(V = Node)() const { enforce(nodeID == NodeID.mapping, new NodeException("Trying to 'mappingValues'-iterate over a " ~ nodeTypeString ~ " node", startMark_)); static if (is(Unqual!V == Node)) return mapping.map!(pair => pair.value); else return mapping.map!(pair => pair.value.as!V); } @safe unittest { int[int] array; Node m1 = Node(array); m1["foo"] = 2; m1["bar"] = 3; assert(m1.mappingValues.equal([2, 3]) || m1.mappingValues.equal([3, 2])); const cm1 = Node(["foo": 2, "bar": 3]); assert(cm1.mappingValues.equal([2, 3]) || cm1.mappingValues.equal([3, 2])); } /** Foreach over a sequence, getting each element as T. * * If T is Node, simply iterate over the nodes in the sequence. * Otherwise, convert each node to T during iteration. * * Throws: NodeException if the node is not a sequence or an * element could not be converted to specified type. */ int opApply(D)(D dg) if (isDelegate!D && (Parameters!D.length == 1)) { enforce(nodeID == NodeID.sequence, new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", startMark_)); int result; foreach(ref node; get!(Node[])) { static if(is(Unqual!(Parameters!D[0]) == Node)) { result = dg(node); } else { Parameters!D[0] temp = node.as!(Parameters!D[0]); result = dg(temp); } if(result){break;} } return result; } /// ditto int opApply(D)(D dg) const if (isDelegate!D && (Parameters!D.length == 1)) { enforce(nodeID == NodeID.sequence, new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", startMark_)); int result; foreach(ref node; get!(Node[])) { static if(is(Unqual!(Parameters!D[0]) == Node)) { result = dg(node); } else { Parameters!D[0] temp = node.as!(Parameters!D[0]); result = dg(temp); } if(result){break;} } return result; } @safe unittest { Node n1 = Node(11); Node n2 = Node(12); Node n3 = Node(13); Node n4 = Node(14); Node narray = Node([n1, n2, n3, n4]); const cNArray = narray; int[] array, array2, array3; foreach(int value; narray) { array ~= value; } foreach(Node node; narray) { array2 ~= node.as!int; } foreach (const Node node; cNArray) { array3 ~= node.as!int; } assert(array == [11, 12, 13, 14]); assert(array2 == [11, 12, 13, 14]); assert(array3 == [11, 12, 13, 14]); } @safe unittest { string[] testStrs = ["1", "2", "3"]; auto node1 = Node(testStrs); int i = 0; foreach (string elem; node1) { assert(elem == testStrs[i]); i++; } const node2 = Node(testStrs); i = 0; foreach (string elem; node2) { assert(elem == testStrs[i]); i++; } immutable node3 = Node(testStrs); i = 0; foreach (string elem; node3) { assert(elem == testStrs[i]); i++; } } @safe unittest { auto node = Node(["a":1, "b":2, "c":3]); const cNode = node; assertThrown({foreach (Node n; node) {}}()); assertThrown({foreach (const Node n; cNode) {}}()); } /** Foreach over a mapping, getting each key/value as K/V. * * If the K and/or V is Node, simply iterate over the nodes in the mapping. * Otherwise, convert each key/value to T during iteration. * * Throws: NodeException if the node is not a mapping or an * element could not be converted to specified type. */ int opApply(DG)(DG dg) if (isDelegate!DG && (Parameters!DG.length == 2)) { alias K = Parameters!DG[0]; alias V = Parameters!DG[1]; enforce(nodeID == NodeID.mapping, new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", startMark_)); int result; foreach(ref pair; get!(Node.Pair[])) { static if(is(Unqual!K == Node) && is(Unqual!V == Node)) { result = dg(pair.key, pair.value); } else static if(is(Unqual!K == Node)) { V tempValue = pair.value.as!V; result = dg(pair.key, tempValue); } else static if(is(Unqual!V == Node)) { K tempKey = pair.key.as!K; result = dg(tempKey, pair.value); } else { K tempKey = pair.key.as!K; V tempValue = pair.value.as!V; result = dg(tempKey, tempValue); } if(result){break;} } return result; } /// ditto int opApply(DG)(DG dg) const if (isDelegate!DG && (Parameters!DG.length == 2)) { alias K = Parameters!DG[0]; alias V = Parameters!DG[1]; enforce(nodeID == NodeID.mapping, new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", startMark_)); int result; foreach(ref pair; get!(Node.Pair[])) { static if(is(Unqual!K == Node) && is(Unqual!V == Node)) { result = dg(pair.key, pair.value); } else static if(is(Unqual!K == Node)) { V tempValue = pair.value.as!V; result = dg(pair.key, tempValue); } else static if(is(Unqual!V == Node)) { K tempKey = pair.key.as!K; result = dg(tempKey, pair.value); } else { K tempKey = pair.key.as!K; V tempValue = pair.value.as!V; result = dg(tempKey, tempValue); } if(result){break;} } return result; } @safe unittest { Node n1 = Node(cast(long)11); Node n2 = Node(cast(long)12); Node n3 = Node(cast(long)13); Node n4 = Node(cast(long)14); Node k1 = Node("11"); Node k2 = Node("12"); Node k3 = Node("13"); Node k4 = Node("14"); Node nmap1 = Node([Pair(k1, n1), Pair(k2, n2), Pair(k3, n3), Pair(k4, n4)]); int[string] expected = ["11" : 11, "12" : 12, "13" : 13, "14" : 14]; int[string] array; foreach(string key, int value; nmap1) { array[key] = value; } assert(array == expected); Node nmap2 = Node([Pair(k1, Node(cast(long)5)), Pair(k2, Node(true)), Pair(k3, Node(cast(real)1.0)), Pair(k4, Node("yarly"))]); foreach(scope string key, scope Node value; nmap2) { switch(key) { case "11": assert(value.as!int == 5 ); break; case "12": assert(value.as!bool == true ); break; case "13": assert(value.as!float == 1.0 ); break; case "14": assert(value.as!string == "yarly"); break; default: assert(false); } } const nmap3 = nmap2; foreach(const Node key, const Node value; nmap3) { switch(key.as!string) { case "11": assert(value.as!int == 5 ); break; case "12": assert(value.as!bool == true ); break; case "13": assert(value.as!float == 1.0 ); break; case "14": assert(value.as!string == "yarly"); break; default: assert(false); } } } @safe unittest { string[int] testStrs = [0: "1", 1: "2", 2: "3"]; auto node1 = Node(testStrs); foreach (const int i, string elem; node1) { assert(elem == testStrs[i]); } const node2 = Node(testStrs); foreach (const int i, string elem; node2) { assert(elem == testStrs[i]); } immutable node3 = Node(testStrs); foreach (const int i, string elem; node3) { assert(elem == testStrs[i]); } } @safe unittest { auto node = Node(["a", "b", "c"]); const cNode = node; assertThrown({foreach (Node a, Node b; node) {}}()); assertThrown({foreach (const Node a, const Node b; cNode) {}}()); } /** Add an element to a sequence. * * This method can only be called on sequence nodes. * * If value is a node, it is copied to the sequence directly. Otherwise * value is converted to a node and then stored in the sequence. * * $(P When emitting, all values in the sequence will be emitted. When * using the !!set tag, the user needs to ensure that all elements in * the sequence are unique, otherwise $(B invalid) YAML code will be * emitted.) * * Params: value = Value to _add to the sequence. */ void add(T)(T value) { if (!isValid) { setValue(Node[].init); } enforce(nodeID == NodeID.sequence, new NodeException("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_)); auto nodes = get!(Node[])(); static if(is(Unqual!T == Node)){nodes ~= value;} else {nodes ~= Node(value);} setValue(nodes); } @safe unittest { with(Node([1, 2, 3, 4])) { add(5.0f); assert(opIndex(4).as!float == 5.0f); } with(Node()) { add(5.0f); assert(opIndex(0).as!float == 5.0f); } with(Node(5.0f)) { assertThrown!NodeException(add(5.0f)); } with(Node([5.0f : true])) { assertThrown!NodeException(add(5.0f)); } } /** Add a key-value pair to a mapping. * * This method can only be called on mapping nodes. * * If key and/or value is a node, it is copied to the mapping directly. * Otherwise it is converted to a node and then stored in the mapping. * * $(P It is possible for the same key to be present more than once in a * mapping. When emitting, all key-value pairs will be emitted. * This is useful with the "!!pairs" tag, but will result in * $(B invalid) YAML with "!!map" and "!!omap" tags.) * * Params: key = Key to _add. * value = Value to _add. */ void add(K, V)(K key, V value) { if (!isValid) { setValue(Node.Pair[].init); } enforce(nodeID == NodeID.mapping, new NodeException("Trying to add a key-value pair to a " ~ nodeTypeString ~ " node", startMark_)); auto pairs = get!(Node.Pair[])(); pairs ~= Pair(key, value); setValue(pairs); } @safe unittest { with(Node([1, 2], [3, 4])) { add(5, "6"); assert(opIndex(5).as!string == "6"); } with(Node()) { add(5, "6"); assert(opIndex(5).as!string == "6"); } with(Node(5.0f)) { assertThrown!NodeException(add(5, "6")); } with(Node([5.0f])) { assertThrown!NodeException(add(5, "6")); } } /** Determine whether a key is in a mapping, and access its value. * * This method can only be called on mapping nodes. * * Params: key = Key to search for. * * Returns: A pointer to the value (as a Node) corresponding to key, * or null if not found. * * Note: Any modification to the node can invalidate the returned * pointer. * * See_Also: contains */ inout(Node*) opBinaryRight(string op, K)(K key) inout if (op == "in") { enforce(nodeID == NodeID.mapping, new NodeException("Trying to use 'in' on a " ~ nodeTypeString ~ " node", startMark_)); auto idx = findPair(key); if(idx < 0) { return null; } else { return &(get!(Node.Pair[])[idx].value); } } @safe unittest { auto mapping = Node(["foo", "baz"], ["bar", "qux"]); assert("bad" !in mapping && ("bad" in mapping) is null); Node* foo = "foo" in mapping; assert(foo !is null); assert(*foo == Node("bar")); assert(foo.get!string == "bar"); *foo = Node("newfoo"); assert(mapping["foo"] == Node("newfoo")); } @safe unittest { auto mNode = Node(["a": 2]); assert("a" in mNode); const cNode = Node(["a": 2]); assert("a" in cNode); immutable iNode = Node(["a": 2]); assert("a" in iNode); } /** Remove first (if any) occurence of a value in a collection. * * This method can only be called on collection nodes. * * If the node is a sequence, the first node matching value is removed. * If the node is a mapping, the first key-value pair where _value * matches specified value is removed. * * Params: rhs = Value to _remove. * * Throws: NodeException if the node is not a collection. */ void remove(T)(T rhs) { remove_!(T, No.key, "remove")(rhs); } @safe unittest { with(Node([1, 2, 3, 4, 3])) { remove(3); assert(length == 4); assert(opIndex(2).as!int == 4); assert(opIndex(3).as!int == 3); add(YAMLNull()); assert(length == 5); remove(YAMLNull()); assert(length == 4); } with(Node(["1", "2", "3"], [4, 5, 6])) { remove(4); assert(length == 2); add("nullkey", YAMLNull()); assert(length == 3); remove(YAMLNull()); assert(length == 2); } } /** Remove element at the specified index of a collection. * * This method can only be called on collection nodes. * * If the node is a sequence, index must be integral. * * If the node is a mapping, remove the first key-value pair where * key matches index. * * If the node is a mapping and no key matches index, nothing is removed * and no exception is thrown. This ensures behavior siilar to D arrays * and associative arrays. * * Params: index = Index to remove at. * * Throws: NodeException if the node is not a collection, index is out * of range or if a non-integral index is used on a sequence node. */ void removeAt(T)(T index) { remove_!(T, Yes.key, "removeAt")(index); } @safe unittest { with(Node([1, 2, 3, 4, 3])) { removeAt(3); assertThrown!NodeException(removeAt("3")); assert(length == 4); assert(opIndex(3).as!int == 3); } with(Node(["1", "2", "3"], [4, 5, 6])) { // no integer 2 key, so don't remove anything removeAt(2); assert(length == 3); removeAt("2"); assert(length == 2); add(YAMLNull(), "nullval"); assert(length == 3); removeAt(YAMLNull()); assert(length == 2); } } /// Compare with another _node. int opCmp(const scope ref Node rhs) const scope @safe { const bool hasNullTag = this.tag_ is null; // Only one of them is null: we can order nodes if ((hasNullTag) ^ (rhs.tag is null)) return hasNullTag ? -1 : 1; // Either both `null` or both have a value if (!hasNullTag) if (int result = std.algorithm.comparison.cmp(tag_, rhs.tag_)) return result; static int cmp(T1, T2)(T1 a, T2 b) { return a > b ? 1 : a < b ? -1 : 0; } // Compare validity: if both valid, we have to compare further. if (!this.isValid()) return rhs.isValid() ? -1 : 0; if (!rhs.isValid()) return 1; if (const typeCmp = cmp(type, rhs.type)) return typeCmp; static int compareCollections(T)(const scope ref Node lhs, const scope ref Node rhs) { const c1 = lhs.getValue!T; const c2 = rhs.getValue!T; if(c1 is c2){return 0;} if(c1.length != c2.length) { return cmp(c1.length, c2.length); } // Equal lengths, compare items. foreach(i; 0 .. c1.length) { const itemCmp = c1[i].opCmp(c2[i]); if(itemCmp != 0){return itemCmp;} } return 0; } final switch(type) { case NodeType.string: return std.algorithm.cmp(getValue!string, rhs.getValue!string); case NodeType.integer: return cmp(getValue!long, rhs.getValue!long); case NodeType.boolean: const b1 = getValue!bool; const b2 = rhs.getValue!bool; return b1 ? b2 ? 0 : 1 : b2 ? -1 : 0; case NodeType.binary: const b1 = getValue!(ubyte[]); const b2 = rhs.getValue!(ubyte[]); return std.algorithm.cmp(b1, b2); case NodeType.null_: return 0; case NodeType.decimal: const r1 = getValue!real; const r2 = rhs.getValue!real; if(isNaN(r1)) { return isNaN(r2) ? 0 : -1; } if(isNaN(r2)) { return 1; } // Fuzzy equality. if(r1 <= r2 + real.epsilon && r1 >= r2 - real.epsilon) { return 0; } return cmp(r1, r2); case NodeType.timestamp: const t1 = getValue!SysTime; const t2 = rhs.getValue!SysTime; return cmp(t1, t2); case NodeType.mapping: return compareCollections!(Pair[])(this, rhs); case NodeType.sequence: return compareCollections!(Node[])(this, rhs); case NodeType.merge: assert(false, "Cannot compare merge nodes"); case NodeType.invalid: assert(false, "Cannot compare invalid nodes"); } } // Ensure opCmp is symmetric for collections @safe unittest { auto node1 = Node( [ Node("New York Yankees", "tag:yaml.org,2002:str"), Node("Atlanta Braves", "tag:yaml.org,2002:str") ], "tag:yaml.org,2002:seq" ); auto node2 = Node( [ Node("Detroit Tigers", "tag:yaml.org,2002:str"), Node("Chicago cubs", "tag:yaml.org,2002:str") ], "tag:yaml.org,2002:seq" ); assert(node1 > node2); assert(node2 < node1); } // Compute hash of the node. hash_t toHash() nothrow const @trusted { const valueHash = value_.match!(v => hashOf(v)); return tag_ is null ? valueHash : tag_.hashOf(valueHash); } @safe unittest { assert(Node(42).toHash() != Node(41).toHash()); assert(Node(42).toHash() != Node(42, "some-tag").toHash()); } /// Get type of the node value. @property NodeType type() const scope @safe pure nothrow @nogc { return this.value_.match!( (const bool _) => NodeType.boolean, (const long _) => NodeType.integer, (const Node[] _) => NodeType.sequence, (const ubyte[] _) => NodeType.binary, (const string _) => NodeType.string, (const Node.Pair[] _) => NodeType.mapping, (const SysTime _) => NodeType.timestamp, (const YAMLNull _) => NodeType.null_, (const YAMLMerge _) => NodeType.merge, (const real _) => NodeType.decimal, (const YAMLInvalid _) => NodeType.invalid, ); } /// Get the kind of node this is. @property NodeID nodeID() const scope @safe pure nothrow @nogc { final switch (type) { case NodeType.sequence: return NodeID.sequence; case NodeType.mapping: return NodeID.mapping; case NodeType.boolean: case NodeType.integer: case NodeType.binary: case NodeType.string: case NodeType.timestamp: case NodeType.null_: case NodeType.merge: case NodeType.decimal: return NodeID.scalar; case NodeType.invalid: return NodeID.invalid; } } package: // Get a string representation of the node tree. Used for debugging. // // Params: level = Level of the node in the tree. // // Returns: String representing the node tree. @property string debugString(uint level = 0) const scope @safe { string indent; foreach(i; 0 .. level){indent ~= " ";} final switch (nodeID) { case NodeID.invalid: return indent ~ "invalid"; case NodeID.sequence: string result = indent ~ "sequence:\n"; foreach(ref node; get!(Node[])) { result ~= node.debugString(level + 1); } return result; case NodeID.mapping: string result = indent ~ "mapping:\n"; foreach(ref pair; get!(Node.Pair[])) { result ~= indent ~ " pair\n"; result ~= pair.key.debugString(level + 2); result ~= pair.value.debugString(level + 2); } return result; case NodeID.scalar: return indent ~ "scalar(" ~ (convertsTo!string ? get!string : text(type)) ~ ")\n"; } } public: @property string nodeTypeString() const scope @safe pure nothrow @nogc { final switch (nodeID) { case NodeID.mapping: return "mapping"; case NodeID.sequence: return "sequence"; case NodeID.scalar: return "scalar"; case NodeID.invalid: return "invalid"; } } // Determine if the value can be converted to specified type. @property bool convertsTo(T)() const { if(isType!T){return true;} // Every type allowed in Value should be convertible to string. static if(isSomeString!T) {return true;} else static if(isFloatingPoint!T){return type.among!(NodeType.integer, NodeType.decimal);} else static if(isIntegral!T) {return type == NodeType.integer;} else static if(is(Unqual!T==bool)){return type == NodeType.boolean;} else {return false;} } /** * Sets the style of this node when dumped. * * Params: style = Any valid style. */ void setStyle(CollectionStyle style) @safe { enforce(!isValid || (nodeID.among(NodeID.mapping, NodeID.sequence)), new NodeException( "Cannot set collection style for non-collection nodes", startMark_)); collectionStyle = style; } /// Ditto void setStyle(ScalarStyle style) @safe { enforce(!isValid || (nodeID == NodeID.scalar), new NodeException( "Cannot set scalar style for non-scalar nodes", startMark_)); scalarStyle = style; } /// @safe unittest { import dub.internal.dyaml.dumper; auto stream = new Appender!string(); auto node = Node([1, 2, 3, 4, 5]); node.setStyle(CollectionStyle.block); auto dumper = dumper(); dumper.dump(stream, node); } /// @safe unittest { import dub.internal.dyaml.dumper; auto stream = new Appender!string(); auto node = Node(4); node.setStyle(ScalarStyle.literal); auto dumper = dumper(); dumper.dump(stream, node); } @safe unittest { assertThrown!NodeException(Node(4).setStyle(CollectionStyle.block)); assertThrown!NodeException(Node([4]).setStyle(ScalarStyle.literal)); } @safe unittest { import dub.internal.dyaml.dumper; { auto stream = new Appender!string(); auto node = Node([1, 2, 3, 4, 5]); node.setStyle(CollectionStyle.block); auto dumper = dumper(); dumper.explicitEnd = false; dumper.explicitStart = false; dumper.YAMLVersion = null; dumper.dump(stream, node); //Block style should start with a hyphen. assert(stream.data[0] == '-'); } { auto stream = new Appender!string(); auto node = Node([1, 2, 3, 4, 5]); node.setStyle(CollectionStyle.flow); auto dumper = dumper(); dumper.explicitEnd = false; dumper.explicitStart = false; dumper.YAMLVersion = null; dumper.dump(stream, node); //Flow style should start with a bracket. assert(stream.data[0] == '['); } { auto stream = new Appender!string(); auto node = Node(1); node.setStyle(ScalarStyle.singleQuoted); auto dumper = dumper(); dumper.explicitEnd = false; dumper.explicitStart = false; dumper.YAMLVersion = null; dumper.dump(stream, node); assert(stream.data == "!!int '1'\n"); } { auto stream = new Appender!string(); auto node = Node(1); node.setStyle(ScalarStyle.doubleQuoted); auto dumper = dumper(); dumper.explicitEnd = false; dumper.explicitStart = false; dumper.YAMLVersion = null; dumper.dump(stream, node); assert(stream.data == "!!int \"1\"\n"); } } private: // Determine if the value stored by the node is of specified type. // // This only works for default YAML types, not for user defined types. @property bool isType(T)() const { return value_.match!( (const T _) => true, _ => false, ); } /// Check at compile time if a type is stored natively enum canBeType (T) = is(typeof({ value_.match!((const T _) => true, _ => false); })); // Implementation of contains() and containsKey(). bool contains_(T, Flag!"key" key, string func)(T rhs) const { final switch (nodeID) { case NodeID.mapping: return findPair!(T, key)(rhs) >= 0; case NodeID.sequence: static if(!key) { foreach(ref node; getValue!(Node[])) { if(node == rhs){return true;} } return false; } else { throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node", startMark_); } case NodeID.scalar: case NodeID.invalid: throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node", startMark_); } } // Implementation of remove() and removeAt() void remove_(T, Flag!"key" key, string func)(T rhs) { static void removeElem(E, I)(ref Node node, I index) { auto elems = node.getValue!(E[]); moveAll(elems[cast(size_t)index + 1 .. $], elems[cast(size_t)index .. $ - 1]); elems.length = elems.length - 1; node.setValue(elems); } final switch (nodeID) { case NodeID.mapping: const index = findPair!(T, key)(rhs); if(index >= 0){removeElem!Pair(this, index);} break; case NodeID.sequence: static long getIndex(ref Node node, ref T rhs) { foreach(idx, ref elem; node.get!(Node[])) { if(elem.convertsTo!T && elem.as!(T, No.stringConversion) == rhs) { return idx; } } return -1; } const index = select!key(rhs, getIndex(this, rhs)); // This throws if the index is not integral. checkSequenceIndex(index); static if(isIntegral!(typeof(index))){removeElem!Node(this, index); break; } else {assert(false, "Non-integral sequence index");} case NodeID.scalar: case NodeID.invalid: throw new NodeException("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node", startMark_); } } // Get index of pair with key (or value, if key is false) matching index. // Cannot be inferred @safe due to https://issues.dlang.org/show_bug.cgi?id=16528 sizediff_t findPair(T, Flag!"key" key = Yes.key)(const scope ref T index) const scope @safe { const pairs = getValue!(Pair[])(); const(Node)* node; foreach(idx, ref const(Pair) pair; pairs) { static if(key){node = &pair.key;} else {node = &pair.value;} const bool typeMatch = (isFloatingPoint!T && (node.type.among!(NodeType.integer, NodeType.decimal))) || (isIntegral!T && node.type == NodeType.integer) || (is(Unqual!T==bool) && node.type == NodeType.boolean) || (isSomeString!T && node.type == NodeType.string) || (node.isType!T); if(typeMatch && *node == index) { return idx; } } return -1; } // Check if index is integral and in range. void checkSequenceIndex(T)(T index) const scope @safe { assert(nodeID == NodeID.sequence, "checkSequenceIndex() called on a " ~ nodeTypeString ~ " node"); static if(!isIntegral!T) { throw new NodeException("Indexing a sequence with a non-integral type.", startMark_); } else { enforce(index >= 0 && index < getValue!(Node[]).length, new NodeException("Sequence index out of range: " ~ to!string(index), startMark_)); } } // Safe wrapper for getting a value out of the variant. inout(T) getValue(T)() @safe return scope inout { alias RType = typeof(return); return value_.tryMatch!((RType r) => r); } // Safe wrapper for coercing a value out of the variant. inout(T) coerceValue(T)() @trusted scope return inout { alias RType = typeof(return); static if (is(typeof({ RType rt = T.init; T t = RType.init; }))) alias TType = T; else // `inout` matters (indirection) alias TType = RType; // `inout(Node[]).to!string` apparently is not safe: // struct SumTypeBug { // import std.conv; // Node[] data; // // string bug () inout @safe // { // return this.data.to!string; // } // } // Doesn't compile with DMD v2.100.0 return this.value_.tryMatch!( (inout bool v) @safe => v.to!TType, (inout long v) @safe => v.to!TType, (inout Node[] v) @trusted => v.to!TType, (inout ubyte[] v) @safe => v.to!TType, (inout string v) @safe => v.to!TType, (inout Node.Pair[] v) @trusted => v.to!TType, (inout SysTime v) @trusted => v.to!TType, (inout real v) @safe => v.to!TType, (inout YAMLNull v) @safe => null.to!TType, ); } // Safe wrapper for setting a value for the variant. void setValue(T)(T value) @trusted { static if (allowed!T) { value_ = value; } else { auto tmpNode = cast(Node)value; tag_ = tmpNode.tag; scalarStyle = tmpNode.scalarStyle; collectionStyle = tmpNode.collectionStyle; value_ = tmpNode.value_; } } /// public void toString (DGT) (scope DGT sink) const scope @safe { this.value_.match!( (const bool v) => formattedWrite(sink, v ? "true" : "false"), (const long v) => formattedWrite(sink, "%s", v), (const Node[] v) => formattedWrite(sink, "[%(%s, %)]", v), (const ubyte[] v) => formattedWrite(sink, "%s", v), (const string v) => formattedWrite(sink, `"%s"`, v), (const Node.Pair[] v) => formattedWrite(sink, "{%(%s, %)}", v), (const SysTime v) => formattedWrite(sink, "%s", v), (const YAMLNull v) => formattedWrite(sink, "%s", v), (const YAMLMerge v) => formattedWrite(sink, "%s", v), (const real v) => formattedWrite(sink, "%s", v), (const YAMLInvalid v) => formattedWrite(sink, "%s", v), ); } } package: // Merge pairs into an array of pairs based on merge rules in the YAML spec. // // Any new pair will only be added if there is not already a pair // with the same key. // // Params: pairs = Appender managing the array of pairs to merge into. // toMerge = Pairs to merge. void merge(ref Appender!(Node.Pair[]) pairs, Node.Pair[] toMerge) @safe { bool eq(ref Node.Pair a, ref Node.Pair b) @safe { return a.key == b.key; } foreach(ref pair; toMerge) if(!canFind!eq(pairs.data, pair)) { pairs.put(pair); } } enum hasNodeConstructor(T) = hasSimpleNodeConstructor!T || hasExpandedNodeConstructor!T; template hasSimpleNodeConstructor(T) { static if (is(T == struct)) { enum hasSimpleNodeConstructor = is(typeof(T(Node.init))); } else static if (is(T == class)) { enum hasSimpleNodeConstructor = is(typeof(new T(Node.init))); } else enum hasSimpleNodeConstructor = false; } template hasExpandedNodeConstructor(T) { static if (is(T == struct)) { enum hasExpandedNodeConstructor = is(typeof(T(Node.init, ""))); } else static if (is(T == class)) { enum hasExpandedNodeConstructor = is(typeof(new T(Node.init, ""))); } else enum hasExpandedNodeConstructor = false; } enum castableToNode(T) = (is(T == struct) || is(T == class)) && is(typeof(T.opCast!Node()) : Node); @safe unittest { import dub.internal.dyaml : Loader, Node; static struct Foo { string[] bars; this(const Node node) { foreach(value; node["bars"].sequence) { bars ~= value.as!string.idup; } } } Loader.fromString(`{ bars: ["a", "b"] }`) .load .as!(Foo); } @safe unittest { import dub.internal.dyaml : Loader, Node; import std : split, to; static class MyClass { int x, y, z; this(Node node) { auto parts = node.as!string().split(":"); x = parts[0].to!int; y = parts[1].to!int; z = parts[2].to!int; } } auto loader = Loader.fromString(`"1:2:3"`); Node node = loader.load(); auto mc = node.get!MyClass; } @safe unittest { import dub.internal.dyaml : Loader, Node; import std : split, to; static class MyClass { int x, y, z; this(Node node) { auto parts = node.as!string().split(":"); x = parts[0].to!int; y = parts[1].to!int; z = parts[2].to!int; } } auto loader = Loader.fromString(`"1:2:3"`); const node = loader.load(); auto mc = node.get!MyClass; } dub-1.40.0/source/dub/internal/dyaml/package.d000066400000000000000000000011031477246567400211230ustar00rootroot00000000000000// Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) module dub.internal.dyaml; public import dub.internal.dyaml.dumper; public import dub.internal.dyaml.encoding; public import dub.internal.dyaml.exception; public import dub.internal.dyaml.linebreak; public import dub.internal.dyaml.loader; public import dub.internal.dyaml.resolver; public import dub.internal.dyaml.style; public import dub.internal.dyaml.node; dub-1.40.0/source/dub/internal/dyaml/parser.d000066400000000000000000001137371477246567400210450ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011-2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * YAML parser. * Code based on PyYAML: http://www.pyyaml.org */ module dub.internal.dyaml.parser; import std.algorithm; import std.array; import std.conv; import std.exception; import std.typecons; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.scanner; import dub.internal.dyaml.style; import dub.internal.dyaml.token; import dub.internal.dyaml.tagdirective; /** * The following YAML grammar is LL(1) and is parsed by a recursive descent * parser. * * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END * implicit_document ::= block_node DOCUMENT-END* * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* * block_node_or_indentless_sequence ::= * ALIAS * | properties (block_content | indentless_block_sequence)? * | block_content * | indentless_block_sequence * block_node ::= ALIAS * | properties block_content? * | block_content * flow_node ::= ALIAS * | properties flow_content? * | flow_content * properties ::= TAG ANCHOR? | ANCHOR TAG? * block_content ::= block_collection | flow_collection | SCALAR * flow_content ::= flow_collection | SCALAR * block_collection ::= block_sequence | block_mapping * flow_collection ::= flow_sequence | flow_mapping * block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END * indentless_sequence ::= (BLOCK-ENTRY block_node?)+ * block_mapping ::= BLOCK-MAPPING_START * ((KEY block_node_or_indentless_sequence?)? * (VALUE block_node_or_indentless_sequence?)?)* * BLOCK-END * flow_sequence ::= FLOW-SEQUENCE-START * (flow_sequence_entry FLOW-ENTRY)* * flow_sequence_entry? * FLOW-SEQUENCE-END * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * flow_mapping ::= FLOW-MAPPING-START * (flow_mapping_entry FLOW-ENTRY)* * flow_mapping_entry? * FLOW-MAPPING-END * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * * FIRST sets: * * stream: { STREAM-START } * explicit_document: { DIRECTIVE DOCUMENT-START } * implicit_document: FIRST(block_node) * block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } * flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } * block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } * flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } * block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } * block_sequence: { BLOCK-SEQUENCE-START } * block_mapping: { BLOCK-MAPPING-START } * block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } * indentless_sequence: { ENTRY } * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } * flow_sequence: { FLOW-SEQUENCE-START } * flow_mapping: { FLOW-MAPPING-START } * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } */ package: /// Generates events from tokens provided by a Scanner. /// /// While Parser receives tokens with non-const character slices, the events it /// produces are immutable strings, which are usually the same slices, cast to string. /// Parser is the last layer of D:YAML that may possibly do any modifications to these /// slices. final class Parser { private: ///Default tag handle shortcuts and replacements. static TagDirective[] defaultTagDirectives_ = [TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")]; ///Scanner providing YAML tokens. Scanner scanner_; ///Event produced by the most recent state. Event currentEvent_; ///YAML version string. string YAMLVersion_ = null; ///Tag handle shortcuts and replacements. TagDirective[] tagDirectives_; ///Stack of states. Appender!(Event delegate() @safe[]) states_; ///Stack of marks used to keep track of extents of e.g. YAML collections. Appender!(Mark[]) marks_; ///Current state. Event delegate() @safe state_; public: ///Construct a Parser using specified Scanner. this(Scanner scanner) @safe { state_ = &parseStreamStart; scanner_ = scanner; states_.reserve(32); marks_.reserve(32); } /** * Check if any events are left. May have side effects in some cases. */ bool empty() @safe { ensureState(); return currentEvent_.isNull; } /** * Return the current event. * * Must not be called if there are no events left. */ Event front() @safe { ensureState(); assert(!currentEvent_.isNull, "No event left to peek"); return currentEvent_; } /** * Skip to the next event. * * Must not be called if there are no events left. */ void popFront() @safe { currentEvent_.id = EventID.invalid; ensureState(); } /// Set file name. ref inout(string) name() inout @safe return pure nothrow @nogc { return scanner_.name; } /// Get a mark from the current reader position Mark mark() const @safe pure nothrow @nogc { return scanner_.mark; } private: /// If current event is invalid, load the next valid one if possible. void ensureState() @safe { if(currentEvent_.isNull && state_ !is null) { currentEvent_ = state_(); } } ///Pop and return the newest state in states_. Event delegate() @safe popState() @safe { enforce(states_.data.length > 0, new YAMLException("Parser: Need to pop state but no states left to pop")); const result = states_.data.back; states_.shrinkTo(states_.data.length - 1); return result; } ///Pop and return the newest mark in marks_. Mark popMark() @safe { enforce(marks_.data.length > 0, new YAMLException("Parser: Need to pop mark but no marks left to pop")); const result = marks_.data.back; marks_.shrinkTo(marks_.data.length - 1); return result; } /// Push a state on the stack void pushState(Event delegate() @safe state) @safe { states_ ~= state; } /// Push a mark on the stack void pushMark(Mark mark) @safe { marks_ ~= mark; } /** * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END * implicit_document ::= block_node DOCUMENT-END* * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* */ ///Parse stream start. Event parseStreamStart() @safe { const token = scanner_.front; scanner_.popFront(); state_ = &parseImplicitDocumentStart; return streamStartEvent(token.startMark, token.endMark); } /// Parse implicit document start, unless explicit detected: if so, parse explicit. Event parseImplicitDocumentStart() @safe { // Parse an implicit document. if(!scanner_.front.id.among!(TokenID.directive, TokenID.documentStart, TokenID.streamEnd)) { tagDirectives_ = defaultTagDirectives_; const token = scanner_.front; pushState(&parseDocumentEnd); state_ = &parseBlockNode; return documentStartEvent(token.startMark, token.endMark, false, null, null); } return parseDocumentStart(); } ///Parse explicit document start. Event parseDocumentStart() @safe { //Parse any extra document end indicators. while(scanner_.front.id == TokenID.documentEnd) { scanner_.popFront(); } //Parse an explicit document. if(scanner_.front.id != TokenID.streamEnd) { const startMark = scanner_.front.startMark; auto tagDirectives = processDirectives(); enforce(scanner_.front.id == TokenID.documentStart, new ParserException("Expected document start but found " ~ scanner_.front.idString, scanner_.front.startMark)); const endMark = scanner_.front.endMark; scanner_.popFront(); pushState(&parseDocumentEnd); state_ = &parseDocumentContent; return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives); } else { //Parse the end of the stream. const token = scanner_.front; scanner_.popFront(); assert(states_.data.length == 0); assert(marks_.data.length == 0); state_ = null; return streamEndEvent(token.startMark, token.endMark); } } ///Parse document end (explicit or implicit). Event parseDocumentEnd() @safe { Mark startMark = scanner_.front.startMark; const bool explicit = scanner_.front.id == TokenID.documentEnd; Mark endMark = startMark; if (explicit) { endMark = scanner_.front.endMark; scanner_.popFront(); } state_ = &parseDocumentStart; return documentEndEvent(startMark, endMark, explicit); } ///Parse document content. Event parseDocumentContent() @safe { if(scanner_.front.id.among!(TokenID.directive, TokenID.documentStart, TokenID.documentEnd, TokenID.streamEnd)) { state_ = popState(); return processEmptyScalar(scanner_.front.startMark); } return parseBlockNode(); } /// Process directives at the beginning of a document. TagDirective[] processDirectives() @safe { // Destroy version and tag handles from previous document. YAMLVersion_ = null; tagDirectives_.length = 0; // Process directives. while(scanner_.front.id == TokenID.directive) { const token = scanner_.front; scanner_.popFront(); string value = token.value.idup; if(token.directive == DirectiveType.yaml) { enforce(YAMLVersion_ is null, new ParserException("Duplicate YAML directive", token.startMark)); const minor = value.split(".")[0]; enforce(minor == "1", new ParserException("Incompatible document (version 1.x is required)", token.startMark)); YAMLVersion_ = value; } else if(token.directive == DirectiveType.tag) { auto handle = value[0 .. token.valueDivider]; foreach(ref pair; tagDirectives_) { // handle const h = pair.handle; enforce(h != handle, new ParserException("Duplicate tag handle: " ~ handle, token.startMark)); } tagDirectives_ ~= TagDirective(handle, value[token.valueDivider .. $]); } // Any other directive type is ignored (only YAML and TAG are in YAML // 1.1/1.2, any other directives are "reserved") } TagDirective[] value = tagDirectives_; //Add any default tag handles that haven't been overridden. foreach(ref defaultPair; defaultTagDirectives_) { bool found; foreach(ref pair; tagDirectives_) if(defaultPair.handle == pair.handle) { found = true; break; } if(!found) {tagDirectives_ ~= defaultPair; } } return value; } /** * block_node_or_indentless_sequence ::= ALIAS * | properties (block_content | indentless_block_sequence)? * | block_content * | indentless_block_sequence * block_node ::= ALIAS * | properties block_content? * | block_content * flow_node ::= ALIAS * | properties flow_content? * | flow_content * properties ::= TAG ANCHOR? | ANCHOR TAG? * block_content ::= block_collection | flow_collection | SCALAR * flow_content ::= flow_collection | SCALAR * block_collection ::= block_sequence | block_mapping * flow_collection ::= flow_sequence | flow_mapping */ ///Parse a node. Event parseNode(const Flag!"block" block, const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence) @trusted { if(scanner_.front.id == TokenID.alias_) { const token = scanner_.front; scanner_.popFront(); state_ = popState(); return aliasEvent(token.startMark, token.endMark, cast(string)token.value); } string anchor; string tag; Mark startMark, endMark, tagMark; bool invalidMarks = true; // The index in the tag string where tag handle ends and tag suffix starts. uint tagHandleEnd; //Get anchor/tag if detected. Return false otherwise. bool get(const TokenID id, const Flag!"first" first, ref string target) @safe { if(scanner_.front.id != id){return false;} invalidMarks = false; const token = scanner_.front; scanner_.popFront(); if(first){startMark = token.startMark;} if(id == TokenID.tag) { tagMark = token.startMark; tagHandleEnd = token.valueDivider; } endMark = token.endMark; target = token.value.idup; return true; } //Anchor and/or tag can be in any order. if(get(TokenID.anchor, Yes.first, anchor)){get(TokenID.tag, No.first, tag);} else if(get(TokenID.tag, Yes.first, tag)) {get(TokenID.anchor, No.first, anchor);} if(tag !is null){tag = processTag(tag, tagHandleEnd, startMark, tagMark);} if(invalidMarks) { startMark = endMark = scanner_.front.startMark; } bool implicit = (tag is null || tag == "!"); if(indentlessSequence && scanner_.front.id == TokenID.blockEntry) { state_ = &parseIndentlessSequenceEntry; return sequenceStartEvent (startMark, scanner_.front.endMark, anchor, tag, implicit, CollectionStyle.block); } if(scanner_.front.id == TokenID.scalar) { auto token = scanner_.front; scanner_.popFront(); auto value = token.style == ScalarStyle.doubleQuoted ? handleDoubleQuotedScalarEscapes(token.value) : cast(string)token.value; implicit = (token.style == ScalarStyle.plain && tag is null) || tag == "!"; state_ = popState(); return scalarEvent(startMark, token.endMark, anchor, tag, implicit, value, token.style); } if(scanner_.front.id == TokenID.flowSequenceStart) { endMark = scanner_.front.endMark; state_ = &parseFlowSequenceEntry!(Yes.first); return sequenceStartEvent(startMark, endMark, anchor, tag, implicit, CollectionStyle.flow); } if(scanner_.front.id == TokenID.flowMappingStart) { endMark = scanner_.front.endMark; state_ = &parseFlowMappingKey!(Yes.first); return mappingStartEvent(startMark, endMark, anchor, tag, implicit, CollectionStyle.flow); } if(block && scanner_.front.id == TokenID.blockSequenceStart) { endMark = scanner_.front.endMark; state_ = &parseBlockSequenceEntry!(Yes.first); return sequenceStartEvent(startMark, endMark, anchor, tag, implicit, CollectionStyle.block); } if(block && scanner_.front.id == TokenID.blockMappingStart) { endMark = scanner_.front.endMark; state_ = &parseBlockMappingKey!(Yes.first); return mappingStartEvent(startMark, endMark, anchor, tag, implicit, CollectionStyle.block); } if(anchor !is null || tag !is null) { state_ = popState(); //PyYAML uses a tuple(implicit, false) for the second last arg here, //but the second bool is never used after that - so we don't use it. //Empty scalars are allowed even if a tag or an anchor is specified. return scalarEvent(startMark, endMark, anchor, tag, implicit , ""); } const token = scanner_.front; throw new ParserException("While parsing a " ~ (block ? "block" : "flow") ~ " node, expected node content, but found: " ~ token.idString, token.startMark, "node started here", startMark); } /// Handle escape sequences in a double quoted scalar. /// /// Moved here from scanner as it can't always be done in-place with slices. string handleDoubleQuotedScalarEscapes(const(char)[] tokenValue) const @safe { string notInPlace; bool inEscape; auto appender = appender!(string)(); for(const(char)[] oldValue = tokenValue; !oldValue.empty();) { const dchar c = oldValue.front(); oldValue.popFront(); if(!inEscape) { if(c != '\\') { if(notInPlace is null) { appender.put(c); } else { notInPlace ~= c; } continue; } // Escape sequence starts with a '\' inEscape = true; continue; } import dub.internal.dyaml.escapes; scope(exit) { inEscape = false; } // 'Normal' escape sequence. if(c.among!(escapes)) { if(notInPlace is null) { // \L and \C can't be handled in place as the expand into // many-byte unicode chars if(c != 'L' && c != 'P') { appender.put(dub.internal.dyaml.escapes.fromEscape(c)); continue; } // Need to duplicate as we won't fit into // token.value - which is what appender uses notInPlace = appender.data.dup; notInPlace ~= dub.internal.dyaml.escapes.fromEscape(c); continue; } notInPlace ~= dub.internal.dyaml.escapes.fromEscape(c); continue; } // Unicode char written in hexadecimal in an escape sequence. if(c.among!(escapeHexCodeList)) { // Scanner has already checked that the hex string is valid. const hexLength = dub.internal.dyaml.escapes.escapeHexLength(c); // Any hex digits are 1-byte so this works. const(char)[] hex = oldValue[0 .. hexLength]; oldValue = oldValue[hexLength .. $]; import std.ascii : isHexDigit; assert(!hex.canFind!(d => !d.isHexDigit), "Scanner must ensure the hex string is valid"); const decoded = cast(dchar)parse!int(hex, 16u); if(notInPlace is null) { appender.put(decoded); } else { notInPlace ~= decoded; } continue; } assert(false, "Scanner must handle unsupported escapes"); } return notInPlace is null ? appender.data : notInPlace; } /** * Process a tag string retrieved from a tag token. * * Params: tag = Tag before processing. * handleEnd = Index in tag where tag handle ends and tag suffix * starts. * startMark = Position of the node the tag belongs to. * tagMark = Position of the tag. */ string processTag(const string tag, const uint handleEnd, const Mark startMark, const Mark tagMark) const @safe { const handle = tag[0 .. handleEnd]; const suffix = tag[handleEnd .. $]; if(handle.length > 0) { string replacement; foreach(ref pair; tagDirectives_) { if(pair.handle == handle) { replacement = pair.prefix; break; } } //handle must be in tagDirectives_ enforce(replacement !is null, new ParserException("While parsing a node, found undefined tag handle: " ~ handle, tagMark, "node started here", startMark)); return replacement ~ suffix; } return suffix; } ///Wrappers to parse nodes. Event parseBlockNode() @safe {return parseNode(Yes.block);} Event parseFlowNode() @safe {return parseNode(No.block);} Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(Yes.block, Yes.indentlessSequence);} ///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END ///Parse an entry of a block sequence. If first is true, this is the first entry. Event parseBlockSequenceEntry(Flag!"first" first)() @safe { static if(first) { pushMark(scanner_.front.startMark); scanner_.popFront(); } if(scanner_.front.id == TokenID.blockEntry) { const token = scanner_.front; scanner_.popFront(); if(!scanner_.front.id.among!(TokenID.blockEntry, TokenID.blockEnd)) { pushState(&parseBlockSequenceEntry!(No.first)); return parseBlockNode(); } state_ = &parseBlockSequenceEntry!(No.first); return processEmptyScalar(token.endMark); } if(scanner_.front.id != TokenID.blockEnd) { const token = scanner_.front; throw new ParserException("While parsing a block sequence, expected block end, but found: " ~ token.idString, token.startMark, "sequence started here", marks_.data.back); } state_ = popState(); popMark(); const token = scanner_.front; scanner_.popFront(); return sequenceEndEvent(token.startMark, token.endMark); } ///indentless_sequence ::= (BLOCK-ENTRY block_node?)+ ///Parse an entry of an indentless sequence. Event parseIndentlessSequenceEntry() @safe { if(scanner_.front.id == TokenID.blockEntry) { const token = scanner_.front; scanner_.popFront(); if(!scanner_.front.id.among!(TokenID.blockEntry, TokenID.key, TokenID.value, TokenID.blockEnd)) { pushState(&parseIndentlessSequenceEntry); return parseBlockNode(); } state_ = &parseIndentlessSequenceEntry; return processEmptyScalar(token.endMark); } state_ = popState(); const token = scanner_.front; return sequenceEndEvent(token.startMark, token.endMark); } /** * block_mapping ::= BLOCK-MAPPING_START * ((KEY block_node_or_indentless_sequence?)? * (VALUE block_node_or_indentless_sequence?)?)* * BLOCK-END */ ///Parse a key in a block mapping. If first is true, this is the first key. Event parseBlockMappingKey(Flag!"first" first)() @safe { static if(first) { pushMark(scanner_.front.startMark); scanner_.popFront(); } if(scanner_.front.id == TokenID.key) { const token = scanner_.front; scanner_.popFront(); if(!scanner_.front.id.among!(TokenID.key, TokenID.value, TokenID.blockEnd)) { pushState(&parseBlockMappingValue); return parseBlockNodeOrIndentlessSequence(); } state_ = &parseBlockMappingValue; return processEmptyScalar(token.endMark); } if(scanner_.front.id != TokenID.blockEnd) { const token = scanner_.front; throw new ParserException("While parsing a block mapping, expected block end, but found: " ~ token.idString, token.startMark, "mapping started here", marks_.data.back); } state_ = popState(); popMark(); const token = scanner_.front; scanner_.popFront(); return mappingEndEvent(token.startMark, token.endMark); } ///Parse a value in a block mapping. Event parseBlockMappingValue() @safe { if(scanner_.front.id == TokenID.value) { const token = scanner_.front; scanner_.popFront(); if(!scanner_.front.id.among!(TokenID.key, TokenID.value, TokenID.blockEnd)) { pushState(&parseBlockMappingKey!(No.first)); return parseBlockNodeOrIndentlessSequence(); } state_ = &parseBlockMappingKey!(No.first); return processEmptyScalar(token.endMark); } state_= &parseBlockMappingKey!(No.first); return processEmptyScalar(scanner_.front.startMark); } /** * flow_sequence ::= FLOW-SEQUENCE-START * (flow_sequence_entry FLOW-ENTRY)* * flow_sequence_entry? * FLOW-SEQUENCE-END * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * * Note that while production rules for both flow_sequence_entry and * flow_mapping_entry are equal, their interpretations are different. * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` * generate an inline mapping (set syntax). */ ///Parse an entry in a flow sequence. If first is true, this is the first entry. Event parseFlowSequenceEntry(Flag!"first" first)() @safe { static if(first) { pushMark(scanner_.front.startMark); scanner_.popFront(); } if(scanner_.front.id != TokenID.flowSequenceEnd) { static if(!first) { if(scanner_.front.id == TokenID.flowEntry) { scanner_.popFront(); } else { const token = scanner_.front; throw new ParserException("While parsing a flow sequence, expected ',' or ']', but got: " ~ token.idString, token.startMark, "sequence started here", marks_.data.back); } } if(scanner_.front.id == TokenID.key) { const token = scanner_.front; state_ = &parseFlowSequenceEntryMappingKey; return mappingStartEvent(token.startMark, token.endMark, null, null, true, CollectionStyle.flow); } else if(scanner_.front.id != TokenID.flowSequenceEnd) { pushState(&parseFlowSequenceEntry!(No.first)); return parseFlowNode(); } } const token = scanner_.front; scanner_.popFront(); state_ = popState(); popMark(); return sequenceEndEvent(token.startMark, token.endMark); } ///Parse a key in flow context. Event parseFlowKey(Event delegate() @safe nextState) @safe { const token = scanner_.front; scanner_.popFront(); if(!scanner_.front.id.among!(TokenID.value, TokenID.flowEntry, TokenID.flowSequenceEnd)) { pushState(nextState); return parseFlowNode(); } state_ = nextState; return processEmptyScalar(token.endMark); } ///Parse a mapping key in an entry in a flow sequence. Event parseFlowSequenceEntryMappingKey() @safe { return parseFlowKey(&parseFlowSequenceEntryMappingValue); } ///Parse a mapping value in a flow context. Event parseFlowValue(TokenID checkId, Event delegate() @safe nextState) @safe { if(scanner_.front.id == TokenID.value) { const token = scanner_.front; scanner_.popFront(); if(!scanner_.front.id.among(TokenID.flowEntry, checkId)) { pushState(nextState); return parseFlowNode(); } state_ = nextState; return processEmptyScalar(token.endMark); } state_ = nextState; return processEmptyScalar(scanner_.front.startMark); } ///Parse a mapping value in an entry in a flow sequence. Event parseFlowSequenceEntryMappingValue() @safe { return parseFlowValue(TokenID.flowSequenceEnd, &parseFlowSequenceEntryMappingEnd); } ///Parse end of a mapping in a flow sequence entry. Event parseFlowSequenceEntryMappingEnd() @safe { state_ = &parseFlowSequenceEntry!(No.first); const token = scanner_.front; return mappingEndEvent(token.startMark, token.startMark); } /** * flow_mapping ::= FLOW-MAPPING-START * (flow_mapping_entry FLOW-ENTRY)* * flow_mapping_entry? * FLOW-MAPPING-END * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? */ ///Parse a key in a flow mapping. Event parseFlowMappingKey(Flag!"first" first)() @safe { static if(first) { pushMark(scanner_.front.startMark); scanner_.popFront(); } if(scanner_.front.id != TokenID.flowMappingEnd) { static if(!first) { if(scanner_.front.id == TokenID.flowEntry) { scanner_.popFront(); } else { const token = scanner_.front; throw new ParserException("While parsing a flow mapping, expected ',' or '}', but got: " ~ token.idString, token.startMark, "mapping started here", marks_.data.back); } } if(scanner_.front.id == TokenID.key) { return parseFlowKey(&parseFlowMappingValue); } if(scanner_.front.id != TokenID.flowMappingEnd) { pushState(&parseFlowMappingEmptyValue); return parseFlowNode(); } } const token = scanner_.front; scanner_.popFront(); state_ = popState(); popMark(); return mappingEndEvent(token.startMark, token.endMark); } ///Parse a value in a flow mapping. Event parseFlowMappingValue() @safe { return parseFlowValue(TokenID.flowMappingEnd, &parseFlowMappingKey!(No.first)); } ///Parse an empty value in a flow mapping. Event parseFlowMappingEmptyValue() @safe { state_ = &parseFlowMappingKey!(No.first); return processEmptyScalar(scanner_.front.startMark); } ///Return an empty scalar. Event processEmptyScalar(const Mark mark) @safe pure nothrow const @nogc { return scalarEvent(mark, mark, null, null, true, ""); } } // Provide good error message for bad block mapping @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `[`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a flow node, expected node content, but found: streamEnd\n" ~ ":1,2\nnode started here: :1,2"); } // Provide good error message for bad block mapping @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `&anchor !foo!bar value`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a node, found undefined tag handle: !foo!\n" ~ ":1,9\nnode started here: :1,1"); } // Provide good error message for bad block mapping @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `- a ,`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a block sequence, expected block end, but found: flowEntry\n" ~ ":2,1\nsequence started here: :1,1"); } // Provide good error message for bad block mapping @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `a: b ,`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a block mapping, expected block end, but found: flowEntry\n" ~ ":2,1\nmapping started here: :1,1"); } // Provide good error message for bad flow sequence @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `[a,b,c`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a flow sequence, expected ',' or ']', but got: streamEnd\n" ~ ":1,7\nsequence started here: :1,1"); } // Provide good error message for bad flow mapping @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `{a,b,c`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a flow mapping, expected ',' or '}', but got: streamEnd\n" ~ ":1,7\nmapping started here: :1,1"); } dub-1.40.0/source/dub/internal/dyaml/queue.d000066400000000000000000000153051477246567400206650ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011-2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) module dub.internal.dyaml.queue; import std.traits : hasMember, hasIndirections; package: /// Simple queue implemented as a singly linked list with a tail pointer. /// /// Needed in some D:YAML code that needs a queue-like structure without too much /// reallocation that goes with an array. /// /// Allocations are non-GC and are damped by a free-list based on the nodes /// that are removed. Note that elements lifetime must be managed /// outside. struct Queue(T) if (!hasMember!(T, "__xdtor")) { private: // Linked list node containing one element and pointer to the next node. struct Node { T payload_; Node* next_; } // Start of the linked list - first element added in time (end of the queue). Node* first_; // Last element of the linked list - last element added in time (start of the queue). Node* last_; // free-list Node* stock; // Length of the queue. size_t length_; // allocate a new node or recycle one from the stock. Node* makeNewNode(T thePayload, Node* theNext = null) @trusted nothrow @nogc { import std.experimental.allocator : make; import std.experimental.allocator.mallocator : Mallocator; Node* result; if (stock !is null) { result = stock; stock = result.next_; result.payload_ = thePayload; result.next_ = theNext; } else { result = Mallocator.instance.make!(Node)(thePayload, theNext); // GC can dispose T managed member if it thinks they are no used... static if (hasIndirections!T) { import core.memory : GC; GC.addRange(result, Node.sizeof); } } return result; } // free the stock of available free nodes. void freeStock() @trusted @nogc nothrow { import std.experimental.allocator.mallocator : Mallocator; while (stock !is null) { Node* toFree = stock; stock = stock.next_; static if (hasIndirections!T) { import core.memory : GC; GC.removeRange(toFree); } Mallocator.instance.deallocate((cast(ubyte*) toFree)[0 .. Node.sizeof]); } } public: @disable void opAssign(ref Queue); @disable bool opEquals(ref Queue); @disable int opCmp(ref Queue); this(this) @safe nothrow @nogc { auto node = first_; first_ = null; last_ = null; while (node !is null) { Node* newLast = makeNewNode(node.payload_); if (last_ !is null) last_.next_ = newLast; if (first_ is null) first_ = newLast; last_ = newLast; node = node.next_; } } ~this() @safe nothrow @nogc { freeStock(); stock = first_; freeStock(); } /// Returns a forward range iterating over this queue. auto range() @safe pure nothrow @nogc { static struct Result { private Node* cursor; void popFront() @safe pure nothrow @nogc { cursor = cursor.next_; } ref T front() @safe pure nothrow @nogc in(cursor !is null) { return cursor.payload_; } bool empty() @safe pure nothrow @nogc const { return cursor is null; } } return Result(first_); } /// Push a new item to the queue. void push(T item) @nogc @safe nothrow { Node* newLast = makeNewNode(item); if (last_ !is null) last_.next_ = newLast; if (first_ is null) first_ = newLast; last_ = newLast; ++length_; } /// Insert a new item putting it to specified index in the linked list. void insert(T item, const size_t idx) @safe nothrow in { assert(idx <= length_); } do { if (idx == 0) { first_ = makeNewNode(item, first_); ++length_; } // Adding before last added element, so we can just push. else if (idx == length_) { push(item); } else { // Get the element before one we're inserting. Node* current = first_; foreach (i; 1 .. idx) current = current.next_; assert(current); // Insert a new node after current, and put current.next_ behind it. current.next_ = makeNewNode(item, current.next_); ++length_; } } /// Returns: The next element in the queue and remove it. T pop() @safe nothrow in { assert(!empty, "Trying to pop an element from an empty queue"); } do { T result = peek(); Node* oldStock = stock; Node* old = first_; first_ = first_.next_; // start the stock from the popped element stock = old; old.next_ = null; // add the existing "old" stock to the new first stock element if (oldStock !is null) stock.next_ = oldStock; if (--length_ == 0) { assert(first_ is null); last_ = null; } return result; } /// Returns: The next element in the queue. ref inout(T) peek() @safe pure nothrow inout @nogc in { assert(!empty, "Trying to peek at an element in an empty queue"); } do { return first_.payload_; } /// Returns: true of the queue empty, false otherwise. bool empty() @safe pure nothrow const @nogc { return first_ is null; } /// Returns: The number of elements in the queue. size_t length() @safe pure nothrow const @nogc { return length_; } } @safe nothrow unittest { auto queue = Queue!int(); assert(queue.empty); foreach (i; 0 .. 65) { queue.push(5); assert(queue.pop() == 5); assert(queue.empty); assert(queue.length_ == 0); } int[] array = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5]; foreach (i; array) { queue.push(i); } array = 42 ~ array[0 .. 3] ~ 42 ~ array[3 .. $] ~ 42; queue.insert(42, 3); queue.insert(42, 0); queue.insert(42, queue.length); int[] array2; while (!queue.empty) { array2 ~= queue.pop(); } assert(array == array2); } dub-1.40.0/source/dub/internal/dyaml/reader.d000066400000000000000000000553151477246567400210100ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011-2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) module dub.internal.dyaml.reader; import core.stdc.stdlib; import core.stdc.string; import core.thread; import std.algorithm; import std.array; import std.conv; import std.exception; import std.range; import std.string; import std.system; import std.typecons; import std.utf; import dub.internal.tinyendian; import dub.internal.dyaml.encoding; import dub.internal.dyaml.exception; alias isBreak = among!('\n', '\u0085', '\u2028', '\u2029'); package: /// Provides an API to read characters from a UTF-8 buffer. struct Reader { private: // Buffer of currently loaded characters. char[] buffer_; // Current position within buffer. Only data after this position can be read. size_t bufferOffset_; // Index of the current character in the buffer. size_t charIndex_; // Number of characters (code points) in buffer_. size_t characterCount_; // File name string name_; // Current line in file. uint line_; // Current column in file. uint column_; // Original Unicode encoding of the data. Encoding encoding_; version(unittest) { // Endianness of the input before it was converted (for testing) Endian endian_; } // The number of consecutive ASCII characters starting at bufferOffset_. // // Used to minimize UTF-8 decoding. size_t upcomingASCII_; // Index to buffer_ where the last decoded character starts. size_t lastDecodedBufferOffset_; // Offset, relative to charIndex_, of the last decoded character, // in code points, not chars. size_t lastDecodedCharOffset_; public: /// Construct a Reader. /// /// Params: buffer = Buffer with YAML data. This may be e.g. the entire /// contents of a file or a string. $(B will) be modified by /// the Reader and other parts of D:YAML (D:YAML tries to /// reuse the buffer to minimize memory allocations) /// name = File name if the buffer is the contents of a file or /// `""` if the buffer is the contents of a string. /// /// Throws: ReaderException on a UTF decoding error or if there are /// nonprintable Unicode characters illegal in YAML. this(ubyte[] buffer, string name = "") @safe pure { name_ = name; auto endianResult = fixUTFByteOrder(buffer); if(endianResult.bytesStripped > 0) { // TODO: add line and column throw new ReaderException("Size of UTF-16 or UTF-32 input not aligned " ~ "to 2 or 4 bytes, respectively", Mark(name, 0, 0)); } version(unittest) { endian_ = endianResult.endian; } encoding_ = endianResult.encoding; auto utf8Result = toUTF8(endianResult.array, endianResult.encoding); const msg = utf8Result.errorMessage; if(msg !is null) { // TODO: add line and column throw new ReaderException("Error when converting to UTF-8: " ~ msg, Mark(name, 0, 0)); } buffer_ = utf8Result.utf8; characterCount_ = utf8Result.characterCount; // Check that all characters in buffer are printable. // TODO: add line and column enforce(isPrintableValidUTF8(buffer_), new ReaderException("Special unicode characters are not allowed", Mark(name, 0, 0))); checkASCII(); } /// Get character at specified index relative to current position. /// /// Params: index = Index of the character to get relative to current position /// in the buffer. Can point outside of the buffer; In that /// case, '\0' will be returned. /// /// Returns: Character at specified position or '\0' if outside of the buffer. /// // XXX removed; search for 'risky' to find why. // Throws: ReaderException if trying to read past the end of the buffer. dchar peek(const size_t index) @safe pure { if(index < upcomingASCII_) { return buffer_[bufferOffset_ + index]; } if(characterCount_ <= charIndex_ + index) { // XXX This is risky; revert this if bugs are introduced. We rely on // the assumption that Reader only uses peek() to detect end of buffer. // The test suite passes. // Revert this case here and in other peek() versions if this causes // errors. // throw new ReaderException("Trying to read past the end of the buffer"); return '\0'; } // Optimized path for Scanner code that peeks chars in linear order to // determine the length of some sequence. if(index == lastDecodedCharOffset_) { ++lastDecodedCharOffset_; const char b = buffer_[lastDecodedBufferOffset_]; // ASCII if(b < 0x80) { ++lastDecodedBufferOffset_; return b; } return decode(buffer_, lastDecodedBufferOffset_); } // 'Slow' path where we decode everything up to the requested character. const asciiToTake = min(upcomingASCII_, index); lastDecodedCharOffset_ = asciiToTake; lastDecodedBufferOffset_ = bufferOffset_ + asciiToTake; dchar d; while(lastDecodedCharOffset_ <= index) { d = decodeNext(); } return d; } /// Optimized version of peek() for the case where peek index is 0. dchar peek() @safe pure { if(upcomingASCII_ > 0) { return buffer_[bufferOffset_]; } if(characterCount_ <= charIndex_) { return '\0'; } lastDecodedCharOffset_ = 0; lastDecodedBufferOffset_ = bufferOffset_; return decodeNext(); } /// Get byte at specified index relative to current position. /// /// Params: index = Index of the byte to get relative to current position /// in the buffer. Can point outside of the buffer; In that /// case, '\0' will be returned. /// /// Returns: Byte at specified position or '\0' if outside of the buffer. char peekByte(const size_t index) @safe pure nothrow @nogc { return characterCount_ > (charIndex_ + index) ? buffer_[bufferOffset_ + index] : '\0'; } /// Optimized version of peekByte() for the case where peek byte index is 0. char peekByte() @safe pure nothrow @nogc { return characterCount_ > charIndex_ ? buffer_[bufferOffset_] : '\0'; } /// Get specified number of characters starting at current position. /// /// Note: This gets only a "view" into the internal buffer, which will be /// invalidated after other Reader calls. /// /// Params: length = Number of characters (code points, not bytes) to get. May /// reach past the end of the buffer; in that case the returned /// slice will be shorter. /// /// Returns: Characters starting at current position or an empty slice if out of bounds. char[] prefix(const size_t length) @safe pure { return slice(length); } /// Get specified number of bytes, not code points, starting at current position. /// /// Note: This gets only a "view" into the internal buffer, which will be /// invalidated after other Reader calls. /// /// Params: length = Number bytes (not code points) to get. May NOT reach past /// the end of the buffer; should be used with peek() to avoid /// this. /// /// Returns: Bytes starting at current position. char[] prefixBytes(const size_t length) @safe pure nothrow @nogc in(length == 0 || bufferOffset_ + length <= buffer_.length, "prefixBytes out of bounds") { return buffer_[bufferOffset_ .. bufferOffset_ + length]; } /// Get a slice view of the internal buffer, starting at the current position. /// /// Note: This gets only a "view" into the internal buffer, /// which get invalidated after other Reader calls. /// /// Params: end = End of the slice relative to current position. May reach past /// the end of the buffer; in that case the returned slice will /// be shorter. /// /// Returns: Slice into the internal buffer or an empty slice if out of bounds. char[] slice(const size_t end) @safe pure { // Fast path in case the caller has already peek()ed all the way to end. if(end == lastDecodedCharOffset_) { return buffer_[bufferOffset_ .. lastDecodedBufferOffset_]; } const asciiToTake = min(upcomingASCII_, end, buffer_.length); lastDecodedCharOffset_ = asciiToTake; lastDecodedBufferOffset_ = bufferOffset_ + asciiToTake; // 'Slow' path - decode everything up to end. while(lastDecodedCharOffset_ < end && lastDecodedBufferOffset_ < buffer_.length) { decodeNext(); } return buffer_[bufferOffset_ .. lastDecodedBufferOffset_]; } /// Get the next character, moving buffer position beyond it. /// /// Returns: Next character. /// /// Throws: ReaderException if trying to read past the end of the buffer /// or if invalid data is read. dchar get() @safe pure { const result = peek(); forward(); return result; } /// Get specified number of characters, moving buffer position beyond them. /// /// Params: length = Number or characters (code points, not bytes) to get. /// /// Returns: Characters starting at current position. char[] get(const size_t length) @safe pure { auto result = slice(length); forward(length); return result; } /// Move current position forward. /// /// Params: length = Number of characters to move position forward. void forward(size_t length) @safe pure { while(length > 0) { auto asciiToTake = min(upcomingASCII_, length); charIndex_ += asciiToTake; length -= asciiToTake; upcomingASCII_ -= asciiToTake; for(; asciiToTake > 0; --asciiToTake) { const c = buffer_[bufferOffset_++]; // c is ASCII, do we only need to check for ASCII line breaks. if(c == '\n' || (c == '\r' && buffer_[bufferOffset_] != '\n')) { ++line_; column_ = 0; continue; } ++column_; } // If we have used up all upcoming ASCII chars, the next char is // non-ASCII even after this returns, so upcomingASCII_ doesn't need to // be updated - it's zero. if(length == 0) { break; } assert(upcomingASCII_ == 0, "Running unicode handling code but we haven't run out of ASCII chars"); assert(bufferOffset_ < buffer_.length, "Attempted to decode past the end of YAML buffer"); assert(buffer_[bufferOffset_] >= 0x80, "ASCII must be handled by preceding code"); ++charIndex_; const c = decode(buffer_, bufferOffset_); // New line. (can compare with '\n' without decoding since it's ASCII) if(c.isBreak || (c == '\r' && buffer_[bufferOffset_] != '\n')) { ++line_; column_ = 0; } else if(c != '\uFEFF') { ++column_; } --length; checkASCII(); } lastDecodedBufferOffset_ = bufferOffset_; lastDecodedCharOffset_ = 0; } /// Move current position forward by one character. void forward() @safe pure { ++charIndex_; lastDecodedBufferOffset_ = bufferOffset_; lastDecodedCharOffset_ = 0; // ASCII if(upcomingASCII_ > 0) { --upcomingASCII_; const c = buffer_[bufferOffset_++]; if(c == '\n' || (c == '\r' && buffer_[bufferOffset_] != '\n')) { ++line_; column_ = 0; return; } ++column_; return; } // UTF-8 assert(bufferOffset_ < buffer_.length, "Attempted to decode past the end of YAML buffer"); assert(buffer_[bufferOffset_] >= 0x80, "ASCII must be handled by preceding code"); const c = decode(buffer_, bufferOffset_); // New line. (can compare with '\n' without decoding since it's ASCII) if(c.isBreak || (c == '\r' && buffer_[bufferOffset_] != '\n')) { ++line_; column_ = 0; } else if(c != '\uFEFF') { ++column_; } checkASCII(); } /// Get filename, line and column of current position. Mark mark() const pure nothrow @nogc @safe { return Mark(name_, line_, column_); } /// Get filename, line and column of current position + some number of chars Mark mark(size_t advance) const pure @safe { auto lineTemp = cast()line_; auto columnTemp = cast()column_; auto bufferOffsetTemp = cast()bufferOffset_; for (size_t pos = 0; pos < advance; pos++) { if (bufferOffsetTemp >= buffer_.length) { break; } const c = decode(buffer_, bufferOffsetTemp); if (c.isBreak || (c == '\r' && buffer_[bufferOffsetTemp] == '\n')) { lineTemp++; columnTemp = 0; } columnTemp++; } return Mark(name_, lineTemp, columnTemp); } /// Get file name. ref inout(string) name() inout @safe return pure nothrow @nogc { return name_; } /// Get current line number. uint line() const @safe pure nothrow @nogc { return line_; } /// Get current column number. uint column() const @safe pure nothrow @nogc { return column_; } /// Get index of the current character in the buffer. size_t charIndex() const @safe pure nothrow @nogc { return charIndex_; } /// Get encoding of the input buffer. Encoding encoding() const @safe pure nothrow @nogc { return encoding_; } private: // Update upcomingASCII_ (should be called forward()ing over a UTF-8 sequence) void checkASCII() @safe pure nothrow @nogc { upcomingASCII_ = countASCII(buffer_[bufferOffset_ .. $]); } // Decode the next character relative to // lastDecodedCharOffset_/lastDecodedBufferOffset_ and update them. // // Does not advance the buffer position. Used in peek() and slice(). dchar decodeNext() @safe pure { assert(lastDecodedBufferOffset_ < buffer_.length, "Attempted to decode past the end of YAML buffer"); const char b = buffer_[lastDecodedBufferOffset_]; ++lastDecodedCharOffset_; // ASCII if(b < 0x80) { ++lastDecodedBufferOffset_; return b; } return decode(buffer_, lastDecodedBufferOffset_); } } private: // Convert a UTF-8/16/32 buffer to UTF-8, in-place if possible. // // Params: // // input = Buffer with UTF-8/16/32 data to decode. May be overwritten by the // conversion, in which case the result will be a slice of this buffer. // encoding = Encoding of input. // // Returns: // // A struct with the following members: // // $(D string errorMessage) In case of an error, the error message is stored here. If // there was no error, errorMessage is NULL. Always check // this first. // $(D char[] utf8) input converted to UTF-8. May be a slice of input. // $(D size_t characterCount) Number of characters (code points) in input. auto toUTF8(ubyte[] input, const UTFEncoding encoding) @safe pure nothrow { // Documented in function ddoc. struct Result { string errorMessage; char[] utf8; size_t characterCount; } Result result; // Encode input_ into UTF-8 if it's encoded as UTF-16 or UTF-32. // // Params: // // buffer = The input buffer to encode. // result = A Result struct to put encoded result and any error messages to. // // On error, result.errorMessage will be set. static void encode(C)(C[] input, ref Result result) @safe pure { // We can do UTF-32->UTF-8 in place because all UTF-8 sequences are 4 or // less bytes. static if(is(C == dchar)) { char[4] encodeBuf; auto utf8 = cast(char[])input; auto length = 0; foreach(dchar c; input) { ++result.characterCount; // ASCII if(c < 0x80) { utf8[length++] = cast(char)c; continue; } std.utf.encode(encodeBuf, c); const bytes = codeLength!char(c); utf8[length .. length + bytes] = encodeBuf[0 .. bytes]; length += bytes; } result.utf8 = utf8[0 .. length]; } // Unfortunately we can't do UTF-16 in place so we just use std.conv.to else { result.characterCount = std.utf.count(input); result.utf8 = input.to!(char[]); } } try final switch(encoding) { case UTFEncoding.UTF_8: result.utf8 = cast(char[])input; result.utf8.validate(); result.characterCount = std.utf.count(result.utf8); break; case UTFEncoding.UTF_16: assert(input.length % 2 == 0, "UTF-16 buffer size must be even"); encode(cast(wchar[])input, result); break; case UTFEncoding.UTF_32: assert(input.length % 4 == 0, "UTF-32 buffer size must be a multiple of 4"); encode(cast(dchar[])input, result); break; } catch(ConvException e) { result.errorMessage = e.msg; } catch(UTFException e) { result.errorMessage = e.msg; } catch(Exception e) { assert(false, "Unexpected exception in encode(): " ~ e.msg); } return result; } /// Determine if all characters (code points, not bytes) in a string are printable. bool isPrintableValidUTF8(const char[] chars) @safe pure { import std.uni : isControl, isWhite; foreach (dchar chr; chars) { if (!chr.isValidDchar || (chr.isControl && !chr.isWhite)) { return false; } } return true; } /// Counts the number of ASCII characters in buffer until the first UTF-8 sequence. /// /// Used to determine how many characters we can process without decoding. size_t countASCII(const(char)[] buffer) @safe pure nothrow @nogc { return buffer.byCodeUnit.until!(x => x > 0x7F).walkLength; } // Unittests. void testEndian(R)() { void endian_test(ubyte[] data, Encoding encoding_expected, Endian endian_expected) { auto reader = new R(data); assert(reader.encoding == encoding_expected); assert(reader.endian_ == endian_expected); } ubyte[] little_endian_utf_16 = [0xFF, 0xFE, 0x7A, 0x00]; ubyte[] big_endian_utf_16 = [0xFE, 0xFF, 0x00, 0x7A]; endian_test(little_endian_utf_16, Encoding.UTF_16, Endian.littleEndian); endian_test(big_endian_utf_16, Encoding.UTF_16, Endian.bigEndian); } void testPeekPrefixForward(R)() { import std.encoding; ubyte[] data = bomTable[BOM.utf8].sequence ~ cast(ubyte[])"data"; auto reader = new R(data); assert(reader.peek() == 'd'); assert(reader.peek(1) == 'a'); assert(reader.peek(2) == 't'); assert(reader.peek(3) == 'a'); assert(reader.peek(4) == '\0'); assert(reader.prefix(4) == "data"); // assert(reader.prefix(6) == "data\0"); reader.forward(2); assert(reader.peek(1) == 'a'); // assert(collectException(reader.peek(3))); } void testUTF(R)() { import std.encoding; dchar[] data = cast(dchar[])"data"; void utf_test(T)(T[] data, BOM bom) { ubyte[] bytes = bomTable[bom].sequence ~ (cast(ubyte[])data)[0 .. data.length * T.sizeof]; auto reader = new R(bytes); assert(reader.peek() == 'd'); assert(reader.peek(1) == 'a'); assert(reader.peek(2) == 't'); assert(reader.peek(3) == 'a'); } utf_test!char(to!(char[])(data), BOM.utf8); utf_test!wchar(to!(wchar[])(data), endian == Endian.bigEndian ? BOM.utf16be : BOM.utf16le); utf_test(data, endian == Endian.bigEndian ? BOM.utf32be : BOM.utf32le); } void test1Byte(R)() { ubyte[] data = [97]; auto reader = new R(data); assert(reader.peek() == 'a'); assert(reader.peek(1) == '\0'); // assert(collectException(reader.peek(2))); } @system unittest { testEndian!Reader(); testPeekPrefixForward!Reader(); testUTF!Reader(); test1Byte!Reader(); } //Issue 257 - https://github.com/dlang-community/D-YAML/issues/257 @safe unittest { import dub.internal.dyaml.loader : Loader; auto yaml = "hello "; auto root = Loader.fromString(yaml).load(); assert(root.isValid); } dub-1.40.0/source/dub/internal/dyaml/representer.d000066400000000000000000000414251477246567400221010ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * YAML node _representer. Prepares YAML nodes for output. A tutorial can be * found $(LINK2 ../tutorials/custom_types.html, here). * * Code based on $(LINK2 http://www.pyyaml.org, PyYAML). */ module dub.internal.dyaml.representer; import std.algorithm; import std.array; import std.base64; import std.container; import std.conv; import std.datetime; import std.exception; import std.format; import std.math; import std.typecons; import std.string; import dub.internal.dyaml.exception; import dub.internal.dyaml.node; import dub.internal.dyaml.serializer; import dub.internal.dyaml.style; package: /** * Represents YAML nodes as scalar, sequence and mapping nodes ready for output. */ Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe { Node result; final switch(data.type) { case NodeType.null_: result = representNull(); break; case NodeType.merge: break; case NodeType.boolean: result = representBool(data); break; case NodeType.integer: result = representLong(data); break; case NodeType.decimal: result = representReal(data); break; case NodeType.binary: result = representBytes(data); break; case NodeType.timestamp: result = representSysTime(data); break; case NodeType.string: result = representString(data); break; case NodeType.mapping: result = representPairs(data, defaultScalarStyle, defaultCollectionStyle); break; case NodeType.sequence: result = representNodes(data, defaultScalarStyle, defaultCollectionStyle); break; case NodeType.invalid: assert(0); } final switch (result.nodeID) { case NodeID.scalar: if (result.scalarStyle == ScalarStyle.invalid) { result.scalarStyle = defaultScalarStyle; } break; case NodeID.sequence, NodeID.mapping: if (defaultCollectionStyle != CollectionStyle.invalid) { result.collectionStyle = defaultCollectionStyle; } break; case NodeID.invalid: break; } //Override tag if specified. if(data.tag_ !is null){result.tag_ = data.tag_;} //Remember style if this was loaded before. if(data.scalarStyle != ScalarStyle.invalid) { result.scalarStyle = data.scalarStyle; } if(data.collectionStyle != CollectionStyle.invalid) { result.collectionStyle = data.collectionStyle; } return result; } @safe unittest { // We don't emit yaml merge nodes. assert(representData(Node(YAMLMerge()), ScalarStyle.invalid, CollectionStyle.invalid) == Node.init); } @safe unittest { assert(representData(Node(YAMLNull()), ScalarStyle.invalid, CollectionStyle.invalid) == Node("null", "tag:yaml.org,2002:null")); } @safe unittest { assert(representData(Node(cast(string)null), ScalarStyle.invalid, CollectionStyle.invalid) == Node("", "tag:yaml.org,2002:str")); assert(representData(Node("Hello world!"), ScalarStyle.invalid, CollectionStyle.invalid) == Node("Hello world!", "tag:yaml.org,2002:str")); } @safe unittest { assert(representData(Node(64), ScalarStyle.invalid, CollectionStyle.invalid) == Node("64", "tag:yaml.org,2002:int")); } @safe unittest { assert(representData(Node(true), ScalarStyle.invalid, CollectionStyle.invalid) == Node("true", "tag:yaml.org,2002:bool")); assert(representData(Node(false), ScalarStyle.invalid, CollectionStyle.invalid) == Node("false", "tag:yaml.org,2002:bool")); } @safe unittest { // Float comparison is pretty unreliable... auto result = representData(Node(1.0), ScalarStyle.invalid, CollectionStyle.invalid); assert(isClose(result.as!string.to!real, 1.0)); assert(result.tag == "tag:yaml.org,2002:float"); assert(representData(Node(real.nan), ScalarStyle.invalid, CollectionStyle.invalid) == Node(".nan", "tag:yaml.org,2002:float")); assert(representData(Node(real.infinity), ScalarStyle.invalid, CollectionStyle.invalid) == Node(".inf", "tag:yaml.org,2002:float")); assert(representData(Node(-real.infinity), ScalarStyle.invalid, CollectionStyle.invalid) == Node("-.inf", "tag:yaml.org,2002:float")); } @safe unittest { assert(representData(Node(SysTime(DateTime(2000, 3, 14, 12, 34, 56), UTC())), ScalarStyle.invalid, CollectionStyle.invalid) == Node("2000-03-14T12:34:56Z", "tag:yaml.org,2002:timestamp")); } @safe unittest { assert(representData(Node(Node[].init, "tag:yaml.org,2002:set"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node.Pair[].init, "tag:yaml.org,2002:set")); assert(representData(Node(Node[].init, "tag:yaml.org,2002:seq"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node[].init, "tag:yaml.org,2002:seq")); { auto nodes = [ Node("a"), Node("b"), Node("c"), ]; assert(representData(Node(nodes, "tag:yaml.org,2002:set"), ScalarStyle.invalid, CollectionStyle.invalid) == Node([ Node.Pair( Node("a", "tag:yaml.org,2002:str"), Node("null", "tag:yaml.org,2002:null") ), Node.Pair( Node("b", "tag:yaml.org,2002:str"), Node("null", "tag:yaml.org,2002:null") ), Node.Pair( Node("c", "tag:yaml.org,2002:str"), Node("null", "tag:yaml.org,2002:null") ) ], "tag:yaml.org,2002:set")); } { auto nodes = [ Node("a"), Node("b"), Node("c"), ]; assert(representData(Node(nodes, "tag:yaml.org,2002:seq"), ScalarStyle.invalid, CollectionStyle.invalid) == Node([ Node("a", "tag:yaml.org,2002:str"), Node("b", "tag:yaml.org,2002:str"), Node("c", "tag:yaml.org,2002:str") ], "tag:yaml.org,2002:seq")); } } @safe unittest { assert(representData(Node(Node.Pair[].init, "tag:yaml.org,2002:omap"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node[].init, "tag:yaml.org,2002:omap")); assert(representData(Node(Node.Pair[].init, "tag:yaml.org,2002:pairs"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node[].init, "tag:yaml.org,2002:pairs")); assert(representData(Node(Node.Pair[].init, "tag:yaml.org,2002:map"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node.Pair[].init, "tag:yaml.org,2002:map")); { auto nodes = [ Node.Pair("a", "b"), Node.Pair("a", "c") ]; assertThrown(representData(Node(nodes, "tag:yaml.org,2002:omap"), ScalarStyle.invalid, CollectionStyle.invalid)); } // Yeah, this gets ugly really fast. { auto nodes = [ Node.Pair("a", "b"), Node.Pair("a", "c") ]; assert(representData(Node(nodes, "tag:yaml.org,2002:pairs"), ScalarStyle.invalid, CollectionStyle.invalid) == Node([ Node( [Node.Pair( Node("a", "tag:yaml.org,2002:str"), Node("b", "tag:yaml.org,2002:str") )], "tag:yaml.org,2002:map"), Node( [Node.Pair( Node("a", "tag:yaml.org,2002:str"), Node("c", "tag:yaml.org,2002:str") )], "tag:yaml.org,2002:map"), ], "tag:yaml.org,2002:pairs")); } { auto nodes = [ Node.Pair("a", "b"), Node.Pair("a", "c") ]; assertThrown(representData(Node(nodes, "tag:yaml.org,2002:map"), ScalarStyle.invalid, CollectionStyle.invalid)); } { auto nodes = [ Node.Pair("a", "b"), Node.Pair("c", "d") ]; assert(representData(Node(nodes, "tag:yaml.org,2002:omap"), ScalarStyle.invalid, CollectionStyle.invalid) == Node([ Node([ Node.Pair( Node("a", "tag:yaml.org,2002:str"), Node("b", "tag:yaml.org,2002:str") ) ], "tag:yaml.org,2002:map"), Node([ Node.Pair( Node("c", "tag:yaml.org,2002:str"), Node("d", "tag:yaml.org,2002:str") ) ], "tag:yaml.org,2002:map" )], "tag:yaml.org,2002:omap")); } { auto nodes = [ Node.Pair("a", "b"), Node.Pair("c", "d") ]; assert(representData(Node(nodes, "tag:yaml.org,2002:map"), ScalarStyle.invalid, CollectionStyle.invalid) == Node([ Node.Pair( Node("a", "tag:yaml.org,2002:str"), Node("b", "tag:yaml.org,2002:str") ), Node.Pair( Node("c", "tag:yaml.org,2002:str"), Node("d", "tag:yaml.org,2002:str") ), ], "tag:yaml.org,2002:map")); } } private: //Represent a _null _node as a _null YAML value. Node representNull() @safe { return Node("null", "tag:yaml.org,2002:null"); } //Represent a string _node as a string scalar. Node representString(const Node node) @safe { string value = node.as!string; return Node(value, "tag:yaml.org,2002:str"); } //Represent a bytes _node as a binary scalar. Node representBytes(const Node node) @safe { const ubyte[] value = node.as!(ubyte[]); if(value is null){return Node("null", "tag:yaml.org,2002:null");} auto newNode = Node(Base64.encode(value).idup, "tag:yaml.org,2002:binary"); newNode.scalarStyle = ScalarStyle.literal; return newNode; } //Represent a bool _node as a bool scalar. Node representBool(const Node node) @safe { return Node(node.as!bool ? "true" : "false", "tag:yaml.org,2002:bool"); } //Represent a long _node as an integer scalar. Node representLong(const Node node) @safe { return Node(node.as!long.to!string, "tag:yaml.org,2002:int"); } //Represent a real _node as a floating point scalar. Node representReal(const Node node) @safe { real f = node.as!real; string value = isNaN(f) ? ".nan": f == real.infinity ? ".inf": f == -1.0 * real.infinity ? "-.inf": {auto a = appender!string(); formattedWrite(a, "%12f", f); return a.data.strip();}(); return Node(value, "tag:yaml.org,2002:float"); } //Represent a SysTime _node as a timestamp. Node representSysTime(const Node node) @safe { return Node(node.as!SysTime.toISOExtString(), "tag:yaml.org,2002:timestamp"); } //Represent a sequence _node as sequence/set. Node representNodes(const Node node, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe { auto nodes = node.as!(Node[]); if(node.tag_ == "tag:yaml.org,2002:set") { //YAML sets are mapping with null values. Node.Pair[] pairs; pairs.length = nodes.length; foreach(idx, key; nodes) { pairs[idx] = Node.Pair(key, Node("null", "tag:yaml.org,2002:null")); } Node.Pair[] value; value.length = pairs.length; auto bestStyle = CollectionStyle.flow; foreach(idx, pair; pairs) { value[idx] = Node.Pair(representData(pair.key, defaultScalarStyle, defaultCollectionStyle), representData(pair.value, defaultScalarStyle, defaultCollectionStyle)); if(value[idx].shouldUseBlockStyle) { bestStyle = CollectionStyle.block; } } auto newNode = Node(value, node.tag_); newNode.collectionStyle = bestStyle; return newNode; } else { Node[] value; value.length = nodes.length; auto bestStyle = CollectionStyle.flow; foreach(idx, item; nodes) { value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); const isScalar = value[idx].nodeID == NodeID.scalar; const s = value[idx].scalarStyle; if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)) { bestStyle = CollectionStyle.block; } } auto newNode = Node(value, "tag:yaml.org,2002:seq"); newNode.collectionStyle = bestStyle; return newNode; } } bool shouldUseBlockStyle(const Node value) @safe { const isScalar = value.nodeID == NodeID.scalar; const s = value.scalarStyle; return (!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)); } bool shouldUseBlockStyle(const Node.Pair value) @safe { const keyScalar = value.key.nodeID == NodeID.scalar; const valScalar = value.value.nodeID == NodeID.scalar; const keyStyle = value.key.scalarStyle; const valStyle = value.value.scalarStyle; if(!keyScalar || (keyStyle != ScalarStyle.invalid && keyStyle != ScalarStyle.plain)) { return true; } if(!valScalar || (valStyle != ScalarStyle.invalid && valStyle != ScalarStyle.plain)) { return true; } return false; } //Represent a mapping _node as map/ordered map/pairs. Node representPairs(const Node node, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe { auto pairs = node.as!(Node.Pair[]); bool hasDuplicates(const Node.Pair[] pairs) @safe { //TODO this should be replaced by something with deterministic memory allocation. auto keys = redBlackTree!Node(); foreach(pair; pairs) { if(pair.key in keys){return true;} keys.insert(pair.key); } return false; } Node[] mapToSequence(const Node.Pair[] pairs) @safe { Node[] nodes; nodes.length = pairs.length; foreach(idx, pair; pairs) { Node.Pair value; auto bestStyle = value.shouldUseBlockStyle ? CollectionStyle.block : CollectionStyle.flow; value = Node.Pair(representData(pair.key, defaultScalarStyle, defaultCollectionStyle), representData(pair.value, defaultScalarStyle, defaultCollectionStyle)); auto newNode = Node([value], "tag:yaml.org,2002:map"); newNode.collectionStyle = bestStyle; nodes[idx] = newNode; } return nodes; } if(node.tag_ == "tag:yaml.org,2002:omap") { enforce(!hasDuplicates(pairs), new RepresenterException("Duplicate entry in an ordered map")); auto sequence = mapToSequence(pairs); Node[] value; value.length = sequence.length; auto bestStyle = CollectionStyle.flow; foreach(idx, item; sequence) { value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); if(value[idx].shouldUseBlockStyle) { bestStyle = CollectionStyle.block; } } auto newNode = Node(value, node.tag_); newNode.collectionStyle = bestStyle; return newNode; } else if(node.tag_ == "tag:yaml.org,2002:pairs") { auto sequence = mapToSequence(pairs); Node[] value; value.length = sequence.length; auto bestStyle = CollectionStyle.flow; foreach(idx, item; sequence) { value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); if(value[idx].shouldUseBlockStyle) { bestStyle = CollectionStyle.block; } } auto newNode = Node(value, node.tag_); newNode.collectionStyle = bestStyle; return newNode; } else { enforce(!hasDuplicates(pairs), new RepresenterException("Duplicate entry in an unordered map")); Node.Pair[] value; value.length = pairs.length; auto bestStyle = CollectionStyle.flow; foreach(idx, pair; pairs) { value[idx] = Node.Pair(representData(pair.key, defaultScalarStyle, defaultCollectionStyle), representData(pair.value, defaultScalarStyle, defaultCollectionStyle)); if(value[idx].shouldUseBlockStyle) { bestStyle = CollectionStyle.block; } } auto newNode = Node(value, "tag:yaml.org,2002:map"); newNode.collectionStyle = bestStyle; return newNode; } } dub-1.40.0/source/dub/internal/dyaml/resolver.d000066400000000000000000000234241477246567400214030ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * Implements a class that resolves YAML tags. This can be used to implicitly * resolve tags for custom data types, removing the need to explicitly * specify tags in YAML. A tutorial can be found * $(LINK2 ../tutorials/custom_types.html, here). * * Code based on $(LINK2 http://www.pyyaml.org, PyYAML). */ module dub.internal.dyaml.resolver; import std.conv; import std.regex; import std.typecons; import std.utf; import dub.internal.dyaml.node; import dub.internal.dyaml.exception; /// Type of `regexes` private alias RegexType = Tuple!(string, "tag", const Regex!char, "regexp", string, "chars"); private immutable RegexType[] regexes = [ RegexType("tag:yaml.org,2002:bool", regex(r"^(?:yes|Yes|YES|no|No|NO|true|True|TRUE" ~ "|false|False|FALSE|on|On|ON|off|Off|OFF)$"), "yYnNtTfFoO"), RegexType("tag:yaml.org,2002:float", regex(r"^(?:[-+]?([0-9][0-9_]*)\\.[0-9_]*" ~ "(?:[eE][-+][0-9]+)?|[-+]?(?:[0-9][0-9_]" ~ "*)?\\.[0-9_]+(?:[eE][-+][0-9]+)?|[-+]?" ~ "[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]" ~ "*|[-+]?\\.(?:inf|Inf|INF)|\\." ~ "(?:nan|NaN|NAN))$"), "-+0123456789."), RegexType("tag:yaml.org,2002:int", regex(r"^(?:[-+]?0b[0-1_]+" ~ "|[-+]?0[0-7_]+" ~ "|[-+]?(?:0|[1-9][0-9_]*)" ~ "|[-+]?0x[0-9a-fA-F_]+" ~ "|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$"), "-+0123456789"), RegexType("tag:yaml.org,2002:merge", regex(r"^<<$"), "<"), RegexType("tag:yaml.org,2002:null", regex(r"^$|^(?:~|null|Null|NULL)$"), "~nN\0"), RegexType("tag:yaml.org,2002:timestamp", regex(r"^[0-9][0-9][0-9][0-9]-[0-9][0-9]-" ~ "[0-9][0-9]|[0-9][0-9][0-9][0-9]-[0-9]" ~ "[0-9]?-[0-9][0-9]?[Tt]|[ \t]+[0-9]" ~ "[0-9]?:[0-9][0-9]:[0-9][0-9]" ~ "(?:\\.[0-9]*)?(?:[ \t]*Z|[-+][0-9]" ~ "[0-9]?(?::[0-9][0-9])?)?$"), "0123456789"), RegexType("tag:yaml.org,2002:value", regex(r"^=$"), "="), //The following resolver is only for documentation purposes. It cannot work //because plain scalars cannot start with '!', '&', or '*'. RegexType("tag:yaml.org,2002:yaml", regex(r"^(?:!|&|\*)$"), "!&*"), ]; /** * Resolves YAML tags (data types). * * Can be used to implicitly resolve custom data types of scalar values. */ struct Resolver { private: // Default tag to use for scalars. string defaultScalarTag_ = "tag:yaml.org,2002:str"; // Default tag to use for sequences. string defaultSequenceTag_ = "tag:yaml.org,2002:seq"; // Default tag to use for mappings. string defaultMappingTag_ = "tag:yaml.org,2002:map"; /* * Arrays of scalar resolver tuples indexed by starting character of a scalar. * * Each tuple stores regular expression the scalar must match, * and tag to assign to it if it matches. */ Tuple!(string, const Regex!char)[][dchar] yamlImplicitResolvers_; package: static auto withDefaultResolvers() @safe { Resolver resolver; foreach(pair; regexes) { resolver.addImplicitResolver(pair.tag, pair.regexp, pair.chars); } return resolver; } public: @disable bool opEquals(ref Resolver); @disable int opCmp(ref Resolver); /** * Add an implicit scalar resolver. * * If a scalar matches regexp and starts with any character in first, * its _tag is set to tag. If it matches more than one resolver _regexp * resolvers added _first override ones added later. Default resolvers * override any user specified resolvers, but they can be disabled in * Resolver constructor. * * If a scalar is not resolved to anything, it is assigned the default * YAML _tag for strings. * * Params: tag = Tag to resolve to. * regexp = Regular expression the scalar must match to have this _tag. * first = String of possible starting characters of the scalar. * */ void addImplicitResolver(string tag, const Regex!char regexp, string first) pure @safe { foreach(const dchar c; first) { if((c in yamlImplicitResolvers_) is null) { yamlImplicitResolvers_[c] = []; } yamlImplicitResolvers_[c] ~= tuple(tag, regexp); } } /// Resolve scalars starting with 'A' to !_tag @safe unittest { import std.file : write; import std.regex : regex; import dub.internal.dyaml.loader : Loader; import dub.internal.dyaml.resolver : Resolver; write("example.yaml", "A"); auto loader = Loader.fromFile("example.yaml"); loader.resolver.addImplicitResolver("!tag", regex("A.*"), "A"); auto node = loader.load(); assert(node.tag == "!tag"); } package: /** * Resolve tag of a node. * * Params: kind = Type of the node. * tag = Explicit tag of the node, if any. * value = Value of the node, if any. * implicit = Should the node be implicitly resolved? * * If the tag is already specified and not non-specific, that tag will * be returned. * * Returns: Resolved tag. */ string resolve(const NodeID kind, const string tag, scope string value, const bool implicit) @safe { import std.array : empty, front; if((tag !is null) && (tag != "!")) { return tag; } final switch (kind) { case NodeID.scalar: if(!implicit) { return defaultScalarTag_; } //Get the first char of the value. const dchar first = value.empty ? '\0' : value.front; auto resolvers = (first in yamlImplicitResolvers_) is null ? [] : yamlImplicitResolvers_[first]; //If regexp matches, return tag. foreach(resolver; resolvers) { // source/dyaml/resolver.d(192,35): Error: scope variable `__tmpfordtorXXX` // assigned to non-scope parameter `this` calling // `std.regex.RegexMatch!string.RegexMatch.~this` bool isEmpty = () @trusted { return match(value, resolver[1]).empty; }(); if(!isEmpty) { return resolver[0]; } } return defaultScalarTag_; case NodeID.sequence: return defaultSequenceTag_; case NodeID.mapping: return defaultMappingTag_; case NodeID.invalid: assert(false, "Cannot resolve an invalid node"); } } @safe unittest { auto resolver = Resolver.withDefaultResolvers; bool tagMatch(string tag, string[] values) @safe { const string expected = tag; foreach(value; values) { const string resolved = resolver.resolve(NodeID.scalar, null, value, true); if(expected != resolved) { return false; } } return true; } assert(tagMatch("tag:yaml.org,2002:bool", ["yes", "NO", "True", "on"])); assert(tagMatch("tag:yaml.org,2002:float", ["6.8523015e+5", "685.230_15e+03", "685_230.15", "190:20:30.15", "-.inf", ".NaN"])); assert(tagMatch("tag:yaml.org,2002:int", ["685230", "+685_230", "02472256", "0x_0A_74_AE", "0b1010_0111_0100_1010_1110", "190:20:30"])); assert(tagMatch("tag:yaml.org,2002:merge", ["<<"])); assert(tagMatch("tag:yaml.org,2002:null", ["~", "null", ""])); assert(tagMatch("tag:yaml.org,2002:str", ["abcd", "9a8b", "9.1adsf"])); assert(tagMatch("tag:yaml.org,2002:timestamp", ["2001-12-15T02:59:43.1Z", "2001-12-14t21:59:43.10-05:00", "2001-12-14 21:59:43.10 -5", "2001-12-15 2:59:43.10", "2002-12-14"])); assert(tagMatch("tag:yaml.org,2002:value", ["="])); assert(tagMatch("tag:yaml.org,2002:yaml", ["!", "&", "*"])); } ///Returns: Default scalar tag. @property string defaultScalarTag() const pure @safe nothrow {return defaultScalarTag_;} ///Returns: Default sequence tag. @property string defaultSequenceTag() const pure @safe nothrow {return defaultSequenceTag_;} ///Returns: Default mapping tag. @property string defaultMappingTag() const pure @safe nothrow {return defaultMappingTag_;} } dub-1.40.0/source/dub/internal/dyaml/scanner.d000066400000000000000000002313461477246567400211770ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011-2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /// YAML scanner. /// Code based on PyYAML: http://www.pyyaml.org module dub.internal.dyaml.scanner; import core.stdc.string; import std.algorithm; import std.array; import std.conv; import std.ascii : isAlphaNum, isDigit, isHexDigit; import std.exception; import std.string; import std.typecons; import std.traits : Unqual; import std.utf; import dub.internal.dyaml.escapes; import dub.internal.dyaml.exception; import dub.internal.dyaml.queue; import dub.internal.dyaml.reader; import dub.internal.dyaml.style; import dub.internal.dyaml.token; package: /// Scanner produces tokens of the following types: /// STREAM-START /// STREAM-END /// DIRECTIVE(name, value) /// DOCUMENT-START /// DOCUMENT-END /// BLOCK-SEQUENCE-START /// BLOCK-MAPPING-START /// BLOCK-END /// FLOW-SEQUENCE-START /// FLOW-MAPPING-START /// FLOW-SEQUENCE-END /// FLOW-MAPPING-END /// BLOCK-ENTRY /// FLOW-ENTRY /// KEY /// VALUE /// ALIAS(value) /// ANCHOR(value) /// TAG(value) /// SCALAR(value, plain, style) alias isBreak = among!('\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); alias isBreakOrSpace = among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); alias isWhiteSpace = among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); alias isNonLinebreakWhitespace = among!(' ', '\t'); alias isNonScalarStartCharacter = among!('-', '?', ':', ',', '[', ']', '{', '}', '#', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`', ' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); alias isURIChar = among!('-', ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '!', '~', '*', '\'', '(', ')', '[', ']', '%'); alias isNSChar = among!(' ', '\n', '\r', '\u0085', '\u2028', '\u2029'); alias isBChar = among!('\n', '\r', '\u0085', '\u2028', '\u2029'); alias isFlowScalarBreakSpace = among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029', '\'', '"', '\\'); alias isNSAnchorName = c => !c.isWhiteSpace && !c.among!('[', ']', '{', '}', ',', '\uFEFF'); /// Generates tokens from data provided by a Reader. struct Scanner { private: /// A simple key is a key that is not denoted by the '?' indicator. /// For example: /// --- /// block simple key: value /// ? not a simple key: /// : { flow simple key: value } /// We emit the KEY token before all keys, so when we find a potential simple /// key, we try to locate the corresponding ':' indicator. Simple keys should be /// limited to a single line and 1024 characters. static struct SimpleKey { /// Position of the key Mark mark; /// Index of the key token from start (first token scanned being 0). uint tokenIndex; /// Is this required to be a simple key? bool required; /// Is this struct "null" (invalid)?. bool isNull; } /// Block chomping types. enum Chomping { /// Strip all trailing line breaks. '-' indicator. strip, /// Line break of the last line is preserved, others discarded. Default. clip, /// All trailing line breaks are preserved. '+' indicator. keep } /// Reader used to read from a file/stream. Reader reader_; /// Are we done scanning? bool done_; /// Level of nesting in flow context. If 0, we're in block context. uint flowLevel_; /// Current indentation level. int indent_ = -1; /// Past indentation levels. Used as a stack. Appender!(int[]) indents_; /// Processed tokens not yet emitted. Used as a queue. Queue!Token tokens_; /// Number of tokens emitted through the getToken method. uint tokensTaken_; /// Can a simple key start at the current position? A simple key may start: /// - at the beginning of the line, not counting indentation spaces /// (in block context), /// - after '{', '[', ',' (in the flow context), /// - after '?', ':', '-' (in the block context). /// In the block context, this flag also signifies if a block collection /// may start at the current position. bool allowSimpleKey_ = true; /// Possible simple keys indexed by flow levels. SimpleKey[] possibleSimpleKeys_; public: /// Construct a Scanner using specified Reader. this(Reader reader) @safe nothrow { // Return the next token, but do not delete it from the queue reader_ = reader; fetchStreamStart(); } /// Advance to the next token void popFront() @safe { ++tokensTaken_; tokens_.pop(); } /// Return the current token const(Token) front() @safe { enforce(!empty, "No token left to peek"); return tokens_.peek(); } /// Return whether there are any more tokens left. bool empty() @safe { while (needMoreTokens()) { fetchToken(); } return tokens_.empty; } /// Set file name. ref inout(string) name() inout @safe return pure nothrow @nogc { return reader_.name; } /// Get a mark from the current reader position Mark mark() const @safe pure nothrow @nogc { return reader_.mark; } private: /// Most scanning error messages have the same format; so build them with this /// function. string expected(T)(string expected, T found) { return text(expected, ", but found ", found); } /// Determine whether or not we need to fetch more tokens before peeking/getting a token. bool needMoreTokens() @safe pure { if(done_) { return false; } if(tokens_.empty) { return true; } /// The current token may be a potential simple key, so we need to look further. stalePossibleSimpleKeys(); return nextPossibleSimpleKey() == tokensTaken_; } /// Fetch at token, adding it to tokens_. void fetchToken() @safe { // Eat whitespaces and comments until we reach the next token. scanToNextToken(); // Remove obsolete possible simple keys. stalePossibleSimpleKeys(); // Compare current indentation and column. It may add some tokens // and decrease the current indentation level. unwindIndent(reader_.column); // Get the next character. const dchar c = reader_.peekByte(); // Fetch the token. if(c == '\0') { return fetchStreamEnd(); } if(checkDirective()) { return fetchDirective(); } if(checkDocumentStart()) { return fetchDocumentStart(); } if(checkDocumentEnd()) { return fetchDocumentEnd(); } // Order of the following checks is NOT significant. switch(c) { case '[': return fetchFlowSequenceStart(); case '{': return fetchFlowMappingStart(); case ']': return fetchFlowSequenceEnd(); case '}': return fetchFlowMappingEnd(); case ',': return fetchFlowEntry(); case '!': return fetchTag(); case '\'': return fetchSingle(); case '\"': return fetchDouble(); case '*': return fetchAlias(); case '&': return fetchAnchor(); case '?': if(checkKey()) { return fetchKey(); } goto default; case ':': if(checkValue()) { return fetchValue(); } goto default; case '-': if(checkBlockEntry()) { return fetchBlockEntry(); } goto default; case '|': if(flowLevel_ == 0) { return fetchLiteral(); } break; case '>': if(flowLevel_ == 0) { return fetchFolded(); } break; default: if(checkPlain()) { return fetchPlain(); } } throw new ScannerException("While scanning for the next token, found character " ~ "\'%s\', index %s that cannot start any token" .format(c, to!int(c)), reader_.mark); } /// Return the token number of the nearest possible simple key. uint nextPossibleSimpleKey() @safe pure nothrow @nogc { uint minTokenNumber = uint.max; foreach(k, ref simpleKey; possibleSimpleKeys_) { if(simpleKey.isNull) { continue; } minTokenNumber = min(minTokenNumber, simpleKey.tokenIndex); } return minTokenNumber; } /// Remove entries that are no longer possible simple keys. /// /// According to the YAML specification, simple keys /// - should be limited to a single line, /// - should be no longer than 1024 characters. /// Disabling this will allow simple keys of any length and /// height (may cause problems if indentation is broken though). void stalePossibleSimpleKeys() @safe pure { foreach(level, ref key; possibleSimpleKeys_) { if(key.isNull) { continue; } if(key.mark.line != reader_.mark.line || reader_.mark.column - key.mark.column > 1024) { enforce(!key.required, new ScannerException("While scanning a simple key, could not find expected ':'", reader_.mark, "key started here", key.mark)); key.isNull = true; } } } /// Check if the next token starts a possible simple key and if so, save its position. /// /// This function is called for ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. void savePossibleSimpleKey() @safe pure { // Check if a simple key is required at the current position. const required = (flowLevel_ == 0 && indent_ == reader_.column); assert(allowSimpleKey_ || !required, "A simple key is required only if it is " ~ "the first token in the current line. Therefore it is always allowed."); if(!allowSimpleKey_) { return; } // The next token might be a simple key, so save its number and position. removePossibleSimpleKey(); const tokenCount = tokensTaken_ + cast(uint)tokens_.length; const line = reader_.line; const column = reader_.column; const key = SimpleKey(reader_.mark, tokenCount, required); if(possibleSimpleKeys_.length <= flowLevel_) { const oldLength = possibleSimpleKeys_.length; possibleSimpleKeys_.length = flowLevel_ + 1; // Make sure all the empty keys are null foreach (ref emptyKey; possibleSimpleKeys_[oldLength .. flowLevel_]) { emptyKey.isNull = true; } } possibleSimpleKeys_[flowLevel_] = key; } /// Remove the saved possible key position at the current flow level. void removePossibleSimpleKey() @safe pure { if(possibleSimpleKeys_.length <= flowLevel_) { return; } if(!possibleSimpleKeys_[flowLevel_].isNull) { const key = possibleSimpleKeys_[flowLevel_]; enforce(!key.required, new ScannerException("While scanning a simple key, could not find expected ':'", reader_.mark, "key started here", key.mark)); possibleSimpleKeys_[flowLevel_].isNull = true; } } /// Decrease indentation, removing entries in indents_. /// /// Params: column = Current column in the file/stream. void unwindIndent(const int column) @safe { if(flowLevel_ > 0) { // In flow context, tokens should respect indentation. // The condition should be `indent >= column` according to the spec. // But this condition will prohibit intuitively correct // constructions such as // key : { // } // In the flow context, indentation is ignored. We make the scanner less // restrictive than what the specification requires. // if(pedantic_ && flowLevel_ > 0 && indent_ > column) // { // throw new ScannerException("Invalid intendation or unclosed '[' or '{'", // reader_.mark) // } return; } // In block context, we may need to issue the BLOCK-END tokens. while(indent_ > column) { indent_ = indents_.data.back; assert(indents_.data.length); indents_.shrinkTo(indents_.data.length - 1); tokens_.push(blockEndToken(reader_.mark, reader_.mark)); } } /// Increase indentation if needed. /// /// Params: column = Current column in the file/stream. /// /// Returns: true if the indentation was increased, false otherwise. bool addIndent(int column) @safe { if(indent_ >= column){return false;} indents_ ~= indent_; indent_ = column; return true; } /// Add STREAM-START token. void fetchStreamStart() @safe nothrow { tokens_.push(streamStartToken(reader_.mark, reader_.mark, reader_.encoding)); } ///Add STREAM-END token. void fetchStreamEnd() @safe { //Set intendation to -1 . unwindIndent(-1); removePossibleSimpleKey(); allowSimpleKey_ = false; possibleSimpleKeys_.destroy; tokens_.push(streamEndToken(reader_.mark, reader_.mark)); done_ = true; } /// Add DIRECTIVE token. void fetchDirective() @safe { // Set intendation to -1 . unwindIndent(-1); // Reset simple keys. removePossibleSimpleKey(); allowSimpleKey_ = false; auto directive = scanDirective(); tokens_.push(directive); } /// Add DOCUMENT-START or DOCUMENT-END token. void fetchDocumentIndicator(TokenID id)() if(id == TokenID.documentStart || id == TokenID.documentEnd) { // Set indentation to -1 . unwindIndent(-1); // Reset simple keys. Note that there can't be a block collection after '---'. removePossibleSimpleKey(); allowSimpleKey_ = false; Mark startMark = reader_.mark; reader_.forward(3); tokens_.push(simpleToken!id(startMark, reader_.mark)); } /// Aliases to add DOCUMENT-START or DOCUMENT-END token. alias fetchDocumentStart = fetchDocumentIndicator!(TokenID.documentStart); alias fetchDocumentEnd = fetchDocumentIndicator!(TokenID.documentEnd); /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. void fetchFlowCollectionStart(TokenID id)() @safe { // '[' and '{' may start a simple key. savePossibleSimpleKey(); // Simple keys are allowed after '[' and '{'. allowSimpleKey_ = true; ++flowLevel_; Mark startMark = reader_.mark; reader_.forward(); tokens_.push(simpleToken!id(startMark, reader_.mark)); } /// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. alias fetchFlowSequenceStart = fetchFlowCollectionStart!(TokenID.flowSequenceStart); alias fetchFlowMappingStart = fetchFlowCollectionStart!(TokenID.flowMappingStart); /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. void fetchFlowCollectionEnd(TokenID id)() { // Reset possible simple key on the current level. removePossibleSimpleKey(); // No simple keys after ']' and '}'. allowSimpleKey_ = false; --flowLevel_; Mark startMark = reader_.mark; reader_.forward(); tokens_.push(simpleToken!id(startMark, reader_.mark)); } /// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/ alias fetchFlowSequenceEnd = fetchFlowCollectionEnd!(TokenID.flowSequenceEnd); alias fetchFlowMappingEnd = fetchFlowCollectionEnd!(TokenID.flowMappingEnd); /// Add FLOW-ENTRY token; void fetchFlowEntry() @safe { // Reset possible simple key on the current level. removePossibleSimpleKey(); // Simple keys are allowed after ','. allowSimpleKey_ = true; Mark startMark = reader_.mark; reader_.forward(); tokens_.push(flowEntryToken(startMark, reader_.mark)); } /// Additional checks used in block context in fetchBlockEntry and fetchKey. /// /// Params: type = String representing the token type we might need to add. /// id = Token type we might need to add. void blockChecks(string type, TokenID id)() { enum context = type ~ " keys are not allowed here"; // Are we allowed to start a key (not neccesarily a simple one)? enforce(allowSimpleKey_, new ScannerException(context, reader_.mark)); if(addIndent(reader_.column)) { tokens_.push(simpleToken!id(reader_.mark, reader_.mark)); } } /// Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process. void fetchBlockEntry() @safe { if(flowLevel_ == 0) { blockChecks!("Sequence", TokenID.blockSequenceStart)(); } // It's an error for the block entry to occur in the flow context, // but we let the parser detect this. // Reset possible simple key on the current level. removePossibleSimpleKey(); // Simple keys are allowed after '-'. allowSimpleKey_ = true; Mark startMark = reader_.mark; reader_.forward(); tokens_.push(blockEntryToken(startMark, reader_.mark)); } /// Add KEY token. Might add BLOCK-MAPPING-START in the process. void fetchKey() @safe { if(flowLevel_ == 0) { blockChecks!("Mapping", TokenID.blockMappingStart)(); } // Reset possible simple key on the current level. removePossibleSimpleKey(); // Simple keys are allowed after '?' in the block context. allowSimpleKey_ = (flowLevel_ == 0); Mark startMark = reader_.mark; reader_.forward(); tokens_.push(keyToken(startMark, reader_.mark)); } /// Add VALUE token. Might add KEY and/or BLOCK-MAPPING-START in the process. void fetchValue() @safe { //Do we determine a simple key? if(possibleSimpleKeys_.length > flowLevel_ && !possibleSimpleKeys_[flowLevel_].isNull) { const key = possibleSimpleKeys_[flowLevel_]; assert(key.tokenIndex >= tokensTaken_); possibleSimpleKeys_[flowLevel_].isNull = true; Mark keyMark = key.mark; const idx = key.tokenIndex - tokensTaken_; // Add KEY. // Manually inserting since tokens are immutable (need linked list). tokens_.insert(keyToken(keyMark, keyMark), idx); // If this key starts a new block mapping, we need to add BLOCK-MAPPING-START. if(flowLevel_ == 0 && addIndent(key.mark.column)) { tokens_.insert(blockMappingStartToken(keyMark, keyMark), idx); } // There cannot be two simple keys in a row. allowSimpleKey_ = false; } // Part of a complex key else { // We can start a complex value if and only if we can start a simple key. enforce(flowLevel_ > 0 || allowSimpleKey_, new ScannerException("Mapping values are not allowed here", reader_.mark)); // If this value starts a new block mapping, we need to add // BLOCK-MAPPING-START. It'll be detected as an error later by the parser. if(flowLevel_ == 0 && addIndent(reader_.column)) { tokens_.push(blockMappingStartToken(reader_.mark, reader_.mark)); } // Reset possible simple key on the current level. removePossibleSimpleKey(); // Simple keys are allowed after ':' in the block context. allowSimpleKey_ = (flowLevel_ == 0); } // Add VALUE. Mark startMark = reader_.mark; reader_.forward(); tokens_.push(valueToken(startMark, reader_.mark)); } /// Add ALIAS or ANCHOR token. void fetchAnchor_(TokenID id)() @safe if(id == TokenID.alias_ || id == TokenID.anchor) { // ALIAS/ANCHOR could be a simple key. savePossibleSimpleKey(); // No simple keys after ALIAS/ANCHOR. allowSimpleKey_ = false; auto anchor = scanAnchor(id); tokens_.push(anchor); } /// Aliases to add ALIAS or ANCHOR token. alias fetchAlias = fetchAnchor_!(TokenID.alias_); alias fetchAnchor = fetchAnchor_!(TokenID.anchor); /// Add TAG token. void fetchTag() @safe { //TAG could start a simple key. savePossibleSimpleKey(); //No simple keys after TAG. allowSimpleKey_ = false; tokens_.push(scanTag()); } /// Add block SCALAR token. void fetchBlockScalar(ScalarStyle style)() @safe if(style == ScalarStyle.literal || style == ScalarStyle.folded) { // Reset possible simple key on the current level. removePossibleSimpleKey(); // A simple key may follow a block scalar. allowSimpleKey_ = true; auto blockScalar = scanBlockScalar(style); tokens_.push(blockScalar); } /// Aliases to add literal or folded block scalar. alias fetchLiteral = fetchBlockScalar!(ScalarStyle.literal); alias fetchFolded = fetchBlockScalar!(ScalarStyle.folded); /// Add quoted flow SCALAR token. void fetchFlowScalar(ScalarStyle quotes)() { // A flow scalar could be a simple key. savePossibleSimpleKey(); // No simple keys after flow scalars. allowSimpleKey_ = false; // Scan and add SCALAR. auto scalar = scanFlowScalar(quotes); tokens_.push(scalar); } /// Aliases to add single or double quoted block scalar. alias fetchSingle = fetchFlowScalar!(ScalarStyle.singleQuoted); alias fetchDouble = fetchFlowScalar!(ScalarStyle.doubleQuoted); /// Add plain SCALAR token. void fetchPlain() @safe { // A plain scalar could be a simple key savePossibleSimpleKey(); // No simple keys after plain scalars. But note that scanPlain() will // change this flag if the scan is finished at the beginning of the line. allowSimpleKey_ = false; auto plain = scanPlain(); // Scan and add SCALAR. May change allowSimpleKey_ tokens_.push(plain); } pure: ///Check if the next token is DIRECTIVE: ^ '%' ... bool checkDirective() @safe { return reader_.peekByte() == '%' && reader_.column == 0; } /// Check if the next token is DOCUMENT-START: ^ '---' (' '|'\n') bool checkDocumentStart() @safe { // Check one char first, then all 3, to prevent reading outside the buffer. return reader_.column == 0 && reader_.peekByte() == '-' && reader_.prefix(3) == "---" && reader_.peek(3).isWhiteSpace; } /// Check if the next token is DOCUMENT-END: ^ '...' (' '|'\n') bool checkDocumentEnd() @safe { // Check one char first, then all 3, to prevent reading outside the buffer. return reader_.column == 0 && reader_.peekByte() == '.' && reader_.prefix(3) == "..." && reader_.peek(3).isWhiteSpace; } /// Check if the next token is BLOCK-ENTRY: '-' (' '|'\n') bool checkBlockEntry() @safe { return !!reader_.peek(1).isWhiteSpace; } /// Check if the next token is KEY(flow context): '?' /// /// or KEY(block context): '?' (' '|'\n') bool checkKey() @safe { return (flowLevel_ > 0 || reader_.peek(1).isWhiteSpace); } /// Check if the next token is VALUE(flow context): ':' /// /// or VALUE(block context): ':' (' '|'\n') bool checkValue() @safe { return flowLevel_ > 0 || reader_.peek(1).isWhiteSpace; } /// Check if the next token is a plain scalar. /// /// A plain scalar may start with any non-space character except: /// '-', '?', ':', ',', '[', ']', '{', '}', /// '#', '&', '*', '!', '|', '>', '\'', '\"', /// '%', '@', '`'. /// /// It may also start with /// '-', '?', ':' /// if it is followed by a non-space character. /// /// Note that we limit the last rule to the block context (except the /// '-' character) because we want the flow context to be space /// independent. bool checkPlain() @safe { const c = reader_.peek(); if(!c.isNonScalarStartCharacter) { return true; } return !reader_.peek(1).isWhiteSpace && (c == '-' || (flowLevel_ == 0 && (c == '?' || c == ':'))); } /// Move to the next non-space character. void findNextNonSpace() @safe { while(reader_.peekByte() == ' ') { reader_.forward(); } } /// Scan a string of alphanumeric or "-_" characters. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanAlphaNumericToSlice(string name)(ref char[] slice, const Mark startMark) { size_t length; dchar c = reader_.peek(); while(c.isAlphaNum || c.among!('-', '_')) { c = reader_.peek(++length); } enforce(length > 0, new ScannerException(expected("While scanning a " ~ name ~ ", expected alphanumeric, '-' or '_'", c), reader_.mark, name~" started here", startMark)); slice ~= reader_.get(length); } /// Scan a string. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. char[] readAnchorAlias(const Mark startMark) @safe { size_t length; dchar c = reader_.peek(); while (c.isNSAnchorName) { c = reader_.peek(++length); } enforce(length > 0, new ScannerException( expected("While scanning an anchor or alias, expected a printable character besides '[', ']', '{', '}' and ','", c), reader_.mark, "started here", startMark)); return reader_.get(length); } /// Scan and throw away all characters until next line break. void scanToNextBreak() @safe { while(!reader_.peek().isBreak) { reader_.forward(); } } /// Scan all characters until next line break. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanToNextBreakToSlice(ref char[] slice) @safe { uint length; while(!reader_.peek(length).isBreak) { ++length; } slice ~= reader_.get(length); } /// Move to next token in the file/stream. /// /// We ignore spaces, line breaks and comments. /// If we find a line break in the block context, we set /// allowSimpleKey` on. /// /// We do not yet support BOM inside the stream as the /// specification requires. Any such mark will be considered as a part /// of the document. void scanToNextToken() @safe { // TODO(PyYAML): We need to make tab handling rules more sane. A good rule is: // Tabs cannot precede tokens // BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, // KEY(block), VALUE(block), BLOCK-ENTRY // So the checking code is // if : // allowSimpleKey_ = false // We also need to add the check for `allowSimpleKey_ == true` to // `unwindIndent` before issuing BLOCK-END. // Scanners for block, flow, and plain scalars need to be modified. for(;;) { //All whitespace in flow context is ignored, even whitespace // not allowed in other contexts if (flowLevel_ > 0) { while(reader_.peekByte().isNonLinebreakWhitespace) { reader_.forward(); } } else { findNextNonSpace(); } if(reader_.peekByte() == '#') { scanToNextBreak(); } if(scanLineBreak() != '\0') { if(flowLevel_ == 0) { allowSimpleKey_ = true; } } else { break; } } } /// Scan directive token. Token scanDirective() @safe { Mark startMark = reader_.mark; // Skip the '%'. reader_.forward(); // Scan directive name char[] name; scanDirectiveNameToSlice(name, startMark); char[] value; // Index where tag handle ends and suffix starts in a tag directive value. uint tagHandleEnd = uint.max; if(name == "YAML") { scanYAMLDirectiveValueToSlice(value, startMark); } else if(name == "TAG") { tagHandleEnd = scanTagDirectiveValueToSlice(value, startMark); } Mark endMark = reader_.mark; DirectiveType directive; if(name == "YAML") { directive = DirectiveType.yaml; } else if(name == "TAG") { directive = DirectiveType.tag; } else { directive = DirectiveType.reserved; scanToNextBreak(); } scanDirectiveIgnoredLine(startMark); return directiveToken(startMark, endMark, value, directive, tagHandleEnd); } /// Scan name of a directive token. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanDirectiveNameToSlice(ref char[] slice, const Mark startMark) @safe { // Scan directive name. scanAlphaNumericToSlice!"directive"(slice, startMark); enforce(reader_.peek().among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), new ScannerException(expected("While scanning a directive, expected alphanumeric, '-' or '_'", reader_.peek()), reader_.mark, "directive started here", startMark)); } /// Scan value of a YAML directive token. Returns major, minor version separated by '.'. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanYAMLDirectiveValueToSlice(ref char[] slice, const Mark startMark) @safe { findNextNonSpace(); scanYAMLDirectiveNumberToSlice(slice, startMark); enforce(reader_.peekByte() == '.', new ScannerException(expected("While scanning a directive, expected digit or '.'", reader_.peek()), reader_.mark, "directive started here", startMark)); // Skip the '.'. reader_.forward(); slice ~= '.'; scanYAMLDirectiveNumberToSlice(slice, startMark); enforce(reader_.peek().among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), new ScannerException(expected("While scanning a directive, expected digit or '.'", reader_.peek()), reader_.mark, "directive started here", startMark)); } /// Scan a number from a YAML directive. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanYAMLDirectiveNumberToSlice(ref char[] slice, const Mark startMark) @safe { enforce(isDigit(reader_.peek()), new ScannerException(expected("While scanning a directive, expected a digit", reader_.peek()), reader_.mark, "directive started here", startMark)); // Already found the first digit in the enforce(), so set length to 1. uint length = 1; while(reader_.peek(length).isDigit) { ++length; } slice ~= reader_.get(length); } /// Scan value of a tag directive. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. /// /// Returns: Length of tag handle (which is before tag prefix) in scanned data uint scanTagDirectiveValueToSlice(ref char[] slice, const Mark startMark) @safe { findNextNonSpace(); const startLength = slice.length; scanTagDirectiveHandleToSlice(slice, startMark); const handleLength = cast(uint)(slice.length - startLength); findNextNonSpace(); scanTagDirectivePrefixToSlice(slice, startMark); return handleLength; } /// Scan handle of a tag directive. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanTagDirectiveHandleToSlice(ref char[] slice, const Mark startMark) @safe { scanTagHandleToSlice!"directive"(slice, startMark); enforce(reader_.peekByte() == ' ', new ScannerException(expected("While scanning a directive handle, expected ' '", reader_.peek()), reader_.mark, "directive started here", startMark)); } /// Scan prefix of a tag directive. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanTagDirectivePrefixToSlice(ref char[] slice, const Mark startMark) @safe { scanTagURIToSlice!"directive"(slice, startMark); enforce(reader_.peek().among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), new ScannerException(expected("While scanning a directive prefix, expected ' '", reader_.peek()), reader_.mark, "directive started here", startMark)); } /// Scan (and ignore) ignored line after a directive. void scanDirectiveIgnoredLine(const Mark startMark) @safe { findNextNonSpace(); if(reader_.peekByte() == '#') { scanToNextBreak(); } enforce(reader_.peek().isBreak, new ScannerException(expected("While scanning a directive, expected a comment or a line break", reader_.peek()), reader_.mark, "directive started here", startMark)); scanLineBreak(); } /// Scan an alias or an anchor. /// /// The specification does not restrict characters for anchors and /// aliases. This may lead to problems, for instance, the document: /// [ *alias, value ] /// can be interpteted in two ways, as /// [ "value" ] /// and /// [ *alias , "value" ] /// Therefore we restrict aliases to ASCII alphanumeric characters. Token scanAnchor(const TokenID id) @safe { const startMark = reader_.mark; reader_.forward(); // The */& character was only peeked, so we drop it now char[] value = readAnchorAlias(startMark); assert(!reader_.peek().isNSAnchorName, "Anchor/alias name not fully scanned"); if(id == TokenID.alias_) { return aliasToken(startMark, reader_.mark, value); } if(id == TokenID.anchor) { return anchorToken(startMark, reader_.mark, value); } assert(false, "This code should never be reached"); } /// Scan a tag token. Token scanTag() @safe { const startMark = reader_.mark; dchar c = reader_.peek(1); char[] slice; // Index where tag handle ends and tag suffix starts in the tag value // (slice) we will produce. uint handleEnd; if(c == '<') { reader_.forward(2); handleEnd = 0; scanTagURIToSlice!"tag"(slice, startMark); enforce(reader_.peekByte() == '>', new ScannerException(expected("While scanning a tag, expected a '>'", reader_.peek()), reader_.mark, "tag started here", startMark)); reader_.forward(); } else if(c.isWhiteSpace) { reader_.forward(); handleEnd = 0; slice ~= '!'; } else { uint length = 1; bool useHandle; while(!c.isBreakOrSpace) { if(c == '!') { useHandle = true; break; } ++length; c = reader_.peek(length); } if(useHandle) { scanTagHandleToSlice!"tag"(slice, startMark); handleEnd = cast(uint)slice.length; } else { reader_.forward(); slice ~= '!'; handleEnd = cast(uint)slice.length; } scanTagURIToSlice!"tag"(slice, startMark); } enforce(reader_.peek().isBreakOrSpace, new ScannerException(expected("While scanning a tag, expected a ' '", reader_.peek()), reader_.mark, "tag started here", startMark)); return tagToken(startMark, reader_.mark, slice, handleEnd); } /// Scan a block scalar token with specified style. Token scanBlockScalar(const ScalarStyle style) @safe { const startMark = reader_.mark; // Scan the header. reader_.forward(); const indicators = scanBlockScalarIndicators(startMark); const chomping = indicators[0]; const increment = indicators[1]; scanBlockScalarIgnoredLine(startMark); // Determine the indentation level and go to the first non-empty line. Mark endMark; uint indent = max(1, indent_ + 1); char[] slice; // Used to strip the last line breaks written to the slice at the end of the // scalar, which may be needed based on chomping. char[] newBreakSlice; // Read the first indentation/line breaks before the scalar. size_t startLen = newBreakSlice.length; if(increment == int.min) { auto indentation = scanBlockScalarIndentationToSlice(newBreakSlice); endMark = indentation[1]; indent = max(indent, indentation[0]); } else { indent += increment - 1; endMark = scanBlockScalarBreaksToSlice(newBreakSlice, indent); } // int.max means there's no line break (int.max is outside UTF-32). dchar lineBreak = cast(dchar)int.max; // Scan the inner part of the block scalar. while(reader_.column == indent && reader_.peekByte() != '\0') { slice ~= newBreakSlice; const bool leadingNonSpace = !reader_.peekByte().among!(' ', '\t'); // This is where the 'interesting' non-whitespace data gets read. scanToNextBreakToSlice(slice); lineBreak = scanLineBreak(); // This transaction serves to rollback data read in the // scanBlockScalarBreaksToSlice() call. newBreakSlice = []; startLen = slice.length; // The line breaks should actually be written _after_ the if() block // below. We work around that by inserting endMark = scanBlockScalarBreaksToSlice(newBreakSlice, indent); // This will not run during the last iteration if(reader_.column == indent && reader_.peekByte() != '\0') { // Unfortunately, folding rules are ambiguous. // This is the folding according to the specification: if(style == ScalarStyle.folded && lineBreak == '\n' && leadingNonSpace && !reader_.peekByte().among!(' ', '\t')) { // No breaks were scanned; no need to insert the space in the // middle of slice. if(startLen == slice.length + newBreakSlice.length) { newBreakSlice ~= ' '; } } else { // We need to insert in the middle of the slice in case any line // breaks were scanned. newBreakSlice.insert(lineBreak, 0); } ////this is Clark Evans's interpretation (also in the spec ////examples): // //if(style == ScalarStyle.folded && lineBreak == '\n') //{ // if(startLen == endLen) // { // if(!" \t"d.canFind(reader_.peekByte())) // { // reader_.sliceBuilder.write(' '); // } // else // { // chunks ~= lineBreak; // } // } //} //else //{ // reader_.sliceBuilder.insertBack(lineBreak, endLen - startLen); //} } else { break; } } // If chompint is Keep, we keep (commit) the last scanned line breaks // (which are at the end of the scalar). Otherwise re remove them (end the // transaction). if(chomping == Chomping.keep) { slice ~= newBreakSlice; } if(chomping != Chomping.strip && lineBreak != int.max) { // If chomping is Keep, we keep the line break but the first line break // that isn't stripped (since chomping isn't Strip in this branch) must // be inserted _before_ the other line breaks. if(chomping == Chomping.keep) { slice.insert(lineBreak, startLen); } // If chomping is not Keep, discard the line break else { if (lineBreak != '\0') { slice ~= lineBreak; } } } return scalarToken(startMark, endMark, slice, style); } /// Scan chomping and indentation indicators of a scalar token. Tuple!(Chomping, int) scanBlockScalarIndicators(const Mark startMark) @safe { auto chomping = Chomping.clip; int increment = int.min; dchar c = reader_.peek(); /// Indicators can be in any order. if(getChomping(c, chomping)) { getIncrement(c, increment, startMark); } else { const gotIncrement = getIncrement(c, increment, startMark); if(gotIncrement) { getChomping(c, chomping); } } enforce(c.among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), new ScannerException(expected("While scanning a block scalar, expected a chomping or indentation indicator", c), reader_.mark, "scalar started here", startMark)); return tuple(chomping, increment); } /// Get chomping indicator, if detected. Return false otherwise. /// /// Used in scanBlockScalarIndicators. /// /// Params: /// /// c = The character that may be a chomping indicator. /// chomping = Write the chomping value here, if detected. bool getChomping(ref dchar c, ref Chomping chomping) @safe { if(!c.among!('+', '-')) { return false; } chomping = c == '+' ? Chomping.keep : Chomping.strip; reader_.forward(); c = reader_.peek(); return true; } /// Get increment indicator, if detected. Return false otherwise. /// /// Used in scanBlockScalarIndicators. /// /// Params: /// /// c = The character that may be an increment indicator. /// If an increment indicator is detected, this will be updated to /// the next character in the Reader. /// increment = Write the increment value here, if detected. /// startMark = Mark for error messages. bool getIncrement(ref dchar c, ref int increment, const Mark startMark) @safe { if(!c.isDigit) { return false; } // Convert a digit to integer. increment = c - '0'; assert(increment < 10 && increment >= 0, "Digit has invalid value"); enforce(increment > 0, new ScannerException(expected("While scanning a block scalar, expected an indentation indicator in range 1-9", "0"), reader_.mark, "scalar started here", startMark)); reader_.forward(); c = reader_.peek(); return true; } /// Scan (and ignore) ignored line in a block scalar. void scanBlockScalarIgnoredLine(const Mark startMark) @safe { findNextNonSpace(); if(reader_.peekByte()== '#') { scanToNextBreak(); } enforce(reader_.peek().isBreak, new ScannerException(expected("While scanning a block scalar, expected a comment or line break", reader_.peek()), reader_.mark, "scalar started here", startMark)); scanLineBreak(); } /// Scan indentation in a block scalar, returning line breaks, max indent and end mark. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. Tuple!(uint, Mark) scanBlockScalarIndentationToSlice(ref char[] slice) @safe { uint maxIndent; Mark endMark = reader_.mark; while(reader_.peek().among!(' ', '\n', '\r', '\u0085', '\u2028', '\u2029')) { if(reader_.peekByte() != ' ') { slice ~= scanLineBreak(); endMark = reader_.mark; continue; } reader_.forward(); maxIndent = max(reader_.column, maxIndent); } return tuple(maxIndent, endMark); } /// Scan line breaks at lower or specified indentation in a block scalar. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. Mark scanBlockScalarBreaksToSlice(ref char[] slice, const uint indent) @safe { Mark endMark = reader_.mark; for(;;) { while(reader_.column < indent && reader_.peekByte() == ' ') { reader_.forward(); } if(!reader_.peek().among!('\n', '\r', '\u0085', '\u2028', '\u2029')) { break; } slice ~= scanLineBreak(); endMark = reader_.mark; } return endMark; } /// Scan a qouted flow scalar token with specified quotes. Token scanFlowScalar(const ScalarStyle quotes) @safe { const startMark = reader_.mark; const quote = reader_.get(); char[] slice; scanFlowScalarNonSpacesToSlice(slice, quotes, startMark); while(reader_.peek() != quote) { scanFlowScalarSpacesToSlice(slice, startMark); scanFlowScalarNonSpacesToSlice(slice, quotes, startMark); } reader_.forward(); return scalarToken(startMark, reader_.mark, slice, quotes); } /// Scan nonspace characters in a flow scalar. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanFlowScalarNonSpacesToSlice(ref char[] slice, const ScalarStyle quotes, const Mark startMark) @safe { for(;;) { dchar c = reader_.peek(); size_t numCodePoints; while(!reader_.peek(numCodePoints).isFlowScalarBreakSpace) { ++numCodePoints; } if (numCodePoints > 0) { slice ~= reader_.get(numCodePoints); } c = reader_.peek(); if(quotes == ScalarStyle.singleQuoted && c == '\'' && reader_.peek(1) == '\'') { reader_.forward(2); slice ~= '\''; } else if((quotes == ScalarStyle.doubleQuoted && c == '\'') || (quotes == ScalarStyle.singleQuoted && c.among!('"', '\\'))) { reader_.forward(); slice ~= c; } else if(quotes == ScalarStyle.doubleQuoted && c == '\\') { reader_.forward(); c = reader_.peek(); if(c.among!(escapes)) { reader_.forward(); // Escaping has been moved to Parser as it can't be done in // place (in a slice) in case of '\P' and '\L' (very uncommon, // but we don't want to break the spec) char[2] escapeSequence = ['\\', cast(char)c]; slice ~= escapeSequence; } else if(c.among!(escapeHexCodeList)) { const hexLength = dub.internal.dyaml.escapes.escapeHexLength(c); reader_.forward(); foreach(i; 0 .. hexLength) { enforce(reader_.peek(i).isHexDigit, new ScannerException(expected("While scanning a double quoted scalar, expected an escape sequence of hexadecimal numbers", reader_.peek(i)), reader_.mark, "scalar started here", startMark)); } char[] hex = reader_.get(hexLength); assert((hex.length > 0) && (hex.length <= 8), "Hex escape overflow"); char[2] escapeStart = ['\\', cast(char) c]; slice ~= escapeStart; slice ~= hex; } else if(c.among!('\n', '\r', '\u0085', '\u2028', '\u2029')) { scanLineBreak(); scanFlowScalarBreaksToSlice(slice, startMark); } else { throw new ScannerException(text("While scanning a double quoted scalar, found unsupported escape character ", c), reader_.mark, "scalar started here", startMark); } } else { return; } } } /// Scan space characters in a flow scalar. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// spaces into that slice. void scanFlowScalarSpacesToSlice(ref char[] slice, const Mark startMark) @safe { // Increase length as long as we see whitespace. size_t length; while(reader_.peekByte(length).among!(' ', '\t')) { ++length; } auto whitespaces = reader_.prefixBytes(length); // Can check the last byte without striding because '\0' is ASCII const c = reader_.peek(length); enforce(c != '\0', new ScannerException("While scanning a quoted scalar, found unexpected end of buffer", reader_.mark, "scalar started here", startMark)); // Spaces not followed by a line break. if(!c.among!('\n', '\r', '\u0085', '\u2028', '\u2029')) { reader_.forward(length); slice ~= whitespaces; return; } // There's a line break after the spaces. reader_.forward(length); const lineBreak = scanLineBreak(); if(lineBreak != '\n') { slice ~= lineBreak; } // If we have extra line breaks after the first, scan them into the // slice. const bool extraBreaks = scanFlowScalarBreaksToSlice(slice, startMark); // No extra breaks, one normal line break. Replace it with a space. if(lineBreak == '\n' && !extraBreaks) { slice ~= ' '; } } /// Scan line breaks in a flow scalar. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// line breaks into that slice. bool scanFlowScalarBreaksToSlice(ref char[] slice, const Mark startMark) @safe { // True if at least one line break was found. bool anyBreaks; for(;;) { // Instead of checking indentation, we check for document separators. const prefix = reader_.prefix(3); enforce(!(prefix == "---" || prefix == "...") || !reader_.peek(3).isWhiteSpace, new ScannerException("While scanning a quoted scalar, found unexpected document separator", reader_.mark, "scalar started here", startMark)); // Skip any whitespaces. while(reader_.peekByte().among!(' ', '\t')) { reader_.forward(); } // Encountered a non-whitespace non-linebreak character, so we're done. if(!reader_.peek().among!(' ', '\n', '\r', '\u0085', '\u2028', '\u2029')) { break; } const lineBreak = scanLineBreak(); anyBreaks = true; slice ~= lineBreak; } return anyBreaks; } /// Scan plain scalar token (no block, no quotes). Token scanPlain() @safe { // We keep track of the allowSimpleKey_ flag here. // Indentation rules are loosed for the flow context const startMark = reader_.mark; Mark endMark = startMark; const indent = indent_ + 1; // We allow zero indentation for scalars, but then we need to check for // document separators at the beginning of the line. // if(indent == 0) { indent = 1; } char[] slice; char[] newSpacesSlice; // Stop at a comment. while(reader_.peekByte() != '#') { // Scan the entire plain scalar. size_t length; dchar c = reader_.peek(length); for(;;) { const cNext = reader_.peek(length + 1); if(c.isWhiteSpace || (flowLevel_ == 0 && c == ':' && cNext.isWhiteSpace) || (flowLevel_ > 0 && c == ':' && (cNext.isWhiteSpace || cNext.among!(',', '[', ']', '{', '}'))) || (flowLevel_ > 0 && c.among!(',', '[', ']', '{', '}'))) { break; } ++length; c = cNext; } if(length == 0) { break; } allowSimpleKey_ = false; newSpacesSlice ~= reader_.get(length); endMark = reader_.mark; slice ~= newSpacesSlice; newSpacesSlice = []; const startLength = slice.length; scanPlainSpacesToSlice(newSpacesSlice); if(startLength == slice.length + newSpacesSlice.length || (flowLevel_ == 0 && reader_.column < indent)) { break; } } return scalarToken(startMark, endMark, slice, ScalarStyle.plain); } /// Scan spaces in a plain scalar. /// /// Assumes that the caller is building a slice in Reader, and puts the spaces /// into that slice. void scanPlainSpacesToSlice(ref char[] slice) @trusted { // The specification is really confusing about tabs in plain scalars. // We just forbid them completely. Do not use tabs in YAML! // Get as many plain spaces as there are. size_t length; while(reader_.peekByte(length) == ' ') { ++length; } char[] whitespaces = reader_.prefixBytes(length); reader_.forward(length); const dchar c = reader_.peek(); if(!c.isNSChar) { // We have spaces, but no newline. if(whitespaces.length > 0) { slice ~= whitespaces; } return; } // Newline after the spaces (if any) const lineBreak = scanLineBreak(); allowSimpleKey_ = true; static bool end(Reader reader_) @safe pure { const prefix = reader_.prefix(3); return ("---" == prefix || "..." == prefix) && reader_.peek(3).among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); } if(end(reader_)) { return; } bool extraBreaks; char[] newSlice; if(lineBreak != '\n') { newSlice ~= lineBreak; } while(reader_.peek().isNSChar) { if(reader_.peekByte() == ' ') { reader_.forward(); } else { const lBreak = scanLineBreak(); extraBreaks = true; newSlice ~= lBreak; if(end(reader_)) { return; } } } slice ~= newSlice; // No line breaks, only a space. if(lineBreak == '\n' && !extraBreaks) { slice ~= ' '; } } /// Scan handle of a tag token. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanTagHandleToSlice(string name)(ref char[] slice, const Mark startMark) { dchar c = reader_.peek(); enum contextMsg = "While scanning a " ~ name ~ ", expected a !"; // should this be an assert? enforce(c == '!', new ScannerException(expected(contextMsg, c), reader_.mark, "tag started here", startMark)); uint length = 1; c = reader_.peek(length); if(c != ' ') { while(c.isAlphaNum || c.among!('-', '_')) { ++length; c = reader_.peek(length); } enforce(c == '!', new ScannerException(expected(contextMsg, c), reader_.mark(length), "tag started here", startMark)); ++length; } slice ~= reader_.get(length); } /// Scan URI in a tag token. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanTagURIToSlice(string name)(ref char[] slice, const Mark startMark) { // Note: we do not check if URI is well-formed. dchar c = reader_.peek(); const startLen = slice.length; { uint length; while(c.isAlphaNum || c.isURIChar) { if(c == '%') { auto chars = reader_.get(length); slice ~= chars; length = 0; scanURIEscapesToSlice!name(slice, startMark); } else { ++length; } c = reader_.peek(length); } if(length > 0) { auto chars = reader_.get(length); slice ~= chars; length = 0; } } // OK if we scanned something, error otherwise. enum contextMsg = "While parsing a " ~ name ~ ", expected a URI"; enforce(slice.length > startLen, new ScannerException(expected(contextMsg, c), reader_.mark, "tag started here", startMark)); } // Not @nogc yet because std.utf.decode is not @nogc /// Scan URI escape sequences. /// /// Assumes that the caller is building a slice in Reader, and puts the scanned /// characters into that slice. void scanURIEscapesToSlice(string name)(ref char[] slice, const Mark startMark) { import core.exception : UnicodeException; // URI escapes encode a UTF-8 string. We store UTF-8 code units here for // decoding into UTF-32. Appender!string buffer; enum contextMsg = "While scanning a " ~ name; while(reader_.peekByte() == '%') { reader_.forward(); char[2] nextByte = [reader_.peekByte(), reader_.peekByte(1)]; enforce(nextByte[0].isHexDigit && nextByte[1].isHexDigit, new ScannerException(expected(contextMsg ~ ", expected a URI escape sequence of 2 hexadecimal numbers", nextByte), reader_.mark, "tag started here", startMark)); buffer ~= nextByte[].to!ubyte(16); reader_.forward(2); } try { foreach (dchar chr; buffer.data) { slice ~= chr; } } catch (UnicodeException) { throw new ScannerException(contextMsg ~ ", found invalid UTF-8 data encoded in URI escape sequence", reader_.mark, "tag started here", startMark); } } /// Scan a line break, if any. /// /// Transforms: /// '\r\n' : '\n' /// '\r' : '\n' /// '\n' : '\n' /// '\u0085' : '\n' /// '\u2028' : '\u2028' /// '\u2029 : '\u2029' /// no break : '\0' dchar scanLineBreak() @safe { // Fast path for ASCII line breaks. const b = reader_.peekByte(); if(b < 0x80) { if(b == '\n' || b == '\r') { if(reader_.prefix(2) == "\r\n") { reader_.forward(2); } else { reader_.forward(); } return '\n'; } return '\0'; } const c = reader_.peek(); if(c == '\x85') { reader_.forward(); return '\n'; } if(c == '\u2028' || c == '\u2029') { reader_.forward(); return c; } return '\0'; } } // Issue 309 - https://github.com/dlang-community/D-YAML/issues/309 @safe unittest { enum str = q"EOS exp: | foobar EOS".chomp; auto r = Reader(cast(ubyte[])str.dup); auto s = Scanner(r); auto elems = s.map!"a.value".filter!"a.length > 0".array; assert(elems[1] == "foobar"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `test: key: value`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : Mapping values are not allowed here\n" ~ ":1,10"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `test: ? foo : bar`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : Mapping keys are not allowed here\n" ~ ":1,7"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `@`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning for the next token, found character '@', index 64 that cannot start any token\n" ~ ":1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: bar meh`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a simple key, could not find expected ':'\n" ~ ":2,4\nkey started here: :2,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: &A bar *A ]`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a simple key, could not find expected ':'\n" ~ ":2,4\nkey started here: :2,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: &[`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning an anchor or alias, expected a printable character besides '[', ']', '{', '}' and ',', but found [\n" ~ ":1,7\nstarted here: :1,6"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%?`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive, expected alphanumeric, '-' or '_', but found ?\n" ~ ":1,2\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%b?`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive, expected alphanumeric, '-' or '_', but found ?\n" ~ ":1,3\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%YAML 1?`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive, expected digit or '.', but found ?\n" ~ ":1,8\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%YAML 1.1?`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive, expected digit or '.', but found ?\n" ~ ":1,10\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%YAML ?`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive, expected a digit, but found ?\n" ~ ":1,7\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%TAG !a!<`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive handle, expected ' ', but found <\n" ~ ":1,9\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%TAG !a! !>`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive prefix, expected ' ', but found >\n" ~ ":1,11\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `%YAML 1.0 ?`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a directive, expected a comment or a line break, but found ?\n" ~ ":1,11\ndirective started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: !: While scanning a tag, expected a '>', but found #\n" ~ ":1,9\ntag started here: :1,6"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: !#`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a tag, expected a ' ', but found #\n" ~ ":1,10\ntag started here: :1,6"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: !<#`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While parsing a tag, expected a URI, but found #\n" ~ ":1,8\ntag started here: :1,6"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: |b`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a block scalar, expected a chomping or indentation indicator, but found b\n" ~ ":1,7\nscalar started here: :1,6"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `foo: |0`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a block scalar, expected an indentation indicator in range 1-9, but found 0\n" ~ ":1,7\nscalar started here: :1,6"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `"\x"`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a double quoted scalar, expected an escape sequence of hexadecimal numbers, but found \"\n" ~ ":1,4\nscalar started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `"\:"`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a double quoted scalar, found unsupported escape character :\n" ~ ":1,3\nscalar started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `"an unfinished scal`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a quoted scalar, found unexpected end of buffer\n" ~ ":1,20\nscalar started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `"an unfinished scal ---`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a quoted scalar, found unexpected document separator\n" ~ ":2,1\nscalar started here: :1,1"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `Error: !a:!`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a tag, expected a !, but found :\n" ~ ":1,10\ntag started here: :1,8"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `Error: !e!tag%:)`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a tag, expected a URI escape sequence of 2 hexadecimal numbers, but found :)\n" ~ ":1,15\ntag started here: :1,8"); } @safe unittest { import dub.internal.dyaml.loader : Loader; const str = `Error: !e!tag%99%99`; const exc = collectException!LoaderException(Loader.fromString(str).load()); assert(exc); assert(exc.message() == "Unable to load : While scanning a tag, found invalid UTF-8 data encoded in URI escape sequence\n" ~ ":1,20\ntag started here: :1,8"); } private void insert(ref char[] slice, const dchar c, const size_t position) @safe pure in(position <= slice.length, text("Trying to insert after the end of the slice (", position, " > ", slice.length, ")")) { const point = position; const movedLength = slice.length - point; // Encode c into UTF-8 char[4] encodeBuf; if(c < 0x80) { encodeBuf[0] = cast(char)c; } const size_t bytes = c < 0x80 ? 1 : encode(encodeBuf, c); slice.length += bytes; if(movedLength > 0) { copy(slice[point..point + movedLength * char.sizeof], slice[point + bytes .. point + bytes + movedLength * char.sizeof]); } slice[point .. point + bytes] = encodeBuf[0 .. bytes]; } dub-1.40.0/source/dub/internal/dyaml/serializer.d000066400000000000000000000261651477246567400217200ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /** * YAML serializer. * Code based on PyYAML: http://www.pyyaml.org */ module dub.internal.dyaml.serializer; import std.array; import std.format; import std.typecons; import dub.internal.dyaml.emitter; import dub.internal.dyaml.event; import dub.internal.dyaml.exception; import dub.internal.dyaml.node; import dub.internal.dyaml.resolver; import dub.internal.dyaml.tagdirective; import dub.internal.dyaml.token; package: ///Serializes represented YAML nodes, generating events which are then emitted by Emitter. struct Serializer { private: ///Resolver used to determine which tags are automaticaly resolvable. Resolver resolver_; ///Do all document starts have to be specified explicitly? Flag!"explicitStart" explicitStart_; ///Do all document ends have to be specified explicitly? Flag!"explicitEnd" explicitEnd_; ///YAML version string. string YAMLVersion_; ///Tag directives to emit. TagDirective[] tagDirectives_; //TODO Use something with more deterministic memory usage. ///Nodes with assigned anchors. string[Node] anchors_; ///Nodes with assigned anchors that are already serialized. bool[Node] serializedNodes_; ///ID of the last anchor generated. uint lastAnchorID_ = 0; public: /** * Construct a Serializer. * * Params: * resolver = Resolver used to determine which tags are automaticaly resolvable. * explicitStart = Do all document starts have to be specified explicitly? * explicitEnd = Do all document ends have to be specified explicitly? * YAMLVersion = YAML version string. * tagDirectives = Tag directives to emit. */ this(Resolver resolver, const Flag!"explicitStart" explicitStart, const Flag!"explicitEnd" explicitEnd, string YAMLVersion, TagDirective[] tagDirectives) @safe { resolver_ = resolver; explicitStart_ = explicitStart; explicitEnd_ = explicitEnd; YAMLVersion_ = YAMLVersion; tagDirectives_ = tagDirectives; } ///Begin the stream. void startStream(EmitterT)(ref EmitterT emitter) @safe { emitter.emit(streamStartEvent(Mark(), Mark())); } ///End the stream. void endStream(EmitterT)(ref EmitterT emitter) @safe { emitter.emit(streamEndEvent(Mark(), Mark())); } ///Serialize a node, emitting it in the process. void serialize(EmitterT)(ref EmitterT emitter, ref Node node) @safe { emitter.emit(documentStartEvent(Mark(), Mark(), explicitStart_, YAMLVersion_, tagDirectives_)); anchorNode(node); serializeNode(emitter, node); emitter.emit(documentEndEvent(Mark(), Mark(), explicitEnd_)); serializedNodes_.destroy(); anchors_.destroy(); string[Node] emptyAnchors; anchors_ = emptyAnchors; lastAnchorID_ = 0; } private: /** * Determine if it's a good idea to add an anchor to a node. * * Used to prevent associating every single repeating scalar with an * anchor/alias - only nodes long enough can use anchors. * * Params: node = Node to check for anchorability. * * Returns: True if the node is anchorable, false otherwise. */ static bool anchorable(ref Node node) @safe { if(node.nodeID == NodeID.scalar) { return (node.type == NodeType.string) ? node.as!string.length > 64 : (node.type == NodeType.binary) ? node.as!(ubyte[]).length > 64 : false; } return node.length > 2; } @safe unittest { import std.string : representation; auto shortString = "not much"; auto longString = "A fairly long string that would be a good idea to add an anchor to"; auto node1 = Node(shortString); auto node2 = Node(shortString.representation.dup); auto node3 = Node(longString); auto node4 = Node(longString.representation.dup); auto node5 = Node([node1]); auto node6 = Node([node1, node2, node3, node4]); assert(!anchorable(node1)); assert(!anchorable(node2)); assert(anchorable(node3)); assert(anchorable(node4)); assert(!anchorable(node5)); assert(anchorable(node6)); } ///Add an anchor to the node if it's anchorable and not anchored yet. void anchorNode(ref Node node) @safe { if(!anchorable(node)){return;} if((node in anchors_) !is null) { if(anchors_[node] is null) { anchors_[node] = generateAnchor(); } return; } anchors_.remove(node); final switch (node.nodeID) { case NodeID.mapping: foreach(ref Node key, ref Node value; node) { anchorNode(key); anchorNode(value); } break; case NodeID.sequence: foreach(ref Node item; node) { anchorNode(item); } break; case NodeID.invalid: assert(0); case NodeID.scalar: } } ///Generate and return a new anchor. string generateAnchor() @safe { ++lastAnchorID_; auto appender = appender!string(); formattedWrite(appender, "id%03d", lastAnchorID_); return appender.data; } ///Serialize a node and all its subnodes. void serializeNode(EmitterT)(ref EmitterT emitter, ref Node node) @safe { //If the node has an anchor, emit an anchor (as aliasEvent) on the //first occurrence, save it in serializedNodes_, and emit an alias //if it reappears. string aliased; if(anchorable(node) && (node in anchors_) !is null) { aliased = anchors_[node]; if((node in serializedNodes_) !is null) { emitter.emit(aliasEvent(Mark(), Mark(), aliased)); return; } serializedNodes_[node] = true; } final switch (node.nodeID) { case NodeID.mapping: const defaultTag = resolver_.defaultMappingTag; const implicit = node.tag_ == defaultTag; emitter.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_, implicit, node.collectionStyle)); foreach(ref Node key, ref Node value; node) { serializeNode(emitter, key); serializeNode(emitter, value); } emitter.emit(mappingEndEvent(Mark(), Mark())); return; case NodeID.sequence: const defaultTag = resolver_.defaultSequenceTag; const implicit = node.tag_ == defaultTag; emitter.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_, implicit, node.collectionStyle)); foreach(ref Node item; node) { serializeNode(emitter, item); } emitter.emit(sequenceEndEvent(Mark(), Mark())); return; case NodeID.scalar: assert(node.type == NodeType.string, "Scalar node type must be string before serialized"); auto value = node.as!string; const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true); const bool isDetected = node.tag_ == detectedTag; emitter.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_, isDetected, value.idup, node.scalarStyle)); return; case NodeID.invalid: assert(0); } } } // Issue #244 @safe unittest { import dub.internal.dyaml.dumper : dumper; auto node = Node([ Node.Pair( Node(""), Node([ Node([ Node.Pair( Node("d"), Node([ Node([ Node.Pair( Node("c"), Node("") ), Node.Pair( Node("b"), Node("") ), Node.Pair( Node(""), Node("") ) ]) ]) ), ]), Node([ Node.Pair( Node("d"), Node([ Node(""), Node(""), Node([ Node.Pair( Node("c"), Node("") ), Node.Pair( Node("b"), Node("") ), Node.Pair( Node(""), Node("") ) ]) ]) ), Node.Pair( Node("z"), Node("") ), Node.Pair( Node(""), Node("") ) ]), Node("") ]) ), Node.Pair( Node("g"), Node("") ), Node.Pair( Node("h"), Node("") ), ]); auto stream = appender!string(); dumper().dump(stream, node); } dub-1.40.0/source/dub/internal/dyaml/stdsumtype.d000066400000000000000000002023231477246567400217600ustar00rootroot00000000000000/++ This module was copied from Phobos at commit 87c6e7e35 (2022-07-06). This is necessary to include https://github.com/dlang/phobos/pull/8501 which is a fix needed for DIP1000 compatibility. A couple minor changes where also required to deal with `package(std)` imports. [SumType] is a generic discriminated union implementation that uses design-by-introspection to generate safe and efficient code. Its features include: * [Pattern matching.][match] * Support for self-referential types. * Full attribute correctness (`pure`, `@safe`, `@nogc`, and `nothrow` are inferred whenever possible). * A type-safe and memory-safe API compatible with DIP 1000 (`scope`). * No dependency on runtime type information (`TypeInfo`). * Compatibility with BetterC. License: Boost License 1.0 Authors: Paul Backus Source: $(PHOBOSSRC std/sumtype.d) +/ module dub.internal.dyaml.stdsumtype; /// $(DIVID basic-usage,$(H3 Basic usage)) version (D_BetterC) {} else @safe unittest { import std.math.operations : isClose; struct Fahrenheit { double degrees; } struct Celsius { double degrees; } struct Kelvin { double degrees; } alias Temperature = SumType!(Fahrenheit, Celsius, Kelvin); // Construct from any of the member types. Temperature t1 = Fahrenheit(98.6); Temperature t2 = Celsius(100); Temperature t3 = Kelvin(273); // Use pattern matching to access the value. Fahrenheit toFahrenheit(Temperature t) { return Fahrenheit( t.match!( (Fahrenheit f) => f.degrees, (Celsius c) => c.degrees * 9.0/5 + 32, (Kelvin k) => k.degrees * 9.0/5 - 459.4 ) ); } assert(toFahrenheit(t1).degrees.isClose(98.6)); assert(toFahrenheit(t2).degrees.isClose(212)); assert(toFahrenheit(t3).degrees.isClose(32)); // Use ref to modify the value in place. void freeze(ref Temperature t) { t.match!( (ref Fahrenheit f) => f.degrees = 32, (ref Celsius c) => c.degrees = 0, (ref Kelvin k) => k.degrees = 273 ); } freeze(t1); assert(toFahrenheit(t1).degrees.isClose(32)); // Use a catch-all handler to give a default result. bool isFahrenheit(Temperature t) { return t.match!( (Fahrenheit f) => true, _ => false ); } assert(isFahrenheit(t1)); assert(!isFahrenheit(t2)); assert(!isFahrenheit(t3)); } /** $(DIVID introspection-based-matching, $(H3 Introspection-based matching)) * * In the `length` and `horiz` functions below, the handlers for `match` do not * specify the types of their arguments. Instead, matching is done based on how * the argument is used in the body of the handler: any type with `x` and `y` * properties will be matched by the `rect` handlers, and any type with `r` and * `theta` properties will be matched by the `polar` handlers. */ version (D_BetterC) {} else @safe unittest { import std.math.operations : isClose; import std.math.trigonometry : cos; import std.math.constants : PI; import std.math.algebraic : sqrt; struct Rectangular { double x, y; } struct Polar { double r, theta; } alias Vector = SumType!(Rectangular, Polar); double length(Vector v) { return v.match!( rect => sqrt(rect.x^^2 + rect.y^^2), polar => polar.r ); } double horiz(Vector v) { return v.match!( rect => rect.x, polar => polar.r * cos(polar.theta) ); } Vector u = Rectangular(1, 1); Vector v = Polar(1, PI/4); assert(length(u).isClose(sqrt(2.0))); assert(length(v).isClose(1)); assert(horiz(u).isClose(1)); assert(horiz(v).isClose(sqrt(0.5))); } /** $(DIVID arithmetic-expression-evaluator, $(H3 Arithmetic expression evaluator)) * * This example makes use of the special placeholder type `This` to define a * [recursive data type](https://en.wikipedia.org/wiki/Recursive_data_type): an * [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree) for * representing simple arithmetic expressions. */ version (D_BetterC) {} else @system unittest { import std.functional : partial; import std.traits : EnumMembers; import std.typecons : Tuple; enum Op : string { Plus = "+", Minus = "-", Times = "*", Div = "/" } // An expression is either // - a number, // - a variable, or // - a binary operation combining two sub-expressions. alias Expr = SumType!( double, string, Tuple!(Op, "op", This*, "lhs", This*, "rhs") ); // Shorthand for Tuple!(Op, "op", Expr*, "lhs", Expr*, "rhs"), // the Tuple type above with Expr substituted for This. alias BinOp = Expr.Types[2]; // Factory function for number expressions Expr* num(double value) { return new Expr(value); } // Factory function for variable expressions Expr* var(string name) { return new Expr(name); } // Factory function for binary operation expressions Expr* binOp(Op op, Expr* lhs, Expr* rhs) { return new Expr(BinOp(op, lhs, rhs)); } // Convenience wrappers for creating BinOp expressions alias sum = partial!(binOp, Op.Plus); alias diff = partial!(binOp, Op.Minus); alias prod = partial!(binOp, Op.Times); alias quot = partial!(binOp, Op.Div); // Evaluate expr, looking up variables in env double eval(Expr expr, double[string] env) { return expr.match!( (double num) => num, (string var) => env[var], (BinOp bop) { double lhs = eval(*bop.lhs, env); double rhs = eval(*bop.rhs, env); final switch (bop.op) { static foreach (op; EnumMembers!Op) { case op: return mixin("lhs" ~ op ~ "rhs"); } } } ); } // Return a "pretty-printed" representation of expr string pprint(Expr expr) { import std.format : format; return expr.match!( (double num) => "%g".format(num), (string var) => var, (BinOp bop) => "(%s %s %s)".format( pprint(*bop.lhs), cast(string) bop.op, pprint(*bop.rhs) ) ); } Expr* myExpr = sum(var("a"), prod(num(2), var("b"))); double[string] myEnv = ["a":3, "b":4, "c":7]; assert(eval(*myExpr, myEnv) == 11); assert(pprint(*myExpr) == "(a + (2 * b))"); } import std.format.spec : FormatSpec, singleSpec; import std.meta : AliasSeq, Filter, IndexOf = staticIndexOf, Map = staticMap; import std.meta : NoDuplicates; import std.meta : anySatisfy, allSatisfy; import std.traits : hasElaborateCopyConstructor, hasElaborateDestructor; import std.traits : isAssignable, isCopyable, isStaticArray, isRvalueAssignable; import std.traits : ConstOf, ImmutableOf, InoutOf, TemplateArgsOf; // FIXME: std.sumtype : `std.traits : DeducedParameterType` and `std.conv : toCtString` // are `package(std)` but trivial, hence copied below import std.traits : CommonType, /*DeducatedParameterType*/ Unqual; private template DeducedParameterType(T) { static if (is(T == U*, U) || is(T == U[], U)) alias DeducedParameterType = Unqual!T; else alias DeducedParameterType = T; } import std.typecons : ReplaceTypeUnless; import std.typecons : Flag; //import std.conv : toCtString; private enum toCtString(ulong n) = n.stringof[0 .. $ - "LU".length]; /// Placeholder used to refer to the enclosing [SumType]. struct This {} // True if a variable of type T can appear on the lhs of an assignment private enum isAssignableTo(T) = isAssignable!T || (!isCopyable!T && isRvalueAssignable!T); // toHash is required by the language spec to be nothrow and @safe private enum isHashable(T) = __traits(compiles, () nothrow @safe { hashOf(T.init); } ); private enum hasPostblit(T) = __traits(hasPostblit, T); private enum isInout(T) = is(T == inout); /** * A [tagged union](https://en.wikipedia.org/wiki/Tagged_union) that can hold a * single value from any of a specified set of types. * * The value in a `SumType` can be operated on using [pattern matching][match]. * * To avoid ambiguity, duplicate types are not allowed (but see the * ["basic usage" example](#basic-usage) for a workaround). * * The special type `This` can be used as a placeholder to create * self-referential types, just like with `Algebraic`. See the * ["Arithmetic expression evaluator" example](#arithmetic-expression-evaluator) for * usage. * * A `SumType` is initialized by default to hold the `.init` value of its * first member type, just like a regular union. The version identifier * `SumTypeNoDefaultCtor` can be used to disable this behavior. * * See_Also: $(REF Algebraic, std,variant) */ struct SumType(Types...) if (is(NoDuplicates!Types == Types) && Types.length > 0) { /// The types a `SumType` can hold. alias Types = AliasSeq!( ReplaceTypeUnless!(isSumTypeInstance, This, typeof(this), TemplateArgsOf!SumType) ); private: enum bool canHoldTag(T) = Types.length <= T.max; alias unsignedInts = AliasSeq!(ubyte, ushort, uint, ulong); alias Tag = Filter!(canHoldTag, unsignedInts)[0]; union Storage { // Workaround for https://issues.dlang.org/show_bug.cgi?id=20068 template memberName(T) if (IndexOf!(T, Types) >= 0) { enum tid = IndexOf!(T, Types); mixin("enum memberName = `values_", toCtString!tid, "`;"); } static foreach (T; Types) { mixin("T ", memberName!T, ";"); } } Storage storage; Tag tag; /* Accesses the value stored in a SumType. * * This method is memory-safe, provided that: * * 1. A SumType's tag is always accurate. * 2. A SumType cannot be assigned to in @safe code if that assignment * could cause unsafe aliasing. * * All code that accesses a SumType's tag or storage directly, including * @safe code in this module, must be manually checked to ensure that it * does not violate either of the above requirements. */ @trusted ref inout(T) get(T)() inout if (IndexOf!(T, Types) >= 0) { enum tid = IndexOf!(T, Types); assert(tag == tid, "This `" ~ SumType.stringof ~ "` does not contain a(n) `" ~ T.stringof ~ "`" ); return __traits(getMember, storage, Storage.memberName!T); } public: // Workaround for https://issues.dlang.org/show_bug.cgi?id=21399 version (StdDdoc) { // Dummy type to stand in for loop variable private struct T; /// Constructs a `SumType` holding a specific value. this(T value); /// ditto this(const(T) value) const; /// ditto this(immutable(T) value) immutable; /// ditto this(Value)(Value value) inout if (is(Value == DeducedParameterType!(inout(T)))); } static foreach (tid, T; Types) { /// Constructs a `SumType` holding a specific value. this(T value) { import core.lifetime : forward; static if (isCopyable!T) { // Workaround for https://issues.dlang.org/show_bug.cgi?id=21542 __traits(getMember, storage, Storage.memberName!T) = __ctfe ? value : forward!value; } else { __traits(getMember, storage, Storage.memberName!T) = forward!value; } tag = tid; } static if (isCopyable!(const(T))) { static if (IndexOf!(const(T), Map!(ConstOf, Types)) == tid) { /// ditto this(const(T) value) const { __traits(getMember, storage, Storage.memberName!T) = value; tag = tid; } } } else { @disable this(const(T) value) const; } static if (isCopyable!(immutable(T))) { static if (IndexOf!(immutable(T), Map!(ImmutableOf, Types)) == tid) { /// ditto this(immutable(T) value) immutable { __traits(getMember, storage, Storage.memberName!T) = value; tag = tid; } } } else { @disable this(immutable(T) value) immutable; } static if (isCopyable!(inout(T))) { static if (IndexOf!(inout(T), Map!(InoutOf, Types)) == tid) { /// ditto this(Value)(Value value) inout if (is(Value == DeducedParameterType!(inout(T)))) { __traits(getMember, storage, Storage.memberName!T) = value; tag = tid; } } } else { @disable this(Value)(Value value) inout if (is(Value == DeducedParameterType!(inout(T)))); } } static if (anySatisfy!(hasElaborateCopyConstructor, Types)) { static if ( allSatisfy!(isCopyable, Map!(InoutOf, Types)) && !anySatisfy!(hasPostblit, Map!(InoutOf, Types)) && allSatisfy!(isInout, Map!(InoutOf, Types)) ) { /// Constructs a `SumType` that's a copy of another `SumType`. this(ref inout(SumType) other) inout { storage = other.match!((ref value) { alias OtherTypes = Map!(InoutOf, Types); enum tid = IndexOf!(typeof(value), OtherTypes); alias T = Types[tid]; mixin("inout(Storage) newStorage = { ", Storage.memberName!T, ": value", " };"); return newStorage; }); tag = other.tag; } } else { static if (allSatisfy!(isCopyable, Types)) { /// ditto this(ref SumType other) { storage = other.match!((ref value) { alias T = typeof(value); mixin("Storage newStorage = { ", Storage.memberName!T, ": value", " };"); return newStorage; }); tag = other.tag; } } else { @disable this(ref SumType other); } static if (allSatisfy!(isCopyable, Map!(ConstOf, Types))) { /// ditto this(ref const(SumType) other) const { storage = other.match!((ref value) { alias OtherTypes = Map!(ConstOf, Types); enum tid = IndexOf!(typeof(value), OtherTypes); alias T = Types[tid]; mixin("const(Storage) newStorage = { ", Storage.memberName!T, ": value", " };"); return newStorage; }); tag = other.tag; } } else { @disable this(ref const(SumType) other) const; } static if (allSatisfy!(isCopyable, Map!(ImmutableOf, Types))) { /// ditto this(ref immutable(SumType) other) immutable { storage = other.match!((ref value) { alias OtherTypes = Map!(ImmutableOf, Types); enum tid = IndexOf!(typeof(value), OtherTypes); alias T = Types[tid]; mixin("immutable(Storage) newStorage = { ", Storage.memberName!T, ": value", " };"); return newStorage; }); tag = other.tag; } } else { @disable this(ref immutable(SumType) other) immutable; } } } version (SumTypeNoDefaultCtor) { @disable this(); } // Workaround for https://issues.dlang.org/show_bug.cgi?id=21399 version (StdDdoc) { // Dummy type to stand in for loop variable private struct T; /** * Assigns a value to a `SumType`. * * If any of the `SumType`'s members other than the one being assigned * to contain pointers or references, it is possible for the assignment * to cause memory corruption (see the * ["Memory corruption" example](#memory-corruption) below for an * illustration of how). Therefore, such assignments are considered * `@system`. * * An individual assignment can be `@trusted` if the caller can * guarantee that there are no outstanding references to any `SumType` * members that contain pointers or references at the time the * assignment occurs. * * Examples: * * $(DIVID memory-corruption, $(H3 Memory corruption)) * * This example shows how assignment to a `SumType` can be used to * cause memory corruption in `@system` code. In `@safe` code, the * assignment `s = 123` would not be allowed. * * --- * SumType!(int*, int) s = new int; * s.tryMatch!( * (ref int* p) { * s = 123; // overwrites `p` * return *p; // undefined behavior * } * ); * --- */ ref SumType opAssign(T rhs); } static foreach (tid, T; Types) { static if (isAssignableTo!T) { /** * Assigns a value to a `SumType`. * * If any of the `SumType`'s members other than the one being assigned * to contain pointers or references, it is possible for the assignment * to cause memory corruption (see the * ["Memory corruption" example](#memory-corruption) below for an * illustration of how). Therefore, such assignments are considered * `@system`. * * An individual assignment can be `@trusted` if the caller can * guarantee that there are no outstanding references to any `SumType` * members that contain pointers or references at the time the * assignment occurs. * * Examples: * * $(DIVID memory-corruption, $(H3 Memory corruption)) * * This example shows how assignment to a `SumType` can be used to * cause memory corruption in `@system` code. In `@safe` code, the * assignment `s = 123` would not be allowed. * * --- * SumType!(int*, int) s = new int; * s.tryMatch!( * (ref int* p) { * s = 123; // overwrites `p` * return *p; // undefined behavior * } * ); * --- */ ref SumType opAssign(T rhs) { import core.lifetime : forward; import std.traits : hasIndirections, hasNested; import std.meta : AliasSeq, Or = templateOr; alias OtherTypes = AliasSeq!(Types[0 .. tid], Types[tid + 1 .. $]); enum unsafeToOverwrite = anySatisfy!(Or!(hasIndirections, hasNested), OtherTypes); static if (unsafeToOverwrite) { cast(void) () @system {}(); } this.match!destroyIfOwner; static if (isCopyable!T) { // Workaround for https://issues.dlang.org/show_bug.cgi?id=21542 mixin("Storage newStorage = { ", Storage.memberName!T, ": __ctfe ? rhs : forward!rhs", " };"); } else { mixin("Storage newStorage = { ", Storage.memberName!T, ": forward!rhs", " };"); } storage = newStorage; tag = tid; return this; } } } static if (allSatisfy!(isAssignableTo, Types)) { static if (allSatisfy!(isCopyable, Types)) { /** * Copies the value from another `SumType` into this one. * * See the value-assignment overload for details on `@safe`ty. * * Copy assignment is `@disable`d if any of `Types` is non-copyable. */ ref SumType opAssign(ref SumType rhs) { rhs.match!((ref value) { this = value; }); return this; } } else { @disable ref SumType opAssign(ref SumType rhs); } /** * Moves the value from another `SumType` into this one. * * See the value-assignment overload for details on `@safe`ty. */ ref SumType opAssign(SumType rhs) { import core.lifetime : move; rhs.match!((ref value) { static if (isCopyable!(typeof(value))) { // Workaround for https://issues.dlang.org/show_bug.cgi?id=21542 this = __ctfe ? value : move(value); } else { this = move(value); } }); return this; } } /** * Compares two `SumType`s for equality. * * Two `SumType`s are equal if they are the same kind of `SumType`, they * contain values of the same type, and those values are equal. */ bool opEquals(this This, Rhs)(auto ref Rhs rhs) if (!is(CommonType!(This, Rhs) == void)) { static if (is(This == Rhs)) { return AliasSeq!(this, rhs).match!((ref value, ref rhsValue) { static if (is(typeof(value) == typeof(rhsValue))) { return value == rhsValue; } else { return false; } }); } else { alias CommonSumType = CommonType!(This, Rhs); return cast(CommonSumType) this == cast(CommonSumType) rhs; } } // Workaround for https://issues.dlang.org/show_bug.cgi?id=19407 static if (__traits(compiles, anySatisfy!(hasElaborateDestructor, Types))) { // If possible, include the destructor only when it's needed private enum includeDtor = anySatisfy!(hasElaborateDestructor, Types); } else { // If we can't tell, always include it, even when it does nothing private enum includeDtor = true; } static if (includeDtor) { /// Calls the destructor of the `SumType`'s current value. ~this() { this.match!destroyIfOwner; } } invariant { this.match!((ref value) { static if (is(typeof(value) == class)) { if (value !is null) { assert(value); } } else static if (is(typeof(value) == struct)) { assert(&value); } }); } // Workaround for https://issues.dlang.org/show_bug.cgi?id=21400 version (StdDdoc) { /** * Returns a string representation of the `SumType`'s current value. * * Not available when compiled with `-betterC`. */ string toString(this This)(); /** * Handles formatted writing of the `SumType`'s current value. * * Not available when compiled with `-betterC`. * * Params: * sink = Output range to write to. * fmt = Format specifier to use. * * See_Also: $(REF formatValue, std,format) */ void toString(this This, Sink, Char)(ref Sink sink, const ref FormatSpec!Char fmt); } version (D_BetterC) {} else /** * Returns a string representation of the `SumType`'s current value. * * Not available when compiled with `-betterC`. */ string toString(this This)() { import std.conv : to; return this.match!(to!string); } version (D_BetterC) {} else /** * Handles formatted writing of the `SumType`'s current value. * * Not available when compiled with `-betterC`. * * Params: * sink = Output range to write to. * fmt = Format specifier to use. * * See_Also: $(REF formatValue, std,format) */ void toString(this This, Sink, Char)(ref Sink sink, const ref FormatSpec!Char fmt) { import std.format.write : formatValue; this.match!((ref value) { formatValue(sink, value, fmt); }); } static if (allSatisfy!(isHashable, Map!(ConstOf, Types))) { // Workaround for https://issues.dlang.org/show_bug.cgi?id=21400 version (StdDdoc) { /** * Returns the hash of the `SumType`'s current value. * * Not available when compiled with `-betterC`. */ size_t toHash() const; } // Workaround for https://issues.dlang.org/show_bug.cgi?id=20095 version (D_BetterC) {} else /** * Returns the hash of the `SumType`'s current value. * * Not available when compiled with `-betterC`. */ size_t toHash() const { return this.match!hashOf; } } } // Construction @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); } // Assignment @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); x = 3.14; } // Self assignment @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); y = x; } // Equality @safe unittest { alias MySum = SumType!(int, float); assert(MySum(123) == MySum(123)); assert(MySum(123) != MySum(456)); assert(MySum(123) != MySum(123.0)); assert(MySum(123) != MySum(456.0)); } // Equality of differently-qualified SumTypes // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @safe unittest { alias SumA = SumType!(int, float); alias SumB = SumType!(const(int[]), int[]); alias SumC = SumType!(int[], const(int[])); int[] ma = [1, 2, 3]; const(int[]) ca = [1, 2, 3]; assert(const(SumA)(123) == SumA(123)); assert(const(SumB)(ma[]) == SumB(ca[])); assert(const(SumC)(ma[]) == SumC(ca[])); } // Imported types @safe unittest { import std.typecons : Tuple; alias MySum = SumType!(Tuple!(int, int)); } // const and immutable types @safe unittest { alias MySum = SumType!(const(int[]), immutable(float[])); } // Recursive types @safe unittest { alias MySum = SumType!(This*); assert(is(MySum.Types[0] == MySum*)); } // Allowed types @safe unittest { import std.meta : AliasSeq; alias MySum = SumType!(int, float, This*); assert(is(MySum.Types == AliasSeq!(int, float, MySum*))); } // Types with destructors and postblits @system unittest { int copies; static struct Test { bool initialized = false; int* copiesPtr; this(this) { (*copiesPtr)++; } ~this() { if (initialized) (*copiesPtr)--; } } alias MySum = SumType!(int, Test); Test t = Test(true, &copies); { MySum x = t; assert(copies == 1); } assert(copies == 0); { MySum x = 456; assert(copies == 0); } assert(copies == 0); { MySum x = t; assert(copies == 1); x = 456; assert(copies == 0); } { MySum x = 456; assert(copies == 0); x = t; assert(copies == 1); } { MySum x = t; MySum y = x; assert(copies == 2); } { MySum x = t; MySum y; y = x; assert(copies == 2); } } // Doesn't destroy reference types // Disabled in BetterC due to use of classes version (D_BetterC) {} else @system unittest { bool destroyed; class C { ~this() { destroyed = true; } } struct S { ~this() {} } alias MySum = SumType!(S, C); C c = new C(); { MySum x = c; destroyed = false; } assert(!destroyed); { MySum x = c; destroyed = false; x = S(); assert(!destroyed); } } // Types with @disable this() @safe unittest { static struct NoInit { @disable this(); } alias MySum = SumType!(NoInit, int); assert(!__traits(compiles, MySum())); auto _ = MySum(42); } // const SumTypes version (D_BetterC) {} else // not @nogc, https://issues.dlang.org/show_bug.cgi?id=22117 @safe unittest { auto _ = const(SumType!(int[]))([1, 2, 3]); } // Equality of const SumTypes @safe unittest { alias MySum = SumType!int; auto _ = const(MySum)(123) == const(MySum)(456); } // Compares reference types using value equality @safe unittest { import std.array : staticArray; static struct Field {} static struct Struct { Field[] fields; } alias MySum = SumType!Struct; static arr1 = staticArray([Field()]); static arr2 = staticArray([Field()]); auto a = MySum(Struct(arr1[])); auto b = MySum(Struct(arr2[])); assert(a == b); } // toString // Disabled in BetterC due to use of std.conv.text version (D_BetterC) {} else @safe unittest { import std.conv : text; static struct Int { int i; } static struct Double { double d; } alias Sum = SumType!(Int, Double); assert(Sum(Int(42)).text == Int(42).text, Sum(Int(42)).text); assert(Sum(Double(33.3)).text == Double(33.3).text, Sum(Double(33.3)).text); assert((const(Sum)(Int(42))).text == (const(Int)(42)).text, (const(Sum)(Int(42))).text); } // string formatting // Disabled in BetterC due to use of std.format.format version (D_BetterC) {} else @safe unittest { import std.format : format; SumType!int x = 123; assert(format!"%s"(x) == format!"%s"(123)); assert(format!"%x"(x) == format!"%x"(123)); } // string formatting of qualified SumTypes // Disabled in BetterC due to use of std.format.format and dynamic arrays version (D_BetterC) {} else @safe unittest { import std.format : format; int[] a = [1, 2, 3]; const(SumType!(int[])) x = a; assert(format!"%(%d, %)"(x) == format!"%(%s, %)"(a)); } // Github issue #16 // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @safe unittest { alias Node = SumType!(This[], string); // override inference of @system attribute for cyclic functions assert((() @trusted => Node([Node([Node("x")])]) == Node([Node([Node("x")])]) )()); } // Github issue #16 with const // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @safe unittest { alias Node = SumType!(const(This)[], string); // override inference of @system attribute for cyclic functions assert((() @trusted => Node([Node([Node("x")])]) == Node([Node([Node("x")])]) )()); } // Stale pointers // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @system unittest { alias MySum = SumType!(ubyte, void*[2]); MySum x = [null, cast(void*) 0x12345678]; void** p = &x.get!(void*[2])[1]; x = ubyte(123); assert(*p != cast(void*) 0x12345678); } // Exception-safe assignment // Disabled in BetterC due to use of exceptions version (D_BetterC) {} else @safe unittest { static struct A { int value = 123; } static struct B { int value = 456; this(this) { throw new Exception("oops"); } } alias MySum = SumType!(A, B); MySum x; try { x = B(); } catch (Exception e) {} assert( (x.tag == 0 && x.get!A.value == 123) || (x.tag == 1 && x.get!B.value == 456) ); } // Types with @disable this(this) @safe unittest { import core.lifetime : move; static struct NoCopy { @disable this(this); } alias MySum = SumType!NoCopy; NoCopy lval = NoCopy(); MySum x = NoCopy(); MySum y = NoCopy(); assert(!__traits(compiles, SumType!NoCopy(lval))); y = NoCopy(); y = move(x); assert(!__traits(compiles, y = lval)); assert(!__traits(compiles, y = x)); bool b = x == y; } // Github issue #22 // Disabled in BetterC due to use of std.typecons.Nullable version (D_BetterC) {} else @safe unittest { import std.typecons; static struct A { SumType!(Nullable!int) a = Nullable!int.init; } } // Static arrays of structs with postblits // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @safe unittest { static struct S { int n; this(this) { n++; } } SumType!(S[1]) x = [S(0)]; SumType!(S[1]) y = x; auto xval = x.get!(S[1])[0].n; auto yval = y.get!(S[1])[0].n; assert(xval != yval); } // Replacement does not happen inside SumType // Disabled in BetterC due to use of associative arrays version (D_BetterC) {} else @safe unittest { import std.typecons : Tuple, ReplaceTypeUnless; alias A = Tuple!(This*,SumType!(This*))[SumType!(This*,string)[This]]; alias TR = ReplaceTypeUnless!(isSumTypeInstance, This, int, A); static assert(is(TR == Tuple!(int*,SumType!(This*))[SumType!(This*, string)[int]])); } // Supports nested self-referential SumTypes @safe unittest { import std.typecons : Tuple, Flag; alias Nat = SumType!(Flag!"0", Tuple!(This*)); alias Inner = SumType!Nat; alias Outer = SumType!(Nat*, Tuple!(This*, This*)); } // Self-referential SumTypes inside Algebraic // Disabled in BetterC due to use of std.variant.Algebraic version (D_BetterC) {} else @safe unittest { import std.variant : Algebraic; alias T = Algebraic!(SumType!(This*)); assert(is(T.AllowedTypes[0].Types[0] == T.AllowedTypes[0]*)); } // Doesn't call @system postblits in @safe code @safe unittest { static struct SystemCopy { @system this(this) {} } SystemCopy original; assert(!__traits(compiles, () @safe { SumType!SystemCopy copy = original; })); assert(!__traits(compiles, () @safe { SumType!SystemCopy copy; copy = original; })); } // Doesn't overwrite pointers in @safe code @safe unittest { alias MySum = SumType!(int*, int); MySum x; assert(!__traits(compiles, () @safe { x = 123; })); assert(!__traits(compiles, () @safe { x = MySum(123); })); } // Types with invariants // Disabled in BetterC due to use of exceptions version (D_BetterC) {} else version (D_Invariants) @system unittest { import std.exception : assertThrown; import core.exception : AssertError; struct S { int i; invariant { assert(i >= 0); } } class C { int i; invariant { assert(i >= 0); } } SumType!S x; x.match!((ref v) { v.i = -1; }); assertThrown!AssertError(assert(&x)); SumType!C y = new C(); y.match!((ref v) { v.i = -1; }); assertThrown!AssertError(assert(&y)); } // Calls value postblit on self-assignment @safe unittest { static struct S { int n; this(this) { n++; } } SumType!S x = S(); SumType!S y; y = x; auto xval = x.get!S.n; auto yval = y.get!S.n; assert(xval != yval); } // Github issue #29 @safe unittest { alias A = SumType!string; @safe A createA(string arg) { return A(arg); } @safe void test() { A a = createA(""); } } // SumTypes as associative array keys // Disabled in BetterC due to use of associative arrays version (D_BetterC) {} else @safe unittest { int[SumType!(int, string)] aa; } // toString with non-copyable types // Disabled in BetterC due to use of std.conv.to (in toString) version (D_BetterC) {} else @safe unittest { struct NoCopy { @disable this(this); } SumType!NoCopy x; auto _ = x.toString(); } // Can use the result of assignment @safe unittest { alias MySum = SumType!(int, float); MySum a = MySum(123); MySum b = MySum(3.14); assert((a = b) == b); assert((a = MySum(123)) == MySum(123)); assert((a = 3.14) == MySum(3.14)); assert(((a = b) = MySum(123)) == MySum(123)); } // Types with copy constructors @safe unittest { static struct S { int n; this(ref return scope inout S other) inout { n = other.n + 1; } } SumType!S x = S(); SumType!S y = x; auto xval = x.get!S.n; auto yval = y.get!S.n; assert(xval != yval); } // Copyable by generated copy constructors @safe unittest { static struct Inner { ref this(ref inout Inner other) {} } static struct Outer { SumType!Inner inner; } Outer x; Outer y = x; } // Types with qualified copy constructors @safe unittest { static struct ConstCopy { int n; this(inout int n) inout { this.n = n; } this(ref const typeof(this) other) const { this.n = other.n; } } static struct ImmutableCopy { int n; this(inout int n) inout { this.n = n; } this(ref immutable typeof(this) other) immutable { this.n = other.n; } } const SumType!ConstCopy x = const(ConstCopy)(1); immutable SumType!ImmutableCopy y = immutable(ImmutableCopy)(1); } // Types with disabled opEquals @safe unittest { static struct S { @disable bool opEquals(const S rhs) const; } auto _ = SumType!S(S()); } // Types with non-const opEquals @safe unittest { static struct S { int i; bool opEquals(S rhs) { return i == rhs.i; } } auto _ = SumType!S(S(123)); } // Incomparability of different SumTypes @safe unittest { SumType!(int, string) x = 123; SumType!(string, int) y = 123; assert(!__traits(compiles, x != y)); } // Self-reference in return/parameter type of function pointer member // Disabled in BetterC due to use of delegates version (D_BetterC) {} else @safe unittest { alias T = SumType!(int, This delegate(This)); } // Construction and assignment from implicitly-convertible lvalue @safe unittest { alias MySum = SumType!bool; const(bool) b = true; MySum x = b; MySum y; y = b; } // @safe assignment to the only pointer type in a SumType @safe unittest { SumType!(string, int) sm = 123; sm = "this should be @safe"; } // Immutable member type with copy constructor // https://issues.dlang.org/show_bug.cgi?id=22572 @safe unittest { static struct CopyConstruct { this(ref inout CopyConstruct other) inout {} } static immutable struct Value { CopyConstruct c; } SumType!Value s; } // Construction of inout-qualified SumTypes // https://issues.dlang.org/show_bug.cgi?id=22901 @safe unittest { static inout(SumType!(int[])) example(inout(int[]) arr) { return inout(SumType!(int[]))(arr); } } // Assignment of struct with overloaded opAssign in CTFE // https://issues.dlang.org/show_bug.cgi?id=23182 @safe unittest { static struct HasOpAssign { void opAssign(HasOpAssign rhs) {} } static SumType!HasOpAssign test() { SumType!HasOpAssign s; // Test both overloads s = HasOpAssign(); s = SumType!HasOpAssign(); return s; } // Force CTFE enum result = test(); } /// True if `T` is an instance of the `SumType` template, otherwise false. private enum bool isSumTypeInstance(T) = is(T == SumType!Args, Args...); @safe unittest { static struct Wrapper { SumType!int s; alias s this; } assert(isSumTypeInstance!(SumType!int)); assert(!isSumTypeInstance!Wrapper); } /// True if `T` is a [SumType] or implicitly converts to one, otherwise false. enum bool isSumType(T) = is(T : SumType!Args, Args...); /// @safe unittest { static struct ConvertsToSumType { SumType!int payload; alias payload this; } static struct ContainsSumType { SumType!int payload; } assert(isSumType!(SumType!int)); assert(isSumType!ConvertsToSumType); assert(!isSumType!ContainsSumType); } /** * Calls a type-appropriate function with the value held in a [SumType]. * * For each possible type the [SumType] can hold, the given handlers are * checked, in order, to see whether they accept a single argument of that type. * The first one that does is chosen as the match for that type. (Note that the * first match may not always be the most exact match. * See ["Avoiding unintentional matches"](#avoiding-unintentional-matches) for * one common pitfall.) * * Every type must have a matching handler, and every handler must match at * least one type. This is enforced at compile time. * * Handlers may be functions, delegates, or objects with `opCall` overloads. If * a function with more than one overload is given as a handler, all of the * overloads are considered as potential matches. * * Templated handlers are also accepted, and will match any type for which they * can be [implicitly instantiated](https://dlang.org/glossary.html#ifti). See * ["Introspection-based matching"](#introspection-based-matching) for an * example of templated handler usage. * * If multiple [SumType]s are passed to match, their values are passed to the * handlers as separate arguments, and matching is done for each possible * combination of value types. See ["Multiple dispatch"](#multiple-dispatch) for * an example. * * Returns: * The value returned from the handler that matches the currently-held type. * * See_Also: $(REF visit, std,variant) */ template match(handlers...) { import std.typecons : Yes; /** * The actual `match` function. * * Params: * args = One or more [SumType] objects. */ auto ref match(SumTypes...)(auto ref SumTypes args) if (allSatisfy!(isSumType, SumTypes) && args.length > 0) { return matchImpl!(Yes.exhaustive, handlers)(args); } } /** $(DIVID avoiding-unintentional-matches, $(H3 Avoiding unintentional matches)) * * Sometimes, implicit conversions may cause a handler to match more types than * intended. The example below shows two solutions to this problem. */ @safe unittest { alias Number = SumType!(double, int); Number x; // Problem: because int implicitly converts to double, the double // handler is used for both types, and the int handler never matches. assert(!__traits(compiles, x.match!( (double d) => "got double", (int n) => "got int" ) )); // Solution 1: put the handler for the "more specialized" type (in this // case, int) before the handler for the type it converts to. assert(__traits(compiles, x.match!( (int n) => "got int", (double d) => "got double" ) )); // Solution 2: use a template that only accepts the exact type it's // supposed to match, instead of any type that implicitly converts to it. alias exactly(T, alias fun) = function (arg) { static assert(is(typeof(arg) == T)); return fun(arg); }; // Now, even if we put the double handler first, it will only be used for // doubles, not ints. assert(__traits(compiles, x.match!( exactly!(double, d => "got double"), exactly!(int, n => "got int") ) )); } /** $(DIVID multiple-dispatch, $(H3 Multiple dispatch)) * * Pattern matching can be performed on multiple `SumType`s at once by passing * handlers with multiple arguments. This usually leads to more concise code * than using nested calls to `match`, as show below. */ @safe unittest { struct Point2D { double x, y; } struct Point3D { double x, y, z; } alias Point = SumType!(Point2D, Point3D); version (none) { // This function works, but the code is ugly and repetitive. // It uses three separate calls to match! @safe pure nothrow @nogc bool sameDimensions(Point p1, Point p2) { return p1.match!( (Point2D _) => p2.match!( (Point2D _) => true, _ => false ), (Point3D _) => p2.match!( (Point3D _) => true, _ => false ) ); } } // This version is much nicer. @safe pure nothrow @nogc bool sameDimensions(Point p1, Point p2) { alias doMatch = match!( (Point2D _1, Point2D _2) => true, (Point3D _1, Point3D _2) => true, (_1, _2) => false ); return doMatch(p1, p2); } Point a = Point2D(1, 2); Point b = Point2D(3, 4); Point c = Point3D(5, 6, 7); Point d = Point3D(8, 9, 0); assert( sameDimensions(a, b)); assert( sameDimensions(c, d)); assert(!sameDimensions(a, c)); assert(!sameDimensions(d, b)); } /** * Attempts to call a type-appropriate function with the value held in a * [SumType], and throws on failure. * * Matches are chosen using the same rules as [match], but are not required to * be exhaustive—in other words, a type (or combination of types) is allowed to * have no matching handler. If a type without a handler is encountered at * runtime, a [MatchException] is thrown. * * Not available when compiled with `-betterC`. * * Returns: * The value returned from the handler that matches the currently-held type, * if a handler was given for that type. * * Throws: * [MatchException], if the currently-held type has no matching handler. * * See_Also: $(REF tryVisit, std,variant) */ version (D_Exceptions) template tryMatch(handlers...) { import std.typecons : No; /** * The actual `tryMatch` function. * * Params: * args = One or more [SumType] objects. */ auto ref tryMatch(SumTypes...)(auto ref SumTypes args) if (allSatisfy!(isSumType, SumTypes) && args.length > 0) { return matchImpl!(No.exhaustive, handlers)(args); } } /** * Thrown by [tryMatch] when an unhandled type is encountered. * * Not available when compiled with `-betterC`. */ version (D_Exceptions) class MatchException : Exception { /// pure @safe @nogc nothrow this(string msg, string file = __FILE__, size_t line = __LINE__) { super(msg, file, line); } } /** * True if `handler` is a potential match for `Ts`, otherwise false. * * See the documentation for [match] for a full explanation of how matches are * chosen. */ template canMatch(alias handler, Ts...) if (Ts.length > 0) { enum canMatch = is(typeof((ref Ts args) => handler(args))); } /// @safe unittest { alias handleInt = (int i) => "got an int"; assert( canMatch!(handleInt, int)); assert(!canMatch!(handleInt, string)); } // Includes all overloads of the given handler @safe unittest { static struct OverloadSet { static void fun(int n) {} static void fun(double d) {} } assert(canMatch!(OverloadSet.fun, int)); assert(canMatch!(OverloadSet.fun, double)); } // Like aliasSeqOf!(iota(n)), but works in BetterC private template Iota(size_t n) { static if (n == 0) { alias Iota = AliasSeq!(); } else { alias Iota = AliasSeq!(Iota!(n - 1), n - 1); } } @safe unittest { assert(is(Iota!0 == AliasSeq!())); assert(Iota!1 == AliasSeq!(0)); assert(Iota!3 == AliasSeq!(0, 1, 2)); } /* The number that the dim-th argument's tag is multiplied by when * converting TagTuples to and from case indices ("caseIds"). * * Named by analogy to the stride that the dim-th index into a * multidimensional static array is multiplied by to calculate the * offset of a specific element. */ private size_t stride(size_t dim, lengths...)() { import core.checkedint : mulu; size_t result = 1; bool overflow = false; static foreach (i; 0 .. dim) { result = mulu(result, lengths[i], overflow); } /* The largest number matchImpl uses, numCases, is calculated with * stride!(SumTypes.length), so as long as this overflow check * passes, we don't need to check for overflow anywhere else. */ assert(!overflow, "Integer overflow"); return result; } private template matchImpl(Flag!"exhaustive" exhaustive, handlers...) { auto ref matchImpl(SumTypes...)(auto ref SumTypes args) if (allSatisfy!(isSumType, SumTypes) && args.length > 0) { alias stride(size_t i) = .stride!(i, Map!(typeCount, SumTypes)); alias TagTuple = .TagTuple!(SumTypes); /* * A list of arguments to be passed to a handler needed for the case * labeled with `caseId`. */ template handlerArgs(size_t caseId) { enum tags = TagTuple.fromCaseId(caseId); enum argsFrom(size_t i : tags.length) = ""; enum argsFrom(size_t i) = "args[" ~ toCtString!i ~ "].get!(SumTypes[" ~ toCtString!i ~ "]" ~ ".Types[" ~ toCtString!(tags[i]) ~ "])(), " ~ argsFrom!(i + 1); enum handlerArgs = argsFrom!0; } /* An AliasSeq of the types of the member values in the argument list * returned by `handlerArgs!caseId`. * * Note that these are the actual (that is, qualified) types of the * member values, which may not be the same as the types listed in * the arguments' `.Types` properties. */ template valueTypes(size_t caseId) { enum tags = TagTuple.fromCaseId(caseId); template getType(size_t i) { enum tid = tags[i]; alias T = SumTypes[i].Types[tid]; alias getType = typeof(args[i].get!T()); } alias valueTypes = Map!(getType, Iota!(tags.length)); } /* The total number of cases is * * Π SumTypes[i].Types.length for 0 ≤ i < SumTypes.length * * Or, equivalently, * * ubyte[SumTypes[0].Types.length]...[SumTypes[$-1].Types.length].sizeof * * Conveniently, this is equal to stride!(SumTypes.length), so we can * use that function to compute it. */ enum numCases = stride!(SumTypes.length); /* Guaranteed to never be a valid handler index, since * handlers.length <= size_t.max. */ enum noMatch = size_t.max; // An array that maps caseIds to handler indices ("hids"). enum matches = () { size_t[numCases] matches; // Workaround for https://issues.dlang.org/show_bug.cgi?id=19561 foreach (ref match; matches) { match = noMatch; } static foreach (caseId; 0 .. numCases) { static foreach (hid, handler; handlers) { static if (canMatch!(handler, valueTypes!caseId)) { if (matches[caseId] == noMatch) { matches[caseId] = hid; } } } } return matches; }(); import std.algorithm.searching : canFind; // Check for unreachable handlers static foreach (hid, handler; handlers) { static assert(matches[].canFind(hid), "`handlers[" ~ toCtString!hid ~ "]` " ~ "of type `" ~ ( __traits(isTemplate, handler) ? "template" : typeof(handler).stringof ) ~ "` " ~ "never matches" ); } // Workaround for https://issues.dlang.org/show_bug.cgi?id=19993 enum handlerName(size_t hid) = "handler" ~ toCtString!hid; static foreach (size_t hid, handler; handlers) { mixin("alias ", handlerName!hid, " = handler;"); } immutable argsId = TagTuple(args).toCaseId; final switch (argsId) { static foreach (caseId; 0 .. numCases) { case caseId: static if (matches[caseId] != noMatch) { return mixin(handlerName!(matches[caseId]), "(", handlerArgs!caseId, ")"); } else { static if (exhaustive) { static assert(false, "No matching handler for types `" ~ valueTypes!caseId.stringof ~ "`"); } else { throw new MatchException( "No matching handler for types `" ~ valueTypes!caseId.stringof ~ "`"); } } } } assert(false, "unreachable"); } } private enum typeCount(SumType) = SumType.Types.length; /* A TagTuple represents a single possible set of tags that `args` * could have at runtime. * * Because D does not allow a struct to be the controlling expression * of a switch statement, we cannot dispatch on the TagTuple directly. * Instead, we must map each TagTuple to a unique integer and generate * a case label for each of those integers. * * This mapping is implemented in `fromCaseId` and `toCaseId`. It uses * the same technique that's used to map index tuples to memory offsets * in a multidimensional static array. * * For example, when `args` consists of two SumTypes with two member * types each, the TagTuples corresponding to each case label are: * * case 0: TagTuple([0, 0]) * case 1: TagTuple([1, 0]) * case 2: TagTuple([0, 1]) * case 3: TagTuple([1, 1]) * * When there is only one argument, the caseId is equal to that * argument's tag. */ private struct TagTuple(SumTypes...) { size_t[SumTypes.length] tags; alias tags this; alias stride(size_t i) = .stride!(i, Map!(typeCount, SumTypes)); invariant { static foreach (i; 0 .. tags.length) { assert(tags[i] < SumTypes[i].Types.length, "Invalid tag"); } } this(ref const(SumTypes) args) { static foreach (i; 0 .. tags.length) { tags[i] = args[i].tag; } } static TagTuple fromCaseId(size_t caseId) { TagTuple result; // Most-significant to least-significant static foreach_reverse (i; 0 .. result.length) { result[i] = caseId / stride!i; caseId %= stride!i; } return result; } size_t toCaseId() { size_t result; static foreach (i; 0 .. tags.length) { result += tags[i] * stride!i; } return result; } } // Matching @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); assert(x.match!((int v) => true, (float v) => false)); assert(y.match!((int v) => false, (float v) => true)); } // Missing handlers @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); assert(!__traits(compiles, x.match!((int x) => true))); assert(!__traits(compiles, x.match!())); } // Handlers with qualified parameters // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @safe unittest { alias MySum = SumType!(int[], float[]); MySum x = MySum([1, 2, 3]); MySum y = MySum([1.0, 2.0, 3.0]); assert(x.match!((const(int[]) v) => true, (const(float[]) v) => false)); assert(y.match!((const(int[]) v) => false, (const(float[]) v) => true)); } // Handlers for qualified types // Disabled in BetterC due to use of dynamic arrays version (D_BetterC) {} else @safe unittest { alias MySum = SumType!(immutable(int[]), immutable(float[])); MySum x = MySum([1, 2, 3]); assert(x.match!((immutable(int[]) v) => true, (immutable(float[]) v) => false)); assert(x.match!((const(int[]) v) => true, (const(float[]) v) => false)); // Tail-qualified parameters assert(x.match!((immutable(int)[] v) => true, (immutable(float)[] v) => false)); assert(x.match!((const(int)[] v) => true, (const(float)[] v) => false)); // Generic parameters assert(x.match!((immutable v) => true)); assert(x.match!((const v) => true)); // Unqualified parameters assert(!__traits(compiles, x.match!((int[] v) => true, (float[] v) => false) )); } // Delegate handlers // Disabled in BetterC due to use of closures version (D_BetterC) {} else @safe unittest { alias MySum = SumType!(int, float); int answer = 42; MySum x = MySum(42); MySum y = MySum(3.14); assert(x.match!((int v) => v == answer, (float v) => v == answer)); assert(!y.match!((int v) => v == answer, (float v) => v == answer)); } version (unittest) { version (D_BetterC) { // std.math.isClose depends on core.runtime.math, so use a // libc-based version for testing with -betterC @safe pure @nogc nothrow private bool isClose(double lhs, double rhs) { import core.stdc.math : fabs; return fabs(lhs - rhs) < 1e-5; } } else { import std.math.operations : isClose; } } // Generic handler @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); assert(x.match!(v => v*2) == 84); assert(y.match!(v => v*2).isClose(6.28)); } // Fallback to generic handler // Disabled in BetterC due to use of std.conv.to version (D_BetterC) {} else @safe unittest { import std.conv : to; alias MySum = SumType!(int, float, string); MySum x = MySum(42); MySum y = MySum("42"); assert(x.match!((string v) => v.to!int, v => v*2) == 84); assert(y.match!((string v) => v.to!int, v => v*2) == 42); } // Multiple non-overlapping generic handlers @safe unittest { import std.array : staticArray; alias MySum = SumType!(int, float, int[], char[]); static ints = staticArray([1, 2, 3]); static chars = staticArray(['a', 'b', 'c']); MySum x = MySum(42); MySum y = MySum(3.14); MySum z = MySum(ints[]); MySum w = MySum(chars[]); assert(x.match!(v => v*2, v => v.length) == 84); assert(y.match!(v => v*2, v => v.length).isClose(6.28)); assert(w.match!(v => v*2, v => v.length) == 3); assert(z.match!(v => v*2, v => v.length) == 3); } // Structural matching @safe unittest { static struct S1 { int x; } static struct S2 { int y; } alias MySum = SumType!(S1, S2); MySum a = MySum(S1(0)); MySum b = MySum(S2(0)); assert(a.match!(s1 => s1.x + 1, s2 => s2.y - 1) == 1); assert(b.match!(s1 => s1.x + 1, s2 => s2.y - 1) == -1); } // Separate opCall handlers @safe unittest { static struct IntHandler { bool opCall(int arg) { return true; } } static struct FloatHandler { bool opCall(float arg) { return false; } } alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); assert(x.match!(IntHandler.init, FloatHandler.init)); assert(!y.match!(IntHandler.init, FloatHandler.init)); } // Compound opCall handler @safe unittest { static struct CompoundHandler { bool opCall(int arg) { return true; } bool opCall(float arg) { return false; } } alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); assert(x.match!(CompoundHandler.init)); assert(!y.match!(CompoundHandler.init)); } // Ordered matching @safe unittest { alias MySum = SumType!(int, float); MySum x = MySum(42); assert(x.match!((int v) => true, v => false)); } // Non-exhaustive matching version (D_Exceptions) @system unittest { import std.exception : assertThrown, assertNotThrown; alias MySum = SumType!(int, float); MySum x = MySum(42); MySum y = MySum(3.14); assertNotThrown!MatchException(x.tryMatch!((int n) => true)); assertThrown!MatchException(y.tryMatch!((int n) => true)); } // Non-exhaustive matching in @safe code version (D_Exceptions) @safe unittest { SumType!(int, float) x; auto _ = x.tryMatch!( (int n) => n + 1, ); } // Handlers with ref parameters @safe unittest { alias Value = SumType!(long, double); auto value = Value(3.14); value.match!( (long) {}, (ref double d) { d *= 2; } ); assert(value.get!double.isClose(6.28)); } // Unreachable handlers @safe unittest { alias MySum = SumType!(int, string); MySum s; assert(!__traits(compiles, s.match!( (int _) => 0, (string _) => 1, (double _) => 2 ) )); assert(!__traits(compiles, s.match!( _ => 0, (int _) => 1 ) )); } // Unsafe handlers @system unittest { SumType!int x; alias unsafeHandler = (int x) @system { return; }; assert(!__traits(compiles, () @safe { x.match!unsafeHandler; })); auto test() @system { return x.match!unsafeHandler; } } // Overloaded handlers @safe unittest { static struct OverloadSet { static string fun(int i) { return "int"; } static string fun(double d) { return "double"; } } alias MySum = SumType!(int, double); MySum a = 42; MySum b = 3.14; assert(a.match!(OverloadSet.fun) == "int"); assert(b.match!(OverloadSet.fun) == "double"); } // Overload sets that include SumType arguments @safe unittest { alias Inner = SumType!(int, double); alias Outer = SumType!(Inner, string); static struct OverloadSet { @safe: static string fun(int i) { return "int"; } static string fun(double d) { return "double"; } static string fun(string s) { return "string"; } static string fun(Inner i) { return i.match!fun; } static string fun(Outer o) { return o.match!fun; } } Outer a = Inner(42); Outer b = Inner(3.14); Outer c = "foo"; assert(OverloadSet.fun(a) == "int"); assert(OverloadSet.fun(b) == "double"); assert(OverloadSet.fun(c) == "string"); } // Overload sets with ref arguments @safe unittest { static struct OverloadSet { static void fun(ref int i) { i = 42; } static void fun(ref double d) { d = 3.14; } } alias MySum = SumType!(int, double); MySum x = 0; MySum y = 0.0; x.match!(OverloadSet.fun); y.match!(OverloadSet.fun); assert(x.match!((value) => is(typeof(value) == int) && value == 42)); assert(y.match!((value) => is(typeof(value) == double) && value == 3.14)); } // Overload sets with templates @safe unittest { import std.traits : isNumeric; static struct OverloadSet { static string fun(string arg) { return "string"; } static string fun(T)(T arg) if (isNumeric!T) { return "numeric"; } } alias MySum = SumType!(int, string); MySum x = 123; MySum y = "hello"; assert(x.match!(OverloadSet.fun) == "numeric"); assert(y.match!(OverloadSet.fun) == "string"); } // Github issue #24 @safe unittest { void test() @nogc { int acc = 0; SumType!int(1).match!((int x) => acc += x); } } // Github issue #31 @safe unittest { void test() @nogc { int acc = 0; SumType!(int, string)(1).match!( (int x) => acc += x, (string _) => 0, ); } } // Types that `alias this` a SumType @safe unittest { static struct A {} static struct B {} static struct D { SumType!(A, B) value; alias value this; } auto _ = D().match!(_ => true); } // Multiple dispatch @safe unittest { alias MySum = SumType!(int, string); static int fun(MySum x, MySum y) { import std.meta : Args = AliasSeq; return Args!(x, y).match!( (int xv, int yv) => 0, (string xv, int yv) => 1, (int xv, string yv) => 2, (string xv, string yv) => 3 ); } assert(fun(MySum(0), MySum(0)) == 0); assert(fun(MySum(""), MySum(0)) == 1); assert(fun(MySum(0), MySum("")) == 2); assert(fun(MySum(""), MySum("")) == 3); } // inout SumTypes @safe unittest { inout(int[]) fun(inout(SumType!(int[])) x) { return x.match!((inout(int[]) a) => a); } } private void destroyIfOwner(T)(ref T value) { static if (hasElaborateDestructor!T) { destroy(value); } } dub-1.40.0/source/dub/internal/dyaml/style.d000066400000000000000000000014071477246567400206770ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) ///YAML node formatting styles. module dub.internal.dyaml.style; ///Scalar styles. enum ScalarStyle : ubyte { /// Invalid (uninitialized) style invalid = 0, /// `|` (Literal block style) literal, /// `>` (Folded block style) folded, /// Plain scalar plain, /// Single quoted scalar singleQuoted, /// Double quoted scalar doubleQuoted } ///Collection styles. enum CollectionStyle : ubyte { /// Invalid (uninitialized) style invalid = 0, /// Block style. block, /// Flow style. flow } dub-1.40.0/source/dub/internal/dyaml/tagdirective.d000066400000000000000000000006531477246567400222130ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) ///Tag directives. module dub.internal.dyaml.tagdirective; ///Single tag directive. handle is the shortcut, prefix is the prefix that replaces it. struct TagDirective { string handle; string prefix; } dub-1.40.0/source/dub/internal/dyaml/token.d000066400000000000000000000123311477246567400206550ustar00rootroot00000000000000 // Copyright Ferdinand Majerech 2011-2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /// YAML tokens. /// Code based on PyYAML: http://www.pyyaml.org module dub.internal.dyaml.token; import std.conv; import dub.internal.dyaml.encoding; import dub.internal.dyaml.exception; import dub.internal.dyaml.reader; import dub.internal.dyaml.style; package: /// Token types. enum TokenID : ubyte { // Invalid (uninitialized) token invalid = 0, directive, documentStart, documentEnd, streamStart, streamEnd, blockSequenceStart, blockMappingStart, blockEnd, flowSequenceStart, flowMappingStart, flowSequenceEnd, flowMappingEnd, key, value, blockEntry, flowEntry, alias_, anchor, tag, scalar } /// Specifies the type of a tag directive token. enum DirectiveType : ubyte { // YAML version directive. yaml, // Tag directive. tag, // Any other directive is "reserved" for future YAML versions. reserved } /// Token produced by scanner. /// /// 32 bytes on 64-bit. struct Token { @disable int opCmp(ref Token); // 16B /// Value of the token, if any. /// /// Values are char[] instead of string, as Parser may still change them in a few /// cases. Parser casts values to strings when producing Events. char[] value; // 4B /// Start position of the token in file/stream. Mark startMark; // 4B /// End position of the token in file/stream. Mark endMark; // 1B /// Token type. TokenID id; // 1B /// Style of scalar token, if this is a scalar token. ScalarStyle style; // 1B /// Encoding, if this is a stream start token. Encoding encoding; // 1B /// Type of directive for directiveToken. DirectiveType directive; // 4B /// Used to split value into 2 substrings for tokens that need 2 values (tagToken) uint valueDivider; /// Get string representation of the token ID. @property string idString() @safe pure const {return id.to!string;} } /// Construct a directive token. /// /// Params: start = Start position of the token. /// end = End position of the token. /// value = Value of the token. /// directive = Directive type (YAML or TAG in YAML 1.1). /// nameEnd = Position of the end of the name Token directiveToken(const Mark start, const Mark end, char[] value, DirectiveType directive, const uint nameEnd) @safe pure nothrow @nogc { return Token(value, start, end, TokenID.directive, ScalarStyle.init, Encoding.init, directive, nameEnd); } /// Construct a simple (no value) token with specified type. /// /// Params: id = Type of the token. /// start = Start position of the token. /// end = End position of the token. Token simpleToken(TokenID id)(const Mark start, const Mark end) { return Token(null, start, end, id); } /// Construct a stream start token. /// /// Params: start = Start position of the token. /// end = End position of the token. /// encoding = Encoding of the stream. Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) @safe pure nothrow @nogc { return Token(null, start, end, TokenID.streamStart, ScalarStyle.invalid, encoding); } /// Aliases for construction of simple token types. alias streamEndToken = simpleToken!(TokenID.streamEnd); alias blockSequenceStartToken = simpleToken!(TokenID.blockSequenceStart); alias blockMappingStartToken = simpleToken!(TokenID.blockMappingStart); alias blockEndToken = simpleToken!(TokenID.blockEnd); alias keyToken = simpleToken!(TokenID.key); alias valueToken = simpleToken!(TokenID.value); alias blockEntryToken = simpleToken!(TokenID.blockEntry); alias flowEntryToken = simpleToken!(TokenID.flowEntry); /// Construct a simple token with value with specified type. /// /// Params: id = Type of the token. /// start = Start position of the token. /// end = End position of the token. /// value = Value of the token. /// valueDivider = A hack for TagToken to store 2 values in value; the first /// value goes up to valueDivider, the second after it. Token simpleValueToken(TokenID id)(const Mark start, const Mark end, char[] value, const uint valueDivider = uint.max) { return Token(value, start, end, id, ScalarStyle.invalid, Encoding.init, DirectiveType.init, valueDivider); } /// Alias for construction of tag token. alias tagToken = simpleValueToken!(TokenID.tag); alias aliasToken = simpleValueToken!(TokenID.alias_); alias anchorToken = simpleValueToken!(TokenID.anchor); /// Construct a scalar token. /// /// Params: start = Start position of the token. /// end = End position of the token. /// value = Value of the token. /// style = Style of the token. Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) @safe pure nothrow @nogc { return Token(value, start, end, TokenID.scalar, style); } dub-1.40.0/source/dub/internal/git.d000066400000000000000000000123021477246567400172100ustar00rootroot00000000000000module dub.internal.git; import dub.internal.vibecompat.core.file; import dub.internal.logging; import std.file; import std.string; version (Windows) { import dub.internal.vibecompat.data.json; string determineVersionWithGit(NativePath path) { // On Windows, which is slow at running external processes, // cache the version numbers that are determined using // git to speed up the initialization phase. import dub.internal.utils : jsonFromFile; // quickly determine head commit without invoking git string head_commit; auto hpath = (path ~ ".git/HEAD").toNativeString(); if (exists(hpath)) { auto head_ref = readText(hpath).strip(); if (head_ref.startsWith("ref: ")) { auto rpath = (path ~ (".git/"~head_ref[5 .. $])).toNativeString(); if (exists(rpath)) head_commit = readText(rpath).strip(); } } // return the last determined version for that commit // not that this is not always correct, most notably when // a tag gets added/removed/changed and changes the outcome // of the full version detection computation auto vcachepath = path ~ ".dub/version.json"; if (existsFile(vcachepath)) { auto ver = jsonFromFile(vcachepath); if (head_commit == ver["commit"].opt!string) return ver["version"].get!string; } // if no cache file or the HEAD commit changed, perform full detection auto ret = determineVersionWithGitTool(path); // update version cache file if (head_commit.length) { import dub.internal.utils : atomicWriteJsonFile; ensureDirectory(path ~ ".dub"); atomicWriteJsonFile(vcachepath, Json(["commit": Json(head_commit), "version": Json(ret)])); } return ret; } } else { string determineVersionWithGit(NativePath path) { return determineVersionWithGitTool(path); } } // determines the version of a package that is stored in a Git working copy // by invoking the "git" executable private string determineVersionWithGitTool(NativePath path) { import std.process; auto git_dir = path ~ ".git"; if (!existsFile(git_dir) || !isDir(git_dir.toNativeString)) return null; auto git_dir_param = "--git-dir=" ~ git_dir.toNativeString(); static string exec(scope string[] params...) { auto ret = execute(params); if (ret.status == 0) return ret.output.strip; logDebug("'%s' failed with exit code %s: %s", params.join(" "), ret.status, ret.output.strip); return null; } if (const describeOutput = exec("git", git_dir_param, "describe", "--long", "--tags")) { if (const ver = determineVersionFromGitDescribe(describeOutput)) return ver; } auto branch = exec("git", git_dir_param, "rev-parse", "--abbrev-ref", "HEAD"); if (branch !is null) { if (branch != "HEAD") return "~" ~ branch; } return null; } private string determineVersionFromGitDescribe(string describeOutput) { import dub.semver : isValidVersion; import std.conv : to; const parts = describeOutput.split("-"); const commit = parts[$-1]; const num = parts[$-2].to!int; const tag = parts[0 .. $-2].join("-"); if (tag.startsWith("v") && isValidVersion(tag[1 .. $])) { if (num == 0) return tag[1 .. $]; const i = tag.indexOf('+'); return format("%s%scommit.%s.%s", tag[1 .. $], i >= 0 ? '.' : '+', num, commit); } return null; } unittest { // tag v1.0.0 assert(determineVersionFromGitDescribe("v1.0.0-0-deadbeef") == "1.0.0"); // 1 commit after v1.0.0 assert(determineVersionFromGitDescribe("v1.0.0-1-deadbeef") == "1.0.0+commit.1.deadbeef"); // tag v1.0.0+2.0.0 assert(determineVersionFromGitDescribe("v1.0.0+2.0.0-0-deadbeef") == "1.0.0+2.0.0"); // 12 commits after tag v1.0.0+2.0.0 assert(determineVersionFromGitDescribe("v1.0.0+2.0.0-12-deadbeef") == "1.0.0+2.0.0.commit.12.deadbeef"); // tag v1.0.0-beta.1 assert(determineVersionFromGitDescribe("v1.0.0-beta.1-0-deadbeef") == "1.0.0-beta.1"); // 2 commits after tag v1.0.0-beta.1 assert(determineVersionFromGitDescribe("v1.0.0-beta.1-2-deadbeef") == "1.0.0-beta.1+commit.2.deadbeef"); // tag v1.0.0-beta.2+2.0.0 assert(determineVersionFromGitDescribe("v1.0.0-beta.2+2.0.0-0-deadbeef") == "1.0.0-beta.2+2.0.0"); // 3 commits after tag v1.0.0-beta.2+2.0.0 assert(determineVersionFromGitDescribe("v1.0.0-beta.2+2.0.0-3-deadbeef") == "1.0.0-beta.2+2.0.0.commit.3.deadbeef"); // invalid tags assert(determineVersionFromGitDescribe("1.0.0-0-deadbeef") is null); assert(determineVersionFromGitDescribe("v1.0-0-deadbeef") is null); } /** Clones a repository into a new directory. Params: remote = The (possibly remote) repository to clone from reference = The branch to check out after cloning destination = Repository destination directory Returns: Whether the cloning succeeded. */ bool cloneRepository(string remote, string reference, string destination) { import std.process : Pid, spawnProcess, wait; Pid command; if (!exists(destination)) { string[] args = ["git", "clone", "--no-checkout"]; if (getLogLevel > LogLevel.diagnostic) args ~= "-q"; command = spawnProcess(args~[remote, destination]); if (wait(command) != 0) { return false; } } string[] args = ["git", "-C", destination, "checkout", "--detach"]; if (getLogLevel > LogLevel.diagnostic) args ~= "-q"; command = spawnProcess(args~[reference]); if (wait(command) != 0) { rmdirRecurse(destination); return false; } return true; } dub-1.40.0/source/dub/internal/io/000077500000000000000000000000001477246567400166715ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/io/filesystem.d000066400000000000000000000061601477246567400212250ustar00rootroot00000000000000/** * An abstract filesystem representation * * This interface allows to represent the file system to various part of Dub. * Instead of direct use of `std.file`, an implementation of this interface can * be used, allowing to mock all I/O in unittest on a thread-local basis. */ module dub.internal.io.filesystem; public import std.datetime.systime; public import dub.internal.vibecompat.inet.path; /// Ditto public interface Filesystem { static import dub.internal.vibecompat.core.file; /// TODO: Remove, the API should be improved public alias IterateDirDg = int delegate( scope int delegate(ref dub.internal.vibecompat.core.file.FileInfo)); /// Ditto public IterateDirDg iterateDirectory (in NativePath path) scope; /// Returns: The `path` of this FSEntry public abstract NativePath getcwd () const scope; /** * Implements `mkdir -p`: Create a directory and every intermediary * * There is no way to error out on intermediate directory, * like standard mkdir does. If you want this behavior, * simply check (`existsDirectory`) if the parent directory exists. * * Params: * path = The path of the directory to be created. */ public abstract void mkdir (in NativePath path) scope; /// Checks the existence of a file public abstract bool existsFile (in NativePath path) const scope; /// Checks the existence of a directory public abstract bool existsDirectory (in NativePath path) const scope; /// Reads a file, returns the content as `ubyte[]` public abstract ubyte[] readFile (in NativePath path) const scope; /// Reads a file, returns the content as text public abstract string readText (in NativePath path) const scope; /// Write to this file public final void writeFile (in NativePath path, const(char)[] data) scope { import std.string : representation; this.writeFile(path, data.representation); } /// Ditto public abstract void writeFile (in NativePath path, const(ubyte)[] data) scope; /** Remove a file * * Always error if the target is a directory. * Does not error if the target does not exists * and `force` is set to `true`. * * Params: * path = Path to the file to remove * force = Whether to ignore non-existing file, * default to `false`. */ public void removeFile (in NativePath path, bool force = false); /** Remove a directory * * Remove an existing empty directory. * If `force` is set to `true`, no error will be thrown * if the directory is empty or non-existing. * * Params: * path = Path to the directory to remove * force = Whether to ignore non-existing / non-empty directories, * default to `false`. */ public void removeDir (in NativePath path, bool force = false); /// Implement `std.file.setTimes` public void setTimes (in NativePath path, in SysTime accessTime, in SysTime modificationTime); /// Implement `std.file.setAttributes` public void setAttributes (in NativePath path, uint attributes); } dub-1.40.0/source/dub/internal/io/mockfs.d000066400000000000000000000374441477246567400203340ustar00rootroot00000000000000/******************************************************************************* An unittest implementation of `Filesystem` *******************************************************************************/ module dub.internal.io.mockfs; public import dub.internal.io.filesystem; static import dub.internal.vibecompat.core.file; import std.algorithm; import std.exception; import std.range; import std.string; /// Ditto public final class MockFS : Filesystem { /// private FSEntry cwd; /// private FSEntry root; /// public this () scope { this.root = this.cwd = new FSEntry(); } public override NativePath getcwd () const scope { return this.cwd.path(); } /// public override bool existsDirectory (in NativePath path) const scope { auto entry = this.lookup(path); return entry !is null && entry.isDirectory(); } /// Ditto public override void mkdir (in NativePath path) scope { import std.algorithm.iteration : reduce; const abs = path.absolute(); auto segments = path.bySegment; // `library-nonet` (using vibe.d) has an empty front for absolute path, // while our built-in module (in vibecompat) does not. if (abs && segments.front.name.length == 0) segments.popFront(); reduce!((FSEntry dir, segment) => dir.mkdir(segment.name))( (abs ? this.root : this.cwd), segments); } /// Ditto public override bool existsFile (in NativePath path) const scope { auto entry = this.lookup(path); return entry !is null && entry.isFile(); } /// Ditto public override void writeFile (in NativePath path, const(ubyte)[] data) scope { enforce(!path.endsWithSlash(), "Cannot write to directory: " ~ path.toNativeString()); if (auto file = this.lookup(path)) { // If the file already exists, override it enforce(file.isFile(), "Trying to write to directory: " ~ path.toNativeString()); file.content = data.dup; } else { auto p = this.getParent(path); auto file = new FSEntry(p, FSEntry.Type.File, path.head.name()); file.content = data.dup; p.children ~= file; } } /// Reads a file, returns the content as `ubyte[]` public override ubyte[] readFile (in NativePath path) const scope { auto entry = this.lookup(path); enforce(entry !is null, "No such file: " ~ path.toNativeString()); enforce(entry.isFile(), "Trying to read a directory"); // This is a hack to make poisoning a file possible. // However, it is rather crude and doesn't allow to poison directory. // Consider introducing a derived type to allow it. assert(entry.content != "poison".representation, "Trying to access poisoned path: " ~ path.toNativeString()); return entry.content.dup; } /// Reads a file, returns the content as text public override string readText (in NativePath path) const scope { import std.utf : validate; const content = this.readFile(path); // Ignore BOM: If it's needed for a test, add support for it. validate(cast(const(char[])) content); // `readFile` just `dup` the content, so it's safe to cast. return cast(string) content; } /// Ditto public override IterateDirDg iterateDirectory (in NativePath path) scope { enforce(this.existsDirectory(path), path.toNativeString() ~ " does not exists or is not a directory"); auto dir = this.lookup(path); int iterator(scope int delegate(ref dub.internal.vibecompat.core.file.FileInfo) del) { foreach (c; dir.children) { dub.internal.vibecompat.core.file.FileInfo fi; fi.name = c.name; fi.timeModified = c.attributes.modification; final switch (c.attributes.type) { case FSEntry.Type.File: fi.size = c.content.length; break; case FSEntry.Type.Directory: fi.isDirectory = true; break; } if (auto res = del(fi)) return res; } return 0; } return &iterator; } /** Remove a file * * Always error if the target is a directory. * Does not error if the target does not exists * and `force` is set to `true`. * * Params: * path = Path to the file to remove * force = Whether to ignore non-existing file, * default to `false`. */ public override void removeFile (in NativePath path, bool force = false) { import std.algorithm.searching : countUntil; assert(!path.empty, "Empty path provided to `removeFile`"); enforce(!path.endsWithSlash(), "Cannot remove file with directory path: " ~ path.toNativeString()); auto p = this.getParent(path, force); const idx = p.children.countUntil!(e => e.name == path.head.name()); if (idx < 0) { enforce(force, "removeFile: No such file: " ~ path.toNativeString()); } else { enforce(p.children[idx].attributes.type == FSEntry.Type.File, "removeFile called on a directory: " ~ path.toNativeString()); p.children = p.children[0 .. idx] ~ p.children[idx + 1 .. $]; } } /** Remove a directory * * Remove an existing empty directory. * If `force` is set to `true`, no error will be thrown * if the directory is empty or non-existing. * * Params: * path = Path to the directory to remove * force = Whether to ignore non-existing / non-empty directories, * default to `false`. */ public override void removeDir (in NativePath path, bool force = false) { import std.algorithm.searching : countUntil; assert(!path.empty, "Empty path provided to `removeFile`"); auto p = this.getParent(path, force); const idx = p.children.countUntil!(e => e.name == path.head.name()); if (idx < 0) { enforce(force, "removeDir: No such directory: " ~ path.toNativeString()); } else { enforce(p.children[idx].attributes.type == FSEntry.Type.Directory, "removeDir called on a file: " ~ path.toNativeString()); enforce(force || p.children[idx].children.length == 0, "removeDir called on non-empty directory: " ~ path.toNativeString()); p.children = p.children[0 .. idx] ~ p.children[idx + 1 .. $]; } } /// Ditto public override void setTimes (in NativePath path, in SysTime accessTime, in SysTime modificationTime) { auto e = this.lookup(path); enforce(e !is null, "setTimes: No such file or directory: " ~ path.toNativeString()); e.setTimes(accessTime, modificationTime); } /// Ditto public override void setAttributes (in NativePath path, uint attributes) { auto e = this.lookup(path); enforce(e !is null, "setAttributes: No such file or directory: " ~ path.toNativeString()); e.setAttributes(attributes); } /** * Converts an `Filesystem` and its children to a `ZipFile` */ public ubyte[] serializeToZip (string rootPath) { import std.path; import std.zip; scope z = new ZipArchive(); void addToZip(scope string dir, scope FSEntry e) { auto m = new ArchiveMember(); m.name = dir.buildPath(e.name); m.fileAttributes = e.attributes.attrs; m.time = e.attributes.modification; final switch (e.attributes.type) { case FSEntry.Type.Directory: // We need to ensure the directory entry ends with a slash // otherwise it will be considered as a file. if (m.name[$-1] != '/') m.name ~= '/'; z.addMember(m); foreach (c; e.children) addToZip(m.name, c); break; case FSEntry.Type.File: m.expandedData = e.content; z.addMember(m); } } addToZip(rootPath, this.cwd); return cast(ubyte[]) z.build(); } /** Get the parent `FSEntry` of a `NativePath` * * If the parent doesn't exist, an `Exception` will be thrown * unless `silent` is provided. If the parent path is a file, * an `Exception` will be thrown regardless of `silent`. * * Params: * path = The path to look up the parent for * silent = Whether to error on non-existing parent, * default to `false`. */ protected inout(FSEntry) getParent(NativePath path, bool silent = false) inout return scope { // Relative path in the current directory if (!path.hasParentPath()) return this.cwd; // If we're not in the right `FSEntry`, recurse const parentPath = path.parentPath(); auto p = this.lookup(parentPath); enforce(silent || p !is null, "No such directory: " ~ parentPath.toNativeString()); enforce(p is null || p.attributes.type == FSEntry.Type.Directory, "Parent path is not a directory: " ~ parentPath.toNativeString()); return p; } /// Get an arbitrarily nested children node protected inout(FSEntry) lookup(NativePath path) inout return scope { import std.algorithm.iteration : reduce; const abs = path.absolute(); auto segments = path.bySegment; // `library-nonet` (using vibe.d) has an empty front for absolute path, // while our built-in module (in vibecompat) does not. if (abs && segments.front.name.length == 0) segments.popFront(); // Casting away constness because no good way to do this with `inout`, // but `FSEntry.lookup` is `inout` too. return cast(inout(FSEntry)) reduce!( (FSEntry dir, segment) => dir ? dir.lookup(segment.name) : null) (cast() (abs ? this.root : this.cwd), segments); } } /******************************************************************************* Represents a node on the filesystem This class encapsulates operations which are node specific, such as looking up a child node, adding one, or setting properties. *******************************************************************************/ public class FSEntry { /// Type of file system entry public enum Type : ubyte { Directory, File, } /// List FSEntry attributes protected struct Attributes { /// The type of FSEntry, see `FSEntry.Type` public Type type; /// System-specific attributes for this `FSEntry` public uint attrs; /// Last access time public SysTime access; /// Last modification time public SysTime modification; } /// Ditto protected Attributes attributes; /// The name of this node protected string name; /// The parent of this entry (can be null for the root) protected FSEntry parent; union { /// Children for this FSEntry (with type == Directory) protected FSEntry[] children; /// Content for this FDEntry (with type == File) protected ubyte[] content; } /// Creates a new FSEntry package(dub) this (FSEntry p, Type t, string n) { // Avoid 'DOS File Times cannot hold dates prior to 1980.' exception import std.datetime.date; SysTime DefaultTime = SysTime(DateTime(2020, 01, 01)); assert(n.length > 0, "FSentry.this(%s, %s, %s) called with empty name" .format(p.path(), t, n)); this.attributes.type = t; this.parent = p; this.name = n; this.attributes.access = DefaultTime; this.attributes.modification = DefaultTime; } /// Create the root of the filesystem, only usable from this module package(dub) this () { import std.datetime.date; SysTime DefaultTime = SysTime(DateTime(2020, 01, 01)); this.attributes.type = Type.Directory; this.attributes.access = DefaultTime; this.attributes.modification = DefaultTime; } /// Get a direct children node, returns `null` if it can't be found protected inout(FSEntry) lookup(string name) inout return scope { assert(!name.canFind('/')); if (name == ".") return this; if (name == "..") return this.parent; foreach (c; this.children) if (c.name == name) return c; return null; } /*+************************************************************************* Utility function Below this banners are functions that are provided for the convenience of writing tests for `Dub`. ***************************************************************************/ /// Prints a visual representation of the filesystem to stdout for debugging public void print(bool content = false) const scope { import std.range : repeat; static import std.stdio; size_t indent; for (auto p = &this.parent; (*p) !is null; p = &p.parent) indent++; // Don't print anything (even a newline) for root if (this.parent is null) std.stdio.write('/'); else std.stdio.write('|', '-'.repeat(indent), ' ', this.name, ' '); final switch (this.attributes.type) { case Type.Directory: std.stdio.writeln('(', this.children.length, " entries):"); foreach (c; this.children) c.print(content); break; case Type.File: if (!content) std.stdio.writeln('(', this.content.length, " bytes)"); else if (this.name.endsWith(".json") || this.name.endsWith(".sdl")) std.stdio.writeln('(', this.content.length, " bytes): ", cast(string) this.content); else std.stdio.writeln('(', this.content.length, " bytes): ", this.content); break; } } /*+************************************************************************* Public filesystem functions Below this banners are functions which mimic the behavior of a file system. ***************************************************************************/ /// Returns: The `path` of this FSEntry public NativePath path () const scope { if (this.parent is null) return NativePath("/"); auto thisPath = this.parent.path ~ this.name; thisPath.endsWithSlash = (this.attributes.type == Type.Directory); return thisPath; } /// Implements `mkdir -p`, returns the created directory public FSEntry mkdir (string name) scope { // Check if the child already exists if (auto child = this.lookup(name)) return child; this.children ~= new FSEntry(this, Type.Directory, name); return this.children[$-1]; } /// public bool isFile () const scope { return this.attributes.type == Type.File; } /// public bool isDirectory () const scope { return this.attributes.type == Type.Directory; } /// Implement `std.file.setTimes` public void setTimes (in SysTime accessTime, in SysTime modificationTime) { this.attributes.access = accessTime; this.attributes.modification = modificationTime; } /// Implement `std.file.setAttributes` public void setAttributes (uint attributes) { this.attributes.attrs = attributes; } } dub-1.40.0/source/dub/internal/io/realfs.d000066400000000000000000000050441477246567400203150ustar00rootroot00000000000000/******************************************************************************* An implementation of `Filesystem` using vibe.d functions *******************************************************************************/ module dub.internal.io.realfs; public import dub.internal.io.filesystem; /// Ditto public final class RealFS : Filesystem { static import dub.internal.vibecompat.core.file; static import std.file; /// private NativePath path_; /// public this (NativePath cwd = NativePath(std.file.getcwd())) scope @safe pure nothrow @nogc { this.path_ = cwd; } public override NativePath getcwd () const scope { return this.path_; } /// protected override bool existsDirectory (in NativePath path) const scope { return dub.internal.vibecompat.core.file.existsDirectory(path); } /// Ditto protected override void mkdir (in NativePath path) scope { dub.internal.vibecompat.core.file.ensureDirectory(path); } /// Ditto protected override bool existsFile (in NativePath path) const scope { return dub.internal.vibecompat.core.file.existsFile(path); } /// Ditto protected override void writeFile (in NativePath path, const(ubyte)[] data) scope { return dub.internal.vibecompat.core.file.writeFile(path, data); } /// Reads a file, returns the content as `ubyte[]` public override ubyte[] readFile (in NativePath path) const scope { return cast(ubyte[]) std.file.read(path.toNativeString()); } /// Ditto protected override string readText (in NativePath path) const scope { return dub.internal.vibecompat.core.file.readText(path); } /// Ditto protected override IterateDirDg iterateDirectory (in NativePath path) scope { return dub.internal.vibecompat.core.file.iterateDirectory(path); } /// Ditto protected override void removeFile (in NativePath path, bool force = false) scope { return std.file.remove(path.toNativeString()); } /// public override void removeDir (in NativePath path, bool force = false) { if (force) std.file.rmdirRecurse(path.toNativeString()); else std.file.rmdir(path.toNativeString()); } /// Ditto protected override void setTimes (in NativePath path, in SysTime accessTime, in SysTime modificationTime) { std.file.setTimes( path.toNativeString(), accessTime, modificationTime); } /// Ditto protected override void setAttributes (in NativePath path, uint attributes) { std.file.setAttributes(path.toNativeString(), attributes); } } dub-1.40.0/source/dub/internal/libInputVisitor.d000066400000000000000000000035311477246567400215770ustar00rootroot00000000000000module dub.internal.libInputVisitor; version (Have_libInputVisitor) public import libInputVisitor; else: /++ Copyright (C) 2012 Nick Sabalausky This program is free software. It comes without any warranty, to the extent permitted by applicable law. You can redistribute it and/or modify it under the terms of the Do What The Fuck You Want To Public License, Version 2, as published by Sam Hocevar. See http://www.wtfpl.net/ for more details. DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE Version 2, December 2004 Copyright (C) 2004 Sam Hocevar Everyone is permitted to copy and distribute verbatim or modified copies of this license document, and changing it is allowed as long as the name is changed. DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. You just DO WHAT THE FUCK YOU WANT TO. +/ /++ Should work with DMD 2.059 and up For more info on this, see: http://semitwist.com/articles/article/view/combine-coroutines-and-input-ranges-for-dead-simple-d-iteration +/ import core.thread; class InputVisitor(Obj, Elem) : Fiber { bool started = false; Obj obj; this(Obj obj) { this.obj = obj; super(&run); } private void run() { obj.visit(this); } private void ensureStarted() { if(!started) { call(); started = true; } } // Member 'front' must be a function due to DMD Issue #5403 private Elem _front; @property Elem front() { ensureStarted(); return _front; } void popFront() { ensureStarted(); call(); } @property bool empty() { ensureStarted(); return state == Fiber.State.TERM; } void yield(Elem elem) { _front = elem; Fiber.yield(); } } template inputVisitor(Elem) { @property InputVisitor!(Obj, Elem) inputVisitor(Obj)(Obj obj) { return new InputVisitor!(Obj, Elem)(obj); } } dub-1.40.0/source/dub/internal/logging.d000066400000000000000000000276131477246567400200660ustar00rootroot00000000000000/** Handles all the console output of the Dub package manager, by providing useful methods for handling colored text. The module also disables colors when stdout and stderr are not a TTY in order to avoid ASCII escape sequences in piped output. The module can auto-detect and configure itself in this regard by calling initLogging() at the beginning of the program. But, whether to color text or not can also be set manually with setLoggingColorsEnabled(bool). The output for the log levels error, warn and info is formatted like this: " " '----------' fixed width the "tag" part can be colored (most often will be) and always has a fixed width, which is defined as a constant at the beginning of this module. The output for the log levels debug and diagnostic will be just the plain string. There are some default tag string and color values for some logging levels: - warn: "Warning", yellow bold - error: "Error", red bold Actually, for error and warn levels, the tag color is fixed to the ones listed above. Also, the default tag string for the info level is "" (the empty string) and the default color is white (usually it's manually set when calling logInfo with the wanted tag string, but this allows to just logInfo("text") without having to worry about the tag if it's not needed). Usage: After initializing the logging module with initLogging(), the functions logDebug(..), logDiagnostic(..), logInfo(..), logWarning(..) and logError(..) can be used to print log messages. Whether the messages are printed on stdout or stderr depends on the log level (warning and error go to stderr). The log(..) function can also be used. Check the signature and documentation of the functions for more information. The minimum log level to print can be configured using setLogLevel(..), and whether to color outputted text or not can be set with setLoggingColorsEnabled(..) The color(str, color) function can be used to color text within a log message, for instance like this: logInfo("Tag", Color.green, "My %s message", "colored".color(Color.red)) Copyright: © 2018 Giacomo De Lazzari License: Subject to the terms of the MIT license, as written in the included LICENSE file. Authors: Giacomo De Lazzari */ module dub.internal.logging; import std.stdio; import std.array; import std.format; import std.string; import dub.internal.colorize : fg, mode; /** An enum listing possible colors for terminal output, useful to set the color of a tag. Re-exported from d-colorize in dub.internal.colorize. See the enum definition there for a list of possible values. */ public alias Color = fg; /** An enum listing possible text "modes" for terminal output, useful to set the text to bold, underline, blinking, etc... Re-exported from d-colorize in dub.internal.colorize. See the enum definition there for a list of possible values. */ public alias Mode = mode; /// Defines the current width of logging tags for justifying in chars. /// Can be manipulated through push and pop. struct TagWidth { import core.atomic; private shared int value = 12; private shared int index; private shared int[16] stack; /// Gets the tag width in chars public int get() { return value; } /// Changes the tag width for all following logging calls, until $(LREF pop) is called. public void push(int width) { int currentIndex = index; index.atomicOp!"+="(1); stack[currentIndex] = value; assert(index < stack.length, "too many TagWidth.push without pop"); value = width; } /// Reverts the last $(LREF push) call. public void pop() { assert(index > 0); value = stack[index.atomicOp!"-="(1)]; } } /// The global tag width instance used for logging. public __gshared TagWidth tagWidth; /// Possible log levels supported enum LogLevel { debug_, diagnostic, info, warn, error, none } // The current minimum log level to be printed private shared LogLevel _minLevel = LogLevel.info; /* Whether to print text with colors or not, defaults to true but will be set to false in initLogging() if stdout or stderr are not a TTY (which means the output is probably being piped and we don't want ASCII escape chars in it) */ private shared bool _printColors = true; /// Ditto public bool hasColors () @trusted nothrow @nogc { return _printColors; } // isatty() is used in initLogging() to detect whether or not we are on a TTY extern (C) int isatty(int); /** This function must be called at the beginning for the program, before any logging occurs. It will detect whether or not stdout/stderr are a console/TTY and will consequently disable colored output if needed. Also, if a NO_COLOR environment variable is defined, colors are disabled (https://no-color.org/). Forgetting to call the function will result in ASCII escape sequences in the piped output, probably an undesirable thing. */ void initLogging() { import std.process : environment; import core.stdc.stdio; _printColors = environment.get("NO_COLOR") == ""; version (Windows) { version (CRuntime_DigitalMars) { if (!isatty(core.stdc.stdio.stdout._file) || !isatty(core.stdc.stdio.stderr._file)) _printColors = false; } else version (CRuntime_Microsoft) { if (!isatty(fileno(core.stdc.stdio.stdout)) || !isatty(fileno(core.stdc.stdio.stderr))) _printColors = false; } else _printColors = false; } else version (Posix) { import core.sys.posix.unistd; if (!isatty(STDERR_FILENO) || !isatty(STDOUT_FILENO)) _printColors = false; } } /// Sets the minimum log level to be printed void setLogLevel(LogLevel level) nothrow { _minLevel = level; } /// Gets the minimum log level to be printed LogLevel getLogLevel() { return _minLevel; } /// Set whether to print colors or not void setLoggingColorsEnabled(bool enabled) { _printColors = enabled; } /** Shorthand function to log a message with debug/diagnostic level, no tag string or tag color required (since there will be no tag). Params: fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logDebug(T...)(string fmt, lazy T args) nothrow { log(LogLevel.debug_, false, "", Color.init, fmt, args); } /// ditto void logDiagnostic(T...)(string fmt, lazy T args) nothrow { log(LogLevel.diagnostic, false, "", Color.init, fmt, args); } /** Shorthand function to log a message with info level, with custom tag string and tag color. Params: tag = The string the tag at the beginning of the line should contain tagColor = The color the tag string should have fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logInfo(T...)(string tag, Color tagColor, string fmt, lazy T args) nothrow { log(LogLevel.info, false, tag, tagColor, fmt, args); } /** Shorthand function to log a message with info level, this version prints an empty tag automatically (which is different from not having a tag - in this case there will be an indentation of tagWidth chars on the left anyway). Params: fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logInfo(T...)(string fmt, lazy T args) nothrow if (!is(T[0] : Color)) { log(LogLevel.info, false, "", Color.init, fmt, args); } /** Shorthand function to log a message with info level, this version doesn't print a tag at all, it effectively just prints the given string. Params: fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logInfoNoTag(T...)(string fmt, lazy T args) nothrow if (!is(T[0] : Color)) { log(LogLevel.info, true, "", Color.init, fmt, args); } /** Shorthand function to log a message with warning level, with custom tag string. The tag color is fixed to yellow. Params: tag = The string the tag at the beginning of the line should contain fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logWarnTag(T...)(string tag, string fmt, lazy T args) nothrow { log(LogLevel.warn, false, tag, Color.yellow, fmt, args); } /** Shorthand function to log a message with warning level, using the default tag "Warning". The tag color is also fixed to yellow. Params: fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logWarn(T...)(string fmt, lazy T args) nothrow { log(LogLevel.warn, false, "Warning", Color.yellow, fmt, args); } /** Shorthand function to log a message with error level, with custom tag string. The tag color is fixed to red. Params: tag = The string the tag at the beginning of the line should contain fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logErrorTag(T...)(string tag, string fmt, lazy T args) nothrow { log(LogLevel.error, false, tag, Color.red, fmt, args); } /** Shorthand function to log a message with error level, using the default tag "Error". The tag color is also fixed to red. Params: fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void logError(T...)(string fmt, lazy T args) nothrow { log(LogLevel.error, false, "Error", Color.red, fmt, args); } /** Log a message with the specified log level and with the specified tag string and color. If the log level is debug or diagnostic, the tag is not printed thus the tag string and tag color will be ignored. If the log level is error or warning, the tag will be in bold text. Also the tag can be disabled (for any log level) by passing true as the second argument. Params: level = The log level for the logged message disableTag = Setting this to true disables the tag, no matter what tag = The string the tag at the beginning of the line should contain tagColor = The color the tag string should have fmt = See http://dlang.org/phobos/std_format.html#format-string args = Arguments matching the format string */ void log(T...)( LogLevel level, bool disableTag, string tag, Color tagColor, string fmt, lazy T args ) nothrow { if (level < _minLevel) return; auto hasTag = true; if (level <= LogLevel.diagnostic) hasTag = false; if (disableTag) hasTag = false; auto boldTag = false; if (level >= LogLevel.warn) boldTag = true; try { string result = format(fmt, args); if (hasTag) result = tag.rightJustify(tagWidth.get, ' ').color(tagColor, boldTag ? Mode.bold : Mode.init) ~ " " ~ result; import dub.internal.colorize : cwrite; File output = (level <= LogLevel.info) ? stdout : stderr; if (output.isOpen) { output.cwrite(result, "\n"); output.flush(); } } catch (Exception e) { debug assert(false, e.msg); } } /** Colors the specified string with the specified color. The function is used to print colored text within a log message. The function also checks whether color output is enabled or disabled (when not outputting to a TTY) and, in the last case, just returns the plain string. This allows to use it like so: logInfo("Tag", Color.green, "My %s log message", "colored".color(Color.red)); without worrying whether or not colored output is enabled or not. Also a mode can be specified, such as bold/underline/etc... Params: str = The string to color c = The color to apply m = An optional mode, such as bold/underline/etc... */ string color(const string str, const Color c, const Mode m = Mode.init) { import dub.internal.colorize; if (_printColors) return dub.internal.colorize.color(str, c, bg.init, m); else return str; } /** This function is the same as the above one, but just accepts a mode. It's useful, for instance, when outputting bold text without changing the color. Params: str = The string to color m = The mode, such as bold/underline/etc... */ string color(const string str, const Mode m = Mode.init) { import dub.internal.colorize; if (_printColors) return dub.internal.colorize.color(str, fg.init, bg.init, m); else return str; } dub-1.40.0/source/dub/internal/sdlang/000077500000000000000000000000001477246567400175325ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/sdlang/ast.d000066400000000000000000001470551477246567400205020ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.ast; version (Have_sdlang_d) public import sdlang.ast; else: import std.algorithm; import std.array; import std.conv; import std.range; import std.string; version(sdlangUnittest) version(unittest) { import std.stdio; import std.exception; } import dub.internal.sdlang.exception; import dub.internal.sdlang.token; import dub.internal.sdlang.util; import dub.internal.dyaml.stdsumtype; class Attribute { Value value; Location location; private Tag _parent; /// Get parent tag. To set a parent, attach this Attribute to its intended /// parent tag by calling 'Tag.add(...)', or by passing it to /// the parent tag's constructor. @property Tag parent() { return _parent; } private string _namespace; @property string namespace() { return _namespace; } /// Not particularly efficient, but it works. @property void namespace(string value) { if(_parent && _namespace != value) { // Remove auto saveParent = _parent; if(_parent) this.remove(); // Change namespace _namespace = value; // Re-add if(saveParent) saveParent.add(this); } else _namespace = value; } private string _name; /// Not including namespace. Use 'fullName' if you want the namespace included. @property string name() { return _name; } /// Not the most efficient, but it works. @property void name(string value) { if(_parent && _name != value) { _parent.updateId++; void removeFromGroupedLookup(string ns) { // Remove from _parent._attributes[ns] auto sameNameAttrs = _parent._attributes[ns][_name]; auto targetIndex = sameNameAttrs.countUntil(this); _parent._attributes[ns][_name].removeIndex(targetIndex); } // Remove from _parent._tags removeFromGroupedLookup(_namespace); removeFromGroupedLookup("*"); // Change name _name = value; // Add to new locations in _parent._attributes _parent._attributes[_namespace][_name] ~= this; _parent._attributes["*"][_name] ~= this; } else _name = value; } @property string fullName() { return _namespace==""? _name : text(_namespace, ":", _name); } this(string namespace, string name, Value value, Location location = Location(0, 0, 0)) { this._namespace = namespace; this._name = name; this.location = location; this.value = value; } this(string name, Value value, Location location = Location(0, 0, 0)) { this._namespace = ""; this._name = name; this.location = location; this.value = value; } /// Removes 'this' from its parent, if any. Returns 'this' for chaining. /// Inefficient ATM, but it works. Attribute remove() { if(!_parent) return this; void removeFromGroupedLookup(string ns) { // Remove from _parent._attributes[ns] auto sameNameAttrs = _parent._attributes[ns][_name]; auto targetIndex = sameNameAttrs.countUntil(this); _parent._attributes[ns][_name].removeIndex(targetIndex); } // Remove from _parent._attributes removeFromGroupedLookup(_namespace); removeFromGroupedLookup("*"); // Remove from _parent.allAttributes auto allAttrsIndex = _parent.allAttributes.countUntil(this); _parent.allAttributes.removeIndex(allAttrsIndex); // Remove from _parent.attributeIndices auto sameNamespaceAttrs = _parent.attributeIndices[_namespace]; auto attrIndicesIndex = sameNamespaceAttrs.countUntil(allAttrsIndex); _parent.attributeIndices[_namespace].removeIndex(attrIndicesIndex); // Fixup other indices foreach(ns, ref nsAttrIndices; _parent.attributeIndices) foreach(k, ref v; nsAttrIndices) if(v > allAttrsIndex) v--; _parent.removeNamespaceIfEmpty(_namespace); _parent.updateId++; _parent = null; return this; } override bool opEquals(Object o) { auto a = cast(Attribute)o; if(!a) return false; return _namespace == a._namespace && _name == a._name && value == a.value; } string toSDLString()() { Appender!string sink; this.toSDLString(sink); return sink.data; } void toSDLString(Sink)(ref Sink sink) if(isOutputRange!(Sink,char)) { if(_namespace != "") { sink.put(_namespace); sink.put(':'); } sink.put(_name); sink.put('='); value.toSDLString(sink); } } class Tag { Location location; Value[] values; private Tag _parent; /// Get parent tag. To set a parent, attach this Tag to its intended /// parent tag by calling 'Tag.add(...)', or by passing it to /// the parent tag's constructor. @property Tag parent() { return _parent; } private string _namespace; @property string namespace() { return _namespace; } /// Not particularly efficient, but it works. @property void namespace(string value) { if(_parent && _namespace != value) { // Remove auto saveParent = _parent; if(_parent) this.remove(); // Change namespace _namespace = value; // Re-add if(saveParent) saveParent.add(this); } else _namespace = value; } private string _name; /// Not including namespace. Use 'fullName' if you want the namespace included. @property string name() { return _name; } /// Not the most efficient, but it works. @property void name(string value) { if(_parent && _name != value) { _parent.updateId++; void removeFromGroupedLookup(string ns) { // Remove from _parent._tags[ns] auto sameNameTags = _parent._tags[ns][_name]; auto targetIndex = sameNameTags.countUntil(this); _parent._tags[ns][_name].removeIndex(targetIndex); } // Remove from _parent._tags removeFromGroupedLookup(_namespace); removeFromGroupedLookup("*"); // Change name _name = value; // Add to new locations in _parent._tags _parent._tags[_namespace][_name] ~= this; _parent._tags["*"][_name] ~= this; } else _name = value; } /// This tag's name, including namespace if one exists. @property string fullName() { return _namespace==""? _name : text(_namespace, ":", _name); } // Tracks dirtiness. This is incremented every time a change is made which // could invalidate existing ranges. This way, the ranges can detect when // they've been invalidated. private size_t updateId=0; this(Tag parent = null) { if(parent) parent.add(this); } this( string namespace, string name, Value[] values=null, Attribute[] attributes=null, Tag[] children=null ) { this(null, namespace, name, values, attributes, children); } this( Tag parent, string namespace, string name, Value[] values=null, Attribute[] attributes=null, Tag[] children=null ) { this._namespace = namespace; this._name = name; if(parent) parent.add(this); this.values = values; this.add(attributes); this.add(children); } private Attribute[] allAttributes; // In same order as specified in SDL file. private Tag[] allTags; // In same order as specified in SDL file. private string[] allNamespaces; // In same order as specified in SDL file. private size_t[][string] attributeIndices; // allAttributes[ attributes[namespace][i] ] private size_t[][string] tagIndices; // allTags[ tags[namespace][i] ] private Attribute[][string][string] _attributes; // attributes[namespace or "*"][name][i] private Tag[][string][string] _tags; // tags[namespace or "*"][name][i] /// Adds a Value, Attribute, Tag (or array of such) as a member/child of this Tag. /// Returns 'this' for chaining. /// Throws 'SDLangValidationException' if trying to add an Attribute or Tag /// that already has a parent. Tag add(Value val) { values ~= val; updateId++; return this; } ///ditto Tag add(Value[] vals) { foreach(val; vals) add(val); return this; } ///ditto Tag add(Attribute attr) { if(attr._parent) { throw new SDLangValidationException( "Attribute is already attached to a parent tag. "~ "Use Attribute.remove() before adding it to another tag." ); } if(!allNamespaces.canFind(attr._namespace)) allNamespaces ~= attr._namespace; attr._parent = this; allAttributes ~= attr; attributeIndices[attr._namespace] ~= allAttributes.length-1; _attributes[attr._namespace][attr._name] ~= attr; _attributes["*"] [attr._name] ~= attr; updateId++; return this; } ///ditto Tag add(Attribute[] attrs) { foreach(attr; attrs) add(attr); return this; } ///ditto Tag add(Tag tag) { if(tag._parent) { throw new SDLangValidationException( "Tag is already attached to a parent tag. "~ "Use Tag.remove() before adding it to another tag." ); } if(!allNamespaces.canFind(tag._namespace)) allNamespaces ~= tag._namespace; tag._parent = this; allTags ~= tag; tagIndices[tag._namespace] ~= allTags.length-1; _tags[tag._namespace][tag._name] ~= tag; _tags["*"] [tag._name] ~= tag; updateId++; return this; } ///ditto Tag add(Tag[] tags) { foreach(tag; tags) add(tag); return this; } /// Removes 'this' from its parent, if any. Returns 'this' for chaining. /// Inefficient ATM, but it works. Tag remove() { if(!_parent) return this; void removeFromGroupedLookup(string ns) { // Remove from _parent._tags[ns] auto sameNameTags = _parent._tags[ns][_name]; auto targetIndex = sameNameTags.countUntil(this); _parent._tags[ns][_name].removeIndex(targetIndex); } // Remove from _parent._tags removeFromGroupedLookup(_namespace); removeFromGroupedLookup("*"); // Remove from _parent.allTags auto allTagsIndex = _parent.allTags.countUntil(this); _parent.allTags.removeIndex(allTagsIndex); // Remove from _parent.tagIndices auto sameNamespaceTags = _parent.tagIndices[_namespace]; auto tagIndicesIndex = sameNamespaceTags.countUntil(allTagsIndex); _parent.tagIndices[_namespace].removeIndex(tagIndicesIndex); // Fixup other indices foreach(ns, ref nsTagIndices; _parent.tagIndices) foreach(k, ref v; nsTagIndices) if(v > allTagsIndex) v--; _parent.removeNamespaceIfEmpty(_namespace); _parent.updateId++; _parent = null; return this; } private void removeNamespaceIfEmpty(string namespace) { // If namespace has no attributes, remove it from attributeIndices/_attributes if(namespace in attributeIndices && attributeIndices[namespace].length == 0) { attributeIndices.remove(namespace); _attributes.remove(namespace); } // If namespace has no tags, remove it from tagIndices/_tags if(namespace in tagIndices && tagIndices[namespace].length == 0) { tagIndices.remove(namespace); _tags.remove(namespace); } // If namespace is now empty, remove it from allNamespaces if( namespace !in tagIndices && namespace !in attributeIndices ) { auto allNamespacesIndex = allNamespaces.length - allNamespaces.find(namespace).length; allNamespaces = allNamespaces[0..allNamespacesIndex] ~ allNamespaces[allNamespacesIndex+1..$]; } } struct NamedMemberRange(T, string membersGrouped) { private Tag tag; private string namespace; // "*" indicates "all namespaces" (ok since it's not a valid namespace name) private string name; private size_t updateId; // Tag's updateId when this range was created. this(Tag tag, string namespace, string name, size_t updateId) { this.tag = tag; this.namespace = namespace; this.name = name; this.updateId = updateId; frontIndex = 0; if( namespace in mixin("tag."~membersGrouped) && name in mixin("tag."~membersGrouped~"[namespace]") ) endIndex = mixin("tag."~membersGrouped~"[namespace][name].length"); else endIndex = 0; } invariant() { assert( this.updateId == tag.updateId, "This range has been invalidated by a change to the tag." ); } @property bool empty() { return frontIndex == endIndex; } private size_t frontIndex; @property T front() { return this[0]; } void popFront() { if(empty) throw new SDLangRangeException("Range is empty"); frontIndex++; } private size_t endIndex; // One past the last element @property T back() { return this[$-1]; } void popBack() { if(empty) throw new SDLangRangeException("Range is empty"); endIndex--; } alias length opDollar; @property size_t length() { return endIndex - frontIndex; } @property typeof(this) save() { auto r = typeof(this)(this.tag, this.namespace, this.name, this.updateId); r.frontIndex = this.frontIndex; r.endIndex = this.endIndex; return r; } typeof(this) opSlice() { return save(); } typeof(this) opSlice(size_t start, size_t end) { auto r = save(); r.frontIndex = this.frontIndex + start; r.endIndex = this.frontIndex + end; if( r.frontIndex > this.endIndex || r.endIndex > this.endIndex || r.frontIndex > r.endIndex ) throw new SDLangRangeException("Slice out of range"); return r; } T opIndex(size_t index) { if(empty) throw new SDLangRangeException("Range is empty"); return mixin("tag."~membersGrouped~"[namespace][name][frontIndex+index]"); } } struct MemberRange(T, string allMembers, string memberIndices, string membersGrouped) { private Tag tag; private string namespace; // "*" indicates "all namespaces" (ok since it's not a valid namespace name) private bool isMaybe; private size_t updateId; // Tag's updateId when this range was created. private size_t initialEndIndex; this(Tag tag, string namespace, bool isMaybe) { this.tag = tag; this.namespace = namespace; this.updateId = tag.updateId; this.isMaybe = isMaybe; frontIndex = 0; if(namespace == "*") initialEndIndex = mixin("tag."~allMembers~".length"); else if(namespace in mixin("tag."~memberIndices)) initialEndIndex = mixin("tag."~memberIndices~"[namespace].length"); else initialEndIndex = 0; endIndex = initialEndIndex; } invariant() { assert( this.updateId == tag.updateId, "This range has been invalidated by a change to the tag." ); } @property bool empty() { return frontIndex == endIndex; } private size_t frontIndex; @property T front() { return this[0]; } void popFront() { if(empty) throw new SDLangRangeException("Range is empty"); frontIndex++; } private size_t endIndex; // One past the last element @property T back() { return this[$-1]; } void popBack() { if(empty) throw new SDLangRangeException("Range is empty"); endIndex--; } alias length opDollar; @property size_t length() { return endIndex - frontIndex; } @property typeof(this) save() { auto r = typeof(this)(this.tag, this.namespace, this.isMaybe); r.frontIndex = this.frontIndex; r.endIndex = this.endIndex; r.initialEndIndex = this.initialEndIndex; r.updateId = this.updateId; return r; } typeof(this) opSlice() { return save(); } typeof(this) opSlice(size_t start, size_t end) { auto r = save(); r.frontIndex = this.frontIndex + start; r.endIndex = this.frontIndex + end; if( r.frontIndex > this.endIndex || r.endIndex > this.endIndex || r.frontIndex > r.endIndex ) throw new SDLangRangeException("Slice out of range"); return r; } T opIndex(size_t index) { if(empty) throw new SDLangRangeException("Range is empty"); if(namespace == "*") return mixin("tag."~allMembers~"[ frontIndex+index ]"); else return mixin("tag."~allMembers~"[ tag."~memberIndices~"[namespace][frontIndex+index] ]"); } alias NamedMemberRange!(T,membersGrouped) ThisNamedMemberRange; ThisNamedMemberRange opIndex(string name) { if(frontIndex != 0 || endIndex != initialEndIndex) { throw new SDLangRangeException( "Cannot lookup tags/attributes by name on a subset of a range, "~ "only across the entire tag. "~ "Please make sure you haven't called popFront or popBack on this "~ "range and that you aren't using a slice of the range." ); } if(!isMaybe && empty) throw new SDLangRangeException("Range is empty"); if(!isMaybe && name !in this) throw new SDLangRangeException(`No such `~T.stringof~` named: "`~name~`"`); return ThisNamedMemberRange(tag, namespace, name, updateId); } bool opBinaryRight(string op)(string name) if(op=="in") { if(frontIndex != 0 || endIndex != initialEndIndex) { throw new SDLangRangeException( "Cannot lookup tags/attributes by name on a subset of a range, "~ "only across the entire tag. "~ "Please make sure you haven't called popFront or popBack on this "~ "range and that you aren't using a slice of the range." ); } return namespace in mixin("tag."~membersGrouped) && name in mixin("tag."~membersGrouped~"[namespace]") && mixin("tag."~membersGrouped~"[namespace][name].length") > 0; } } struct NamespaceRange { private Tag tag; private bool isMaybe; private size_t updateId; // Tag's updateId when this range was created. this(Tag tag, bool isMaybe) { this.tag = tag; this.isMaybe = isMaybe; this.updateId = tag.updateId; frontIndex = 0; endIndex = tag.allNamespaces.length; } invariant() { assert( this.updateId == tag.updateId, "This range has been invalidated by a change to the tag." ); } @property bool empty() { return frontIndex == endIndex; } private size_t frontIndex; @property NamespaceAccess front() { return this[0]; } void popFront() { if(empty) throw new SDLangRangeException("Range is empty"); frontIndex++; } private size_t endIndex; // One past the last element @property NamespaceAccess back() { return this[$-1]; } void popBack() { if(empty) throw new SDLangRangeException("Range is empty"); endIndex--; } alias length opDollar; @property size_t length() { return endIndex - frontIndex; } @property NamespaceRange save() { auto r = NamespaceRange(this.tag, this.isMaybe); r.frontIndex = this.frontIndex; r.endIndex = this.endIndex; r.updateId = this.updateId; return r; } typeof(this) opSlice() { return save(); } typeof(this) opSlice(size_t start, size_t end) { auto r = save(); r.frontIndex = this.frontIndex + start; r.endIndex = this.frontIndex + end; if( r.frontIndex > this.endIndex || r.endIndex > this.endIndex || r.frontIndex > r.endIndex ) throw new SDLangRangeException("Slice out of range"); return r; } NamespaceAccess opIndex(size_t index) { if(empty) throw new SDLangRangeException("Range is empty"); auto namespace = tag.allNamespaces[frontIndex+index]; return NamespaceAccess( namespace, AttributeRange(tag, namespace, isMaybe), TagRange(tag, namespace, isMaybe) ); } NamespaceAccess opIndex(string namespace) { if(!isMaybe && empty) throw new SDLangRangeException("Range is empty"); if(!isMaybe && namespace !in this) throw new SDLangRangeException(`No such namespace: "`~namespace~`"`); return NamespaceAccess( namespace, AttributeRange(tag, namespace, isMaybe), TagRange(tag, namespace, isMaybe) ); } /// Inefficient when range is a slice or has used popFront/popBack, but it works. bool opBinaryRight(string op)(string namespace) if(op=="in") { if(frontIndex == 0 && endIndex == tag.allNamespaces.length) { return namespace in tag.attributeIndices || namespace in tag.tagIndices; } else // Slower fallback method return tag.allNamespaces[frontIndex..endIndex].canFind(namespace); } } struct NamespaceAccess { string name; AttributeRange attributes; TagRange tags; } alias MemberRange!(Attribute, "allAttributes", "attributeIndices", "_attributes") AttributeRange; alias MemberRange!(Tag, "allTags", "tagIndices", "_tags" ) TagRange; static assert(isRandomAccessRange!AttributeRange); static assert(isRandomAccessRange!TagRange); static assert(isRandomAccessRange!NamespaceRange); /// Access all attributes that don't have a namespace @property AttributeRange attributes() { return AttributeRange(this, "", false); } /// Access all direct-child tags that don't have a namespace @property TagRange tags() { return TagRange(this, "", false); } /// Access all namespaces in this tag, and the attributes/tags within them. @property NamespaceRange namespaces() { return NamespaceRange(this, false); } /// Access all attributes and tags regardless of namespace. @property NamespaceAccess all() { // "*" isn't a valid namespace name, so we can use it to indicate "all namespaces" return NamespaceAccess( "*", AttributeRange(this, "*", false), TagRange(this, "*", false) ); } struct MaybeAccess { Tag tag; /// Access all attributes that don't have a namespace @property AttributeRange attributes() { return AttributeRange(tag, "", true); } /// Access all direct-child tags that don't have a namespace @property TagRange tags() { return TagRange(tag, "", true); } /// Access all namespaces in this tag, and the attributes/tags within them. @property NamespaceRange namespaces() { return NamespaceRange(tag, true); } /// Access all attributes and tags regardless of namespace. @property NamespaceAccess all() { // "*" isn't a valid namespace name, so we can use it to indicate "all namespaces" return NamespaceAccess( "*", AttributeRange(tag, "*", true), TagRange(tag, "*", true) ); } } /// Access 'attributes', 'tags', 'namespaces' and 'all' like normal, /// except that looking up a non-existent name/namespace with /// opIndex(string) results in an empty array instead of a thrown SDLangRangeException. @property MaybeAccess maybe() { return MaybeAccess(this); } override bool opEquals(Object o) { auto t = cast(Tag)o; if(!t) return false; if(_namespace != t._namespace || _name != t._name) return false; if( values .length != t.values .length || allAttributes .length != t.allAttributes.length || allNamespaces .length != t.allNamespaces.length || allTags .length != t.allTags .length ) return false; if(values != t.values) return false; if(allNamespaces != t.allNamespaces) return false; if(allAttributes != t.allAttributes) return false; // Ok because cycles are not allowed //TODO: Actually check for or prevent cycles. return allTags == t.allTags; } /// Treats 'this' as the root tag. Note that root tags cannot have /// values or attributes, and cannot be part of a namespace. /// If this isn't a valid root tag, 'SDLangValidationException' will be thrown. string toSDLDocument()(string indent="\t", int indentLevel=0) { Appender!string sink; toSDLDocument(sink, indent, indentLevel); return sink.data; } ///ditto void toSDLDocument(Sink)(ref Sink sink, string indent="\t", int indentLevel=0) if(isOutputRange!(Sink,char)) { if(values.length > 0) throw new SDLangValidationException("Root tags cannot have any values, only child tags."); if(allAttributes.length > 0) throw new SDLangValidationException("Root tags cannot have any attributes, only child tags."); if(_namespace != "") throw new SDLangValidationException("Root tags cannot have a namespace."); foreach(tag; allTags) tag.toSDLString(sink, indent, indentLevel); } /// Output this entire tag in SDL format. Does *not* treat 'this' as /// a root tag. If you intend this to be the root of a standard SDL /// document, use 'toSDLDocument' instead. string toSDLString()(string indent="\t", int indentLevel=0) { Appender!string sink; toSDLString(sink, indent, indentLevel); return sink.data; } ///ditto void toSDLString(Sink)(ref Sink sink, string indent="\t", int indentLevel=0) if(isOutputRange!(Sink,char)) { if(_name == "" && values.length == 0) throw new SDLangValidationException("Anonymous tags must have at least one value."); if(_name == "" && _namespace != "") throw new SDLangValidationException("Anonymous tags cannot have a namespace."); // Indent foreach(i; 0..indentLevel) sink.put(indent); // Name if(_namespace != "") { sink.put(_namespace); sink.put(':'); } sink.put(_name); // Values foreach(i, v; values) { // Omit the first space for anonymous tags if(_name != "" || i > 0) sink.put(' '); v.toSDLString(sink); } // Attributes foreach(attr; allAttributes) { sink.put(' '); attr.toSDLString(sink); } // Child tags bool foundChild=false; foreach(tag; allTags) { if(!foundChild) { sink.put(" {\n"); foundChild = true; } tag.toSDLString(sink, indent, indentLevel+1); } if(foundChild) { foreach(i; 0..indentLevel) sink.put(indent); sink.put("}\n"); } else sink.put("\n"); } /// Not the most efficient, but it works. string toDebugString() { import std.algorithm : sort; Appender!string buf; buf.put("\n"); buf.put("Tag "); if(_namespace != "") { buf.put("["); buf.put(_namespace); buf.put("]"); } buf.put("'%s':\n".format(_name)); // Values foreach(val; values) buf.put(" (%s): %s\n".format( val.match!(v => typeof(v).stringof), val )); // Attributes foreach(attrNamespace; _attributes.keys.sort()) if(attrNamespace != "*") foreach(attrName; _attributes[attrNamespace].keys.sort()) foreach(attr; _attributes[attrNamespace][attrName]) { string namespaceStr; if(attr._namespace != "") namespaceStr = "["~attr._namespace~"]"; buf.put( " %s%s(%s): %s\n".format( namespaceStr, attr._name, attr.value.match!(v => typeof(v).stringof), attr.value ) ); } // Children foreach(tagNamespace; _tags.keys.sort()) if(tagNamespace != "*") foreach(tagName; _tags[tagNamespace].keys.sort()) foreach(tag; _tags[tagNamespace][tagName]) buf.put( tag.toDebugString().replace("\n", "\n ") ); return buf.data; } } version(sdlangUnittest) { private void testRandomAccessRange(R, E)(R range, E[] expected, bool function(E, E) equals=null) { static assert(isRandomAccessRange!R); static assert(is(ElementType!R == E)); static assert(hasLength!R); static assert(!isInfinite!R); assert(range.length == expected.length); if(range.length == 0) { assert(range.empty); return; } static bool defaultEquals(E e1, E e2) { return e1 == e2; } if(equals is null) equals = &defaultEquals; assert(equals(range.front, expected[0])); assert(equals(range.front, expected[0])); // Ensure consistent result from '.front' assert(equals(range.front, expected[0])); // Ensure consistent result from '.front' assert(equals(range.back, expected[$-1])); assert(equals(range.back, expected[$-1])); // Ensure consistent result from '.back' assert(equals(range.back, expected[$-1])); // Ensure consistent result from '.back' // Forward iteration auto original = range.save; auto r2 = range.save; foreach(i; 0..expected.length) { //trace("Forward iteration: ", i); // Test length/empty assert(range.length == expected.length - i); assert(range.length == r2.length); assert(!range.empty); assert(!r2.empty); // Test front assert(equals(range.front, expected[i])); assert(equals(range.front, r2.front)); // Test back assert(equals(range.back, expected[$-1])); assert(equals(range.back, r2.back)); // Test opIndex(0) assert(equals(range[0], expected[i])); assert(equals(range[0], r2[0])); // Test opIndex($-1) assert(equals(range[$-1], expected[$-1])); assert(equals(range[$-1], r2[$-1])); // Test popFront range.popFront(); assert(range.length == r2.length - 1); r2.popFront(); assert(range.length == r2.length); } assert(range.empty); assert(r2.empty); assert(original.length == expected.length); // Backwards iteration range = original.save; r2 = original.save; foreach(i; iota(0, expected.length).retro()) { //trace("Backwards iteration: ", i); // Test length/empty assert(range.length == i+1); assert(range.length == r2.length); assert(!range.empty); assert(!r2.empty); // Test front assert(equals(range.front, expected[0])); assert(equals(range.front, r2.front)); // Test back assert(equals(range.back, expected[i])); assert(equals(range.back, r2.back)); // Test opIndex(0) assert(equals(range[0], expected[0])); assert(equals(range[0], r2[0])); // Test opIndex($-1) assert(equals(range[$-1], expected[i])); assert(equals(range[$-1], r2[$-1])); // Test popBack range.popBack(); assert(range.length == r2.length - 1); r2.popBack(); assert(range.length == r2.length); } assert(range.empty); assert(r2.empty); assert(original.length == expected.length); // Random access range = original.save; r2 = original.save; foreach(i; 0..expected.length) { //trace("Random access: ", i); // Test length/empty assert(range.length == expected.length); assert(range.length == r2.length); assert(!range.empty); assert(!r2.empty); // Test front assert(equals(range.front, expected[0])); assert(equals(range.front, r2.front)); // Test back assert(equals(range.back, expected[$-1])); assert(equals(range.back, r2.back)); // Test opIndex(i) assert(equals(range[i], expected[i])); assert(equals(range[i], r2[i])); } assert(!range.empty); assert(!r2.empty); assert(original.length == expected.length); } } version(sdlangUnittest) unittest { import sdlang.parser; writeln("Unittesting sdlang ast..."); stdout.flush(); Tag root; root = parseSource(""); testRandomAccessRange(root.attributes, cast( Attribute[])[]); testRandomAccessRange(root.tags, cast( Tag[])[]); testRandomAccessRange(root.namespaces, cast(Tag.NamespaceAccess[])[]); root = parseSource(` blue 3 "Lee" isThree=true blue 5 "Chan" 12345 isThree=false stuff:orange 1 2 3 2 1 stuff:square points=4 dimensions=2 points="Still four" stuff:triangle data:points=3 data:dimensions=2 nothing namespaces small:A=1 med:A=2 big:A=3 small:B=10 big:B=30 people visitor:a=1 b=2 { chiyo "Small" "Flies?" nemesis="Car" score=100 yukari visitor:sana tomo visitor:hayama } `); auto blue3 = new Tag( null, "", "blue", [ Value(3), Value("Lee") ], [ new Attribute("isThree", Value(true)) ], null ); auto blue5 = new Tag( null, "", "blue", [ Value(5), Value("Chan"), Value(12345) ], [ new Attribute("isThree", Value(false)) ], null ); auto orange = new Tag( null, "stuff", "orange", [ Value(1), Value(2), Value(3), Value(2), Value(1) ], null, null ); auto square = new Tag( null, "stuff", "square", null, [ new Attribute("points", Value(4)), new Attribute("dimensions", Value(2)), new Attribute("points", Value("Still four")), ], null ); auto triangle = new Tag( null, "stuff", "triangle", null, [ new Attribute("data", "points", Value(3)), new Attribute("data", "dimensions", Value(2)), ], null ); auto nothing = new Tag( null, "", "nothing", null, null, null ); auto namespaces = new Tag( null, "", "namespaces", null, [ new Attribute("small", "A", Value(1)), new Attribute("med", "A", Value(2)), new Attribute("big", "A", Value(3)), new Attribute("small", "B", Value(10)), new Attribute("big", "B", Value(30)), ], null ); auto chiyo = new Tag( null, "", "chiyo", [ Value("Small"), Value("Flies?") ], [ new Attribute("nemesis", Value("Car")), new Attribute("score", Value(100)), ], null ); auto chiyo_ = new Tag( null, "", "chiyo_", [ Value("Small"), Value("Flies?") ], [ new Attribute("nemesis", Value("Car")), new Attribute("score", Value(100)), ], null ); auto yukari = new Tag( null, "", "yukari", null, null, null ); auto sana = new Tag( null, "visitor", "sana", null, null, null ); auto sana_ = new Tag( null, "visitor", "sana_", null, null, null ); auto sanaVisitor_ = new Tag( null, "visitor_", "sana_", null, null, null ); auto tomo = new Tag( null, "", "tomo", null, null, null ); auto hayama = new Tag( null, "visitor", "hayama", null, null, null ); auto people = new Tag( null, "", "people", null, [ new Attribute("visitor", "a", Value(1)), new Attribute("b", Value(2)), ], [chiyo, yukari, sana, tomo, hayama] ); assert(blue3 .opEquals( blue3 )); assert(blue5 .opEquals( blue5 )); assert(orange .opEquals( orange )); assert(square .opEquals( square )); assert(triangle .opEquals( triangle )); assert(nothing .opEquals( nothing )); assert(namespaces .opEquals( namespaces )); assert(people .opEquals( people )); assert(chiyo .opEquals( chiyo )); assert(yukari .opEquals( yukari )); assert(sana .opEquals( sana )); assert(tomo .opEquals( tomo )); assert(hayama .opEquals( hayama )); assert(!blue3.opEquals(orange)); assert(!blue3.opEquals(people)); assert(!blue3.opEquals(sana)); assert(!blue3.opEquals(blue5)); assert(!blue5.opEquals(blue3)); alias Tag.NamespaceAccess NSA; static bool namespaceEquals(NSA n1, NSA n2) { return n1.name == n2.name; } testRandomAccessRange(root.attributes, cast(Attribute[])[]); testRandomAccessRange(root.tags, [blue3, blue5, nothing, namespaces, people]); testRandomAccessRange(root.namespaces, [NSA(""), NSA("stuff")], &namespaceEquals); testRandomAccessRange(root.namespaces[0].tags, [blue3, blue5, nothing, namespaces, people]); testRandomAccessRange(root.namespaces[1].tags, [orange, square, triangle]); assert("" in root.namespaces); assert("stuff" in root.namespaces); assert("foobar" !in root.namespaces); testRandomAccessRange(root.namespaces[ ""].tags, [blue3, blue5, nothing, namespaces, people]); testRandomAccessRange(root.namespaces["stuff"].tags, [orange, square, triangle]); testRandomAccessRange(root.all.attributes, cast(Attribute[])[]); testRandomAccessRange(root.all.tags, [blue3, blue5, orange, square, triangle, nothing, namespaces, people]); testRandomAccessRange(root.all.tags[], [blue3, blue5, orange, square, triangle, nothing, namespaces, people]); testRandomAccessRange(root.all.tags[3..6], [square, triangle, nothing]); assert("blue" in root.tags); assert("nothing" in root.tags); assert("people" in root.tags); assert("orange" !in root.tags); assert("square" !in root.tags); assert("foobar" !in root.tags); assert("blue" in root.all.tags); assert("nothing" in root.all.tags); assert("people" in root.all.tags); assert("orange" in root.all.tags); assert("square" in root.all.tags); assert("foobar" !in root.all.tags); assert("orange" in root.namespaces["stuff"].tags); assert("square" in root.namespaces["stuff"].tags); assert("square" in root.namespaces["stuff"].tags); assert("foobar" !in root.attributes); assert("foobar" !in root.all.attributes); assert("foobar" !in root.namespaces["stuff"].attributes); assert("blue" !in root.attributes); assert("blue" !in root.all.attributes); assert("blue" !in root.namespaces["stuff"].attributes); testRandomAccessRange(root.tags["nothing"], [nothing]); testRandomAccessRange(root.tags["blue"], [blue3, blue5]); testRandomAccessRange(root.namespaces["stuff"].tags["orange"], [orange]); testRandomAccessRange(root.all.tags["nothing"], [nothing]); testRandomAccessRange(root.all.tags["blue"], [blue3, blue5]); testRandomAccessRange(root.all.tags["orange"], [orange]); assertThrown!SDLangRangeException(root.tags["foobar"]); assertThrown!SDLangRangeException(root.all.tags["foobar"]); assertThrown!SDLangRangeException(root.attributes["foobar"]); assertThrown!SDLangRangeException(root.all.attributes["foobar"]); // DMD Issue #12585 causes a segfault in these two tests when using 2.064 or 2.065, // so work around it. //assertThrown!SDLangRangeException(root.namespaces["foobar"].tags["foobar"]); //assertThrown!SDLangRangeException(root.namespaces["foobar"].attributes["foobar"]); bool didCatch = false; try auto x = root.namespaces["foobar"].tags["foobar"]; catch(SDLangRangeException e) didCatch = true; assert(didCatch); didCatch = false; try auto x = root.namespaces["foobar"].attributes["foobar"]; catch(SDLangRangeException e) didCatch = true; assert(didCatch); testRandomAccessRange(root.maybe.tags["nothing"], [nothing]); testRandomAccessRange(root.maybe.tags["blue"], [blue3, blue5]); testRandomAccessRange(root.maybe.namespaces["stuff"].tags["orange"], [orange]); testRandomAccessRange(root.maybe.all.tags["nothing"], [nothing]); testRandomAccessRange(root.maybe.all.tags["blue"], [blue3, blue5]); testRandomAccessRange(root.maybe.all.tags["blue"][], [blue3, blue5]); testRandomAccessRange(root.maybe.all.tags["blue"][0..1], [blue3]); testRandomAccessRange(root.maybe.all.tags["blue"][1..2], [blue5]); testRandomAccessRange(root.maybe.all.tags["orange"], [orange]); testRandomAccessRange(root.maybe.tags["foobar"], cast(Tag[])[]); testRandomAccessRange(root.maybe.all.tags["foobar"], cast(Tag[])[]); testRandomAccessRange(root.maybe.namespaces["foobar"].tags["foobar"], cast(Tag[])[]); testRandomAccessRange(root.maybe.attributes["foobar"], cast(Attribute[])[]); testRandomAccessRange(root.maybe.all.attributes["foobar"], cast(Attribute[])[]); testRandomAccessRange(root.maybe.namespaces["foobar"].attributes["foobar"], cast(Attribute[])[]); testRandomAccessRange(blue3.attributes, [ new Attribute("isThree", Value(true)) ]); testRandomAccessRange(blue3.tags, cast(Tag[])[]); testRandomAccessRange(blue3.namespaces, [NSA("")], &namespaceEquals); testRandomAccessRange(blue3.all.attributes, [ new Attribute("isThree", Value(true)) ]); testRandomAccessRange(blue3.all.tags, cast(Tag[])[]); testRandomAccessRange(blue5.attributes, [ new Attribute("isThree", Value(false)) ]); testRandomAccessRange(blue5.tags, cast(Tag[])[]); testRandomAccessRange(blue5.namespaces, [NSA("")], &namespaceEquals); testRandomAccessRange(blue5.all.attributes, [ new Attribute("isThree", Value(false)) ]); testRandomAccessRange(blue5.all.tags, cast(Tag[])[]); testRandomAccessRange(orange.attributes, cast(Attribute[])[]); testRandomAccessRange(orange.tags, cast(Tag[])[]); testRandomAccessRange(orange.namespaces, cast(NSA[])[], &namespaceEquals); testRandomAccessRange(orange.all.attributes, cast(Attribute[])[]); testRandomAccessRange(orange.all.tags, cast(Tag[])[]); testRandomAccessRange(square.attributes, [ new Attribute("points", Value(4)), new Attribute("dimensions", Value(2)), new Attribute("points", Value("Still four")), ]); testRandomAccessRange(square.tags, cast(Tag[])[]); testRandomAccessRange(square.namespaces, [NSA("")], &namespaceEquals); testRandomAccessRange(square.all.attributes, [ new Attribute("points", Value(4)), new Attribute("dimensions", Value(2)), new Attribute("points", Value("Still four")), ]); testRandomAccessRange(square.all.tags, cast(Tag[])[]); testRandomAccessRange(triangle.attributes, cast(Attribute[])[]); testRandomAccessRange(triangle.tags, cast(Tag[])[]); testRandomAccessRange(triangle.namespaces, [NSA("data")], &namespaceEquals); testRandomAccessRange(triangle.namespaces[0].attributes, [ new Attribute("data", "points", Value(3)), new Attribute("data", "dimensions", Value(2)), ]); assert("data" in triangle.namespaces); assert("foobar" !in triangle.namespaces); testRandomAccessRange(triangle.namespaces["data"].attributes, [ new Attribute("data", "points", Value(3)), new Attribute("data", "dimensions", Value(2)), ]); testRandomAccessRange(triangle.all.attributes, [ new Attribute("data", "points", Value(3)), new Attribute("data", "dimensions", Value(2)), ]); testRandomAccessRange(triangle.all.tags, cast(Tag[])[]); testRandomAccessRange(nothing.attributes, cast(Attribute[])[]); testRandomAccessRange(nothing.tags, cast(Tag[])[]); testRandomAccessRange(nothing.namespaces, cast(NSA[])[], &namespaceEquals); testRandomAccessRange(nothing.all.attributes, cast(Attribute[])[]); testRandomAccessRange(nothing.all.tags, cast(Tag[])[]); testRandomAccessRange(namespaces.attributes, cast(Attribute[])[]); testRandomAccessRange(namespaces.tags, cast(Tag[])[]); testRandomAccessRange(namespaces.namespaces, [NSA("small"), NSA("med"), NSA("big")], &namespaceEquals); testRandomAccessRange(namespaces.namespaces[], [NSA("small"), NSA("med"), NSA("big")], &namespaceEquals); testRandomAccessRange(namespaces.namespaces[1..2], [NSA("med")], &namespaceEquals); testRandomAccessRange(namespaces.namespaces[0].attributes, [ new Attribute("small", "A", Value(1)), new Attribute("small", "B", Value(10)), ]); testRandomAccessRange(namespaces.namespaces[1].attributes, [ new Attribute("med", "A", Value(2)), ]); testRandomAccessRange(namespaces.namespaces[2].attributes, [ new Attribute("big", "A", Value(3)), new Attribute("big", "B", Value(30)), ]); testRandomAccessRange(namespaces.namespaces[1..2][0].attributes, [ new Attribute("med", "A", Value(2)), ]); assert("small" in namespaces.namespaces); assert("med" in namespaces.namespaces); assert("big" in namespaces.namespaces); assert("foobar" !in namespaces.namespaces); assert("small" !in namespaces.namespaces[1..2]); assert("med" in namespaces.namespaces[1..2]); assert("big" !in namespaces.namespaces[1..2]); assert("foobar" !in namespaces.namespaces[1..2]); testRandomAccessRange(namespaces.namespaces["small"].attributes, [ new Attribute("small", "A", Value(1)), new Attribute("small", "B", Value(10)), ]); testRandomAccessRange(namespaces.namespaces["med"].attributes, [ new Attribute("med", "A", Value(2)), ]); testRandomAccessRange(namespaces.namespaces["big"].attributes, [ new Attribute("big", "A", Value(3)), new Attribute("big", "B", Value(30)), ]); testRandomAccessRange(namespaces.all.attributes, [ new Attribute("small", "A", Value(1)), new Attribute("med", "A", Value(2)), new Attribute("big", "A", Value(3)), new Attribute("small", "B", Value(10)), new Attribute("big", "B", Value(30)), ]); testRandomAccessRange(namespaces.all.attributes[], [ new Attribute("small", "A", Value(1)), new Attribute("med", "A", Value(2)), new Attribute("big", "A", Value(3)), new Attribute("small", "B", Value(10)), new Attribute("big", "B", Value(30)), ]); testRandomAccessRange(namespaces.all.attributes[2..4], [ new Attribute("big", "A", Value(3)), new Attribute("small", "B", Value(10)), ]); testRandomAccessRange(namespaces.all.tags, cast(Tag[])[]); assert("A" !in namespaces.attributes); assert("B" !in namespaces.attributes); assert("foobar" !in namespaces.attributes); assert("A" in namespaces.all.attributes); assert("B" in namespaces.all.attributes); assert("foobar" !in namespaces.all.attributes); assert("A" in namespaces.namespaces["small"].attributes); assert("B" in namespaces.namespaces["small"].attributes); assert("foobar" !in namespaces.namespaces["small"].attributes); assert("A" in namespaces.namespaces["med"].attributes); assert("B" !in namespaces.namespaces["med"].attributes); assert("foobar" !in namespaces.namespaces["med"].attributes); assert("A" in namespaces.namespaces["big"].attributes); assert("B" in namespaces.namespaces["big"].attributes); assert("foobar" !in namespaces.namespaces["big"].attributes); assert("foobar" !in namespaces.tags); assert("foobar" !in namespaces.all.tags); assert("foobar" !in namespaces.namespaces["small"].tags); assert("A" !in namespaces.tags); assert("A" !in namespaces.all.tags); assert("A" !in namespaces.namespaces["small"].tags); testRandomAccessRange(namespaces.namespaces["small"].attributes["A"], [ new Attribute("small", "A", Value(1)), ]); testRandomAccessRange(namespaces.namespaces["med"].attributes["A"], [ new Attribute("med", "A", Value(2)), ]); testRandomAccessRange(namespaces.namespaces["big"].attributes["A"], [ new Attribute("big", "A", Value(3)), ]); testRandomAccessRange(namespaces.all.attributes["A"], [ new Attribute("small", "A", Value(1)), new Attribute("med", "A", Value(2)), new Attribute("big", "A", Value(3)), ]); testRandomAccessRange(namespaces.all.attributes["B"], [ new Attribute("small", "B", Value(10)), new Attribute("big", "B", Value(30)), ]); testRandomAccessRange(chiyo.attributes, [ new Attribute("nemesis", Value("Car")), new Attribute("score", Value(100)), ]); testRandomAccessRange(chiyo.tags, cast(Tag[])[]); testRandomAccessRange(chiyo.namespaces, [NSA("")], &namespaceEquals); testRandomAccessRange(chiyo.all.attributes, [ new Attribute("nemesis", Value("Car")), new Attribute("score", Value(100)), ]); testRandomAccessRange(chiyo.all.tags, cast(Tag[])[]); testRandomAccessRange(yukari.attributes, cast(Attribute[])[]); testRandomAccessRange(yukari.tags, cast(Tag[])[]); testRandomAccessRange(yukari.namespaces, cast(NSA[])[], &namespaceEquals); testRandomAccessRange(yukari.all.attributes, cast(Attribute[])[]); testRandomAccessRange(yukari.all.tags, cast(Tag[])[]); testRandomAccessRange(sana.attributes, cast(Attribute[])[]); testRandomAccessRange(sana.tags, cast(Tag[])[]); testRandomAccessRange(sana.namespaces, cast(NSA[])[], &namespaceEquals); testRandomAccessRange(sana.all.attributes, cast(Attribute[])[]); testRandomAccessRange(sana.all.tags, cast(Tag[])[]); testRandomAccessRange(people.attributes, [new Attribute("b", Value(2))]); testRandomAccessRange(people.tags, [chiyo, yukari, tomo]); testRandomAccessRange(people.namespaces, [NSA("visitor"), NSA("")], &namespaceEquals); testRandomAccessRange(people.namespaces[0].attributes, [new Attribute("visitor", "a", Value(1))]); testRandomAccessRange(people.namespaces[1].attributes, [new Attribute("b", Value(2))]); testRandomAccessRange(people.namespaces[0].tags, [sana, hayama]); testRandomAccessRange(people.namespaces[1].tags, [chiyo, yukari, tomo]); assert("visitor" in people.namespaces); assert("" in people.namespaces); assert("foobar" !in people.namespaces); testRandomAccessRange(people.namespaces["visitor"].attributes, [new Attribute("visitor", "a", Value(1))]); testRandomAccessRange(people.namespaces[ ""].attributes, [new Attribute("b", Value(2))]); testRandomAccessRange(people.namespaces["visitor"].tags, [sana, hayama]); testRandomAccessRange(people.namespaces[ ""].tags, [chiyo, yukari, tomo]); testRandomAccessRange(people.all.attributes, [ new Attribute("visitor", "a", Value(1)), new Attribute("b", Value(2)), ]); testRandomAccessRange(people.all.tags, [chiyo, yukari, sana, tomo, hayama]); people.attributes["b"][0].name = "b_"; people.namespaces["visitor"].attributes["a"][0].name = "a_"; people.tags["chiyo"][0].name = "chiyo_"; people.namespaces["visitor"].tags["sana"][0].name = "sana_"; assert("b_" in people.attributes); assert("a_" in people.namespaces["visitor"].attributes); assert("chiyo_" in people.tags); assert("sana_" in people.namespaces["visitor"].tags); assert(people.attributes["b_"][0] == new Attribute("b_", Value(2))); assert(people.namespaces["visitor"].attributes["a_"][0] == new Attribute("visitor", "a_", Value(1))); assert(people.tags["chiyo_"][0] == chiyo_); assert(people.namespaces["visitor"].tags["sana_"][0] == sana_); assert("b" !in people.attributes); assert("a" !in people.namespaces["visitor"].attributes); assert("chiyo" !in people.tags); assert("sana" !in people.namespaces["visitor"].tags); assert(people.maybe.attributes["b"].length == 0); assert(people.maybe.namespaces["visitor"].attributes["a"].length == 0); assert(people.maybe.tags["chiyo"].length == 0); assert(people.maybe.namespaces["visitor"].tags["sana"].length == 0); people.tags["tomo"][0].remove(); people.namespaces["visitor"].tags["hayama"][0].remove(); people.tags["chiyo_"][0].remove(); testRandomAccessRange(people.tags, [yukari]); testRandomAccessRange(people.namespaces, [NSA("visitor"), NSA("")], &namespaceEquals); testRandomAccessRange(people.namespaces[0].tags, [sana_]); testRandomAccessRange(people.namespaces[1].tags, [yukari]); assert("visitor" in people.namespaces); assert("" in people.namespaces); assert("foobar" !in people.namespaces); testRandomAccessRange(people.namespaces["visitor"].tags, [sana_]); testRandomAccessRange(people.namespaces[ ""].tags, [yukari]); testRandomAccessRange(people.all.tags, [yukari, sana_]); people.attributes["b_"][0].namespace = "_"; people.namespaces["visitor"].attributes["a_"][0].namespace = "visitor_"; assert("_" in people.namespaces); assert("visitor_" in people.namespaces); assert("" in people.namespaces); assert("visitor" in people.namespaces); people.namespaces["visitor"].tags["sana_"][0].namespace = "visitor_"; assert("_" in people.namespaces); assert("visitor_" in people.namespaces); assert("" in people.namespaces); assert("visitor" !in people.namespaces); assert(people.namespaces["_" ].attributes["b_"][0] == new Attribute("_", "b_", Value(2))); assert(people.namespaces["visitor_"].attributes["a_"][0] == new Attribute("visitor_", "a_", Value(1))); assert(people.namespaces["visitor_"].tags["sana_"][0] == sanaVisitor_); people.tags["yukari"][0].remove(); people.namespaces["visitor_"].tags["sana_"][0].remove(); people.namespaces["visitor_"].attributes["a_"][0].namespace = "visitor"; people.namespaces["_"].attributes["b_"][0].namespace = ""; testRandomAccessRange(people.tags, cast(Tag[])[]); testRandomAccessRange(people.namespaces, [NSA("visitor"), NSA("")], &namespaceEquals); testRandomAccessRange(people.namespaces[0].tags, cast(Tag[])[]); testRandomAccessRange(people.namespaces[1].tags, cast(Tag[])[]); assert("visitor" in people.namespaces); assert("" in people.namespaces); assert("foobar" !in people.namespaces); testRandomAccessRange(people.namespaces["visitor"].tags, cast(Tag[])[]); testRandomAccessRange(people.namespaces[ ""].tags, cast(Tag[])[]); testRandomAccessRange(people.all.tags, cast(Tag[])[]); people.namespaces["visitor"].attributes["a_"][0].remove(); testRandomAccessRange(people.attributes, [new Attribute("b_", Value(2))]); testRandomAccessRange(people.namespaces, [NSA("")], &namespaceEquals); testRandomAccessRange(people.namespaces[0].attributes, [new Attribute("b_", Value(2))]); assert("visitor" !in people.namespaces); assert("" in people.namespaces); assert("foobar" !in people.namespaces); testRandomAccessRange(people.namespaces[""].attributes, [new Attribute("b_", Value(2))]); testRandomAccessRange(people.all.attributes, [ new Attribute("b_", Value(2)), ]); people.attributes["b_"][0].remove(); testRandomAccessRange(people.attributes, cast(Attribute[])[]); testRandomAccessRange(people.namespaces, cast(NSA[])[], &namespaceEquals); assert("visitor" !in people.namespaces); assert("" !in people.namespaces); assert("foobar" !in people.namespaces); testRandomAccessRange(people.all.attributes, cast(Attribute[])[]); } // Regression test, issue #11: https://github.com/Abscissa/SDLang-D/issues/11 version(sdlangUnittest) unittest { import sdlang.parser; writeln("ast: Regression test issue #11..."); stdout.flush(); auto root = parseSource( `// a`); assert("a" in root.tags); root = parseSource( `// parent { child } `); auto child = new Tag( null, "", "child", null, null, null ); assert("parent" in root.tags); assert("child" !in root.tags); testRandomAccessRange(root.tags["parent"][0].tags, [child]); assert("child" in root.tags["parent"][0].tags); } dub-1.40.0/source/dub/internal/sdlang/exception.d000066400000000000000000000013641477246567400217010ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.exception; version (Have_sdlang_d) public import sdlang.exception; else: import std.exception; import std.string; import dub.internal.sdlang.util; abstract class SDLangException : Exception { this(string msg) { super(msg); } } class SDLangParseException : SDLangException { Location location; bool hasLocation; this(string msg) { hasLocation = false; super(msg); } this(Location location, string msg) { hasLocation = true; super("%s: %s".format(location.toString(), msg)); } } class SDLangValidationException : SDLangException { this(string msg) { super(msg); } } class SDLangRangeException : SDLangException { this(string msg) { super(msg); } } dub-1.40.0/source/dub/internal/sdlang/lexer.d000066400000000000000000001726641477246567400210360ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.lexer; version (Have_sdlang_d) public import sdlang.lexer; else: import std.algorithm; import std.array; import std.base64; import std.bigint; import std.conv; import std.datetime; import std.file; import std.traits; import std.typecons; import std.uni; import std.utf; import dub.internal.sdlang.exception; import dub.internal.sdlang.symbol; import dub.internal.sdlang.token; import dub.internal.sdlang.util; alias dub.internal.sdlang.util.startsWith startsWith; Token[] lexFile(string filename) { auto source = cast(string)read(filename); return lexSource(source, filename); } Token[] lexSource(string source, string filename=null) { auto lexer = scoped!Lexer(source, filename); // Can't use 'std.array.array(Range)' because 'lexer' is scoped // and therefore cannot have its reference copied. Appender!(Token[]) tokens; foreach(tok; lexer) tokens.put(tok); return tokens.data; } // Kind of a poor-man's yield, but fast. // Only to be used inside Lexer.popFront (and Lexer.this). private template accept(string symbolName) { static assert(symbolName != "Value", "Value symbols must also take a value."); enum accept = acceptImpl!(symbolName, "null"); } private template accept(string symbolName, string value) { static assert(symbolName == "Value", "Only a Value symbol can take a value."); enum accept = acceptImpl!(symbolName, value); } private template accept(string symbolName, string value, string startLocation, string endLocation) { static assert(symbolName == "Value", "Only a Value symbol can take a value."); enum accept = (" { _front = makeToken!"~symbolName.stringof~"; _front.value = "~value~"; _front.location = "~(startLocation==""? "tokenStart" : startLocation)~"; _front.data = source[ "~(startLocation==""? "tokenStart.index" : startLocation)~" .. "~(endLocation==""? "location.index" : endLocation)~" ]; return; } ").replace("\n", ""); } private template acceptImpl(string symbolName, string value) { enum acceptImpl = (" { _front = makeToken!"~symbolName.stringof~"; _front.value = "~value~"; return; } ").replace("\n", ""); } class Lexer { string source; string filename; Location location; /// Location of current character in source private dchar ch; // Current character private dchar nextCh; // Lookahead character private size_t nextPos; // Position of lookahead character (an index into source) private bool hasNextCh; // If false, then there's no more lookahead, just EOF private size_t posAfterLookahead; // Position after lookahead character (an index into source) private Location tokenStart; // The starting location of the token being lexed // Length so far of the token being lexed, not including current char private size_t tokenLength; // Length in UTF-8 code units private size_t tokenLength32; // Length in UTF-32 code units // Slight kludge: // If a numeric fragment is found after a Date (separated by arbitrary // whitespace), it could be the "hours" part of a DateTime, or it could // be a separate numeric literal that simply follows a plain Date. If the // latter, then the Date must be emitted, but numeric fragment that was // found after it needs to be saved for the the lexer's next iteration. // // It's a slight kludge, and could instead be implemented as a slightly // kludgey parser hack, but it's the only situation where SDL's lexing // needs to lookahead more than one character, so this is good enough. private struct LookaheadTokenInfo { bool exists = false; string numericFragment = ""; bool isNegative = false; Location tokenStart; } private LookaheadTokenInfo lookaheadTokenInfo; this(string source=null, string filename=null) { this.filename = filename; this.source = source; _front = Token(symbol!"Error", Location()); lookaheadTokenInfo = LookaheadTokenInfo.init; if( source.startsWith( ByteOrderMarks[BOM.UTF8] ) ) { source = source[ ByteOrderMarks[BOM.UTF8].length .. $ ]; this.source = source; } foreach(bom; ByteOrderMarks) if( source.startsWith(bom) ) error(Location(filename,0,0,0), "SDL spec only supports UTF-8, not UTF-16 or UTF-32"); if(source == "") mixin(accept!"EOF"); // Prime everything hasNextCh = true; nextCh = source.decode(posAfterLookahead); advanceChar(ErrorOnEOF.Yes); location = Location(filename, 0, 0, 0); popFront(); } @property bool empty() { return _front.symbol == symbol!"EOF"; } Token _front; @property Token front() { return _front; } @property bool isEOF() { return location.index == source.length && !lookaheadTokenInfo.exists; } private void error(string msg) { error(location, msg); } private void error(Location loc, string msg) { throw new SDLangParseException(loc, "Error: "~msg); } private Token makeToken(string symbolName)() { auto tok = Token(symbol!symbolName, tokenStart); tok.data = tokenData; return tok; } private @property string tokenData() { return source[ tokenStart.index .. location.index ]; } /// Check the lookahead character private bool lookahead(dchar ch) { return hasNextCh && nextCh == ch; } private bool lookahead(bool function(dchar) condition) { return hasNextCh && condition(nextCh); } private static bool isNewline(dchar ch) { return ch == '\n' || ch == '\r' || ch == lineSep || ch == paraSep; } /// Returns the length of the newline sequence, or zero if the current /// character is not a newline /// /// Note that there are only single character sequences and the two /// character sequence `\r\n` as used on Windows. private size_t isAtNewline() { if(ch == '\n' || ch == lineSep || ch == paraSep) return 1; else if(ch == '\r') return lookahead('\n') ? 2 : 1; else return 0; } /// Is 'ch' a valid base 64 character? private bool isBase64(dchar ch) { if(ch >= 'A' && ch <= 'Z') return true; if(ch >= 'a' && ch <= 'z') return true; if(ch >= '0' && ch <= '9') return true; return ch == '+' || ch == '/' || ch == '='; } /// Is the current character one that's allowed /// immediately *after* an int/float literal? private bool isEndOfNumber() { if(isEOF) return true; return !isDigit(ch) && ch != ':' && ch != '_' && !isAlpha(ch); } /// Is current character the last one in an ident? private bool isEndOfIdentCached = false; private bool _isEndOfIdent; private bool isEndOfIdent() { if(!isEndOfIdentCached) { if(!hasNextCh) _isEndOfIdent = true; else _isEndOfIdent = !isIdentChar(nextCh); isEndOfIdentCached = true; } return _isEndOfIdent; } /// Is 'ch' a character that's allowed *somewhere* in an identifier? private bool isIdentChar(dchar ch) { if(isAlpha(ch)) return true; else if(isNumber(ch)) return true; else return ch == '-' || ch == '_' || ch == '.' || ch == '$'; } private bool isDigit(dchar ch) { return ch >= '0' && ch <= '9'; } private enum KeywordResult { Accept, // Keyword is matched Continue, // Keyword is not matched *yet* Failed, // Keyword doesn't match } private KeywordResult checkKeyword(dstring keyword32) { // Still within length of keyword if(tokenLength32 < keyword32.length) { if(ch == keyword32[tokenLength32]) return KeywordResult.Continue; else return KeywordResult.Failed; } // At position after keyword else if(tokenLength32 == keyword32.length) { if(isEOF || !isIdentChar(ch)) { debug assert(tokenData == to!string(keyword32)); return KeywordResult.Accept; } else return KeywordResult.Failed; } assert(0, "Fell off end of keyword to check"); } enum ErrorOnEOF { No, Yes } /// Advance one code point. private void advanceChar(ErrorOnEOF errorOnEOF) { if(auto cnt = isAtNewline()) { if (cnt == 1) location.line++; location.col = 0; } else location.col++; location.index = nextPos; nextPos = posAfterLookahead; ch = nextCh; if(!hasNextCh) { if(errorOnEOF == ErrorOnEOF.Yes) error("Unexpected end of file"); return; } tokenLength32++; tokenLength = location.index - tokenStart.index; if(nextPos == source.length) { nextCh = dchar.init; hasNextCh = false; return; } nextCh = source.decode(posAfterLookahead); isEndOfIdentCached = false; } /// Advances the specified amount of characters private void advanceChar(size_t count, ErrorOnEOF errorOnEOF) { while(count-- > 0) advanceChar(errorOnEOF); } void popFront() { // -- Main Lexer ------------- eatWhite(); if(isEOF) mixin(accept!"EOF"); tokenStart = location; tokenLength = 0; tokenLength32 = 0; isEndOfIdentCached = false; if(lookaheadTokenInfo.exists) { tokenStart = lookaheadTokenInfo.tokenStart; auto prevLATokenInfo = lookaheadTokenInfo; lookaheadTokenInfo = LookaheadTokenInfo.init; lexNumeric(prevLATokenInfo); return; } if(ch == '=') { advanceChar(ErrorOnEOF.No); mixin(accept!"="); } else if(ch == '{') { advanceChar(ErrorOnEOF.No); mixin(accept!"{"); } else if(ch == '}') { advanceChar(ErrorOnEOF.No); mixin(accept!"}"); } else if(ch == ':') { advanceChar(ErrorOnEOF.No); mixin(accept!":"); } else if(ch == ';') { advanceChar(ErrorOnEOF.No); mixin(accept!"EOL"); } else if(auto cnt = isAtNewline()) { advanceChar(cnt, ErrorOnEOF.No); mixin(accept!"EOL"); } else if(isAlpha(ch) || ch == '_') lexIdentKeyword(); else if(ch == '"') lexRegularString(); else if(ch == '`') lexRawString(); else if(ch == '\'') lexCharacter(); else if(ch == '[') lexBinary(); else if(ch == '-' || ch == '.' || isDigit(ch)) lexNumeric(); else { advanceChar(ErrorOnEOF.No); error("Syntax error"); } } /// Lex Ident or Keyword private void lexIdentKeyword() { assert(isAlpha(ch) || ch == '_'); // Keyword struct Key { dstring name; Value value; bool failed = false; } static Key[5] keywords; static keywordsInited = false; if(!keywordsInited) { // Value (as a std.variant-based type) can't be statically initialized keywords[0] = Key("true", Value(true )); keywords[1] = Key("false", Value(false)); keywords[2] = Key("on", Value(true )); keywords[3] = Key("off", Value(false)); keywords[4] = Key("null", Value(null )); keywordsInited = true; } foreach(ref key; keywords) key.failed = false; auto numKeys = keywords.length; do { foreach(ref key; keywords) if(!key.failed) { final switch(checkKeyword(key.name)) { case KeywordResult.Accept: mixin(accept!("Value", "key.value")); case KeywordResult.Continue: break; case KeywordResult.Failed: key.failed = true; numKeys--; break; } } if(numKeys == 0) { lexIdent(); return; } advanceChar(ErrorOnEOF.No); } while(!isEOF); foreach(ref key; keywords) if(!key.failed) if(key.name.length == tokenLength32+1) mixin(accept!("Value", "key.value")); mixin(accept!"Ident"); } /// Lex Ident private void lexIdent() { if(tokenLength == 0) assert(isAlpha(ch) || ch == '_'); while(!isEOF && isIdentChar(ch)) advanceChar(ErrorOnEOF.No); mixin(accept!"Ident"); } /// Lex regular string private void lexRegularString() { assert(ch == '"'); Appender!string buf; size_t spanStart = nextPos; // Doesn't include current character void updateBuf() { if(location.index == spanStart) return; buf.put( source[spanStart..location.index] ); } advanceChar(ErrorOnEOF.Yes); while(ch != '"') { if(ch == '\\') { updateBuf(); bool wasEscSequence = true; if(hasNextCh) { switch(nextCh) { case 'n': buf.put('\n'); break; case 'r': buf.put('\r'); break; case 't': buf.put('\t'); break; case '"': buf.put('\"'); break; case '\\': buf.put('\\'); break; default: wasEscSequence = false; break; } } if(wasEscSequence) { advanceChar(ErrorOnEOF.Yes); spanStart = nextPos; } else { eatWhite(false); spanStart = location.index; } } else if(isNewline(ch)) error("Unescaped newlines are only allowed in raw strings, not regular strings."); advanceChar(ErrorOnEOF.Yes); } updateBuf(); advanceChar(ErrorOnEOF.No); // Skip closing double-quote mixin(accept!("Value", "buf.data")); } /// Lex raw string private void lexRawString() { assert(ch == '`'); do advanceChar(ErrorOnEOF.Yes); while(ch != '`'); advanceChar(ErrorOnEOF.No); // Skip closing back-tick mixin(accept!("Value", "tokenData[1..$-1]")); } /// Lex character literal private void lexCharacter() { assert(ch == '\''); advanceChar(ErrorOnEOF.Yes); // Skip opening single-quote dchar value; if(ch == '\\') { advanceChar(ErrorOnEOF.Yes); // Skip escape backslash switch(ch) { case 'n': value = '\n'; break; case 'r': value = '\r'; break; case 't': value = '\t'; break; case '\'': value = '\''; break; case '\\': value = '\\'; break; default: error("Invalid escape sequence."); } } else if(isNewline(ch)) error("Newline not allowed in character literal."); else value = ch; advanceChar(ErrorOnEOF.Yes); // Skip the character itself if(ch == '\'') advanceChar(ErrorOnEOF.No); // Skip closing single-quote else error("Expected closing single-quote."); mixin(accept!("Value", "value")); } /// Lex base64 binary literal private void lexBinary() { assert(ch == '['); advanceChar(ErrorOnEOF.Yes); void eatBase64Whitespace() { while(!isEOF && isWhite(ch)) { if(isNewline(ch)) advanceChar(ErrorOnEOF.Yes); if(!isEOF && isWhite(ch)) eatWhite(); } } eatBase64Whitespace(); // Iterates all valid base64 characters, ending at ']'. // Skips all whitespace. Throws on invalid chars. struct Base64InputRange { Lexer lexer; private bool isInited = false; private int numInputCharsMod4 = 0; @property bool empty() { if(lexer.ch == ']') { if(numInputCharsMod4 != 0) lexer.error("Length of Base64 encoding must be a multiple of 4. ("~to!string(numInputCharsMod4)~")"); return true; } return false; } @property dchar front() { return lexer.ch; } void popFront() { auto lex = lexer; if(!isInited) { if(lexer.isBase64(lexer.ch)) { numInputCharsMod4++; numInputCharsMod4 %= 4; } isInited = true; } lex.advanceChar(lex.ErrorOnEOF.Yes); eatBase64Whitespace(); if(lex.isEOF) lex.error("Unexpected end of file."); if(lex.ch != ']') { if(!lex.isBase64(lex.ch)) lex.error("Invalid character in base64 binary literal."); numInputCharsMod4++; numInputCharsMod4 %= 4; } } } // This is a slow ugly hack. It's necessary because Base64.decode // currently requires the source to have known length. //TODO: Remove this when DMD issue #9543 is fixed. dchar[] tmpBuf = array(Base64InputRange(this)); Appender!(ubyte[]) outputBuf; // Ugly workaround for DMD issue #9102 //TODO: Remove this when DMD #9102 is fixed struct OutputBuf { void put(ubyte ch) { outputBuf.put(ch); } } try //Base64.decode(Base64InputRange(this), OutputBuf()); Base64.decode(tmpBuf, OutputBuf()); //TODO: Starting with dmd 2.062, this should be a Base64Exception catch(Exception e) error("Invalid character in base64 binary literal."); advanceChar(ErrorOnEOF.No); // Skip ']' mixin(accept!("Value", "outputBuf.data")); } private BigInt toBigInt(bool isNegative, string absValue) { auto num = BigInt(absValue); assert(num >= 0); if(isNegative) num = -num; return num; } /// Lex [0-9]+, but without emitting a token. /// This is used by the other numeric parsing functions. private string lexNumericFragment() { if(!isDigit(ch)) error("Expected a digit 0-9."); auto spanStart = location.index; do { advanceChar(ErrorOnEOF.No); } while(!isEOF && isDigit(ch)); return source[spanStart..location.index]; } /// Lex anything that starts with 0-9 or '-'. Ints, floats, dates, etc. private void lexNumeric(LookaheadTokenInfo laTokenInfo = LookaheadTokenInfo.init) { bool isNegative; string firstFragment; if(laTokenInfo.exists) { firstFragment = laTokenInfo.numericFragment; isNegative = laTokenInfo.isNegative; } else { assert(ch == '-' || ch == '.' || isDigit(ch)); // Check for negative isNegative = ch == '-'; if(isNegative) advanceChar(ErrorOnEOF.Yes); // Some floating point with omitted leading zero? if(ch == '.') { lexFloatingPoint(""); return; } firstFragment = lexNumericFragment(); } // Long integer (64-bit signed)? if(ch == 'L' || ch == 'l') { advanceChar(ErrorOnEOF.No); // BigInt(long.min) is a workaround for DMD issue #9548 auto num = toBigInt(isNegative, firstFragment); if(num < BigInt(long.min) || num > long.max) error(tokenStart, "Value doesn't fit in 64-bit signed long integer: "~to!string(num)); mixin(accept!("Value", "num.toLong()")); } // Float (32-bit signed)? else if(ch == 'F' || ch == 'f') { auto value = to!float(tokenData); advanceChar(ErrorOnEOF.No); mixin(accept!("Value", "value")); } // Double float (64-bit signed) with suffix? else if((ch == 'D' || ch == 'd') && !lookahead(':') ) { auto value = to!double(tokenData); advanceChar(ErrorOnEOF.No); mixin(accept!("Value", "value")); } // Decimal (128+ bits signed)? else if( (ch == 'B' || ch == 'b') && (lookahead('D') || lookahead('d')) ) { auto value = to!real(tokenData); advanceChar(ErrorOnEOF.No); advanceChar(ErrorOnEOF.No); mixin(accept!("Value", "value")); } // Some floating point? else if(ch == '.') lexFloatingPoint(firstFragment); // Some date? else if(ch == '/' && hasNextCh && isDigit(nextCh)) lexDate(isNegative, firstFragment); // Some time span? else if(ch == ':' || ch == 'd') lexTimeSpan(isNegative, firstFragment); // Integer (32-bit signed)? else if(isEndOfNumber()) { auto num = toBigInt(isNegative, firstFragment); if(num < int.min || num > int.max) error(tokenStart, "Value doesn't fit in 32-bit signed integer: "~to!string(num)); mixin(accept!("Value", "num.toInt()")); } // Invalid suffix else error("Invalid integer suffix."); } /// Lex any floating-point literal (after the initial numeric fragment was lexed) private void lexFloatingPoint(string firstPart) { assert(ch == '.'); advanceChar(ErrorOnEOF.No); auto secondPart = lexNumericFragment(); try { // Double float (64-bit signed) with suffix? if(ch == 'D' || ch == 'd') { auto value = to!double(tokenData); advanceChar(ErrorOnEOF.No); mixin(accept!("Value", "value")); } // Float (32-bit signed)? else if(ch == 'F' || ch == 'f') { auto value = to!float(tokenData); advanceChar(ErrorOnEOF.No); mixin(accept!("Value", "value")); } // Decimal (128+ bits signed)? else if(ch == 'B' || ch == 'b') { auto value = to!real(tokenData); advanceChar(ErrorOnEOF.Yes); if(!isEOF && (ch == 'D' || ch == 'd')) { advanceChar(ErrorOnEOF.No); if(isEndOfNumber()) mixin(accept!("Value", "value")); } error("Invalid floating point suffix."); } // Double float (64-bit signed) without suffix? else if(isEOF || !isIdentChar(ch)) { auto value = to!double(tokenData); mixin(accept!("Value", "value")); } // Invalid suffix else error("Invalid floating point suffix."); } catch(ConvException e) error("Invalid floating point literal."); } private Date makeDate(bool isNegative, string yearStr, string monthStr, string dayStr) { BigInt biTmp; biTmp = BigInt(yearStr); if(isNegative) biTmp = -biTmp; if(biTmp < int.min || biTmp > int.max) error(tokenStart, "Date's year is out of range. (Must fit within a 32-bit signed int.)"); auto year = biTmp.toInt(); biTmp = BigInt(monthStr); if(biTmp < 1 || biTmp > 12) error(tokenStart, "Date's month is out of range."); auto month = biTmp.toInt(); biTmp = BigInt(dayStr); if(biTmp < 1 || biTmp > 31) error(tokenStart, "Date's month is out of range."); auto day = biTmp.toInt(); return Date(year, month, day); } private DateTimeFrac makeDateTimeFrac( bool isNegative, Date date, string hourStr, string minuteStr, string secondStr, string millisecondStr ) { BigInt biTmp; biTmp = BigInt(hourStr); if(biTmp < int.min || biTmp > int.max) error(tokenStart, "Datetime's hour is out of range."); auto numHours = biTmp.toInt(); biTmp = BigInt(minuteStr); if(biTmp < 0 || biTmp > int.max) error(tokenStart, "Datetime's minute is out of range."); auto numMinutes = biTmp.toInt(); int numSeconds = 0; if(secondStr != "") { biTmp = BigInt(secondStr); if(biTmp < 0 || biTmp > int.max) error(tokenStart, "Datetime's second is out of range."); numSeconds = biTmp.toInt(); } int millisecond = 0; if(millisecondStr != "") { biTmp = BigInt(millisecondStr); if(biTmp < 0 || biTmp > int.max) error(tokenStart, "Datetime's millisecond is out of range."); millisecond = biTmp.toInt(); if(millisecondStr.length == 1) millisecond *= 100; else if(millisecondStr.length == 2) millisecond *= 10; } Duration fracSecs = millisecond.msecs; auto offset = hours(numHours) + minutes(numMinutes) + seconds(numSeconds); if(isNegative) { offset = -offset; fracSecs = -fracSecs; } return DateTimeFrac(DateTime(date) + offset, fracSecs); } private Duration makeDuration( bool isNegative, string dayStr, string hourStr, string minuteStr, string secondStr, string millisecondStr ) { BigInt biTmp; long day = 0; if(dayStr != "") { biTmp = BigInt(dayStr); if(biTmp < long.min || biTmp > long.max) error(tokenStart, "Time span's day is out of range."); day = biTmp.toLong(); } biTmp = BigInt(hourStr); if(biTmp < long.min || biTmp > long.max) error(tokenStart, "Time span's hour is out of range."); auto hour = biTmp.toLong(); biTmp = BigInt(minuteStr); if(biTmp < long.min || biTmp > long.max) error(tokenStart, "Time span's minute is out of range."); auto minute = biTmp.toLong(); biTmp = BigInt(secondStr); if(biTmp < long.min || biTmp > long.max) error(tokenStart, "Time span's second is out of range."); auto second = biTmp.toLong(); long millisecond = 0; if(millisecondStr != "") { biTmp = BigInt(millisecondStr); if(biTmp < long.min || biTmp > long.max) error(tokenStart, "Time span's millisecond is out of range."); millisecond = biTmp.toLong(); if(millisecondStr.length == 1) millisecond *= 100; else if(millisecondStr.length == 2) millisecond *= 10; } auto duration = dur!"days" (day) + dur!"hours" (hour) + dur!"minutes"(minute) + dur!"seconds"(second) + dur!"msecs" (millisecond); if(isNegative) duration = -duration; return duration; } // This has to reproduce some weird corner case behaviors from the // original Java version of SDL. So some of this may seem weird. private Nullable!Duration getTimeZoneOffset(string str) { if(str.length < 2) return Nullable!Duration(); // Unknown timezone if(str[0] != '+' && str[0] != '-') return Nullable!Duration(); // Unknown timezone auto isNegative = str[0] == '-'; string numHoursStr; string numMinutesStr; if(str[1] == ':') { numMinutesStr = str[1..$]; numHoursStr = ""; } else { numMinutesStr = str.find(':'); numHoursStr = str[1 .. $-numMinutesStr.length]; } long numHours = 0; long numMinutes = 0; bool isUnknown = false; try { switch(numHoursStr.length) { case 0: if(numMinutesStr.length == 3) { numHours = 0; numMinutes = to!long(numMinutesStr[1..$]); } else isUnknown = true; break; case 1: case 2: if(numMinutesStr.length == 0) { numHours = to!long(numHoursStr); numMinutes = 0; } else if(numMinutesStr.length == 3) { numHours = to!long(numHoursStr); numMinutes = to!long(numMinutesStr[1..$]); } else isUnknown = true; break; default: if(numMinutesStr.length == 0) { // Yes, this is correct numHours = 0; numMinutes = to!long(numHoursStr[1..$]); } else isUnknown = true; break; } } catch(ConvException e) isUnknown = true; if(isUnknown) return Nullable!Duration(); // Unknown timezone auto timeZoneOffset = hours(numHours) + minutes(numMinutes); if(isNegative) timeZoneOffset = -timeZoneOffset; // Timezone valid return Nullable!Duration(timeZoneOffset); } /// Lex date or datetime (after the initial numeric fragment was lexed) private void lexDate(bool isDateNegative, string yearStr) { assert(ch == '/'); // Lex months advanceChar(ErrorOnEOF.Yes); // Skip '/' auto monthStr = lexNumericFragment(); // Lex days if(ch != '/') error("Invalid date format: Missing days."); advanceChar(ErrorOnEOF.Yes); // Skip '/' auto dayStr = lexNumericFragment(); auto date = makeDate(isDateNegative, yearStr, monthStr, dayStr); if(!isEndOfNumber() && ch != '/') error("Dates cannot have suffixes."); // Date? if(isEOF) mixin(accept!("Value", "date")); auto endOfDate = location; while( !isEOF && ( ch == '\\' || ch == '/' || (isWhite(ch) && !isNewline(ch)) ) ) { if(ch == '\\' && hasNextCh && isNewline(nextCh)) { advanceChar(ErrorOnEOF.Yes); if(isAtNewline()) advanceChar(ErrorOnEOF.Yes); advanceChar(ErrorOnEOF.No); } eatWhite(); } // Date? if(isEOF || (!isDigit(ch) && ch != '-')) mixin(accept!("Value", "date", "", "endOfDate.index")); auto startOfTime = location; // Is time negative? bool isTimeNegative = ch == '-'; if(isTimeNegative) advanceChar(ErrorOnEOF.Yes); // Lex hours auto hourStr = ch == '.'? "" : lexNumericFragment(); // Lex minutes if(ch != ':') { // No minutes found. Therefore we had a plain Date followed // by a numeric literal, not a DateTime. lookaheadTokenInfo.exists = true; lookaheadTokenInfo.numericFragment = hourStr; lookaheadTokenInfo.isNegative = isTimeNegative; lookaheadTokenInfo.tokenStart = startOfTime; mixin(accept!("Value", "date", "", "endOfDate.index")); } advanceChar(ErrorOnEOF.Yes); // Skip ':' auto minuteStr = lexNumericFragment(); // Lex seconds, if exists string secondStr; if(ch == ':') { advanceChar(ErrorOnEOF.Yes); // Skip ':' secondStr = lexNumericFragment(); } // Lex milliseconds, if exists string millisecondStr; if(ch == '.') { advanceChar(ErrorOnEOF.Yes); // Skip '.' millisecondStr = lexNumericFragment(); } auto dateTimeFrac = makeDateTimeFrac(isTimeNegative, date, hourStr, minuteStr, secondStr, millisecondStr); // Lex zone, if exists if(ch == '-') { advanceChar(ErrorOnEOF.Yes); // Skip '-' auto timezoneStart = location; if(!isAlpha(ch)) error("Invalid timezone format."); while(!isEOF && !isWhite(ch)) advanceChar(ErrorOnEOF.No); auto timezoneStr = source[timezoneStart.index..location.index]; if(timezoneStr.startsWith("GMT")) { auto isoPart = timezoneStr["GMT".length..$]; auto offset = getTimeZoneOffset(isoPart); if(offset.isNull()) { // Unknown time zone mixin(accept!("Value", "DateTimeFracUnknownZone(dateTimeFrac.dateTime, dateTimeFrac.fracSecs, timezoneStr)")); } else { auto timezone = new immutable SimpleTimeZone(offset.get()); auto fsecs = dateTimeFrac.fracSecs; mixin(accept!("Value", "SysTime(dateTimeFrac.dateTime, fsecs, timezone)")); } } try { auto timezone = PosixTimeZone.getTimeZone(timezoneStr); if (timezone) { auto fsecs = dateTimeFrac.fracSecs; mixin(accept!("Value", "SysTime(dateTimeFrac.dateTime, fsecs, timezone)")); } } catch(TimeException e) { // Time zone not found. So just move along to "Unknown time zone" below. } // Unknown time zone mixin(accept!("Value", "DateTimeFracUnknownZone(dateTimeFrac.dateTime, dateTimeFrac.fracSecs, timezoneStr)")); } if(!isEndOfNumber()) error("Date-Times cannot have suffixes."); mixin(accept!("Value", "dateTimeFrac")); } /// Lex time span (after the initial numeric fragment was lexed) private void lexTimeSpan(bool isNegative, string firstPart) { assert(ch == ':' || ch == 'd'); string dayStr = ""; string hourStr; // Lexed days? bool hasDays = ch == 'd'; if(hasDays) { dayStr = firstPart; advanceChar(ErrorOnEOF.Yes); // Skip 'd' // Lex hours if(ch != ':') error("Invalid time span format: Missing hours."); advanceChar(ErrorOnEOF.Yes); // Skip ':' hourStr = lexNumericFragment(); } else hourStr = firstPart; // Lex minutes if(ch != ':') error("Invalid time span format: Missing minutes."); advanceChar(ErrorOnEOF.Yes); // Skip ':' auto minuteStr = lexNumericFragment(); // Lex seconds if(ch != ':') error("Invalid time span format: Missing seconds."); advanceChar(ErrorOnEOF.Yes); // Skip ':' auto secondStr = lexNumericFragment(); // Lex milliseconds, if exists string millisecondStr = ""; if(ch == '.') { advanceChar(ErrorOnEOF.Yes); // Skip '.' millisecondStr = lexNumericFragment(); } if(!isEndOfNumber()) error("Time spans cannot have suffixes."); auto duration = makeDuration(isNegative, dayStr, hourStr, minuteStr, secondStr, millisecondStr); mixin(accept!("Value", "duration")); } /// Advances past whitespace and comments private void eatWhite(bool allowComments=true) { // -- Comment/Whitespace Lexer ------------- enum State { normal, lineComment, // Got "#" or "//" or "--", Eating everything until newline blockComment, // Got "/*", Eating everything until "*/" } if(isEOF) return; Location commentStart; State state = State.normal; bool consumeNewlines = false; bool hasConsumedNewline = false; while(true) { final switch(state) { case State.normal: if(ch == '\\') { commentStart = location; consumeNewlines = true; hasConsumedNewline = false; } else if(ch == '#') { if(!allowComments) return; commentStart = location; state = State.lineComment; continue; } else if(ch == '/' || ch == '-') { commentStart = location; if(lookahead(ch)) { if(!allowComments) return; advanceChar(ErrorOnEOF.No); state = State.lineComment; continue; } else if(ch == '/' && lookahead('*')) { if(!allowComments) return; advanceChar(ErrorOnEOF.No); state = State.blockComment; continue; } else return; // Done } else if(isAtNewline()) { if(consumeNewlines) hasConsumedNewline = true; else return; // Done } else if(!isWhite(ch)) { if(consumeNewlines) { if(hasConsumedNewline) return; // Done else error("Only whitespace can come between a line-continuation backslash and the following newline."); } else return; // Done } break; case State.lineComment: if(lookahead(&isNewline)) state = State.normal; break; case State.blockComment: if(ch == '*' && lookahead('/')) { advanceChar(ErrorOnEOF.No); state = State.normal; } break; } advanceChar(ErrorOnEOF.No); if(isEOF) { // Reached EOF if(consumeNewlines && !hasConsumedNewline) error("Missing newline after line-continuation backslash."); else if(state == State.blockComment) error(commentStart, "Unterminated block comment."); else return; // Done, reached EOF } } } } version(sdlangUnittest) { import std.stdio; private auto loc = Location("filename", 0, 0, 0); private auto loc2 = Location("a", 1, 1, 1); unittest { assert([Token(symbol!"EOL",loc) ] == [Token(symbol!"EOL",loc) ] ); assert([Token(symbol!"EOL",loc,Value(7),"A")] == [Token(symbol!"EOL",loc2,Value(7),"B")] ); } private int numErrors = 0; private void testLex(string source, Token[] expected, bool test_locations = false, string file=__FILE__, size_t line=__LINE__) { Token[] actual; try actual = lexSource(source, "filename"); catch(SDLangParseException e) { numErrors++; stderr.writeln(file, "(", line, "): testLex failed on: ", source); stderr.writeln(" Expected:"); stderr.writeln(" ", expected); stderr.writeln(" Actual: SDLangParseException thrown:"); stderr.writeln(" ", e.msg); return; } bool is_same = actual == expected; if (is_same && test_locations) { is_same = actual.map!(t => t.location).equal(expected.map!(t => t.location)); } if(!is_same) { numErrors++; stderr.writeln(file, "(", line, "): testLex failed on: ", source); stderr.writeln(" Expected:"); stderr.writeln(" ", expected); stderr.writeln(" Actual:"); stderr.writeln(" ", actual); if(expected.length > 1 || actual.length > 1) { stderr.writeln(" expected.length: ", expected.length); stderr.writeln(" actual.length: ", actual.length); if(actual.length == expected.length) foreach(i; 0..actual.length) if(actual[i] != expected[i]) { stderr.writeln(" Unequal at index #", i, ":"); stderr.writeln(" Expected:"); stderr.writeln(" ", expected[i]); stderr.writeln(" Actual:"); stderr.writeln(" ", actual[i]); } } } } private void testLexThrows(string file=__FILE__, size_t line=__LINE__)(string source) { bool hadException = false; Token[] actual; try actual = lexSource(source, "filename"); catch(SDLangParseException e) hadException = true; if(!hadException) { numErrors++; stderr.writeln(file, "(", line, "): testLex failed on: ", source); stderr.writeln(" Expected SDLangParseException"); stderr.writeln(" Actual:"); stderr.writeln(" ", actual); } } } version(sdlangUnittest) unittest { writeln("Unittesting sdlang lexer..."); stdout.flush(); testLex("", []); testLex(" ", []); testLex("\\\n", []); testLex("/*foo*/", []); testLex("/* multiline \n comment */", []); testLex("/* * */", []); testLexThrows("/* "); testLex(":", [ Token(symbol!":", loc) ]); testLex("=", [ Token(symbol!"=", loc) ]); testLex("{", [ Token(symbol!"{", loc) ]); testLex("}", [ Token(symbol!"}", loc) ]); testLex(";", [ Token(symbol!"EOL",loc) ]); testLex("\n", [ Token(symbol!"EOL",loc) ]); testLex("foo", [ Token(symbol!"Ident",loc,Value(null),"foo") ]); testLex("_foo", [ Token(symbol!"Ident",loc,Value(null),"_foo") ]); testLex("foo.bar", [ Token(symbol!"Ident",loc,Value(null),"foo.bar") ]); testLex("foo-bar", [ Token(symbol!"Ident",loc,Value(null),"foo-bar") ]); testLex("foo.", [ Token(symbol!"Ident",loc,Value(null),"foo.") ]); testLex("foo-", [ Token(symbol!"Ident",loc,Value(null),"foo-") ]); testLexThrows(".foo"); testLex("foo bar", [ Token(symbol!"Ident",loc,Value(null),"foo"), Token(symbol!"Ident",loc,Value(null),"bar"), ]); testLex("foo \\ \n \n bar", [ Token(symbol!"Ident",loc,Value(null),"foo"), Token(symbol!"Ident",loc,Value(null),"bar"), ]); testLex("foo \\ \n \\ \n bar", [ Token(symbol!"Ident",loc,Value(null),"foo"), Token(symbol!"Ident",loc,Value(null),"bar"), ]); testLexThrows("foo \\ "); testLexThrows("foo \\ bar"); testLexThrows("foo \\ \n \\ "); testLexThrows("foo \\ \n \\ bar"); testLex("foo : = { } ; \n bar \n", [ Token(symbol!"Ident",loc,Value(null),"foo"), Token(symbol!":",loc), Token(symbol!"=",loc), Token(symbol!"{",loc), Token(symbol!"}",loc), Token(symbol!"EOL",loc), Token(symbol!"EOL",loc), Token(symbol!"Ident",loc,Value(null),"bar"), Token(symbol!"EOL",loc), ]); testLexThrows("<"); testLexThrows("*"); testLexThrows(`\`); // Integers testLex( "7", [ Token(symbol!"Value",loc,Value(cast( int) 7)) ]); testLex( "-7", [ Token(symbol!"Value",loc,Value(cast( int)-7)) ]); testLex( "7L", [ Token(symbol!"Value",loc,Value(cast(long) 7)) ]); testLex( "7l", [ Token(symbol!"Value",loc,Value(cast(long) 7)) ]); testLex("-7L", [ Token(symbol!"Value",loc,Value(cast(long)-7)) ]); testLex( "0", [ Token(symbol!"Value",loc,Value(cast( int) 0)) ]); testLex( "-0", [ Token(symbol!"Value",loc,Value(cast( int) 0)) ]); testLex("7/**/", [ Token(symbol!"Value",loc,Value(cast( int) 7)) ]); testLex("7#", [ Token(symbol!"Value",loc,Value(cast( int) 7)) ]); testLex("7 A", [ Token(symbol!"Value",loc,Value(cast(int)7)), Token(symbol!"Ident",loc,Value( null),"A"), ]); testLexThrows("7A"); testLexThrows("-A"); testLexThrows(`-""`); testLex("7;", [ Token(symbol!"Value",loc,Value(cast(int)7)), Token(symbol!"EOL",loc), ]); // Floats testLex("1.2F" , [ Token(symbol!"Value",loc,Value(cast( float)1.2)) ]); testLex("1.2f" , [ Token(symbol!"Value",loc,Value(cast( float)1.2)) ]); testLex("1.2" , [ Token(symbol!"Value",loc,Value(cast(double)1.2)) ]); testLex("1.2D" , [ Token(symbol!"Value",loc,Value(cast(double)1.2)) ]); testLex("1.2d" , [ Token(symbol!"Value",loc,Value(cast(double)1.2)) ]); testLex("1.2BD", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); testLex("1.2bd", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); testLex("1.2Bd", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); testLex("1.2bD", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); testLex(".2F" , [ Token(symbol!"Value",loc,Value(cast( float)0.2)) ]); testLex(".2" , [ Token(symbol!"Value",loc,Value(cast(double)0.2)) ]); testLex(".2D" , [ Token(symbol!"Value",loc,Value(cast(double)0.2)) ]); testLex(".2BD", [ Token(symbol!"Value",loc,Value(cast( real)0.2)) ]); testLex("-1.2F" , [ Token(symbol!"Value",loc,Value(cast( float)-1.2)) ]); testLex("-1.2" , [ Token(symbol!"Value",loc,Value(cast(double)-1.2)) ]); testLex("-1.2D" , [ Token(symbol!"Value",loc,Value(cast(double)-1.2)) ]); testLex("-1.2BD", [ Token(symbol!"Value",loc,Value(cast( real)-1.2)) ]); testLex("-.2F" , [ Token(symbol!"Value",loc,Value(cast( float)-0.2)) ]); testLex("-.2" , [ Token(symbol!"Value",loc,Value(cast(double)-0.2)) ]); testLex("-.2D" , [ Token(symbol!"Value",loc,Value(cast(double)-0.2)) ]); testLex("-.2BD", [ Token(symbol!"Value",loc,Value(cast( real)-0.2)) ]); testLex( "0.0" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); testLex( "0.0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); testLex( "0.0BD", [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); testLex("-0.0" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); testLex("-0.0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); testLex("-0.0BD", [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); testLex( "7F" , [ Token(symbol!"Value",loc,Value(cast( float)7.0)) ]); testLex( "7D" , [ Token(symbol!"Value",loc,Value(cast(double)7.0)) ]); testLex( "7BD" , [ Token(symbol!"Value",loc,Value(cast( real)7.0)) ]); testLex( "0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); testLex( "0D" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); testLex( "0BD" , [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); testLex("-0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); testLex("-0D" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); testLex("-0BD" , [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); testLex("1.2 F", [ Token(symbol!"Value",loc,Value(cast(double)1.2)), Token(symbol!"Ident",loc,Value( null),"F"), ]); testLexThrows("1.2A"); testLexThrows("1.2B"); testLexThrows("1.2BDF"); testLex("1.2;", [ Token(symbol!"Value",loc,Value(cast(double)1.2)), Token(symbol!"EOL",loc), ]); testLex("1.2F;", [ Token(symbol!"Value",loc,Value(cast(float)1.2)), Token(symbol!"EOL",loc), ]); testLex("1.2BD;", [ Token(symbol!"Value",loc,Value(cast(real)1.2)), Token(symbol!"EOL",loc), ]); // Booleans and null testLex("true", [ Token(symbol!"Value",loc,Value( true)) ]); testLex("false", [ Token(symbol!"Value",loc,Value(false)) ]); testLex("on", [ Token(symbol!"Value",loc,Value( true)) ]); testLex("off", [ Token(symbol!"Value",loc,Value(false)) ]); testLex("null", [ Token(symbol!"Value",loc,Value( null)) ]); testLex("TRUE", [ Token(symbol!"Ident",loc,Value(null),"TRUE") ]); testLex("true ", [ Token(symbol!"Value",loc,Value(true)) ]); testLex("true ", [ Token(symbol!"Value",loc,Value(true)) ]); testLex("tru", [ Token(symbol!"Ident",loc,Value(null),"tru") ]); testLex("truX", [ Token(symbol!"Ident",loc,Value(null),"truX") ]); testLex("trueX", [ Token(symbol!"Ident",loc,Value(null),"trueX") ]); // Raw Backtick Strings testLex("`hello world`", [ Token(symbol!"Value",loc,Value(`hello world` )) ]); testLex("` hello world `", [ Token(symbol!"Value",loc,Value(` hello world ` )) ]); testLex("`hello \\t world`", [ Token(symbol!"Value",loc,Value(`hello \t world`)) ]); testLex("`hello \\n world`", [ Token(symbol!"Value",loc,Value(`hello \n world`)) ]); testLex("`hello \n world`", [ Token(symbol!"Value",loc,Value("hello \n world")) ]); testLex("`hello \r\n world`", [ Token(symbol!"Value",loc,Value("hello \r\n world")) ]); testLex("`hello \"world\"`", [ Token(symbol!"Value",loc,Value(`hello "world"` )) ]); testLexThrows("`foo"); testLexThrows("`"); // Double-Quote Strings testLex(`"hello world"`, [ Token(symbol!"Value",loc,Value("hello world" )) ]); testLex(`" hello world "`, [ Token(symbol!"Value",loc,Value(" hello world " )) ]); testLex(`"hello \t world"`, [ Token(symbol!"Value",loc,Value("hello \t world")) ]); testLex(`"hello \n world"`, [ Token(symbol!"Value",loc,Value("hello \n world")) ]); testLex("\"hello \\\n world\"", [ Token(symbol!"Value",loc,Value("hello world" )) ]); testLex("\"hello \\ \n world\"", [ Token(symbol!"Value",loc,Value("hello world" )) ]); testLex("\"hello \\ \n\n world\"", [ Token(symbol!"Value",loc,Value("hello world" )) ]); testLex(`"\"hello world\""`, [ Token(symbol!"Value",loc,Value(`"hello world"` )) ]); testLexThrows("\"hello \n world\""); testLexThrows(`"foo`); testLexThrows(`"`); // Characters testLex("'a'", [ Token(symbol!"Value",loc,Value(cast(dchar) 'a')) ]); testLex("'\\n'", [ Token(symbol!"Value",loc,Value(cast(dchar)'\n')) ]); testLex("'\\t'", [ Token(symbol!"Value",loc,Value(cast(dchar)'\t')) ]); testLex("'\t'", [ Token(symbol!"Value",loc,Value(cast(dchar)'\t')) ]); testLex("'\\''", [ Token(symbol!"Value",loc,Value(cast(dchar)'\'')) ]); testLex(`'\\'`, [ Token(symbol!"Value",loc,Value(cast(dchar)'\\')) ]); testLexThrows("'a"); testLexThrows("'aa'"); testLexThrows("''"); testLexThrows("'\\\n'"); testLexThrows("'\n'"); testLexThrows(`'\`); testLexThrows(`'\'`); testLexThrows("'"); // Unicode testLex("日本語", [ Token(symbol!"Ident",loc,Value(null), "日本語") ]); testLex("`おはよう、日本。`", [ Token(symbol!"Value",loc,Value(`おはよう、日本。`)) ]); testLex(`"おはよう、日本。"`, [ Token(symbol!"Value",loc,Value(`おはよう、日本。`)) ]); testLex("'月'", [ Token(symbol!"Value",loc,Value("月"d.dup[0])) ]); // Base64 Binary testLex("[aGVsbG8gd29ybGQ=]", [ Token(symbol!"Value",loc,Value(cast(ubyte[])"hello world".dup))]); testLex("[ aGVsbG8gd29ybGQ= ]", [ Token(symbol!"Value",loc,Value(cast(ubyte[])"hello world".dup))]); testLex("[\n aGVsbG8g \n \n d29ybGQ= \n]", [ Token(symbol!"Value",loc,Value(cast(ubyte[])"hello world".dup))]); testLexThrows("[aGVsbG8gd29ybGQ]"); // Ie: Not multiple of 4 testLexThrows("[ aGVsbG8gd29ybGQ ]"); // Date testLex( "1999/12/5", [ Token(symbol!"Value",loc,Value(Date( 1999, 12, 5))) ]); testLex( "2013/2/22", [ Token(symbol!"Value",loc,Value(Date( 2013, 2, 22))) ]); testLex("-2013/2/22", [ Token(symbol!"Value",loc,Value(Date(-2013, 2, 22))) ]); testLexThrows("7/"); testLexThrows("2013/2/22a"); testLexThrows("2013/2/22f"); testLex("1999/12/5\n", [ Token(symbol!"Value",loc,Value(Date(1999, 12, 5))), Token(symbol!"EOL",loc), ]); // DateTime, no timezone testLex( "2013/2/22 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22 \t 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22/*foo*/07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22 /*foo*/ \\\n /*bar*/ 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22 /*foo*/ \\\n\n \n /*bar*/ 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22 /*foo*/ \\\n\\\n \\\n /*bar*/ 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22/*foo*/\\\n/*bar*/07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); testLex("-2013/2/22 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime(-2013, 2, 22, 7, 53, 0)))) ]); testLex( "2013/2/22 -07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53)))) ]); testLex("-2013/2/22 -07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime(-2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53)))) ]); testLex( "2013/2/22 07:53:34", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34)))) ]); testLex( "2013/2/22 07:53:34.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34), 123.msecs))) ]); testLex( "2013/2/22 07:53:34.12", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34), 120.msecs))) ]); testLex( "2013/2/22 07:53:34.1", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34), 100.msecs))) ]); testLex( "2013/2/22 07:53.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0), 123.msecs))) ]); testLex( "2013/2/22 34:65", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) + hours(34) + minutes(65) + seconds( 0)))) ]); testLex( "2013/2/22 34:65:77.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) + hours(34) + minutes(65) + seconds(77), 123.msecs))) ]); testLex( "2013/2/22 34:65.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) + hours(34) + minutes(65) + seconds( 0), 123.msecs))) ]); testLex( "2013/2/22 -34:65", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds( 0)))) ]); testLex( "2013/2/22 -34:65:77.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds(77), -123.msecs))) ]); testLex( "2013/2/22 -34:65.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds( 0), -123.msecs))) ]); testLexThrows("2013/2/22 07:53a"); testLexThrows("2013/2/22 07:53f"); testLexThrows("2013/2/22 07:53:34.123a"); testLexThrows("2013/2/22 07:53:34.123f"); testLexThrows("2013/2/22a 07:53"); testLex(`2013/2/22 "foo"`, [ Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), Token(symbol!"Value",loc,Value("foo")), ]); testLex("2013/2/22 07", [ Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), Token(symbol!"Value",loc,Value(cast(int)7)), ]); testLex("2013/2/22 1.2F", [ Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), Token(symbol!"Value",loc,Value(cast(float)1.2)), ]); testLex("2013/2/22 .2F", [ Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), Token(symbol!"Value",loc,Value(cast(float)0.2)), ]); testLex("2013/2/22 -1.2F", [ Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), Token(symbol!"Value",loc,Value(cast(float)-1.2)), ]); testLex("2013/2/22 -.2F", [ Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), Token(symbol!"Value",loc,Value(cast(float)-0.2)), ]); // DateTime, with known timezone testLex( "2013/2/22 07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone( hours(0) )))) ]); testLex("-2013/2/22 07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime(-2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone( hours(0) )))) ]); testLex( "2013/2/22 -07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), new immutable SimpleTimeZone( hours(0) )))) ]); testLex("-2013/2/22 -07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime(-2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), new immutable SimpleTimeZone( hours(0) )))) ]); testLex( "2013/2/22 07:53-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); testLex( "2013/2/22 07:53-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); testLex( "2013/2/22 07:53:34-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), new immutable SimpleTimeZone( hours(0) )))) ]); testLex( "2013/2/22 07:53:34-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); testLex( "2013/2/22 07:53:34-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); testLex( "2013/2/22 07:53:34.123-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), 123.msecs, new immutable SimpleTimeZone( hours(0) )))) ]); testLex( "2013/2/22 07:53:34.123-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), 123.msecs, new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); testLex( "2013/2/22 07:53:34.123-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), 123.msecs, new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); testLex( "2013/2/22 07:53.123-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), 123.msecs, new immutable SimpleTimeZone( hours(0) )))) ]); testLex( "2013/2/22 07:53.123-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), 123.msecs, new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); testLex( "2013/2/22 07:53.123-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), 123.msecs, new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); testLex( "2013/2/22 -34:65-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds( 0), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); // DateTime, with Java SDL's occasionally weird interpretation of some // "not quite ISO" variations of the "GMT with offset" timezone strings. Token testTokenSimpleTimeZone(Duration d) { auto dateTime = DateTime(2013, 2, 22, 7, 53, 0); auto tz = new immutable SimpleTimeZone(d); return Token( symbol!"Value", loc, Value(SysTime(dateTime,tz)) ); } Token testTokenUnknownTimeZone(string tzName) { auto dateTime = DateTime(2013, 2, 22, 7, 53, 0); auto frac = 0.msecs; return Token( symbol!"Value", loc, Value(DateTimeFracUnknownZone(dateTime,frac,tzName)) ); } testLex("2013/2/22 07:53-GMT+", [ testTokenUnknownTimeZone("GMT+") ]); testLex("2013/2/22 07:53-GMT+:", [ testTokenUnknownTimeZone("GMT+:") ]); testLex("2013/2/22 07:53-GMT+:3", [ testTokenUnknownTimeZone("GMT+:3") ]); testLex("2013/2/22 07:53-GMT+:03", [ testTokenSimpleTimeZone(minutes(3)) ]); testLex("2013/2/22 07:53-GMT+:003", [ testTokenUnknownTimeZone("GMT+:003") ]); testLex("2013/2/22 07:53-GMT+4", [ testTokenSimpleTimeZone(hours(4)) ]); testLex("2013/2/22 07:53-GMT+4:", [ testTokenUnknownTimeZone("GMT+4:") ]); testLex("2013/2/22 07:53-GMT+4:3", [ testTokenUnknownTimeZone("GMT+4:3") ]); testLex("2013/2/22 07:53-GMT+4:03", [ testTokenSimpleTimeZone(hours(4)+minutes(3)) ]); testLex("2013/2/22 07:53-GMT+4:003", [ testTokenUnknownTimeZone("GMT+4:003") ]); testLex("2013/2/22 07:53-GMT+04", [ testTokenSimpleTimeZone(hours(4)) ]); testLex("2013/2/22 07:53-GMT+04:", [ testTokenUnknownTimeZone("GMT+04:") ]); testLex("2013/2/22 07:53-GMT+04:3", [ testTokenUnknownTimeZone("GMT+04:3") ]); testLex("2013/2/22 07:53-GMT+04:03", [ testTokenSimpleTimeZone(hours(4)+minutes(3)) ]); testLex("2013/2/22 07:53-GMT+04:03abc", [ testTokenUnknownTimeZone("GMT+04:03abc") ]); testLex("2013/2/22 07:53-GMT+04:003", [ testTokenUnknownTimeZone("GMT+04:003") ]); testLex("2013/2/22 07:53-GMT+004", [ testTokenSimpleTimeZone(minutes(4)) ]); testLex("2013/2/22 07:53-GMT+004:", [ testTokenUnknownTimeZone("GMT+004:") ]); testLex("2013/2/22 07:53-GMT+004:3", [ testTokenUnknownTimeZone("GMT+004:3") ]); testLex("2013/2/22 07:53-GMT+004:03", [ testTokenUnknownTimeZone("GMT+004:03") ]); testLex("2013/2/22 07:53-GMT+004:003", [ testTokenUnknownTimeZone("GMT+004:003") ]); testLex("2013/2/22 07:53-GMT+0004", [ testTokenSimpleTimeZone(minutes(4)) ]); testLex("2013/2/22 07:53-GMT+0004:", [ testTokenUnknownTimeZone("GMT+0004:") ]); testLex("2013/2/22 07:53-GMT+0004:3", [ testTokenUnknownTimeZone("GMT+0004:3") ]); testLex("2013/2/22 07:53-GMT+0004:03", [ testTokenUnknownTimeZone("GMT+0004:03") ]); testLex("2013/2/22 07:53-GMT+0004:003", [ testTokenUnknownTimeZone("GMT+0004:003") ]); testLex("2013/2/22 07:53-GMT+00004", [ testTokenSimpleTimeZone(minutes(4)) ]); testLex("2013/2/22 07:53-GMT+00004:", [ testTokenUnknownTimeZone("GMT+00004:") ]); testLex("2013/2/22 07:53-GMT+00004:3", [ testTokenUnknownTimeZone("GMT+00004:3") ]); testLex("2013/2/22 07:53-GMT+00004:03", [ testTokenUnknownTimeZone("GMT+00004:03") ]); testLex("2013/2/22 07:53-GMT+00004:003", [ testTokenUnknownTimeZone("GMT+00004:003") ]); // DateTime, with unknown timezone testLex( "2013/2/22 07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 0), 0.msecs, "Bogus/Foo")), "2013/2/22 07:53-Bogus/Foo") ]); testLex("-2013/2/22 07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime(-2013, 2, 22, 7, 53, 0), 0.msecs, "Bogus/Foo"))) ]); testLex( "2013/2/22 -07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), 0.msecs, "Bogus/Foo"))) ]); testLex("-2013/2/22 -07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime(-2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), 0.msecs, "Bogus/Foo"))) ]); testLex( "2013/2/22 07:53:34-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 34), 0.msecs, "Bogus/Foo"))) ]); testLex( "2013/2/22 07:53:34.123-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 34), 123.msecs, "Bogus/Foo"))) ]); testLex( "2013/2/22 07:53.123-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 0), 123.msecs, "Bogus/Foo"))) ]); // Time Span testLex( "12:14:42", [ Token(symbol!"Value",loc,Value( days( 0)+hours(12)+minutes(14)+seconds(42)+msecs( 0))) ]); testLex("-12:14:42", [ Token(symbol!"Value",loc,Value(-days( 0)-hours(12)-minutes(14)-seconds(42)-msecs( 0))) ]); testLex( "00:09:12", [ Token(symbol!"Value",loc,Value( days( 0)+hours( 0)+minutes( 9)+seconds(12)+msecs( 0))) ]); testLex( "00:00:01.023", [ Token(symbol!"Value",loc,Value( days( 0)+hours( 0)+minutes( 0)+seconds( 1)+msecs( 23))) ]); testLex( "23d:05:21:23.532", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(532))) ]); testLex( "23d:05:21:23.53", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(530))) ]); testLex( "23d:05:21:23.5", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(500))) ]); testLex("-23d:05:21:23.532", [ Token(symbol!"Value",loc,Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(532))) ]); testLex("-23d:05:21:23.5", [ Token(symbol!"Value",loc,Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(500))) ]); testLex( "23d:05:21:23", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs( 0))) ]); testLexThrows("12:14:42a"); testLexThrows("23d:05:21:23.532a"); testLexThrows("23d:05:21:23.532f"); // Combination testLex("foo. 7", [ Token(symbol!"Ident",loc,Value( null),"foo."), Token(symbol!"Value",loc,Value(cast(int)7)) ]); testLex(` namespace:person "foo" "bar" 1 23L name.first="ひとみ" name.last="Smith" { namespace:age 37; namespace:favorite_color "blue" // comment somedate 2013/2/22 07:53 -- comment inventory /* comment */ { socks } } `, [ Token(symbol!"EOL",loc,Value(null),"\n"), Token(symbol!"Ident", loc, Value( null ), "namespace"), Token(symbol!":", loc, Value( null ), ":"), Token(symbol!"Ident", loc, Value( null ), "person"), Token(symbol!"Value", loc, Value( "foo" ), `"foo"`), Token(symbol!"Value", loc, Value( "bar" ), `"bar"`), Token(symbol!"Value", loc, Value( cast( int) 1 ), "1"), Token(symbol!"Value", loc, Value( cast(long)23 ), "23L"), Token(symbol!"Ident", loc, Value( null ), "name.first"), Token(symbol!"=", loc, Value( null ), "="), Token(symbol!"Value", loc, Value( "ひとみ" ), `"ひとみ"`), Token(symbol!"Ident", loc, Value( null ), "name.last"), Token(symbol!"=", loc, Value( null ), "="), Token(symbol!"Value", loc, Value( "Smith" ), `"Smith"`), Token(symbol!"{", loc, Value( null ), "{"), Token(symbol!"EOL", loc, Value( null ), "\n"), Token(symbol!"Ident", loc, Value( null ), "namespace"), Token(symbol!":", loc, Value( null ), ":"), Token(symbol!"Ident", loc, Value( null ), "age"), Token(symbol!"Value", loc, Value( cast(int)37 ), "37"), Token(symbol!"EOL", loc, Value( null ), ";"), Token(symbol!"Ident", loc, Value( null ), "namespace"), Token(symbol!":", loc, Value( null ), ":"), Token(symbol!"Ident", loc, Value( null ), "favorite_color"), Token(symbol!"Value", loc, Value( "blue" ), `"blue"`), Token(symbol!"EOL", loc, Value( null ), "\n"), Token(symbol!"Ident", loc, Value( null ), "somedate"), Token(symbol!"Value", loc, Value( DateTimeFrac(DateTime(2013, 2, 22, 7, 53, 0)) ), "2013/2/22 07:53"), Token(symbol!"EOL", loc, Value( null ), "\n"), Token(symbol!"EOL", loc, Value( null ), "\n"), Token(symbol!"Ident", loc, Value(null), "inventory"), Token(symbol!"{", loc, Value(null), "{"), Token(symbol!"EOL", loc, Value(null), "\n"), Token(symbol!"Ident", loc, Value(null), "socks"), Token(symbol!"EOL", loc, Value(null), "\n"), Token(symbol!"}", loc, Value(null), "}"), Token(symbol!"EOL", loc, Value(null), "\n"), Token(symbol!"}", loc, Value(null), "}"), Token(symbol!"EOL", loc, Value(null), "\n"), ]); if(numErrors > 0) stderr.writeln(numErrors, " failed test(s)"); } version(sdlangUnittest) unittest { writeln("lexer: Regression test issue #8..."); stdout.flush(); testLex(`"\n \n"`, [ Token(symbol!"Value",loc,Value("\n \n"),`"\n \n"`) ]); testLex(`"\t\t"`, [ Token(symbol!"Value",loc,Value("\t\t"),`"\t\t"`) ]); testLex(`"\n\n"`, [ Token(symbol!"Value",loc,Value("\n\n"),`"\n\n"`) ]); } version(sdlangUnittest) unittest { writeln("lexer: Regression test issue #11..."); stdout.flush(); void test(string input) { testLex( input, [ Token(symbol!"EOL", loc, Value(null), "\n"), Token(symbol!"Ident",loc,Value(null), "a") ] ); } test("//X\na"); test("//\na"); test("--\na"); test("#\na"); } version(sdlangUnittest) unittest { writeln("lexer: Regression test issue #28..."); stdout.flush(); enum offset = 1; // workaround for an of-by-one error for line numbers testLex("test", [ Token(symbol!"Ident", Location("filename", 0, 0, 0), Value(null), "test") ], true); testLex("\ntest", [ Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\n"), Token(symbol!"Ident", Location("filename", 1, 0, 1), Value(null), "test") ], true); testLex("\rtest", [ Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r"), Token(symbol!"Ident", Location("filename", 1, 0, 1), Value(null), "test") ], true); testLex("\r\ntest", [ Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r\n"), Token(symbol!"Ident", Location("filename", 1, 0, 2), Value(null), "test") ], true); testLex("\r\n\ntest", [ Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r\n"), Token(symbol!"EOL", Location("filename", 1, 0, 2), Value(null), "\n"), Token(symbol!"Ident", Location("filename", 2, 0, 3), Value(null), "test") ], true); testLex("\r\r\ntest", [ Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r"), Token(symbol!"EOL", Location("filename", 1, 0, 1), Value(null), "\r\n"), Token(symbol!"Ident", Location("filename", 2, 0, 3), Value(null), "test") ], true); } dub-1.40.0/source/dub/internal/sdlang/package.d000066400000000000000000000063301477246567400212740ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. /++ $(H2 SDLang-D v0.9.3) Library for parsing and generating SDL (Simple Declarative Language). Import this module to use SDLang-D as a library. For the list of officially supported compiler versions, see the $(LINK2 https://github.com/Abscissa/SDLang-D/blob/master/.travis.yml, .travis.yml) file included with your version of SDLang-D. Links: $(UL $(LI $(LINK2 https://github.com/Abscissa/SDLang-D, SDLang-D Homepage) ) $(LI $(LINK2 http://semitwist.com/sdlang-d, SDLang-D API Reference (latest version) ) ) $(LI $(LINK2 http://semitwist.com/sdlang-d-docs, SDLang-D API Reference (earlier versions) ) ) $(LI $(LINK2 http://sdl.ikayzo.org/display/SDL/Language+Guide, Official SDL Site) [$(LINK2 http://semitwist.com/sdl-mirror/Language+Guide.html, mirror)] ) ) Authors: Nick Sabalausky ("Abscissa") http://semitwist.com/contact Copyright: Copyright (C) 2012-2015 Nick Sabalausky. License: $(LINK2 https://github.com/Abscissa/SDLang-D/blob/master/LICENSE.txt, zlib/libpng) +/ module dub.internal.sdlang; version (Have_sdlang_d) public import sdlang; else: import std.array; import std.datetime; import std.file; import std.stdio; import dub.internal.sdlang.ast; import dub.internal.sdlang.exception; import dub.internal.sdlang.lexer; import dub.internal.sdlang.parser; import dub.internal.sdlang.symbol; import dub.internal.sdlang.token; import dub.internal.sdlang.util; // Expose main public API public import dub.internal.sdlang.ast : Attribute, Tag; public import dub.internal.sdlang.exception; public import dub.internal.sdlang.parser : parseFile, parseSource; public import dub.internal.sdlang.token : Value, Token, DateTimeFrac, DateTimeFracUnknownZone; public import dub.internal.sdlang.util : sdlangVersion, Location; version(sdlangUnittest) void main() {} version(sdlangTestApp) { int main(string[] args) { if( args.length != 3 || (args[1] != "lex" && args[1] != "parse" && args[1] != "to-sdl") ) { stderr.writeln("SDLang-D v", sdlangVersion); stderr.writeln("Usage: sdlang [lex|parse|to-sdl] filename.sdl"); return 1; } auto filename = args[2]; try { if(args[1] == "lex") doLex(filename); else if(args[1] == "parse") doParse(filename); else doToSDL(filename); } catch(SDLangParseException e) { stderr.writeln(e.msg); return 1; } return 0; } void doLex(string filename) { auto source = cast(string)read(filename); auto lexer = new Lexer(source, filename); foreach(tok; lexer) { // Value string value; if(tok.symbol == symbol!"Value") value = tok.value.hasValue? toString(tok.value.type) : "{null}"; value = value==""? "\t" : "("~value~":"~tok.value.toString()~") "; // Data auto data = tok.data.replace("\n", "").replace("\r", ""); if(data != "") data = "\t|"~tok.data~"|"; // Display writeln( tok.location.toString, ":\t", tok.symbol.name, value, data ); if(tok.symbol.name == "Error") break; } } void doParse(string filename) { auto root = parseFile(filename); stdout.rawWrite(root.toDebugString()); writeln(); } void doToSDL(string filename) { auto root = parseFile(filename); stdout.rawWrite(root.toSDLDocument()); } } dub-1.40.0/source/dub/internal/sdlang/parser.d000066400000000000000000000315311477246567400211760ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.parser; version (Have_sdlang_d) public import sdlang.parser; else: import std.file; import dub.internal.libInputVisitor; import dub.internal.sdlang.ast; import dub.internal.sdlang.exception; import dub.internal.sdlang.lexer; import dub.internal.sdlang.symbol; import dub.internal.sdlang.token; import dub.internal.sdlang.util; import dub.internal.dyaml.stdsumtype; /// Returns root tag. Tag parseFile(string filename) { auto source = cast(string)read(filename); return parseSource(source, filename); } /// Returns root tag. The optional 'filename' parameter can be included /// so that the SDL document's filename (if any) can be displayed with /// any syntax error messages. Tag parseSource(string source, string filename=null) { auto lexer = new Lexer(source, filename); auto parser = DOMParser(lexer); return parser.parseRoot(); } /++ Parses an SDL document using StAX/Pull-style. Returns an InputRange with element type ParserEvent. The pullParseFile version reads a file and parses it, while pullParseSource parses a string passed in. The optional 'filename' parameter in pullParseSource can be included so that the SDL document's filename (if any) can be displayed with any syntax error messages. Warning! The FileStartEvent and FileEndEvent events *might* be removed later. See $(LINK https://github.com/Abscissa/SDLang-D/issues/17) Example: ------------------ parent 12 attr="q" { childA 34 childB 56 } lastTag ------------------ The ParserEvent sequence emitted for that SDL document would be as follows (indented for readability): ------------------ FileStartEvent TagStartEvent (parent) ValueEvent (12) AttributeEvent (attr, "q") TagStartEvent (childA) ValueEvent (34) TagEndEvent TagStartEvent (childB) ValueEvent (56) TagEndEvent TagEndEvent TagStartEvent (lastTag) TagEndEvent FileEndEvent ------------------ Example: ------------------ foreach(event; pullParseFile("stuff.sdl")) { import std.stdio; if(event.peek!FileStartEvent()) writeln("FileStartEvent, starting! "); else if(event.peek!FileEndEvent()) writeln("FileEndEvent, done! "); else if(auto e = event.peek!TagStartEvent()) writeln("TagStartEvent: ", e.namespace, ":", e.name, " @ ", e.location); else if(event.peek!TagEndEvent()) writeln("TagEndEvent"); else if(auto e = event.peek!ValueEvent()) writeln("ValueEvent: ", e.value); else if(auto e = event.peek!AttributeEvent()) writeln("AttributeEvent: ", e.namespace, ":", e.name, "=", e.value); else // Shouldn't happen throw new Exception("Received unknown parser event"); } ------------------ +/ auto pullParseFile(string filename) { auto source = cast(string)read(filename); return parseSource(source, filename); } ///ditto auto pullParseSource(string source, string filename=null) { auto lexer = new Lexer(source, filename); auto parser = PullParser(lexer); return inputVisitor!ParserEvent( parser ); } /// The element of the InputRange returned by pullParseFile and pullParseSource: alias ParserEvent = SumType!( FileStartEvent, FileEndEvent, TagStartEvent, TagEndEvent, ValueEvent, AttributeEvent, ); /// Event: Start of file struct FileStartEvent { Location location; } /// Event: End of file struct FileEndEvent { Location location; } /// Event: Start of tag struct TagStartEvent { Location location; string namespace; string name; } /// Event: End of tag struct TagEndEvent { //Location location; } /// Event: Found a Value in the current tag struct ValueEvent { Location location; Value value; } /// Event: Found an Attribute in the current tag struct AttributeEvent { Location location; string namespace; string name; Value value; } // The actual pull parser private struct PullParser { private Lexer lexer; private struct IDFull { string namespace; string name; } private void error(string msg) { error(lexer.front.location, msg); } private void error(Location loc, string msg) { throw new SDLangParseException(loc, "Error: "~msg); } private InputVisitor!(PullParser, ParserEvent) v; void visit(InputVisitor!(PullParser, ParserEvent) v) { this.v = v; parseRoot(); } private void emit(Event)(Event event) { v.yield( ParserEvent(event) ); } /// ::= EOF (Lookaheads: Anything) private void parseRoot() { //trace("Starting parse of file: ", lexer.filename); //trace(__FUNCTION__, ": ::= EOF (Lookaheads: Anything)"); auto startLocation = Location(lexer.filename, 0, 0, 0); emit( FileStartEvent(startLocation) ); parseTags(); auto token = lexer.front; if(!token.matches!"EOF"()) error("Expected end-of-file, not " ~ token.symbol.name); emit( FileEndEvent(token.location) ); } /// ::= (Lookaheads: Ident Value) /// | EOL (Lookaheads: EOL) /// | {empty} (Lookaheads: Anything else, except '{') void parseTags() { //trace("Enter ", __FUNCTION__); while(true) { auto token = lexer.front; if(token.matches!"Ident"() || token.matches!"Value"()) { //trace(__FUNCTION__, ": ::= (Lookaheads: Ident Value)"); parseTag(); continue; } else if(token.matches!"EOL"()) { //trace(__FUNCTION__, ": ::= EOL (Lookaheads: EOL)"); lexer.popFront(); continue; } else if(token.matches!"{"()) { error("Anonymous tags must have at least one value. They cannot just have children and attributes only."); } else { //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else, except '{')"); break; } } } /// /// ::= (Lookaheads: Ident) /// | (Lookaheads: Value) void parseTag() { auto token = lexer.front; if(token.matches!"Ident"()) { //trace(__FUNCTION__, ": ::= (Lookaheads: Ident)"); //trace("Found tag named: ", tag.fullName); auto id = parseIDFull(); emit( TagStartEvent(token.location, id.namespace, id.name) ); } else if(token.matches!"Value"()) { //trace(__FUNCTION__, ": ::= (Lookaheads: Value)"); //trace("Found anonymous tag."); emit( TagStartEvent(token.location, null, null) ); } else error("Expected tag name or value, not " ~ token.symbol.name); if(lexer.front.matches!"="()) error("Anonymous tags must have at least one value. They cannot just have attributes and children only."); parseValues(); parseAttributes(); parseOptChild(); parseTagTerminator(); emit( TagEndEvent() ); } /// ::= Ident (Lookaheads: Ident) IDFull parseIDFull() { auto token = lexer.front; if(token.matches!"Ident"()) { //trace(__FUNCTION__, ": ::= Ident (Lookaheads: Ident)"); lexer.popFront(); return parseIDSuffix(token.data); } else { error("Expected namespace or identifier, not " ~ token.symbol.name); assert(0); } } /// /// ::= ':' Ident (Lookaheads: ':') /// ::= {empty} (Lookaheads: Anything else) IDFull parseIDSuffix(string firstIdent) { auto token = lexer.front; if(token.matches!":"()) { //trace(__FUNCTION__, ": ::= ':' Ident (Lookaheads: ':')"); lexer.popFront(); token = lexer.front; if(token.matches!"Ident"()) { lexer.popFront(); return IDFull(firstIdent, token.data); } else { error("Expected name, not " ~ token.symbol.name); assert(0); } } else { //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); return IDFull("", firstIdent); } } /// /// ::= Value (Lookaheads: Value) /// | {empty} (Lookaheads: Anything else) void parseValues() { while(true) { auto token = lexer.front; if(token.matches!"Value"()) { //trace(__FUNCTION__, ": ::= Value (Lookaheads: Value)"); parseValue(); continue; } else { //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); break; } } } /// Handle Value terminals that aren't part of an attribute void parseValue() { auto token = lexer.front; if(token.matches!"Value"()) { //trace(__FUNCTION__, ": (Handle Value terminals that aren't part of an attribute)"); auto value = token.value; //trace("In tag '", parent.fullName, "', found value: ", value); emit( ValueEvent(token.location, value) ); lexer.popFront(); } else error("Expected value, not "~token.symbol.name); } /// /// ::= (Lookaheads: Ident) /// | {empty} (Lookaheads: Anything else) void parseAttributes() { while(true) { auto token = lexer.front; if(token.matches!"Ident"()) { //trace(__FUNCTION__, ": ::= (Lookaheads: Ident)"); parseAttribute(); continue; } else { //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); break; } } } /// ::= '=' Value (Lookaheads: Ident) void parseAttribute() { //trace(__FUNCTION__, ": ::= '=' Value (Lookaheads: Ident)"); auto token = lexer.front; if(!token.matches!"Ident"()) error("Expected attribute name, not "~token.symbol.name); auto id = parseIDFull(); token = lexer.front; if(!token.matches!"="()) error("Expected '=' after attribute name, not "~token.symbol.name); lexer.popFront(); token = lexer.front; if(!token.matches!"Value"()) error("Expected attribute value, not "~token.symbol.name); //trace("In tag '", parent.fullName, "', found attribute '", attr.fullName, "'"); emit( AttributeEvent(token.location, id.namespace, id.name, token.value) ); lexer.popFront(); } /// /// ::= '{' EOL '}' (Lookaheads: '{') /// | {empty} (Lookaheads: Anything else) void parseOptChild() { auto token = lexer.front; if(token.matches!"{") { //trace(__FUNCTION__, ": ::= '{' EOL '}' (Lookaheads: '{')"); lexer.popFront(); token = lexer.front; if(!token.matches!"EOL"()) error("Expected newline or semicolon after '{', not "~token.symbol.name); lexer.popFront(); parseTags(); token = lexer.front; if(!token.matches!"}"()) error("Expected '}' after child tags, not "~token.symbol.name); lexer.popFront(); } else { //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); // Do nothing, no error. } } /// /// ::= EOL (Lookahead: EOL) /// | {empty} (Lookahead: EOF) void parseTagTerminator() { auto token = lexer.front; if(token.matches!"EOL") { //trace(__FUNCTION__, ": ::= EOL (Lookahead: EOL)"); lexer.popFront(); } else if(token.matches!"EOF") { //trace(__FUNCTION__, ": ::= {empty} (Lookahead: EOF)"); // Do nothing } else error("Expected end of tag (newline, semicolon or end-of-file), not " ~ token.symbol.name); } } private struct DOMParser { Lexer lexer; Tag parseRoot() { auto currTag = new Tag(null, null, "root"); currTag.location = Location(lexer.filename, 0, 0, 0); auto parser = PullParser(lexer); auto eventRange = inputVisitor!ParserEvent( parser ); foreach(event; eventRange) { event.match!( (TagStartEvent e) { auto newTag = new Tag(currTag, e.namespace, e.name); newTag.location = e.location; currTag = newTag; }, (TagEndEvent _) { currTag = currTag.parent; if(!currTag) parser.error("Internal Error: Received an extra TagEndEvent"); }, (ValueEvent e) { currTag.add(e.value); }, (AttributeEvent e) { auto attr = new Attribute(e.namespace, e.name, e.value, e.location); currTag.add(attr); }, (FileStartEvent _) { // Do nothing }, (FileEndEvent _) { // There shouldn't be another parent. if(currTag.parent) parser.error("Internal Error: Unexpected end of file, not enough TagEndEvent"); } ); } return currTag; } } // Other parser tests are part of the AST's tests over in the ast module. // Regression test, issue #16: https://github.com/Abscissa/SDLang-D/issues/16 version(sdlangUnittest) unittest { import std.stdio; writeln("parser: Regression test issue #16..."); stdout.flush(); // Shouldn't crash foreach(event; pullParseSource(`tag "data"`)) { event.peek!FileStartEvent(); } } // Regression test, issue #31: https://github.com/Abscissa/SDLang-D/issues/31 // "Escape sequence results in range violation error" version(sdlangUnittest) unittest { import std.stdio; writeln("parser: Regression test issue #31..."); stdout.flush(); // Shouldn't get a Range violation parseSource(`test "\"foo\""`); } dub-1.40.0/source/dub/internal/sdlang/symbol.d000066400000000000000000000021711477246567400212050ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.symbol; version (Have_sdlang_d) public import sdlang.symbol; else: import std.algorithm; static immutable validSymbolNames = [ "Error", "EOF", "EOL", ":", "=", "{", "}", "Ident", "Value", ]; /// Use this to create a Symbol. Ex: symbol!"Value" or symbol!"=" /// Invalid names (such as symbol!"FooBar") are rejected at compile-time. template symbol(string name) { static assert(validSymbolNames.find(name), "Invalid Symbol: '"~name~"'"); immutable symbol = _symbol(name); } private Symbol _symbol(string name) { return Symbol(name); } /// Symbol is essentially the "type" of a Token. /// Token is like an instance of a Symbol. /// /// This only represents terminals. Non-terminal tokens aren't /// constructed since the AST is built directly during parsing. /// /// You can't create a Symbol directly. Instead, use the 'symbol' /// template. struct Symbol { private string _name; @property string name() { return _name; } @disable this(); private this(string name) { this._name = name; } string toString() { return _name; } } dub-1.40.0/source/dub/internal/sdlang/token.d000066400000000000000000000400111477246567400210130ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.token; version (Have_sdlang_d) public import sdlang.token; else: import std.array; import std.base64; import std.conv; import std.datetime; import std.range; import std.string; import std.typetuple; import dub.internal.dyaml.stdsumtype; import dub.internal.sdlang.symbol; import dub.internal.sdlang.util; /// DateTime doesn't support milliseconds, but SDL's "Date Time" type does. /// So this is needed for any SDL "Date Time" that doesn't include a time zone. struct DateTimeFrac { this(DateTime dt, Duration fs) { this.dateTime = dt; this.fracSecs = fs; } DateTime dateTime; Duration fracSecs; } /++ If a "Date Time" literal in the SDL file has a time zone that's not found in your system, you get one of these instead of a SysTime. (Because it's impossible to indicate "unknown time zone" with 'std.datetime.TimeZone'.) The difference between this and 'DateTimeFrac' is that 'DateTimeFrac' indicates that no time zone was specified in the SDL at all, whereas 'DateTimeFracUnknownZone' indicates that a time zone was specified but data for it could not be found on your system. +/ struct DateTimeFracUnknownZone { DateTime dateTime; Duration fracSecs; string timeZone; bool opEquals(const DateTimeFracUnknownZone b) const { return opEquals(b); } bool opEquals(ref const DateTimeFracUnknownZone b) const { return this.dateTime == b.dateTime && this.fracSecs == b.fracSecs && this.timeZone == b.timeZone; } } /++ SDL's data-types map to D's datatypes as described below. Most are straightforward, but take special note of the date/time-related types. Boolean: bool Null: typeof(null) Unicode Character: dchar Double-Quote Unicode String: string Raw Backtick Unicode String: string Integer (32 bits signed): int Long Integer (64 bits signed): long Float (32 bits signed): float Double Float (64 bits signed): double Decimal (128+ bits signed): real Binary (standard Base64): ubyte[] Time Span: Duration Date (with no time at all): Date Date Time (no timezone): DateTimeFrac Date Time (with a known timezone): SysTime Date Time (with an unknown timezone): DateTimeFracUnknownZone +/ alias TypeTuple!( typeof(null), bool, string, dchar, int, long, float, double, real, Date, DateTimeFrac, SysTime, DateTimeFracUnknownZone, Duration, ubyte[], ) ValueTypes; alias SumType!ValueTypes Value; /// ditto template isSDLSink(T) { enum isSink = isOutputRange!T && is(ElementType!(T)[] == string); } string toSDLString(T)(T value) if( is( T : Value ) || is( T : bool ) || is( T : string ) || is( T : dchar ) || is( T : int ) || is( T : long ) || is( T : float ) || is( T : double ) || is( T : real ) || is( T : Date ) || is( T : DateTimeFrac ) || is( T : SysTime ) || is( T : DateTimeFracUnknownZone ) || is( T : Duration ) || is( T : ubyte[] ) || is( T : typeof(null) ) ) { Appender!string sink; toSDLString(value, sink); return sink.data; } void toSDLString(Sink)(Value value, ref Sink sink) if(isOutputRange!(Sink,char)) { value.match!(v => toSDLString(v, sink)); } void toSDLString(Sink)(typeof(null) value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put("null"); } void toSDLString(Sink)(bool value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put(value? "true" : "false"); } //TODO: Figure out how to properly handle strings/chars containing lineSep or paraSep void toSDLString(Sink)(string value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put('"'); // This loop is UTF-safe foreach(char ch; value) { if (ch == '\n') sink.put(`\n`); else if(ch == '\r') sink.put(`\r`); else if(ch == '\t') sink.put(`\t`); else if(ch == '\"') sink.put(`\"`); else if(ch == '\\') sink.put(`\\`); else sink.put(ch); } sink.put('"'); } void toSDLString(Sink)(dchar value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put('\''); if (value == '\n') sink.put(`\n`); else if(value == '\r') sink.put(`\r`); else if(value == '\t') sink.put(`\t`); else if(value == '\'') sink.put(`\'`); else if(value == '\\') sink.put(`\\`); else sink.put(value); sink.put('\''); } void toSDLString(Sink)(int value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put( "%s".format(value) ); } void toSDLString(Sink)(long value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put( "%sL".format(value) ); } void toSDLString(Sink)(float value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put( "%.10sF".format(value) ); } void toSDLString(Sink)(double value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put( "%.30sD".format(value) ); } void toSDLString(Sink)(real value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put( "%.30sBD".format(value) ); } void toSDLString(Sink)(Date value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put(to!string(value.year)); sink.put('/'); sink.put(to!string(cast(int)value.month)); sink.put('/'); sink.put(to!string(value.day)); } void toSDLString(Sink)(DateTimeFrac value, ref Sink sink) if(isOutputRange!(Sink,char)) { toSDLString(value.dateTime.date, sink); sink.put(' '); sink.put("%.2s".format(value.dateTime.hour)); sink.put(':'); sink.put("%.2s".format(value.dateTime.minute)); if(value.dateTime.second != 0) { sink.put(':'); sink.put("%.2s".format(value.dateTime.second)); } if(value.fracSecs.total!"msecs" != 0) { sink.put('.'); sink.put("%.3s".format(value.fracSecs.total!"msecs")); } } void toSDLString(Sink)(SysTime value, ref Sink sink) if(isOutputRange!(Sink,char)) { auto dateTimeFrac = DateTimeFrac(cast(DateTime)value, value.fracSecs); toSDLString(dateTimeFrac, sink); sink.put("-"); auto tzString = value.timezone.name; // If name didn't exist, try abbreviation. // Note that according to std.datetime docs, on Windows the // stdName/dstName may not be properly abbreviated. version(Windows) {} else if(tzString == "") { auto tz = value.timezone; auto stdTime = value.stdTime; if(tz.hasDST()) tzString = tz.dstInEffect(stdTime)? tz.dstName : tz.stdName; else tzString = tz.stdName; } if(tzString == "") { auto offset = value.timezone.utcOffsetAt(value.stdTime); sink.put("GMT"); if(offset < seconds(0)) { sink.put("-"); offset = -offset; } else sink.put("+"); long hours, minutes; offset.split!("hours", "minutes")(hours, minutes); sink.put("%.2s".format(hours)); sink.put(":"); sink.put("%.2s".format(minutes)); } else sink.put(tzString); } void toSDLString(Sink)(DateTimeFracUnknownZone value, ref Sink sink) if(isOutputRange!(Sink,char)) { auto dateTimeFrac = DateTimeFrac(value.dateTime, value.fracSecs); toSDLString(dateTimeFrac, sink); sink.put("-"); sink.put(value.timeZone); } void toSDLString(Sink)(Duration value, ref Sink sink) if(isOutputRange!(Sink,char)) { if(value < seconds(0)) { sink.put("-"); value = -value; } auto days = value.total!"days"(); if(days != 0) { sink.put("%s".format(days)); sink.put("d:"); } long hours, minutes, seconds, msecs; value.split!("hours", "minutes", "seconds", "msecs")(hours, minutes, seconds, msecs); sink.put("%.2s".format(hours)); sink.put(':'); sink.put("%.2s".format(minutes)); sink.put(':'); sink.put("%.2s".format(seconds)); if(msecs != 0) { sink.put('.'); sink.put("%.3s".format(msecs)); } } void toSDLString(Sink)(ubyte[] value, ref Sink sink) if(isOutputRange!(Sink,char)) { sink.put('['); sink.put( Base64.encode(value) ); sink.put(']'); } /// This only represents terminals. Non-terminals aren't /// constructed since the AST is directly built during parsing. struct Token { Symbol symbol = dub.internal.sdlang.symbol.symbol!"Error"; /// The "type" of this token Location location; Value value; /// Only valid when 'symbol' is symbol!"Value", otherwise null string data; /// Original text from source @disable this(); this(Symbol symbol, Location location, Value value=Value(null), string data=null) { this.symbol = symbol; this.location = location; this.value = value; this.data = data; } /// Tokens with differing symbols are always unequal. /// Tokens with differing values are always unequal. /// Tokens with differing Value types are always unequal. /// Member 'location' is always ignored for comparison. /// Member 'data' is ignored for comparison *EXCEPT* when the symbol is Ident. bool opEquals(Token b) { return opEquals(b); } bool opEquals(ref Token b) ///ditto { if( this.symbol != b.symbol || this.value != b.value ) return false; if(this.symbol == .symbol!"Ident") return this.data == b.data; return true; } bool matches(string symbolName)() { return this.symbol == .symbol!symbolName; } } version(sdlangUnittest) unittest { import std.stdio; writeln("Unittesting sdlang token..."); stdout.flush(); auto loc = Location("", 0, 0, 0); auto loc2 = Location("a", 1, 1, 1); assert(Token(symbol!"EOL",loc) == Token(symbol!"EOL",loc )); assert(Token(symbol!"EOL",loc) == Token(symbol!"EOL",loc2)); assert(Token(symbol!":", loc) == Token(symbol!":", loc )); assert(Token(symbol!"EOL",loc) != Token(symbol!":", loc )); assert(Token(symbol!"EOL",loc,Value(null),"\n") == Token(symbol!"EOL",loc,Value(null),"\n")); assert(Token(symbol!"EOL",loc,Value(null),"\n") == Token(symbol!"EOL",loc,Value(null),";" )); assert(Token(symbol!"EOL",loc,Value(null),"A" ) == Token(symbol!"EOL",loc,Value(null),"B" )); assert(Token(symbol!":", loc,Value(null),"A" ) == Token(symbol!":", loc,Value(null),"BB")); assert(Token(symbol!"EOL",loc,Value(null),"A" ) != Token(symbol!":", loc,Value(null),"A" )); assert(Token(symbol!"Ident",loc,Value(null),"foo") == Token(symbol!"Ident",loc,Value(null),"foo")); assert(Token(symbol!"Ident",loc,Value(null),"foo") != Token(symbol!"Ident",loc,Value(null),"BAR")); assert(Token(symbol!"Value",loc,Value(null),"foo") == Token(symbol!"Value",loc, Value(null),"foo")); assert(Token(symbol!"Value",loc,Value(null),"foo") == Token(symbol!"Value",loc2,Value(null),"foo")); assert(Token(symbol!"Value",loc,Value(null),"foo") == Token(symbol!"Value",loc, Value(null),"BAR")); assert(Token(symbol!"Value",loc,Value( 7),"foo") == Token(symbol!"Value",loc, Value( 7),"BAR")); assert(Token(symbol!"Value",loc,Value( 7),"foo") != Token(symbol!"Value",loc, Value( "A"),"foo")); assert(Token(symbol!"Value",loc,Value( 7),"foo") != Token(symbol!"Value",loc, Value( 2),"foo")); assert(Token(symbol!"Value",loc,Value(cast(int)7)) != Token(symbol!"Value",loc, Value(cast(long)7))); assert(Token(symbol!"Value",loc,Value(cast(float)1.2)) != Token(symbol!"Value",loc, Value(cast(double)1.2))); } version(sdlangUnittest) unittest { import std.stdio; writeln("Unittesting sdlang Value.toSDLString()..."); stdout.flush(); // Bool and null assert(Value(null ).toSDLString() == "null"); assert(Value(true ).toSDLString() == "true"); assert(Value(false).toSDLString() == "false"); // Base64 Binary assert(Value(cast(ubyte[])"hello world".dup).toSDLString() == "[aGVsbG8gd29ybGQ=]"); // Integer assert(Value(cast( int) 7).toSDLString() == "7"); assert(Value(cast( int)-7).toSDLString() == "-7"); assert(Value(cast( int) 0).toSDLString() == "0"); assert(Value(cast(long) 7).toSDLString() == "7L"); assert(Value(cast(long)-7).toSDLString() == "-7L"); assert(Value(cast(long) 0).toSDLString() == "0L"); // Floating point assert(Value(cast(float) 1.5).toSDLString() == "1.5F"); assert(Value(cast(float)-1.5).toSDLString() == "-1.5F"); assert(Value(cast(float) 0).toSDLString() == "0F"); assert(Value(cast(double) 1.5).toSDLString() == "1.5D"); assert(Value(cast(double)-1.5).toSDLString() == "-1.5D"); assert(Value(cast(double) 0).toSDLString() == "0D"); assert(Value(cast(real) 1.5).toSDLString() == "1.5BD"); assert(Value(cast(real)-1.5).toSDLString() == "-1.5BD"); assert(Value(cast(real) 0).toSDLString() == "0BD"); // String assert(Value("hello" ).toSDLString() == `"hello"`); assert(Value(" hello ").toSDLString() == `" hello "`); assert(Value("" ).toSDLString() == `""`); assert(Value("hello \r\n\t\"\\ world").toSDLString() == `"hello \r\n\t\"\\ world"`); assert(Value("日本語").toSDLString() == `"日本語"`); // Chars assert(Value(cast(dchar) 'A').toSDLString() == `'A'`); assert(Value(cast(dchar)'\r').toSDLString() == `'\r'`); assert(Value(cast(dchar)'\n').toSDLString() == `'\n'`); assert(Value(cast(dchar)'\t').toSDLString() == `'\t'`); assert(Value(cast(dchar)'\'').toSDLString() == `'\''`); assert(Value(cast(dchar)'\\').toSDLString() == `'\\'`); assert(Value(cast(dchar) '月').toSDLString() == `'月'`); // Date assert(Value(Date( 2004,10,31)).toSDLString() == "2004/10/31"); assert(Value(Date(-2004,10,31)).toSDLString() == "-2004/10/31"); // DateTimeFrac w/o Frac assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15))).toSDLString() == "2004/10/31 14:30:15"); assert(Value(DateTimeFrac(DateTime(2004,10,31, 1, 2, 3))).toSDLString() == "2004/10/31 01:02:03"); assert(Value(DateTimeFrac(DateTime(-2004,10,31, 14,30,15))).toSDLString() == "-2004/10/31 14:30:15"); // DateTimeFrac w/ Frac assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), 123.msecs)).toSDLString() == "2004/10/31 14:30:15.123"); assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), 120.msecs)).toSDLString() == "2004/10/31 14:30:15.120"); assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), 100.msecs)).toSDLString() == "2004/10/31 14:30:15.100"); assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), 12.msecs)).toSDLString() == "2004/10/31 14:30:15.012"); assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), 1.msecs)).toSDLString() == "2004/10/31 14:30:15.001"); assert(Value(DateTimeFrac(DateTime(-2004,10,31, 14,30,15), 123.msecs)).toSDLString() == "-2004/10/31 14:30:15.123"); // DateTimeFracUnknownZone assert(Value(DateTimeFracUnknownZone(DateTime(2004,10,31, 14,30,15), 123.msecs, "Foo/Bar")).toSDLString() == "2004/10/31 14:30:15.123-Foo/Bar"); // SysTime assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone( hours(0) ))).toSDLString() == "2004/10/31 14:30:15-GMT+00:00"); assert(Value(SysTime(DateTime(2004,10,31, 1, 2, 3), new immutable SimpleTimeZone( hours(0) ))).toSDLString() == "2004/10/31 01:02:03-GMT+00:00"); assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone( hours(2)+minutes(10) ))).toSDLString() == "2004/10/31 14:30:15-GMT+02:10"); assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone(-hours(5)-minutes(30) ))).toSDLString() == "2004/10/31 14:30:15-GMT-05:30"); assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone( hours(2)+minutes( 3) ))).toSDLString() == "2004/10/31 14:30:15-GMT+02:03"); assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), 123.msecs, new immutable SimpleTimeZone( hours(0) ))).toSDLString() == "2004/10/31 14:30:15.123-GMT+00:00"); // Duration assert( "12:14:42" == Value( days( 0)+hours(12)+minutes(14)+seconds(42)+msecs( 0)).toSDLString()); assert("-12:14:42" == Value(-days( 0)-hours(12)-minutes(14)-seconds(42)-msecs( 0)).toSDLString()); assert( "00:09:12" == Value( days( 0)+hours( 0)+minutes( 9)+seconds(12)+msecs( 0)).toSDLString()); assert( "00:00:01.023" == Value( days( 0)+hours( 0)+minutes( 0)+seconds( 1)+msecs( 23)).toSDLString()); assert( "23d:05:21:23.532" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(532)).toSDLString()); assert( "23d:05:21:23.530" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(530)).toSDLString()); assert( "23d:05:21:23.500" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(500)).toSDLString()); assert("-23d:05:21:23.532" == Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(532)).toSDLString()); assert("-23d:05:21:23.500" == Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(500)).toSDLString()); assert( "23d:05:21:23" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs( 0)).toSDLString()); } dub-1.40.0/source/dub/internal/sdlang/util.d000066400000000000000000000052001477246567400206510ustar00rootroot00000000000000// SDLang-D // Written in the D programming language. module dub.internal.sdlang.util; version (Have_sdlang_d) public import sdlang.util; else: import std.algorithm; import std.datetime; import std.stdio; import std.string; import dub.internal.sdlang.token; enum sdlangVersion = "0.9.1"; alias immutable(ubyte)[] ByteString; auto startsWith(T)(string haystack, T needle) if( is(T:ByteString) || is(T:string) ) { return std.algorithm.startsWith( cast(ByteString)haystack, cast(ByteString)needle ); } struct Location { string file; /// Filename (including path) int line; /// Zero-indexed int col; /// Zero-indexed, Tab counts as 1 size_t index; /// Index into the source this(int line, int col, int index) { this.line = line; this.col = col; this.index = index; } this(string file, int line, int col, int index) { this.file = file; this.line = line; this.col = col; this.index = index; } string toString() { return "%s(%s:%s)".format(file, line+1, col+1); } } void removeIndex(E)(ref E[] arr, ptrdiff_t index) { arr = arr[0..index] ~ arr[index+1..$]; } void trace(string file=__FILE__, size_t line=__LINE__, TArgs...)(TArgs args) { version(sdlangTrace) { writeln(file, "(", line, "): ", args); stdout.flush(); } } string toString(TypeInfo ti) { if (ti == typeid( bool )) return "bool"; else if(ti == typeid( string )) return "string"; else if(ti == typeid( dchar )) return "dchar"; else if(ti == typeid( int )) return "int"; else if(ti == typeid( long )) return "long"; else if(ti == typeid( float )) return "float"; else if(ti == typeid( double )) return "double"; else if(ti == typeid( real )) return "real"; else if(ti == typeid( Date )) return "Date"; else if(ti == typeid( DateTimeFrac )) return "DateTimeFrac"; else if(ti == typeid( DateTimeFracUnknownZone )) return "DateTimeFracUnknownZone"; else if(ti == typeid( SysTime )) return "SysTime"; else if(ti == typeid( Duration )) return "Duration"; else if(ti == typeid( ubyte[] )) return "ubyte[]"; else if(ti == typeid( typeof(null) )) return "null"; return "{unknown}"; } enum BOM { UTF8, /// UTF-8 UTF16LE, /// UTF-16 (little-endian) UTF16BE, /// UTF-16 (big-endian) UTF32LE, /// UTF-32 (little-endian) UTF32BE, /// UTF-32 (big-endian) } enum NBOM = __traits(allMembers, BOM).length; immutable ubyte[][NBOM] ByteOrderMarks = [ [0xEF, 0xBB, 0xBF], //UTF8 [0xFF, 0xFE], //UTF16LE [0xFE, 0xFF], //UTF16BE [0xFF, 0xFE, 0x00, 0x00], //UTF32LE [0x00, 0x00, 0xFE, 0xFF] //UTF32BE ]; dub-1.40.0/source/dub/internal/temp_files.d000066400000000000000000000020311477246567400205520ustar00rootroot00000000000000/** Provides methods to generate temporary file names and folders and automatically clean them up on program exit. Copyright: © 2012 Matthias Dondorff, © 2012-2023 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig, Jan Jurzitza */ module dub.internal.temp_files; import std.file; import dub.internal.vibecompat.core.file; NativePath getTempDir() { return NativePath(std.file.tempDir()); } NativePath getTempFile(string prefix, string extension = null) { import std.uuid : randomUUID; import std.array: replace; string fileName = prefix ~ "-" ~ randomUUID.toString() ~ extension; if (extension !is null && extension == ".d") fileName = fileName.replace("-", "_"); auto path = getTempDir() ~ fileName; temporary_files ~= path; return path; } private NativePath[] temporary_files; static ~this() { foreach (path; temporary_files) { auto spath = path.toNativeString(); if (spath.exists) std.file.remove(spath); } } dub-1.40.0/source/dub/internal/tinyendian.d000066400000000000000000000157131477246567400206000ustar00rootroot00000000000000// Copyright Ferdinand Majerech 2014. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) /// A minimal library providing functionality for changing the endianness of data. module dub.internal.tinyendian; import std.system : Endian, endian; /// Unicode UTF encodings. enum UTFEncoding : ubyte { UTF_8, UTF_16, UTF_32 } /// @safe unittest { const ints = [314, -101]; int[2] intsSwapBuffer = ints; swapByteOrder(intsSwapBuffer[]); swapByteOrder(intsSwapBuffer[]); assert(ints == intsSwapBuffer, "Lost information when swapping byte order"); const floats = [3.14f, 10.1f]; float[2] floatsSwapBuffer = floats; swapByteOrder(floatsSwapBuffer[]); swapByteOrder(floatsSwapBuffer[]); assert(floats == floatsSwapBuffer, "Lost information when swapping byte order"); } /** Swap byte order of items in an array in place. * * Params: * * T = Item type. Must be either 2 or 4 bytes long. * array = Buffer with values to fix byte order of. */ void swapByteOrder(T)(T[] array) @trusted @nogc pure nothrow if (T.sizeof == 2 || T.sizeof == 4) { // Swap the byte order of all read characters. foreach (ref item; array) { static if (T.sizeof == 2) { import std.algorithm.mutation : swap; swap(*cast(ubyte*)&item, *(cast(ubyte*)&item + 1)); } else static if (T.sizeof == 4) { import core.bitop : bswap; const swapped = bswap(*cast(uint*)&item); item = *cast(const(T)*)&swapped; } else static assert(false, "Unsupported T: " ~ T.stringof); } } /// See fixUTFByteOrder. struct FixUTFByteOrderResult { ubyte[] array; UTFEncoding encoding; Endian endian; uint bytesStripped = 0; } /** Convert byte order of an array encoded in UTF(8/16/32) to system endianness in place. * * Uses the UTF byte-order-mark (BOM) to determine UTF encoding. If there is no BOM * at the beginning of array, UTF-8 is assumed (this is compatible with ASCII). The * BOM, if any, will be removed from the buffer. * * If the encoding is determined to be UTF-16 or UTF-32 and there aren't enough bytes * for the last code unit (i.e. if array.length is odd for UTF-16 or not divisible by * 4 for UTF-32), the extra bytes (1 for UTF-16, 1-3 for UTF-32) are stripped. * * Note that this function does $(B not) check if the array is a valid UTF string. It * only works with the BOM and 1,2 or 4-byte items. * * Params: * * array = The array with UTF-data. * * Returns: * * A struct with the following members: * * $(D ubyte[] array) A slice of the input array containing data in correct * byte order, without BOM and in case of UTF-16/UTF-32, * without stripped bytes, if any. * $(D UTFEncoding encoding) Encoding of the result (UTF-8, UTF-16 or UTF-32) * $(D std.system.Endian endian) Endianness of the original array. * $(D uint bytesStripped) Number of bytes stripped from a UTF-16/UTF-32 array, if * any. This is non-zero only if array.length was not * divisible by 2 or 4 for UTF-16 and UTF-32, respectively. * * Complexity: (BIGOH array.length) */ auto fixUTFByteOrder(ubyte[] array) @safe @nogc pure nothrow { // Enumerates UTF BOMs, matching indices to byteOrderMarks/bomEndian. enum BOM: ubyte { UTF_8 = 0, UTF_16_LE = 1, UTF_16_BE = 2, UTF_32_LE = 3, UTF_32_BE = 4, None = ubyte.max } // These 2 are from std.stream static immutable ubyte[][5] byteOrderMarks = [ [0xEF, 0xBB, 0xBF], [0xFF, 0xFE], [0xFE, 0xFF], [0xFF, 0xFE, 0x00, 0x00], [0x00, 0x00, 0xFE, 0xFF] ]; static immutable Endian[5] bomEndian = [ endian, Endian.littleEndian, Endian.bigEndian, Endian.littleEndian, Endian.bigEndian ]; // Documented in function ddoc. FixUTFByteOrderResult result; // Detect BOM, if any, in the bytes we've read. -1 means no BOM. // Need the last match: First 2 bytes of UTF-32LE BOM match the UTF-16LE BOM. If we // used the first match, UTF-16LE would be detected when we have a UTF-32LE BOM. import std.algorithm.searching : startsWith; BOM bomId = BOM.None; foreach (i, bom; byteOrderMarks) if (array.startsWith(bom)) bomId = cast(BOM)i; result.endian = (bomId != BOM.None) ? bomEndian[bomId] : Endian.init; // Start of UTF data (after BOM, if any) size_t start = 0; // If we've read more than just the BOM, put the rest into the array. with(BOM) final switch(bomId) { case None: result.encoding = UTFEncoding.UTF_8; break; case UTF_8: start = 3; result.encoding = UTFEncoding.UTF_8; break; case UTF_16_LE, UTF_16_BE: result.bytesStripped = array.length % 2; start = 2; result.encoding = UTFEncoding.UTF_16; break; case UTF_32_LE, UTF_32_BE: result.bytesStripped = array.length % 4; start = 4; result.encoding = UTFEncoding.UTF_32; break; } // If there's a BOM, we need to move data back to ensure it starts at array[0] if (start != 0) { array = array[start .. $ - result.bytesStripped]; } // We enforce above that array.length is divisible by 2/4 for UTF-16/32 if (endian != result.endian) { if (result.encoding == UTFEncoding.UTF_16) swapByteOrder(cast(wchar[])array); else if (result.encoding == UTFEncoding.UTF_32) swapByteOrder(cast(dchar[])array); } result.array = array; return result; } /// @safe unittest { { ubyte[] s = [0xEF, 0xBB, 0xBF, 'a']; FixUTFByteOrderResult r = fixUTFByteOrder(s); assert(r.encoding == UTFEncoding.UTF_8); assert(r.array.length == 1); assert(r.array == ['a']); assert(r.endian == Endian.littleEndian); } { ubyte[] s = ['a']; FixUTFByteOrderResult r = fixUTFByteOrder(s); assert(r.encoding == UTFEncoding.UTF_8); assert(r.array.length == 1); assert(r.array == ['a']); assert(r.endian == Endian.bigEndian); } { // strip 'a' b/c not complete unit ubyte[] s = [0xFE, 0xFF, 'a']; FixUTFByteOrderResult r = fixUTFByteOrder(s); assert(r.encoding == UTFEncoding.UTF_16); assert(r.array.length == 0); assert(r.endian == Endian.bigEndian); } } dub-1.40.0/source/dub/internal/undead/000077500000000000000000000000001477246567400175225ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/undead/xml.d000066400000000000000000002622731477246567400205030ustar00rootroot00000000000000// Written in the D programming language. /** $(RED Warning: This module is considered out-dated and not up to Phobos' current standards. It will remain until we have a suitable replacement, but be aware that it will not remain long term.) Classes and functions for creating and parsing XML The basic architecture of this module is that there are standalone functions, classes for constructing an XML document from scratch (Tag, Element and Document), and also classes for parsing a pre-existing XML file (ElementParser and DocumentParser). The parsing classes may be used to build a Document, but that is not their primary purpose. The handling capabilities of DocumentParser and ElementParser are sufficiently customizable that you can make them do pretty much whatever you want. Example: This example creates a DOM (Document Object Model) tree from an XML file. ------------------------------------------------------------------------------ import dub.internal.undead.xml; import std.stdio; import std.string; import std.file; // books.xml is used in various samples throughout the Microsoft XML Core // Services (MSXML) SDK. // // See http://msdn2.microsoft.com/en-us/library/ms762271(VS.85).aspx void main() { string s = cast(string) std.file.read("books.xml"); // Check for well-formedness check(s); // Make a DOM tree auto doc = new Document(s); // Plain-print it writeln(doc); } ------------------------------------------------------------------------------ Example: This example does much the same thing, except that the file is deconstructed and reconstructed by hand. This is more work, but the techniques involved offer vastly more power. ------------------------------------------------------------------------------ import dub.internal.undead.xml; import std.stdio; import std.string; struct Book { string id; string author; string title; string genre; string price; string pubDate; string description; } void main() { string s = cast(string) std.file.read("books.xml"); // Check for well-formedness check(s); // Take it apart Book[] books; auto xml = new DocumentParser(s); xml.onStartTag["book"] = (ElementParser xml) { Book book; book.id = xml.tag.attr["id"]; xml.onEndTag["author"] = (in Element e) { book.author = e.text(); }; xml.onEndTag["title"] = (in Element e) { book.title = e.text(); }; xml.onEndTag["genre"] = (in Element e) { book.genre = e.text(); }; xml.onEndTag["price"] = (in Element e) { book.price = e.text(); }; xml.onEndTag["publish-date"] = (in Element e) { book.pubDate = e.text(); }; xml.onEndTag["description"] = (in Element e) { book.description = e.text(); }; xml.parse(); books ~= book; }; xml.parse(); // Put it back together again; auto doc = new Document(new Tag("catalog")); foreach (book;books) { auto element = new Element("book"); element.tag.attr["id"] = book.id; element ~= new Element("author", book.author); element ~= new Element("title", book.title); element ~= new Element("genre", book.genre); element ~= new Element("price", book.price); element ~= new Element("publish-date",book.pubDate); element ~= new Element("description", book.description); doc ~= element; } // Pretty-print it writefln(join(doc.pretty(3),"\n")); } ------------------------------------------------------------------------------- Copyright: Copyright Janice Caron 2008 - 2009. License: $(HTTP www.boost.org/LICENSE_1_0.txt, Boost License 1.0). Authors: Janice Caron Source: $(PHOBOSSRC std/xml.d) */ /* Copyright Janice Caron 2008 - 2009. Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ module dub.internal.undead.xml; enum cdata = "= 0x20) return true; switch (c) { case 0xA: case 0x9: case 0xD: return true; default: return false; } } else if (0xE000 <= c && c <= 0x10FFFF) { if ((c & 0x1FFFFE) != 0xFFFE) // U+FFFE and U+FFFF return true; } return false; } @safe @nogc nothrow pure unittest { assert(!isChar(cast(dchar) 0x8)); assert( isChar(cast(dchar) 0x9)); assert( isChar(cast(dchar) 0xA)); assert(!isChar(cast(dchar) 0xB)); assert(!isChar(cast(dchar) 0xC)); assert( isChar(cast(dchar) 0xD)); assert(!isChar(cast(dchar) 0xE)); assert(!isChar(cast(dchar) 0x1F)); assert( isChar(cast(dchar) 0x20)); assert( isChar('J')); assert( isChar(cast(dchar) 0xD7FF)); assert(!isChar(cast(dchar) 0xD800)); assert(!isChar(cast(dchar) 0xDFFF)); assert( isChar(cast(dchar) 0xE000)); assert( isChar(cast(dchar) 0xFFFD)); assert(!isChar(cast(dchar) 0xFFFE)); assert(!isChar(cast(dchar) 0xFFFF)); assert( isChar(cast(dchar) 0x10000)); assert( isChar(cast(dchar) 0x10FFFF)); assert(!isChar(cast(dchar) 0x110000)); debug (stdxml_TestHardcodedChecks) { foreach (c; 0 .. dchar.max + 1) assert(isChar(c) == lookup(CharTable, c)); } } /** * Returns true if the character is whitespace according to the XML standard * * Only the following characters are considered whitespace in XML - space, tab, * carriage return and linefeed * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isSpace(dchar c) @safe @nogc pure nothrow { return c == '\u0020' || c == '\u0009' || c == '\u000A' || c == '\u000D'; } /** * Returns true if the character is a digit according to the XML standard * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isDigit(dchar c) @safe @nogc pure nothrow { if (c <= 0x0039 && c >= 0x0030) return true; else return lookup(DigitTable,c); } @safe @nogc nothrow pure unittest { debug (stdxml_TestHardcodedChecks) { foreach (c; 0 .. dchar.max + 1) assert(isDigit(c) == lookup(DigitTable, c)); } } /** * Returns true if the character is a letter according to the XML standard * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isLetter(dchar c) @safe @nogc nothrow pure // rule 84 { return isIdeographic(c) || isBaseChar(c); } /** * Returns true if the character is an ideographic character according to the * XML standard * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isIdeographic(dchar c) @safe @nogc nothrow pure { if (c == 0x3007) return true; if (c <= 0x3029 && c >= 0x3021 ) return true; if (c <= 0x9FA5 && c >= 0x4E00) return true; return false; } @safe @nogc nothrow pure unittest { assert(isIdeographic('\u4E00')); assert(isIdeographic('\u9FA5')); assert(isIdeographic('\u3007')); assert(isIdeographic('\u3021')); assert(isIdeographic('\u3029')); debug (stdxml_TestHardcodedChecks) { foreach (c; 0 .. dchar.max + 1) assert(isIdeographic(c) == lookup(IdeographicTable, c)); } } /** * Returns true if the character is a base character according to the XML * standard * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isBaseChar(dchar c) @safe @nogc nothrow pure { return lookup(BaseCharTable,c); } /** * Returns true if the character is a combining character according to the * XML standard * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isCombiningChar(dchar c) @safe @nogc nothrow pure { return lookup(CombiningCharTable,c); } /** * Returns true if the character is an extender according to the XML standard * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * c = the character to be tested */ bool isExtender(dchar c) @safe @nogc nothrow pure { return lookup(ExtenderTable,c); } /** * Encodes a string by replacing all characters which need to be escaped with * appropriate predefined XML entities. * * encode() escapes certain characters (ampersand, quote, apostrophe, less-than * and greater-than), and similarly, decode() unescapes them. These functions * are provided for convenience only. You do not need to use them when using * the undead.xml classes, because then all the encoding and decoding will be done * for you automatically. * * If the string is not modified, the original will be returned. * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * s = The string to be encoded * * Returns: The encoded string * * Example: * -------------- * writefln(encode("a > b")); // writes "a > b" * -------------- */ S encode(S)(S s) { import std.array : appender; string r; size_t lastI; auto result = appender!S(); foreach (i, c; s) { switch (c) { case '&': r = "&"; break; case '"': r = """; break; case '\'': r = "'"; break; case '<': r = "<"; break; case '>': r = ">"; break; default: continue; } // Replace with r result.put(s[lastI .. i]); result.put(r); lastI = i + 1; } if (!result.data.ptr) return s; result.put(s[lastI .. $]); return result.data; } @safe pure unittest { auto s = "hello"; assert(encode(s) is s); assert(encode("a > b") == "a > b", encode("a > b")); assert(encode("a < b") == "a < b"); assert(encode("don't") == "don't"); assert(encode("\"hi\"") == ""hi"", encode("\"hi\"")); assert(encode("cat & dog") == "cat & dog"); } /** * Mode to use for decoding. * * $(DDOC_ENUM_MEMBERS NONE) Do not decode * $(DDOC_ENUM_MEMBERS LOOSE) Decode, but ignore errors * $(DDOC_ENUM_MEMBERS STRICT) Decode, and throw exception on error */ enum DecodeMode { NONE, LOOSE, STRICT } /** * Decodes a string by unescaping all predefined XML entities. * * encode() escapes certain characters (ampersand, quote, apostrophe, less-than * and greater-than), and similarly, decode() unescapes them. These functions * are provided for convenience only. You do not need to use them when using * the undead.xml classes, because then all the encoding and decoding will be done * for you automatically. * * This function decodes the entities &amp;, &quot;, &apos;, * &lt; and &gt, * as well as decimal and hexadecimal entities such as &#x20AC; * * If the string does not contain an ampersand, the original will be returned. * * Note that the "mode" parameter can be one of DecodeMode.NONE (do not * decode), DecodeMode.LOOSE (decode, but ignore errors), or DecodeMode.STRICT * (decode, and throw a DecodeException in the event of an error). * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Params: * s = The string to be decoded * mode = (optional) Mode to use for decoding. (Defaults to LOOSE). * * Throws: DecodeException if mode == DecodeMode.STRICT and decode fails * * Returns: The decoded string * * Example: * -------------- * writefln(decode("a > b")); // writes "a > b" * -------------- */ string decode(string s, DecodeMode mode=DecodeMode.LOOSE) @safe pure { import std.algorithm.searching : startsWith; if (mode == DecodeMode.NONE) return s; string buffer; foreach (ref i; 0 .. s.length) { char c = s[i]; if (c != '&') { if (buffer.length != 0) buffer ~= c; } else { if (buffer.length == 0) { buffer = s[0 .. i].dup; } if (startsWith(s[i..$],"&#")) { try { dchar d; string t = s[i..$]; checkCharRef(t, d); char[4] temp; import std.utf : encode; buffer ~= temp[0 .. encode(temp, d)]; i = s.length - t.length - 1; } catch (Err e) { if (mode == DecodeMode.STRICT) throw new DecodeException("Unescaped &"); buffer ~= '&'; } } else if (startsWith(s[i..$],"&" )) { buffer ~= '&'; i += 4; } else if (startsWith(s[i..$],""")) { buffer ~= '"'; i += 5; } else if (startsWith(s[i..$],"'")) { buffer ~= '\''; i += 5; } else if (startsWith(s[i..$],"<" )) { buffer ~= '<'; i += 3; } else if (startsWith(s[i..$],">" )) { buffer ~= '>'; i += 3; } else { if (mode == DecodeMode.STRICT) throw new DecodeException("Unescaped &"); buffer ~= '&'; } } } return (buffer.length == 0) ? s : buffer; } @safe pure unittest { void assertNot(string s) pure { bool b = false; try { decode(s,DecodeMode.STRICT); } catch (DecodeException e) { b = true; } assert(b,s); } // Assert that things that should work, do auto s = "hello"; assert(decode(s, DecodeMode.STRICT) is s); assert(decode("a > b", DecodeMode.STRICT) == "a > b"); assert(decode("a < b", DecodeMode.STRICT) == "a < b"); assert(decode("don't", DecodeMode.STRICT) == "don't"); assert(decode(""hi"", DecodeMode.STRICT) == "\"hi\""); assert(decode("cat & dog", DecodeMode.STRICT) == "cat & dog"); assert(decode("*", DecodeMode.STRICT) == "*"); assert(decode("*", DecodeMode.STRICT) == "*"); assert(decode("cat & dog", DecodeMode.LOOSE) == "cat & dog"); assert(decode("a > b", DecodeMode.LOOSE) == "a > b"); assert(decode("&#;", DecodeMode.LOOSE) == "&#;"); assert(decode("&#x;", DecodeMode.LOOSE) == "&#x;"); assert(decode("G;", DecodeMode.LOOSE) == "G;"); assert(decode("G;", DecodeMode.LOOSE) == "G;"); // Assert that things that shouldn't work, don't assertNot("cat & dog"); assertNot("a > b"); assertNot("&#;"); assertNot("&#x;"); assertNot("G;"); assertNot("G;"); } /** * Class representing an XML document. * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * */ class Document : Element { /** * Contains all text which occurs before the root element. * Defaults to <?xml version="1.0"?> */ string prolog = ""; /** * Contains all text which occurs after the root element. * Defaults to the empty string */ string epilog; /** * Constructs a Document by parsing XML text. * * This function creates a complete DOM (Document Object Model) tree. * * The input to this function MUST be valid XML. * This is enforced by DocumentParser's in contract. * * Params: * s = the complete XML text. */ this(string s) in { assert(s.length != 0); } do { auto xml = new DocumentParser(s); string tagString = xml.tag.tagString; this(xml.tag); prolog = s[0 .. tagString.ptr - s.ptr]; parse(xml); epilog = *xml.s; } /** * Constructs a Document from a Tag. * * Params: * tag = the start tag of the document. */ this(const(Tag) tag) { super(tag); } const { /** * Compares two Documents for equality * * Example: * -------------- * Document d1,d2; * if (d1 == d2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope doc = toType!(const Document)(o); return prolog == doc.prolog && (cast(const) this).Element.opEquals(cast(const) doc) && epilog == doc.epilog; } /** * Compares two Documents * * You should rarely need to call this function. It exists so that * Documents can be used as associative array keys. * * Example: * -------------- * Document d1,d2; * if (d1 < d2) { } * -------------- */ override int opCmp(scope const Object o) scope const { const scope doc = toType!(const Document)(o); if (prolog != doc.prolog) return prolog < doc.prolog ? -1 : 1; if (int cmp = this.Element.opCmp(doc)) return cmp; if (epilog != doc.epilog) return epilog < doc.epilog ? -1 : 1; return 0; } /** * Returns the hash of a Document * * You should rarely need to call this function. It exists so that * Documents can be used as associative array keys. */ override size_t toHash() scope const @trusted { return hash(prolog, hash(epilog, (cast() this).Element.toHash())); } /** * Returns the string representation of a Document. (That is, the * complete XML of a document). */ override string toString() scope const @safe { return prolog ~ super.toString() ~ epilog; } } } @system unittest { // https://issues.dlang.org/show_bug.cgi?id=14966 auto xml = ``; auto a = new Document(xml); auto b = new Document(xml); assert(a == b); assert(!(a < b)); int[Document] aa; aa[a] = 1; assert(aa[b] == 1); b ~= new Element("b"); assert(a < b); assert(b > a); } /** * Class representing an XML element. * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) */ class Element : Item { Tag tag; /// The start tag of the element Item[] items; /// The element's items Text[] texts; /// The element's text items CData[] cdatas; /// The element's CData items Comment[] comments; /// The element's comments ProcessingInstruction[] pis; /// The element's processing instructions Element[] elements; /// The element's child elements /** * Constructs an Element given a name and a string to be used as a Text * interior. * * Params: * name = the name of the element. * interior = (optional) the string interior. * * Example: * ------------------------------------------------------- * auto element = new Element("title","Serenity") * // constructs the element Serenity * ------------------------------------------------------- */ this(string name, string interior=null) @safe pure { this(new Tag(name)); if (interior.length != 0) opOpAssign!("~")(new Text(interior)); } /** * Constructs an Element from a Tag. * * Params: * tag_ = the start or empty tag of the element. */ this(const(Tag) tag_) @safe pure { this.tag = new Tag(tag_.name); tag.type = TagType.EMPTY; foreach (k,v;tag_.attr) tag.attr[k] = v; tag.tagString = tag_.tagString; } /** * Append a text item to the interior of this element * * Params: * item = the item you wish to append. * * Example: * -------------- * Element element; * element ~= new Text("hello"); * -------------- */ void opOpAssign(string op)(Text item) @safe pure if (op == "~") { texts ~= item; appendItem(item); } /** * Append a CData item to the interior of this element * * Params: * item = the item you wish to append. * * Example: * -------------- * Element element; * element ~= new CData("hello"); * -------------- */ void opOpAssign(string op)(CData item) @safe pure if (op == "~") { cdatas ~= item; appendItem(item); } /** * Append a comment to the interior of this element * * Params: * item = the item you wish to append. * * Example: * -------------- * Element element; * element ~= new Comment("hello"); * -------------- */ void opOpAssign(string op)(Comment item) @safe pure if (op == "~") { comments ~= item; appendItem(item); } /** * Append a processing instruction to the interior of this element * * Params: * item = the item you wish to append. * * Example: * -------------- * Element element; * element ~= new ProcessingInstruction("hello"); * -------------- */ void opOpAssign(string op)(ProcessingInstruction item) @safe pure if (op == "~") { pis ~= item; appendItem(item); } /** * Append a complete element to the interior of this element * * Params: * item = the item you wish to append. * * Example: * -------------- * Element element; * Element other = new Element("br"); * element ~= other; * // appends element representing
* -------------- */ void opOpAssign(string op)(Element item) @safe pure if (op == "~") { elements ~= item; appendItem(item); } private void appendItem(Item item) @safe pure { items ~= item; if (tag.type == TagType.EMPTY && !item.isEmptyXML) tag.type = TagType.START; } private void parse(ElementParser xml) { xml.onText = (string s) { opOpAssign!("~")(new Text(s)); }; xml.onCData = (string s) { opOpAssign!("~")(new CData(s)); }; xml.onComment = (string s) { opOpAssign!("~")(new Comment(s)); }; xml.onPI = (string s) { opOpAssign!("~")(new ProcessingInstruction(s)); }; xml.onStartTag[null] = (ElementParser xml) { auto e = new Element(xml.tag); e.parse(xml); opOpAssign!("~")(e); }; xml.parse(); } /** * Compares two Elements for equality * * Example: * -------------- * Element e1,e2; * if (e1 == e2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope element = toType!(const Element)(o); immutable len = items.length; if (len != element.items.length) return false; foreach (i; 0 .. len) { if (!items[i].opEquals(element.items[i])) return false; } return true; } /** * Compares two Elements * * You should rarely need to call this function. It exists so that Elements * can be used as associative array keys. * * Example: * -------------- * Element e1,e2; * if (e1 < e2) { } * -------------- */ override int opCmp(scope const Object o) @safe const { const scope element = toType!(const Element)(o); for (uint i=0; ; ++i) { if (i == items.length && i == element.items.length) return 0; if (i == items.length) return -1; if (i == element.items.length) return 1; if (!items[i].opEquals(element.items[i])) return items[i].opCmp(element.items[i]); } } /** * Returns the hash of an Element * * You should rarely need to call this function. It exists so that Elements * can be used as associative array keys. */ override size_t toHash() scope const @safe { size_t hash = tag.toHash(); foreach (item;items) hash += item.toHash(); return hash; } const { /** * Returns the decoded interior of an element. * * The element is assumed to contain text only. So, for * example, given XML such as "<title>Good &amp; * Bad</title>", will return "Good & Bad". * * Params: * mode = (optional) Mode to use for decoding. (Defaults to LOOSE). * * Throws: DecodeException if decode fails */ string text(DecodeMode mode=DecodeMode.LOOSE) { string buffer; foreach (item;items) { Text t = cast(Text) item; if (t is null) throw new DecodeException(item.toString()); buffer ~= decode(t.toString(),mode); } return buffer; } /** * Returns an indented string representation of this item * * Params: * indent = (optional) number of spaces by which to indent this * element. Defaults to 2. */ override string[] pretty(uint indent=2) scope { import std.algorithm.searching : count; import std.string : rightJustify; if (isEmptyXML) return [ tag.toEmptyString() ]; if (items.length == 1) { auto t = cast(const(Text))(items[0]); if (t !is null) { return [tag.toStartString() ~ t.toString() ~ tag.toEndString()]; } } string[] a = [ tag.toStartString() ]; foreach (item;items) { string[] b = item.pretty(indent); foreach (s;b) { a ~= rightJustify(s,count(s) + indent); } } a ~= tag.toEndString(); return a; } /** * Returns the string representation of an Element * * Example: * -------------- * auto element = new Element("br"); * writefln(element.toString()); // writes "
" * -------------- */ override string toString() scope @safe { if (isEmptyXML) return tag.toEmptyString(); string buffer = tag.toStartString(); foreach (item;items) { buffer ~= item.toString(); } buffer ~= tag.toEndString(); return buffer; } override @property @safe pure @nogc nothrow bool isEmptyXML() const scope { return items.length == 0; } } } /** * Tag types. * * $(DDOC_ENUM_MEMBERS START) Used for start tags * $(DDOC_ENUM_MEMBERS END) Used for end tags * $(DDOC_ENUM_MEMBERS EMPTY) Used for empty tags * */ enum TagType { START, END, EMPTY } /** * Class representing an XML tag. * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * The class invariant guarantees *
    *
  • that $(B type) is a valid enum TagType value
  • *
  • that $(B name) consists of valid characters
  • *
  • that each attribute name consists of valid characters
  • *
*/ class Tag { TagType type = TagType.START; /// Type of tag string name; /// Tag name string[string] attr; /// Associative array of attributes private string tagString; invariant() { string s; string t; assert(type == TagType.START || type == TagType.END || type == TagType.EMPTY); s = name; try { checkName(s,t); } catch (Err e) { assert(false,"Invalid tag name:" ~ e.toString()); } foreach (k,v;attr) { s = k; try { checkName(s,t); } catch (Err e) { assert(false,"Invalid attribute name:" ~ e.toString()); } } } /** * Constructs an instance of Tag with a specified name and type * * The constructor does not initialize the attributes. To initialize the * attributes, you access the $(B attr) member variable. * * Params: * name = the Tag's name * type = (optional) the Tag's type. If omitted, defaults to * TagType.START. * * Example: * -------------- * auto tag = new Tag("img",Tag.EMPTY); * tag.attr["src"] = "http://example.com/example.jpg"; * -------------- */ this(string name, TagType type=TagType.START) @safe pure { this.name = name; this.type = type; } /* Private constructor (so don't ddoc this!) * * Constructs a Tag by parsing the string representation, e.g. "". * * The string is passed by reference, and is advanced over all characters * consumed. * * The second parameter is a dummy parameter only, required solely to * distinguish this constructor from the public one. */ private this(ref string s, bool dummy) @safe pure { import std.algorithm.searching : countUntil; import std.ascii : isWhite; import std.utf : byCodeUnit; tagString = s; try { reqc(s,'<'); if (optc(s,'/')) type = TagType.END; ptrdiff_t i = s.byCodeUnit.countUntil(">", "/>", " ", "\t", "\v", "\r", "\n", "\f"); name = s[0 .. i]; s = s[i .. $]; i = s.byCodeUnit.countUntil!(a => !isWhite(a)); s = s[i .. $]; while (s.length > 0 && s[0] != '>' && s[0] != '/') { i = s.byCodeUnit.countUntil("=", " ", "\t", "\v", "\r", "\n", "\f"); string key = s[0 .. i]; s = s[i .. $]; i = s.byCodeUnit.countUntil!(a => !isWhite(a)); s = s[i .. $]; reqc(s,'='); i = s.byCodeUnit.countUntil!(a => !isWhite(a)); s = s[i .. $]; immutable char quote = requireOneOf(s,"'\""); i = s.byCodeUnit.countUntil(quote); string val = decode(s[0 .. i], DecodeMode.LOOSE); s = s[i .. $]; reqc(s,quote); i = s.byCodeUnit.countUntil!(a => !isWhite(a)); s = s[i .. $]; attr[key] = val; } if (optc(s,'/')) { if (type == TagType.END) throw new TagException(""); type = TagType.EMPTY; } reqc(s,'>'); tagString.length = tagString.length - s.length; } catch (XMLException e) { tagString.length = tagString.length - s.length; throw new TagException(tagString); } } const { /** * Compares two Tags for equality * * You should rarely need to call this function. It exists so that Tags * can be used as associative array keys. * * Example: * -------------- * Tag tag1,tag2 * if (tag1 == tag2) { } * -------------- */ override bool opEquals(scope Object o) { const tag = toType!(const Tag)(o); return (name != tag.name) ? false : ( (attr != tag.attr) ? false : ( (type != tag.type) ? false : ( true ))); } /** * Compares two Tags * * Example: * -------------- * Tag tag1,tag2 * if (tag1 < tag2) { } * -------------- */ override int opCmp(Object o) { const tag = toType!(const Tag)(o); // Note that attr is an AA, so the comparison is nonsensical (bug 10381) return ((name != tag.name) ? ( name < tag.name ? -1 : 1 ) : ((attr != tag.attr) ? ( cast(void *) attr < cast(void*) tag.attr ? -1 : 1 ) : ((type != tag.type) ? ( type < tag.type ? -1 : 1 ) : 0 ))); } /** * Returns the hash of a Tag * * You should rarely need to call this function. It exists so that Tags * can be used as associative array keys. */ override size_t toHash() { return .hashOf(name); } /** * Returns the string representation of a Tag * * Example: * -------------- * auto tag = new Tag("book",TagType.START); * writefln(tag.toString()); // writes "" * -------------- */ override string toString() @safe { if (isEmpty) return toEmptyString(); return (isEnd) ? toEndString() : toStartString(); } private { string toNonEndString() @safe { import std.format : format; string s = "<" ~ name; foreach (key,val;attr) s ~= format(" %s=\"%s\"",key,encode(val)); return s; } string toStartString() @safe { return toNonEndString() ~ ">"; } string toEndString() @safe { return ""; } string toEmptyString() @safe { return toNonEndString() ~ " />"; } } /** * Returns true if the Tag is a start tag * * Example: * -------------- * if (tag.isStart) { } * -------------- */ @property bool isStart() @safe @nogc pure nothrow { return type == TagType.START; } /** * Returns true if the Tag is an end tag * * Example: * -------------- * if (tag.isEnd) { } * -------------- */ @property bool isEnd() @safe @nogc pure nothrow { return type == TagType.END; } /** * Returns true if the Tag is an empty tag * * Example: * -------------- * if (tag.isEmpty) { } * -------------- */ @property bool isEmpty() @safe @nogc pure nothrow { return type == TagType.EMPTY; } } } /** * Class representing a comment */ class Comment : Item { private string content; /** * Construct a comment * * Params: * content = the body of the comment * * Throws: CommentException if the comment body is illegal (contains "--" * or exactly equals "-") * * Example: * -------------- * auto item = new Comment("This is a comment"); * // constructs * -------------- */ this(string content) @safe pure { import std.string : indexOf; if (content == "-" || content.indexOf("--") != -1) throw new CommentException(content); this.content = content; } /** * Compares two comments for equality * * Example: * -------------- * Comment item1,item2; * if (item1 == item2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope item = toType!(const Item)(o); const t = cast(const Comment) item; return t !is null && content == t.content; } /** * Compares two comments * * You should rarely need to call this function. It exists so that Comments * can be used as associative array keys. * * Example: * -------------- * Comment item1,item2; * if (item1 < item2) { } * -------------- */ override int opCmp(scope const Object o) scope const { const scope item = toType!(const Item)(o); const t = cast(const Comment) item; return t !is null && (content != t.content ? (content < t.content ? -1 : 1 ) : 0 ); } /** * Returns the hash of a Comment * * You should rarely need to call this function. It exists so that Comments * can be used as associative array keys. */ override size_t toHash() scope const nothrow { return hash(content); } /** * Returns a string representation of this comment */ override string toString() scope const @safe pure nothrow { return ""; } override @property @safe @nogc pure nothrow scope bool isEmptyXML() const { return false; } /// Returns false always } @safe unittest // issue 16241 { import std.exception : assertThrown; auto c = new Comment("=="); assert(c.content == "=="); assertThrown!CommentException(new Comment("--")); } /** * Class representing a Character Data section */ class CData : Item { private string content; /** * Construct a character data section * * Params: * content = the body of the character data segment * * Throws: CDataException if the segment body is illegal (contains "]]>") * * Example: * -------------- * auto item = new CData("hello"); * // constructs hello]]> * -------------- */ this(string content) @safe pure { import std.string : indexOf; if (content.indexOf("]]>") != -1) throw new CDataException(content); this.content = content; } /** * Compares two CDatas for equality * * Example: * -------------- * CData item1,item2; * if (item1 == item2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope item = toType!(const Item)(o); const t = cast(const CData) item; return t !is null && content == t.content; } /** * Compares two CDatas * * You should rarely need to call this function. It exists so that CDatas * can be used as associative array keys. * * Example: * -------------- * CData item1,item2; * if (item1 < item2) { } * -------------- */ override int opCmp(scope const Object o) scope const { const scope item = toType!(const Item)(o); const t = cast(const CData) item; return t !is null && (content != t.content ? (content < t.content ? -1 : 1 ) : 0 ); } /** * Returns the hash of a CData * * You should rarely need to call this function. It exists so that CDatas * can be used as associative array keys. */ override size_t toHash() scope const nothrow { return hash(content); } /** * Returns a string representation of this CData section */ override string toString() scope const @safe pure nothrow { return cdata ~ content ~ "]]>"; } override @property @safe @nogc pure nothrow scope bool isEmptyXML() const { return false; } /// Returns false always } /** * Class representing a text (aka Parsed Character Data) section */ class Text : Item { private string content; /** * Construct a text (aka PCData) section * * Params: * content = the text. This function encodes the text before * insertion, so it is safe to insert any text * * Example: * -------------- * auto Text = new CData("a < b"); * // constructs a < b * -------------- */ this(string content) @safe pure { this.content = encode(content); } /** * Compares two text sections for equality * * Example: * -------------- * Text item1,item2; * if (item1 == item2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope item = toType!(const Item)(o); const t = cast(const Text) item; return t !is null && content == t.content; } /** * Compares two text sections * * You should rarely need to call this function. It exists so that Texts * can be used as associative array keys. * * Example: * -------------- * Text item1,item2; * if (item1 < item2) { } * -------------- */ override int opCmp(scope const Object o) scope const { const scope item = toType!(const Item)(o); const t = cast(const Text) item; return t !is null && (content != t.content ? (content < t.content ? -1 : 1 ) : 0 ); } /** * Returns the hash of a text section * * You should rarely need to call this function. It exists so that Texts * can be used as associative array keys. */ override size_t toHash() scope const nothrow { return hash(content); } /** * Returns a string representation of this Text section */ override string toString() scope const @safe @nogc pure nothrow { return content; } /** * Returns true if the content is the empty string */ override @property @safe @nogc pure nothrow scope bool isEmptyXML() const { return content.length == 0; } } /** * Class representing an XML Instruction section */ class XMLInstruction : Item { private string content; /** * Construct an XML Instruction section * * Params: * content = the body of the instruction segment * * Throws: XIException if the segment body is illegal (contains ">") * * Example: * -------------- * auto item = new XMLInstruction("ATTLIST"); * // constructs * -------------- */ this(string content) @safe pure { import std.string : indexOf; if (content.indexOf(">") != -1) throw new XIException(content); this.content = content; } /** * Compares two XML instructions for equality * * Example: * -------------- * XMLInstruction item1,item2; * if (item1 == item2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope item = toType!(const Item)(o); const t = cast(const XMLInstruction) item; return t !is null && content == t.content; } /** * Compares two XML instructions * * You should rarely need to call this function. It exists so that * XmlInstructions can be used as associative array keys. * * Example: * -------------- * XMLInstruction item1,item2; * if (item1 < item2) { } * -------------- */ override int opCmp(scope const Object o) scope const { const scope item = toType!(const Item)(o); const t = cast(const XMLInstruction) item; return t !is null && (content != t.content ? (content < t.content ? -1 : 1 ) : 0 ); } /** * Returns the hash of an XMLInstruction * * You should rarely need to call this function. It exists so that * XmlInstructions can be used as associative array keys. */ override size_t toHash() scope const nothrow { return hash(content); } /** * Returns a string representation of this XmlInstruction */ override string toString() scope const @safe pure nothrow { return ""; } override @property @safe @nogc pure nothrow scope bool isEmptyXML() const { return false; } /// Returns false always } /** * Class representing a Processing Instruction section */ class ProcessingInstruction : Item { private string content; /** * Construct a Processing Instruction section * * Params: * content = the body of the instruction segment * * Throws: PIException if the segment body is illegal (contains "?>") * * Example: * -------------- * auto item = new ProcessingInstruction("php"); * // constructs * -------------- */ this(string content) @safe pure { import std.string : indexOf; if (content.indexOf("?>") != -1) throw new PIException(content); this.content = content; } /** * Compares two processing instructions for equality * * Example: * -------------- * ProcessingInstruction item1,item2; * if (item1 == item2) { } * -------------- */ override bool opEquals(scope const Object o) const { const scope item = toType!(const Item)(o); const t = cast(const ProcessingInstruction) item; return t !is null && content == t.content; } /** * Compares two processing instructions * * You should rarely need to call this function. It exists so that * ProcessingInstructions can be used as associative array keys. * * Example: * -------------- * ProcessingInstruction item1,item2; * if (item1 < item2) { } * -------------- */ override int opCmp(scope const Object o) scope const { const scope item = toType!(const Item)(o); const t = cast(const ProcessingInstruction) item; return t !is null && (content != t.content ? (content < t.content ? -1 : 1 ) : 0 ); } /** * Returns the hash of a ProcessingInstruction * * You should rarely need to call this function. It exists so that * ProcessingInstructions can be used as associative array keys. */ override size_t toHash() scope const nothrow { return hash(content); } /** * Returns a string representation of this ProcessingInstruction */ override string toString() scope const @safe pure nothrow { return ""; } override @property @safe @nogc pure nothrow bool isEmptyXML() scope const { return false; } /// Returns false always } /** * Abstract base class for XML items */ abstract class Item { /// Compares with another Item of same type for equality abstract override bool opEquals(scope const Object o) @safe const; /// Compares with another Item of same type abstract override int opCmp(scope const Object o) @safe const; /// Returns the hash of this item abstract override size_t toHash() @safe scope const; /// Returns a string representation of this item abstract override string toString() @safe scope const; /** * Returns an indented string representation of this item * * Params: * indent = number of spaces by which to indent child elements */ string[] pretty(uint indent) @safe scope const { import std.string : strip; string s = strip(toString()); return s.length == 0 ? [] : [ s ]; } /// Returns true if the item represents empty XML text abstract @property @safe @nogc pure nothrow bool isEmptyXML() scope const; } /** * Class for parsing an XML Document. * * This is a subclass of ElementParser. Most of the useful functions are * documented there. * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Bugs: * Currently only supports UTF documents. * * If there is an encoding attribute in the prolog, it is ignored. * */ class DocumentParser : ElementParser { string xmlText; /** * Constructs a DocumentParser. * * The input to this function MUST be valid XML. * This is enforced by the function's in contract. * * Params: * xmlText_ = the entire XML document as text * */ this(string xmlText_) in { assert(xmlText_.length != 0); try { // Confirm that the input is valid XML check(xmlText_); } catch (CheckException e) { // And if it's not, tell the user why not assert(false, "\n" ~ e.toString()); } } do { xmlText = xmlText_; s = &xmlText; super(); // Initialize everything parse(); // Parse through the root tag (but not beyond) } } @system unittest { auto doc = new Document(""); assert(doc.elements.length == 1); assert(doc.elements[0].tag.name == "child"); assert(doc.items == doc.elements); } /** * Class for parsing an XML element. * * Standards: $(LINK2 http://www.w3.org/TR/1998/REC-xml-19980210, XML 1.0) * * Note that you cannot construct instances of this class directly. You can * construct a DocumentParser (which is a subclass of ElementParser), but * otherwise, Instances of ElementParser will be created for you by the * library, and passed your way via onStartTag handlers. * */ class ElementParser { alias Handler = void delegate(string); alias ElementHandler = void delegate(in Element element); alias ParserHandler = void delegate(ElementParser parser); private { Tag tag_; string elementStart; string* s; Handler commentHandler = null; Handler cdataHandler = null; Handler xiHandler = null; Handler piHandler = null; Handler rawTextHandler = null; Handler textHandler = null; // Private constructor for start tags this(ElementParser parent) @safe @nogc pure nothrow { s = parent.s; this(); tag_ = parent.tag_; } // Private constructor for empty tags this(Tag tag, string* t) @safe @nogc pure nothrow { s = t; this(); tag_ = tag; } } /** * The Tag at the start of the element being parsed. You can read this to * determine the tag's name and attributes. */ @property @safe @nogc pure nothrow const(Tag) tag() const { return tag_; } /** * Register a handler which will be called whenever a start tag is * encountered which matches the specified name. You can also pass null as * the name, in which case the handler will be called for any unmatched * start tag. * * Example: * -------------- * // Call this function whenever a start tag is encountered * onStartTag["podcast"] = (ElementParser xml) * { * // Your code here * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * * // call myEpisodeStartHandler (defined elsewhere) whenever an * // start tag is encountered * onStartTag["episode"] = &myEpisodeStartHandler; * * // call delegate dg for all other start tags * onStartTag[null] = dg; * -------------- * * This library will supply your function with a new instance of * ElementHandler, which may be used to parse inside the element whose * start tag was just found, or to identify the tag attributes of the * element, etc. * * Note that your function will be called for both start tags and empty * tags. That is, we make no distinction between <br></br> * and <br/>. */ ParserHandler[string] onStartTag; /** * Register a handler which will be called whenever an end tag is * encountered which matches the specified name. You can also pass null as * the name, in which case the handler will be called for any unmatched * end tag. * * Example: * -------------- * // Call this function whenever a end tag is encountered * onEndTag["podcast"] = (in Element e) * { * // Your code here * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * * // call myEpisodeEndHandler (defined elsewhere) whenever an * // end tag is encountered * onEndTag["episode"] = &myEpisodeEndHandler; * * // call delegate dg for all other end tags * onEndTag[null] = dg; * -------------- * * Note that your function will be called for both start tags and empty * tags. That is, we make no distinction between <br></br> * and <br/>. */ ElementHandler[string] onEndTag; protected this() @safe @nogc pure nothrow { elementStart = *s; } /** * Register a handler which will be called whenever text is encountered. * * Example: * -------------- * // Call this function whenever text is encountered * onText = (string s) * { * // Your code here * * // The passed parameter s will have been decoded by the time you see * // it, and so may contain any character. * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * -------------- */ @property @safe @nogc pure nothrow void onText(Handler handler) { textHandler = handler; } /** * Register an alternative handler which will be called whenever text * is encountered. This differs from onText in that onText will decode * the text, whereas onTextRaw will not. This allows you to make design * choices, since onText will be more accurate, but slower, while * onTextRaw will be faster, but less accurate. Of course, you can * still call decode() within your handler, if you want, but you'd * probably want to use onTextRaw only in circumstances where you * know that decoding is unnecessary. * * Example: * -------------- * // Call this function whenever text is encountered * onText = (string s) * { * // Your code here * * // The passed parameter s will NOT have been decoded. * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * -------------- */ @safe @nogc pure nothrow void onTextRaw(Handler handler) { rawTextHandler = handler; } /** * Register a handler which will be called whenever a character data * segment is encountered. * * Example: * -------------- * // Call this function whenever a CData section is encountered * onCData = (string s) * { * // Your code here * * // The passed parameter s does not include the opening * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * -------------- */ @property @safe @nogc pure nothrow void onCData(Handler handler) { cdataHandler = handler; } /** * Register a handler which will be called whenever a comment is * encountered. * * Example: * -------------- * // Call this function whenever a comment is encountered * onComment = (string s) * { * // Your code here * * // The passed parameter s does not include the opening * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * -------------- */ @property @safe @nogc pure nothrow void onComment(Handler handler) { commentHandler = handler; } /** * Register a handler which will be called whenever a processing * instruction is encountered. * * Example: * -------------- * // Call this function whenever a processing instruction is encountered * onPI = (string s) * { * // Your code here * * // The passed parameter s does not include the opening * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * -------------- */ @property @safe @nogc pure nothrow void onPI(Handler handler) { piHandler = handler; } /** * Register a handler which will be called whenever an XML instruction is * encountered. * * Example: * -------------- * // Call this function whenever an XML instruction is encountered * // (Note: XML instructions may only occur preceding the root tag of a * // document). * onPI = (string s) * { * // Your code here * * // The passed parameter s does not include the opening * // * // This is a a closure, so code here may reference * // variables which are outside of this scope * }; * -------------- */ @property @safe @nogc pure nothrow void onXI(Handler handler) { xiHandler = handler; } /** * Parse an XML element. * * Parsing will continue until the end of the current element. Any items * encountered for which a handler has been registered will invoke that * handler. * * Throws: various kinds of XMLException */ void parse() { import std.algorithm.searching : startsWith; import std.string : indexOf; string t; const Tag root = tag_; Tag[string] startTags; if (tag_ !is null) startTags[tag_.name] = tag_; while (s.length != 0) { if (startsWith(*s,"")); if (commentHandler.funcptr !is null) commentHandler(t); chop(*s,3); } else if (startsWith(*s,"")); if (cdataHandler.funcptr !is null) cdataHandler(t); chop(*s,3); } else if (startsWith(*s,"")); if (xiHandler.funcptr !is null) xiHandler(t); chop(*s,1); } else if (startsWith(*s,"")); if (piHandler.funcptr !is null) piHandler(t); chop(*s,2); } else if (startsWith(*s,"<")) { tag_ = new Tag(*s,true); if (root is null) return; // Return to constructor of derived class if (tag_.isStart) { startTags[tag_.name] = tag_; auto parser = new ElementParser(this); auto handler = tag_.name in onStartTag; if (handler !is null) (*handler)(parser); else { handler = null in onStartTag; if (handler !is null) (*handler)(parser); } } else if (tag_.isEnd) { const startTag = startTags[tag_.name]; string text; if (startTag.tagString.length == 0) assert(0); immutable(char)* p = startTag.tagString.ptr + startTag.tagString.length; immutable(char)* q = &tag_.tagString[0]; text = decode(p[0..(q-p)], DecodeMode.LOOSE); auto element = new Element(startTag); if (text.length != 0) element ~= new Text(text); auto handler = tag_.name in onEndTag; if (handler !is null) (*handler)(element); else { handler = null in onEndTag; if (handler !is null) (*handler)(element); } if (tag_.name == root.name) return; } else if (tag_.isEmpty) { Tag startTag = new Tag(tag_.name); // FIX by hed010gy, for bug 2979 // http://d.puremagic.com/issues/show_bug.cgi?id=2979 if (tag_.attr.length > 0) foreach (tn,tv; tag_.attr) startTag.attr[tn]=tv; // END FIX // Handle the pretend start tag string s2; auto parser = new ElementParser(startTag,&s2); auto handler1 = startTag.name in onStartTag; if (handler1 !is null) (*handler1)(parser); else { handler1 = null in onStartTag; if (handler1 !is null) (*handler1)(parser); } // Handle the pretend end tag auto element = new Element(startTag); auto handler2 = tag_.name in onEndTag; if (handler2 !is null) (*handler2)(element); else { handler2 = null in onEndTag; if (handler2 !is null) (*handler2)(element); } } } else { t = chop(*s,indexOf(*s,"<")); if (rawTextHandler.funcptr !is null) rawTextHandler(t); else if (textHandler.funcptr !is null) textHandler(decode(t,DecodeMode.LOOSE)); } } } /** * Returns that part of the element which has already been parsed */ override string toString() const @nogc @safe pure nothrow { assert(elementStart.length >= s.length); return elementStart[0 .. elementStart.length - s.length]; } } private { template Check(string msg) { string old = s; void fail() @safe pure { s = old; throw new Err(s,msg); } void fail(Err e) @safe pure { s = old; throw new Err(s,msg,e); } void fail(string msg2) @safe pure { fail(new Err(s,msg2)); } } void checkMisc(ref string s) @safe pure // rule 27 { import std.algorithm.searching : startsWith; mixin Check!("Misc"); try { if (s.startsWith("",s); } catch (Err e) { fail(e); } } void checkPI(ref string s) @safe pure // rule 16 { mixin Check!("PI"); try { checkLiteral("",s); } catch (Err e) { fail(e); } } void checkCDSect(ref string s) @safe pure // rule 18 { mixin Check!("CDSect"); try { checkLiteral(cdata,s); checkEnd("]]>",s); } catch (Err e) { fail(e); } } void checkProlog(ref string s) @safe pure // rule 22 { mixin Check!("Prolog"); try { /* The XML declaration is optional * http://www.w3.org/TR/2008/REC-xml-20081126/#NT-prolog */ opt!(checkXMLDecl)(s); star!(checkMisc)(s); opt!(seq!(checkDocTypeDecl,star!(checkMisc)))(s); } catch (Err e) { fail(e); } } void checkXMLDecl(ref string s) @safe pure // rule 23 { mixin Check!("XMLDecl"); try { checkLiteral("",s); } catch (Err e) { fail(e); } } void checkVersionInfo(ref string s) @safe pure // rule 24 { mixin Check!("VersionInfo"); try { checkSpace(s); checkLiteral("version",s); checkEq(s); quoted!(checkVersionNum)(s); } catch (Err e) { fail(e); } } void checkEq(ref string s) @safe pure // rule 25 { mixin Check!("Eq"); try { opt!(checkSpace)(s); checkLiteral("=",s); opt!(checkSpace)(s); } catch (Err e) { fail(e); } } void checkVersionNum(ref string s) @safe pure // rule 26 { import std.algorithm.searching : countUntil; import std.utf : byCodeUnit; mixin Check!("VersionNum"); s = s[s.byCodeUnit.countUntil('\"') .. $]; if (s is old) fail(); } void checkDocTypeDecl(ref string s) @safe pure // rule 28 { mixin Check!("DocTypeDecl"); try { checkLiteral("",s); } catch (Err e) { fail(e); } } void checkSDDecl(ref string s) @safe pure // rule 32 { import std.algorithm.searching : startsWith; mixin Check!("SDDecl"); try { checkSpace(s); checkLiteral("standalone",s); checkEq(s); } catch (Err e) { fail(e); } int n = 0; if (s.startsWith("'yes'") || s.startsWith("\"yes\"")) n = 5; else if (s.startsWith("'no'" ) || s.startsWith("\"no\"" )) n = 4; else fail("standalone attribute value must be 'yes', \"yes\","~ " 'no' or \"no\""); s = s[n..$]; } void checkElement(ref string s) @safe pure // rule 39 { mixin Check!("Element"); string sname,ename,t; try { checkTag(s,t,sname); } catch (Err e) { fail(e); } if (t == "STag") { try { checkContent(s); t = s; checkETag(s,ename); } catch (Err e) { fail(e); } if (sname != ename) { s = t; fail("end tag name \"" ~ ename ~ "\" differs from start tag name \""~sname~"\""); } } } // rules 40 and 44 void checkTag(ref string s, out string type, out string name) @safe pure { mixin Check!("Tag"); try { type = "STag"; checkLiteral("<",s); checkName(s,name); star!(seq!(checkSpace,checkAttribute))(s); opt!(checkSpace)(s); if (s.length != 0 && s[0] == '/') { s = s[1..$]; type = "ETag"; } checkLiteral(">",s); } catch (Err e) { fail(e); } } void checkAttribute(ref string s) @safe pure // rule 41 { mixin Check!("Attribute"); try { string name; checkName(s,name); checkEq(s); checkAttValue(s); } catch (Err e) { fail(e); } } void checkETag(ref string s, out string name) @safe pure // rule 42 { mixin Check!("ETag"); try { checkLiteral("",s); } catch (Err e) { fail(e); } } void checkContent(ref string s) @safe pure // rule 43 { import std.algorithm.searching : startsWith; mixin Check!("Content"); try { while (s.length != 0) { old = s; if (s.startsWith("&")) { checkReference(s); } else if (s.startsWith(" B EOS"; try { check(s); } catch (CheckException e) { assert(0, e.toString()); } } @system unittest { string test_xml = ` `; DocumentParser parser = new DocumentParser(test_xml); bool tested = false; parser.onStartTag["stream:stream"] = (ElementParser p) { assert(p.tag.attr["xmlns"] == "jabber:'client'"); assert(p.tag.attr["from"] == "jid.pl"); assert(p.tag.attr["attr"] == "a\"b\"c"); tested = true; }; parser.parse(); assert(tested); } @system unittest { string s = q"EOS What & Up Second EOS"; auto xml = new DocumentParser(s); xml.onStartTag["Test"] = (ElementParser xml) { assert(xml.tag.attr["thing"] == "What & Up"); }; xml.onEndTag["Test"] = (in Element e) { assert(e.text() == "What & Up Second"); }; xml.parse(); } @system unittest { string s = ``; auto doc = new Document(s); assert(doc.toString() == s); } /** The base class for exceptions thrown by this module */ class XMLException : Exception { this(string msg) @safe pure { super(msg); } } // Other exceptions /// Thrown during Comment constructor class CommentException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown during CData constructor class CDataException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown during XMLInstruction constructor class XIException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown during ProcessingInstruction constructor class PIException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown during Text constructor class TextException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown during decode() class DecodeException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown if comparing with wrong type class InvalidTypeException : XMLException { private this(string msg) @safe pure { super(msg); } } /// Thrown when parsing for Tags class TagException : XMLException { private this(string msg) @safe pure { super(msg); } } /** * Thrown during check() */ class CheckException : XMLException { CheckException err; /// Parent in hierarchy private string tail; /** * Name of production rule which failed to parse, * or specific error message */ string msg; size_t line = 0; /// Line number at which parse failure occurred size_t column = 0; /// Column number at which parse failure occurred private this(string tail,string msg,Err err=null) @safe pure { super(null); this.tail = tail; this.msg = msg; this.err = err; } private void complete(string entire) @safe pure { import std.string : count, lastIndexOf; import std.utf : toUTF32; string head = entire[0..$-tail.length]; ptrdiff_t n = head.lastIndexOf('\n') + 1; line = head.count("\n") + 1; dstring t = toUTF32(head[n..$]); column = t.length + 1; if (err !is null) err.complete(entire); } override string toString() const @safe pure { import std.format : format; string s; if (line != 0) s = format("Line %d, column %d: ",line,column); s ~= msg; s ~= '\n'; if (err !is null) s = err.toString() ~ s; return s; } } private alias Err = CheckException; // Private helper functions private { inout(T) toType(T)(scope return inout Object o) { T t = cast(T)(o); if (t is null) { throw new InvalidTypeException("Attempt to compare a " ~ T.stringof ~ " with an instance of another type"); } return t; } string chop(ref string s, size_t n) @safe pure nothrow { if (n == -1) n = s.length; string t = s[0 .. n]; s = s[n..$]; return t; } bool optc(ref string s, char c) @safe pure nothrow { immutable bool b = s.length != 0 && s[0] == c; if (b) s = s[1..$]; return b; } void reqc(ref string s, char c) @safe pure { if (s.length == 0 || s[0] != c) throw new TagException(""); s = s[1..$]; } char requireOneOf(ref string s, string chars) @safe pure { import std.string : indexOf; if (s.length == 0 || indexOf(chars,s[0]) == -1) throw new TagException(""); immutable char ch = s[0]; s = s[1..$]; return ch; } alias hash = .hashOf; // Definitions from the XML specification immutable CharTable=[0x9,0x9,0xA,0xA,0xD,0xD,0x20,0xD7FF,0xE000,0xFFFD, 0x10000,0x10FFFF]; immutable BaseCharTable=[0x0041,0x005A,0x0061,0x007A,0x00C0,0x00D6,0x00D8, 0x00F6,0x00F8,0x00FF,0x0100,0x0131,0x0134,0x013E,0x0141,0x0148,0x014A, 0x017E,0x0180,0x01C3,0x01CD,0x01F0,0x01F4,0x01F5,0x01FA,0x0217,0x0250, 0x02A8,0x02BB,0x02C1,0x0386,0x0386,0x0388,0x038A,0x038C,0x038C,0x038E, 0x03A1,0x03A3,0x03CE,0x03D0,0x03D6,0x03DA,0x03DA,0x03DC,0x03DC,0x03DE, 0x03DE,0x03E0,0x03E0,0x03E2,0x03F3,0x0401,0x040C,0x040E,0x044F,0x0451, 0x045C,0x045E,0x0481,0x0490,0x04C4,0x04C7,0x04C8,0x04CB,0x04CC,0x04D0, 0x04EB,0x04EE,0x04F5,0x04F8,0x04F9,0x0531,0x0556,0x0559,0x0559,0x0561, 0x0586,0x05D0,0x05EA,0x05F0,0x05F2,0x0621,0x063A,0x0641,0x064A,0x0671, 0x06B7,0x06BA,0x06BE,0x06C0,0x06CE,0x06D0,0x06D3,0x06D5,0x06D5,0x06E5, 0x06E6,0x0905,0x0939,0x093D,0x093D,0x0958,0x0961,0x0985,0x098C,0x098F, 0x0990,0x0993,0x09A8,0x09AA,0x09B0,0x09B2,0x09B2,0x09B6,0x09B9,0x09DC, 0x09DD,0x09DF,0x09E1,0x09F0,0x09F1,0x0A05,0x0A0A,0x0A0F,0x0A10,0x0A13, 0x0A28,0x0A2A,0x0A30,0x0A32,0x0A33,0x0A35,0x0A36,0x0A38,0x0A39,0x0A59, 0x0A5C,0x0A5E,0x0A5E,0x0A72,0x0A74,0x0A85,0x0A8B,0x0A8D,0x0A8D,0x0A8F, 0x0A91,0x0A93,0x0AA8,0x0AAA,0x0AB0,0x0AB2,0x0AB3,0x0AB5,0x0AB9,0x0ABD, 0x0ABD,0x0AE0,0x0AE0,0x0B05,0x0B0C,0x0B0F,0x0B10,0x0B13,0x0B28,0x0B2A, 0x0B30,0x0B32,0x0B33,0x0B36,0x0B39,0x0B3D,0x0B3D,0x0B5C,0x0B5D,0x0B5F, 0x0B61,0x0B85,0x0B8A,0x0B8E,0x0B90,0x0B92,0x0B95,0x0B99,0x0B9A,0x0B9C, 0x0B9C,0x0B9E,0x0B9F,0x0BA3,0x0BA4,0x0BA8,0x0BAA,0x0BAE,0x0BB5,0x0BB7, 0x0BB9,0x0C05,0x0C0C,0x0C0E,0x0C10,0x0C12,0x0C28,0x0C2A,0x0C33,0x0C35, 0x0C39,0x0C60,0x0C61,0x0C85,0x0C8C,0x0C8E,0x0C90,0x0C92,0x0CA8,0x0CAA, 0x0CB3,0x0CB5,0x0CB9,0x0CDE,0x0CDE,0x0CE0,0x0CE1,0x0D05,0x0D0C,0x0D0E, 0x0D10,0x0D12,0x0D28,0x0D2A,0x0D39,0x0D60,0x0D61,0x0E01,0x0E2E,0x0E30, 0x0E30,0x0E32,0x0E33,0x0E40,0x0E45,0x0E81,0x0E82,0x0E84,0x0E84,0x0E87, 0x0E88,0x0E8A,0x0E8A,0x0E8D,0x0E8D,0x0E94,0x0E97,0x0E99,0x0E9F,0x0EA1, 0x0EA3,0x0EA5,0x0EA5,0x0EA7,0x0EA7,0x0EAA,0x0EAB,0x0EAD,0x0EAE,0x0EB0, 0x0EB0,0x0EB2,0x0EB3,0x0EBD,0x0EBD,0x0EC0,0x0EC4,0x0F40,0x0F47,0x0F49, 0x0F69,0x10A0,0x10C5,0x10D0,0x10F6,0x1100,0x1100,0x1102,0x1103,0x1105, 0x1107,0x1109,0x1109,0x110B,0x110C,0x110E,0x1112,0x113C,0x113C,0x113E, 0x113E,0x1140,0x1140,0x114C,0x114C,0x114E,0x114E,0x1150,0x1150,0x1154, 0x1155,0x1159,0x1159,0x115F,0x1161,0x1163,0x1163,0x1165,0x1165,0x1167, 0x1167,0x1169,0x1169,0x116D,0x116E,0x1172,0x1173,0x1175,0x1175,0x119E, 0x119E,0x11A8,0x11A8,0x11AB,0x11AB,0x11AE,0x11AF,0x11B7,0x11B8,0x11BA, 0x11BA,0x11BC,0x11C2,0x11EB,0x11EB,0x11F0,0x11F0,0x11F9,0x11F9,0x1E00, 0x1E9B,0x1EA0,0x1EF9,0x1F00,0x1F15,0x1F18,0x1F1D,0x1F20,0x1F45,0x1F48, 0x1F4D,0x1F50,0x1F57,0x1F59,0x1F59,0x1F5B,0x1F5B,0x1F5D,0x1F5D,0x1F5F, 0x1F7D,0x1F80,0x1FB4,0x1FB6,0x1FBC,0x1FBE,0x1FBE,0x1FC2,0x1FC4,0x1FC6, 0x1FCC,0x1FD0,0x1FD3,0x1FD6,0x1FDB,0x1FE0,0x1FEC,0x1FF2,0x1FF4,0x1FF6, 0x1FFC,0x2126,0x2126,0x212A,0x212B,0x212E,0x212E,0x2180,0x2182,0x3041, 0x3094,0x30A1,0x30FA,0x3105,0x312C,0xAC00,0xD7A3]; immutable IdeographicTable=[0x3007,0x3007,0x3021,0x3029,0x4E00,0x9FA5]; immutable CombiningCharTable=[0x0300,0x0345,0x0360,0x0361,0x0483,0x0486, 0x0591,0x05A1,0x05A3,0x05B9,0x05BB,0x05BD,0x05BF,0x05BF,0x05C1,0x05C2, 0x05C4,0x05C4,0x064B,0x0652,0x0670,0x0670,0x06D6,0x06DC,0x06DD,0x06DF, 0x06E0,0x06E4,0x06E7,0x06E8,0x06EA,0x06ED,0x0901,0x0903,0x093C,0x093C, 0x093E,0x094C,0x094D,0x094D,0x0951,0x0954,0x0962,0x0963,0x0981,0x0983, 0x09BC,0x09BC,0x09BE,0x09BE,0x09BF,0x09BF,0x09C0,0x09C4,0x09C7,0x09C8, 0x09CB,0x09CD,0x09D7,0x09D7,0x09E2,0x09E3,0x0A02,0x0A02,0x0A3C,0x0A3C, 0x0A3E,0x0A3E,0x0A3F,0x0A3F,0x0A40,0x0A42,0x0A47,0x0A48,0x0A4B,0x0A4D, 0x0A70,0x0A71,0x0A81,0x0A83,0x0ABC,0x0ABC,0x0ABE,0x0AC5,0x0AC7,0x0AC9, 0x0ACB,0x0ACD,0x0B01,0x0B03,0x0B3C,0x0B3C,0x0B3E,0x0B43,0x0B47,0x0B48, 0x0B4B,0x0B4D,0x0B56,0x0B57,0x0B82,0x0B83,0x0BBE,0x0BC2,0x0BC6,0x0BC8, 0x0BCA,0x0BCD,0x0BD7,0x0BD7,0x0C01,0x0C03,0x0C3E,0x0C44,0x0C46,0x0C48, 0x0C4A,0x0C4D,0x0C55,0x0C56,0x0C82,0x0C83,0x0CBE,0x0CC4,0x0CC6,0x0CC8, 0x0CCA,0x0CCD,0x0CD5,0x0CD6,0x0D02,0x0D03,0x0D3E,0x0D43,0x0D46,0x0D48, 0x0D4A,0x0D4D,0x0D57,0x0D57,0x0E31,0x0E31,0x0E34,0x0E3A,0x0E47,0x0E4E, 0x0EB1,0x0EB1,0x0EB4,0x0EB9,0x0EBB,0x0EBC,0x0EC8,0x0ECD,0x0F18,0x0F19, 0x0F35,0x0F35,0x0F37,0x0F37,0x0F39,0x0F39,0x0F3E,0x0F3E,0x0F3F,0x0F3F, 0x0F71,0x0F84,0x0F86,0x0F8B,0x0F90,0x0F95,0x0F97,0x0F97,0x0F99,0x0FAD, 0x0FB1,0x0FB7,0x0FB9,0x0FB9,0x20D0,0x20DC,0x20E1,0x20E1,0x302A,0x302F, 0x3099,0x3099,0x309A,0x309A]; immutable DigitTable=[0x0030,0x0039,0x0660,0x0669,0x06F0,0x06F9,0x0966, 0x096F,0x09E6,0x09EF,0x0A66,0x0A6F,0x0AE6,0x0AEF,0x0B66,0x0B6F,0x0BE7, 0x0BEF,0x0C66,0x0C6F,0x0CE6,0x0CEF,0x0D66,0x0D6F,0x0E50,0x0E59,0x0ED0, 0x0ED9,0x0F20,0x0F29]; immutable ExtenderTable=[0x00B7,0x00B7,0x02D0,0x02D0,0x02D1,0x02D1,0x0387, 0x0387,0x0640,0x0640,0x0E46,0x0E46,0x0EC6,0x0EC6,0x3005,0x3005,0x3031, 0x3035,0x309D,0x309E,0x30FC,0x30FE]; bool lookup(const(int)[] table, int c) @safe @nogc nothrow pure { while (table.length != 0) { auto m = (table.length >> 1) & ~1; if (c < table[m]) { table = table[0 .. m]; } else if (c > table[m+1]) { table = table[m+2..$]; } else return true; } return false; } string startOf(string s) @safe nothrow pure { string r; foreach (char c;s) { r ~= (c < 0x20 || c > 0x7F) ? '.' : c; if (r.length >= 40) { r ~= "___"; break; } } return r; } void exit(string s=null) { throw new XMLException(s); } } dub-1.40.0/source/dub/internal/utils.d000066400000000000000000000567721477246567400176100ustar00rootroot00000000000000/** ... Copyright: © 2012 Matthias Dondorff License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff */ module dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; import dub.compilers.buildsettings : BuildSettings; import dub.version_; import dub.internal.logging; import core.time : Duration; import std.algorithm : canFind, startsWith; import std.array : appender, array; import std.conv : to; import std.exception : enforce; import std.file; import std.format; import std.range; import std.string : format; import std.process; import std.traits : isIntegral; version(DubUseCurl) { import std.net.curl; public import std.net.curl : HTTPStatusException; } public import dub.internal.temp_files; /** * Obtain a lock for a file at the given path. * * If the file cannot be locked within the given duration, * an exception is thrown. The file will be created if it does not yet exist. * Deleting the file is not safe as another process could create a new file * with the same name. * The returned lock will get unlocked upon destruction. * * Params: * path = path to file that gets locked * timeout = duration after which locking failed * * Returns: * The locked file or an Exception on timeout. */ auto lockFile(string path, Duration timeout) { import core.thread : Thread; import std.datetime, std.stdio : File; import std.algorithm : move; // Just a wrapper to hide (and destruct) the locked File. static struct LockFile { // The Lock can't be unlinked as someone could try to lock an already // opened fd while a new file with the same name gets created. // Exclusive file system locks (O_EXCL, mkdir) could be deleted but // aren't automatically freed when a process terminates, see #1149. private File f; } auto file = File(path, "w"); auto t0 = Clock.currTime(); auto dur = 1.msecs; while (true) { if (file.tryLock()) return LockFile(move(file)); enforce(Clock.currTime() - t0 < timeout, "Failed to lock '"~path~"'."); if (dur < 1024.msecs) // exponentially increase sleep time dur *= 2; Thread.sleep(dur); } } bool isWritableDir(NativePath p, bool create_if_missing = false) { import std.random; auto fname = p ~ format("__dub_write_test_%08X", uniform(0, uint.max)); if (create_if_missing) ensureDirectory(p); try writeFile(fname, "Canary"); catch (Exception) return false; remove(fname.toNativeString()); return true; } Json jsonFromFile(NativePath file, bool silent_fail = false) { if( silent_fail && !existsFile(file) ) return Json.emptyObject; auto text = readText(file); return parseJsonString(text, file.toNativeString()); } /** Read package info file content from archive. File needs to be in root folder or in first sub folder. Params: zip = path to archive file fileName = Package file name Returns: package file content. */ string packageInfoFileFromZip(NativePath zip, out string fileName) { import std.zip : ZipArchive, ArchiveMember; import dub.package_ : packageInfoFiles; auto b = readFile(zip); auto archive = new ZipArchive(b); alias PSegment = typeof (NativePath.init.head); foreach (ArchiveMember am; archive.directory) { auto path = NativePath(am.name).bySegment.array; foreach (fil; packageInfoFiles) { if ((path.length == 1 && path[0] == fil.filename) || (path.length == 2 && path[$-1].name == fil.filename)) { fileName = fil.filename; return stripUTF8Bom(cast(string) archive.expand(archive.directory[am.name])); } } } throw new Exception("No package descriptor found"); } void writeJsonFile(NativePath path, Json json) { auto app = appender!string(); app.writePrettyJsonString(json); writeFile(path, app.data); } /// Performs a write->delete->rename sequence to atomically "overwrite" the destination file void atomicWriteJsonFile(NativePath path, Json json) { import std.random : uniform; auto tmppath = path.parentPath ~ format("%s.%s.tmp", path.head, uniform(0, int.max)); auto app = appender!string(); app.writePrettyJsonString(json); writeFile(tmppath, app.data); if (existsFile(path)) removeFile(path); moveFile(tmppath, path); } void runCommand(string command, string[string] env, string workDir) { runCommands((&command)[0 .. 1], env, workDir); } void runCommands(in string[] commands, string[string] env, string workDir) { import std.stdio : stdin, stdout, stderr, File; version(Windows) enum nullFile = "NUL"; else version(Posix) enum nullFile = "/dev/null"; else static assert(0); auto childStdout = stdout; auto childStderr = stderr; auto config = Config.retainStdout | Config.retainStderr; // Disable child's stdout/stderr depending on LogLevel auto logLevel = getLogLevel(); if(logLevel >= LogLevel.warn) childStdout = File(nullFile, "w"); if(logLevel >= LogLevel.none) childStderr = File(nullFile, "w"); foreach(cmd; commands){ logDiagnostic("Running %s", cmd); Pid pid; pid = spawnShell(cmd, stdin, childStdout, childStderr, env, config, workDir); auto exitcode = pid.wait(); enforce(exitcode == 0, "Command failed with exit code " ~ to!string(exitcode) ~ ": " ~ cmd); } } version (Have_vibe_d_http) public import vibe.http.common : HTTPStatusException; /** Downloads a file from the specified URL. Any redirects will be followed until the actual file resource is reached or if the redirection limit of 10 is reached. Note that only HTTP(S) is currently supported. The download times out if a connection cannot be established within `timeout` ms, or if the average transfer rate drops below 10 bytes / s for more than `timeout` seconds. Pass `0` as `timeout` to disable both timeout mechanisms. Note: Timeouts are only implemented when curl is used (DubUseCurl). */ private void download(string url, string filename, uint timeout = 8) { version(DubUseCurl) { auto conn = HTTP(); setupHTTPClient(conn, timeout); logDebug("Storing %s...", url); std.net.curl.download(url, filename, conn); // workaround https://issues.dlang.org/show_bug.cgi?id=18318 auto sl = conn.statusLine; logDebug("Download %s %s", url, sl); if (sl.code / 100 != 2) throw new HTTPStatusException(sl.code, "Downloading %s failed with %d (%s).".format(url, sl.code, sl.reason)); } else version (Have_vibe_d_http) { import vibe.inet.urltransfer; vibe.inet.urltransfer.download(url, filename); } else assert(false); } /// ditto private void download(URL url, NativePath filename, uint timeout = 8) { download(url.toString(), filename.toNativeString(), timeout); } /// ditto private ubyte[] download(string url, uint timeout = 8) { version(DubUseCurl) { auto conn = HTTP(); setupHTTPClient(conn, timeout); logDebug("Getting %s...", url); return get!(HTTP, ubyte)(url, conn); } else version (Have_vibe_d_http) { import vibe.inet.urltransfer; import vibe.stream.operations; ubyte[] ret; vibe.inet.urltransfer.download(url, (scope input) { ret = input.readAll(); }); return ret; } else assert(false); } /// ditto private ubyte[] download(URL url, uint timeout = 8) { return download(url.toString(), timeout); } /** Downloads a file from the specified URL with retry logic. Downloads a file from the specified URL with up to n tries on failure Throws: `Exception` if the download failed or `HTTPStatusException` after the nth retry or on "unrecoverable failures" such as 404 not found Otherwise might throw anything else that `download` throws. See_Also: download The download times out if a connection cannot be established within `timeout` ms, or if the average transfer rate drops below 10 bytes / s for more than `timeout` seconds. Pass `0` as `timeout` to disable both timeout mechanisms. Note: Timeouts are only implemented when curl is used (DubUseCurl). **/ void retryDownload(URL url, NativePath filename, size_t retryCount = 3, uint timeout = 8) { foreach(i; 0..retryCount) { version(DubUseCurl) { try { download(url, filename, timeout); return; } catch(HTTPStatusException e) { if (e.status == 404) throw e; else { logDebug("Failed to download %s (Attempt %s of %s)", url, i + 1, retryCount); if (i == retryCount - 1) throw e; else continue; } } catch(CurlException e) { logDebug("Failed to download %s (Attempt %s of %s)", url, i + 1, retryCount); continue; } } else { try { download(url, filename); return; } catch(HTTPStatusException e) { if (e.status == 404) throw e; else { logDebug("Failed to download %s (Attempt %s of %s)", url, i + 1, retryCount); if (i == retryCount - 1) throw e; else continue; } } } } throw new Exception("Failed to download %s".format(url)); } ///ditto ubyte[] retryDownload(URL url, size_t retryCount = 3, uint timeout = 8) { foreach(i; 0..retryCount) { version(DubUseCurl) { try { return download(url, timeout); } catch(HTTPStatusException e) { if (e.status == 404) throw e; else { logDebug("Failed to download %s (Attempt %s of %s): %s", url, i + 1, retryCount, e.message); if (i == retryCount - 1) throw e; else continue; } } catch(CurlException e) { logDebug("Failed to download %s (Attempt %s of %s): %s", url, i + 1, retryCount, e.message); continue; } } else { try { return download(url); } catch(HTTPStatusException e) { if (e.status == 404) throw e; else { logDebug("Failed to download %s (Attempt %s of %s)", url, i + 1, retryCount); if (i == retryCount - 1) throw e; else continue; } } } } throw new Exception("Failed to download %s".format(url)); } /// Returns the current DUB version in semantic version format string getDUBVersion() { import dub.version_; import std.array : split, join; // convert version string to valid SemVer format auto verstr = dubVersion; if (verstr.startsWith("v")) verstr = verstr[1 .. $]; auto parts = verstr.split("-"); if (parts.length >= 3) { // detect GIT commit suffix if (parts[$-1].length == 8 && parts[$-1][1 .. $].isHexNumber() && parts[$-2].isNumber()) verstr = parts[0 .. $-2].join("-") ~ "+" ~ parts[$-2 .. $].join("-"); } return verstr; } /** Get current executable's path if running as DUB executable, or find a DUB executable if DUB is used as a library. For the latter, the following locations are checked in order: $(UL $(LI current working directory) $(LI same directory as `compilerBinary` (if supplied)) $(LI all components of the `$PATH` variable) ) Params: compilerBinary = optional path to a D compiler executable, used to locate DUB executable Returns: The path to a valid DUB executable Throws: an Exception if no valid DUB executable is found */ public NativePath getDUBExePath(in string compilerBinary=null) { version(DubApplication) { import std.file : thisExePath; return NativePath(thisExePath()); } else { // this must be dub as a library import std.algorithm : filter, map, splitter; import std.array : array; import std.file : exists, getcwd; import std.path : chainPath, dirName; import std.range : chain, only, take; import std.process : environment; version(Windows) { enum exeName = "dub.exe"; enum pathSep = ';'; } else { enum exeName = "dub"; enum pathSep = ':'; } auto dubLocs = only( getcwd().chainPath(exeName), compilerBinary.dirName.chainPath(exeName), ) .take(compilerBinary.length ? 2 : 1) .chain( environment.get("PATH", "") .splitter(pathSep) .map!(p => p.chainPath(exeName)) ) .filter!exists; enforce(!dubLocs.empty, "Could not find DUB executable"); return NativePath(dubLocs.front.array); } } version(DubUseCurl) { void setupHTTPClient(ref HTTP conn, uint timeout) { static if( is(typeof(&conn.verifyPeer)) ) conn.verifyPeer = false; auto proxy = environment.get("http_proxy", null); if (proxy.length) conn.proxy = proxy; auto noProxy = environment.get("no_proxy", null); if (noProxy.length) conn.handle.set(CurlOption.noproxy, noProxy); conn.handle.set(CurlOption.encoding, ""); if (timeout) { // connection (TLS+TCP) times out after 8s conn.handle.set(CurlOption.connecttimeout, timeout); // transfers time out after 8s below 10 byte/s conn.handle.set(CurlOption.low_speed_limit, 10); conn.handle.set(CurlOption.low_speed_time, timeout); } conn.addRequestHeader("User-Agent", "dub/"~getDUBVersion()~" (std.net.curl; +https://github.com/rejectedsoftware/dub)"); enum CURL_NETRC_OPTIONAL = 1; conn.handle.set(CurlOption.netrc, CURL_NETRC_OPTIONAL); } } private string stripUTF8Bom(string str) { if( str.length >= 3 && str[0 .. 3] == [0xEF, 0xBB, 0xBF] ) return str[3 ..$]; return str; } private bool isNumber(string str) { foreach (ch; str) switch (ch) { case '0': .. case '9': break; default: return false; } return true; } private bool isHexNumber(string str) { foreach (ch; str) switch (ch) { case '0': .. case '9': break; case 'a': .. case 'f': break; case 'A': .. case 'F': break; default: return false; } return true; } /** Get the closest match of $(D input) in the $(D array), where $(D distance) is the maximum levenshtein distance allowed between the compared strings. Returns $(D null) if no closest match is found. */ string getClosestMatch(string[] array, string input, size_t distance) { import std.algorithm : countUntil, map, levenshteinDistance; import std.uni : toUpper; auto distMap = array.map!(elem => levenshteinDistance!((a, b) => toUpper(a) == toUpper(b))(elem, input)); auto idx = distMap.countUntil!(a => a <= distance); return (idx == -1) ? null : array[idx]; } /** Searches for close matches to input in range. R must be a range of strings Note: Sorts the strings range. Use std.range.indexed to avoid this... */ auto fuzzySearch(R)(R strings, string input){ import std.algorithm : levenshteinDistance, schwartzSort, partition3; import std.traits : isSomeString; import std.range : ElementType; static assert(isSomeString!(ElementType!R), "Cannot call fuzzy search on non string rang"); immutable threshold = input.length / 4; return strings.partition3!((a, b) => a.length + threshold < b.length)(input)[1] .schwartzSort!(p => levenshteinDistance(input.toUpper, p.toUpper)); } /** If T is a bitfield-style enum, this function returns a string range listing the names of all members included in the given value. Example: --------- enum Bits { none = 0, a = 1<<0, b = 1<<1, c = 1<<2, a_c = a | c, } assert( bitFieldNames(Bits.none).equals(["none"]) ); assert( bitFieldNames(Bits.a).equals(["a"]) ); assert( bitFieldNames(Bits.a_c).equals(["a", "c", "a_c"]) ); --------- */ auto bitFieldNames(T)(T value) if(is(T==enum) && isIntegral!T) { import std.algorithm : filter, map; import std.conv : to; import std.traits : EnumMembers; return [ EnumMembers!(T) ] .filter!(member => member==0? value==0 : (value & member) == member) .map!(member => to!string(member)); } bool isIdentChar(dchar ch) { import std.ascii : isAlphaNum; return isAlphaNum(ch) || ch == '_'; } string stripDlangSpecialChars(string s) { import std.array : appender; auto ret = appender!string(); foreach(ch; s) ret.put(isIdentChar(ch) ? ch : '_'); return ret.data; } string determineModuleName(BuildSettings settings, NativePath file, NativePath base_path) { import std.algorithm : map; import std.array : array; import std.range : walkLength, chain; assert(base_path.absolute); if (!file.absolute) file = base_path ~ file; size_t path_skip = 0; foreach (ipath; chain(settings.importPaths, settings.cImportPaths).map!(p => NativePath(p))) { if (!ipath.absolute) ipath = base_path ~ ipath; assert(!ipath.empty); if (file.startsWith(ipath) && ipath.bySegment.walkLength > path_skip) path_skip = ipath.bySegment.walkLength; } auto mpath = file.bySegment.array[path_skip .. $]; auto ret = appender!string; //search for module keyword in file string moduleName = getModuleNameFromFile(file.to!string); if(moduleName.length) { assert(moduleName.length > 0, "Wasn't this module name already checked? what"); return moduleName; } //create module name from path if (path_skip == 0) { import std.path; ret ~= mpath[$-1].name.baseName(".d"); } else { foreach (i; 0 .. mpath.length) { import std.path; auto p = mpath[i].name; if (p == "package.d") break ; if (ret.data.length > 0) ret ~= "."; if (i+1 < mpath.length) ret ~= p; else ret ~= p.baseName(".d"); } } assert(ret.data.length > 0, "A module name was expected to be computed, and none was."); return ret.data; } /** * Search for module keyword in D Code * A primitive parser to skip comments and whitespace to get * the module's name from the module declaration. */ string getModuleNameFromContent(string content) { import std.ascii: isAlpha, isAlphaNum, isWhite; import std.algorithm: among; import core.exception: RangeError; enum keyword = "module"; size_t i = 0; size_t startIndex = 0, endIndex = 0; auto foundKeyword = false; auto ch() { return content[i]; } static bool isIdentChar(in char c) { return !isWhite(c) && c != '/' && c != ';'; } try { while(i < content.length) { if(!foundKeyword && ch == keyword[0] && content[i .. i + keyword.length] == keyword) { // -1 because the end of the loop will advance by 1 i += keyword.length - 1; foundKeyword = true; } else if(ch == '/') { ++i; // line comment? if(ch == '/') { while(ch != '\n') ++i; } // block comment? else if(ch == '*') { ++i; while(ch != '*' || content[i + 1] != '/') ++i; ++i; // skip over closing '/' } // nested comment? else if(ch == '+') { ++i; size_t level = 1; while(level > 0) { if(ch == '/') { ++i; if(ch == '+') { ++i; ++level; } } if(ch == '+') { ++i; if(ch == '/') { --level; } else continue; } ++i; } } } else if(isIdentChar(ch) && foundKeyword) { if(startIndex == 0) startIndex = i; ++i; // skip the first char of the name while(isIdentChar(ch)) { ++i; } // when we get here, either we're at the end of the module's identifier, // or there are comments afterwards if(endIndex == 0) { endIndex = i; } if(!isIdentChar(ch)) return content[startIndex .. endIndex]; else continue; } else if(!isIdentChar(ch) && foundKeyword && startIndex != 0) { return content[startIndex .. endIndex]; } ++i; } return ""; } catch(RangeError) { return ""; } } unittest { assert(getModuleNameFromContent("") == ""); assert(getModuleNameFromContent("module myPackage.myModule;") == "myPackage.myModule", getModuleNameFromContent("module myPackage.myModule;")); assert(getModuleNameFromContent("module \t\n myPackage.myModule \t\r\n;") == "myPackage.myModule"); assert(getModuleNameFromContent("// foo\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/*\nfoo\n*/\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/+\nfoo\n+/\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/***\nfoo\n***/\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/+++\nfoo\n+++/\nmodule bar;") == "bar"); assert(getModuleNameFromContent("// module foo;\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/* module foo; */\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/+ module foo; +/\nmodule bar;") == "bar"); assert(getModuleNameFromContent("/+ /+ module foo; +/ +/\nmodule bar;") == "bar"); assert(getModuleNameFromContent("// module foo;\nmodule bar; // module foo;") == "bar"); assert(getModuleNameFromContent("// module foo;\nmodule// module foo;\nbar//module foo;\n;// module foo;") == "bar"); assert(getModuleNameFromContent("/* module foo; */\nmodule/*module foo;*/bar/*module foo;*/;") == "bar", getModuleNameFromContent("/* module foo; */\nmodule/*module foo;*/bar/*module foo;*/;")); assert(getModuleNameFromContent("/+ /+ module foo; +/ module foo; +/ module bar;") == "bar"); assert(getModuleNameFromContent("/+ /+ module foo; +/ module foo; +/ module bar/++/;") == "bar"); assert(getModuleNameFromContent("/*\nmodule sometest;\n*/\n\nmodule fakemath;\n") == "fakemath"); assert(getModuleNameFromContent("module foo_bar;") == "foo_bar"); assert(getModuleNameFromContent("module _foo_bar;") == "_foo_bar"); assert(getModuleNameFromContent("/++ ++/\nmodule foo;") == "foo"); assert(getModuleNameFromContent("module pokémon;") == "pokémon"); assert(getModuleNameFromContent("module éclair;") == "éclair"); assert(getModuleNameFromContent("/** module foo*/ module bar;") == "bar"); assert(getModuleNameFromContent("/* / module foo*/ module bar;") == "bar"); assert(getModuleNameFromContent("module modules.foo;") == "modules.foo"); } /** * Search for module keyword in file */ string getModuleNameFromFile(string filePath) { if (!filePath.exists) { return null; } string fileContent = filePath.readText; logDiagnostic("Get module name from path: %s", filePath); return getModuleNameFromContent(fileContent); } /** * Compare two instances of the same type for equality, * providing a rich error message on failure. * * This function will recurse into composite types (struct, AA, arrays) * and compare element / member wise, taking opEquals into account, * to provide the most accurate reason why comparison failed. */ void deepCompare (T) ( in T result, in T expected, string file = __FILE__, size_t line = __LINE__) { deepCompareImpl!T(result, expected, T.stringof, file, line); } void deepCompareImpl (T) ( in T result, in T expected, string path, string file, size_t line) { static if (is(T == struct) && !is(typeof(T.init.opEquals(T.init)) : bool)) { static foreach (idx; 0 .. T.tupleof.length) deepCompareImpl(result.tupleof[idx], expected.tupleof[idx], format("%s.%s", path, __traits(identifier, T.tupleof[idx])), file, line); } else static if (is(T : KeyT[ValueT], KeyT, ValueT)) { if (result.length != expected.length) throw new Exception( format("%s: AA has different number of entries (%s != %s): %s != %s", path, result.length, expected.length, result, expected), file, line); foreach (key, value; expected) { if (auto ptr = key in result) deepCompareImpl(*ptr, value, format("%s[%s]", path, key), file, line); else throw new Exception( format("Expected key %s[%s] not present in result. %s != %s", path, key, result, expected), file, line); } } else if (result != expected) { static if (is(T == struct) && is(typeof(T.init.opEquals(T.init)) : bool)) path ~= ".opEquals"; throw new Exception( format("%s: result != expected: %s != %s", path, result, expected), file, line); } } /** Filters a forward range with the given predicate and returns a prefix range. This function filters elements in-place, as opposed to returning a new range. This can be particularly useful when working with arrays, as this does not require any memory allocations. This function guarantees that `pred` is called exactly once per element and deterministically destroys any elements for which `pred` returns `false`. */ auto filterInPlace(alias pred, R)(R elems) if (isForwardRange!R) { import std.algorithm.mutation : move; R telems = elems.save; bool any_removed = false; size_t nret = 0; foreach (ref el; elems.save) { if (pred(el)) { if (any_removed) move(el, telems.front); telems.popFront(); nret++; } else any_removed = true; } return elems.takeExactly(nret); } /// unittest { int[] arr = [1, 2, 3, 4, 5, 6, 7, 8]; arr = arr.filterInPlace!(e => e % 2 == 0); assert(arr == [2, 4, 6, 8]); arr = arr.filterInPlace!(e => e < 5); assert(arr == [2, 4]); } dub-1.40.0/source/dub/internal/vibecompat/000077500000000000000000000000001477246567400204135ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/vibecompat/core/000077500000000000000000000000001477246567400213435ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/vibecompat/core/file.d000066400000000000000000000171671477246567400224430ustar00rootroot00000000000000/** File handling. Copyright: © 2012 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.internal.vibecompat.core.file; public import dub.internal.vibecompat.inet.path; import dub.internal.logging; import std.conv; import core.stdc.stdio; import std.datetime; import std.exception; import std.file; import std.path; import std.stdio; import std.string; import std.utf; /// Writes `buffer` to a file public void writeFile(NativePath path, const void[] buffer) { std.file.write(path.toNativeString(), buffer); } /// Returns the content of a file public ubyte[] readFile(NativePath path) { return cast(ubyte[]) std.file.read(path.toNativeString()); } /// Returns the content of a file as text public string readText(NativePath path) { return std.file.readText(path.toNativeString()); } /** Moves or renames a file. */ void moveFile(NativePath from, NativePath to) { moveFile(from.toNativeString(), to.toNativeString()); } /// ditto void moveFile(string from, string to) { std.file.rename(from, to); } /** Copies a file. Note that attributes and time stamps are currently not retained. Params: from = NativePath of the source file to = NativePath for the destination file overwrite = If true, any file existing at the destination path will be overwritten. If this is false, an exception will be thrown should a file already exist at the destination path. Throws: An Exception if the copy operation fails for some reason. */ void copyFile(NativePath from, NativePath to, bool overwrite = false) { enforce(existsFile(from), "Source file does not exist."); if (existsFile(to)) { enforce(overwrite, "Destination file already exists."); // remove file before copy to allow "overwriting" files that are in // use on Linux removeFile(to); } static if (is(PreserveAttributes)) { .copy(from.toNativeString(), to.toNativeString(), PreserveAttributes.yes); } else { .copy(from.toNativeString(), to.toNativeString()); // try to preserve ownership/permissions in Posix version (Posix) { import core.sys.posix.sys.stat; import core.sys.posix.unistd; import std.utf; auto cspath = toUTFz!(const(char)*)(from.toNativeString()); auto cdpath = toUTFz!(const(char)*)(to.toNativeString()); stat_t st; enforce(stat(cspath, &st) == 0, "Failed to get attributes of source file."); if (chown(cdpath, st.st_uid, st.st_gid) != 0) st.st_mode &= ~(S_ISUID | S_ISGID); chmod(cdpath, st.st_mode); } } } /// ditto void copyFile(string from, string to) { copyFile(NativePath(from), NativePath(to)); } version (Windows) extern(Windows) int CreateHardLinkW(const(wchar)* to, const(wchar)* from, void* attr=null); // guess whether 2 files are identical, ignores filename and content private bool sameFile(NativePath a, NativePath b) { version (Posix) { auto st_a = std.file.DirEntry(a.toNativeString).statBuf; auto st_b = std.file.DirEntry(b.toNativeString).statBuf; return st_a == st_b; } else { static assert(__traits(allMembers, FileInfo)[0] == "name"); return getFileInfo(a).tupleof[1 .. $] == getFileInfo(b).tupleof[1 .. $]; } } private bool isWritable(NativePath name) { version (Windows) { import core.sys.windows.windows; return (name.toNativeString.getAttributes & FILE_ATTRIBUTE_READONLY) == 0; } else version (Posix) { import core.sys.posix.sys.stat; return (name.toNativeString.getAttributes & S_IWUSR) != 0; } else static assert(false, "Needs implementation."); } private void makeWritable(NativePath name) { makeWritable(name.toNativeString); } private void makeWritable(string name) { version (Windows) { import core.sys.windows.windows; name.setAttributes(name.getAttributes & ~FILE_ATTRIBUTE_READONLY); } else version (Posix) { import core.sys.posix.sys.stat; name.setAttributes(name.getAttributes | S_IWUSR); } else static assert(false, "Needs implementation."); } /** Creates a hardlink if possible, a copy otherwise. If `from` is read-only and `overwrite` is true, then a copy is made instead and `to` is made writable; so that repeating the command will not fail. */ void hardLinkFile(NativePath from, NativePath to, bool overwrite = false) { if (existsFile(to)) { enforce(overwrite, "Destination file already exists."); if (auto fe = collectException!FileException(removeFile(to))) { if (sameFile(from, to)) return; throw fe; } } const writeAccessChangeRequired = overwrite && !isWritable(from); if (!writeAccessChangeRequired) { version (Windows) { alias cstr = toUTFz!(const(wchar)*); if (CreateHardLinkW(cstr(to.toNativeString), cstr(from.toNativeString))) return; } else { import core.sys.posix.unistd : link; alias cstr = toUTFz!(const(char)*); if (!link(cstr(from.toNativeString), cstr(to.toNativeString))) return; } } // fallback to copy copyFile(from, to, overwrite); if (writeAccessChangeRequired) to.makeWritable; } /** Removes a file */ void removeFile(NativePath path) { removeFile(path.toNativeString()); } /// ditto void removeFile(string path) { std.file.remove(path); } /** Checks if a file exists */ bool existsFile(NativePath path) { return existsFile(path.toNativeString()); } /// ditto bool existsFile(string path) { return std.file.exists(path); } /// Checks if a directory exists bool existsDirectory(NativePath path) { if( !existsFile(path) ) return false; auto fi = getFileInfo(path); return fi.isDirectory; } /** Stores information about the specified file/directory into 'info' Returns false if the file does not exist. */ FileInfo getFileInfo(NativePath path) { auto ent = std.file.DirEntry(path.toNativeString()); return makeFileInfo(ent); } /// ditto FileInfo getFileInfo(string path) { return getFileInfo(NativePath(path)); } /** Creates a new directory. */ void ensureDirectory(NativePath path) { mkdirRecurse(path.toNativeString()); } /** Enumerates all files in the specified directory. */ int delegate(scope int delegate(ref FileInfo)) iterateDirectory(NativePath path) { int iterator(scope int delegate(ref FileInfo) del){ foreach (DirEntry ent; dirEntries(path.toNativeString(), SpanMode.shallow)) { auto fi = makeFileInfo(ent); if (auto res = del(fi)) return res; } return 0; } return &iterator; } /** Returns the current working directory. */ NativePath getWorkingDirectory() { return NativePath(std.file.getcwd()); } /** Contains general information about a file. */ struct FileInfo { /// Name of the file (not including the path) string name; /// Size of the file (zero for directories) ulong size; /// Time of the last modification SysTime timeModified; /// True if this is a symlink to an actual file bool isSymlink; /// True if this is a directory or a symlink pointing to a directory bool isDirectory; } /** Specifies how a file is manipulated on disk. */ enum FileMode { /// The file is opened read-only. read, /// The file is opened for read-write random access. readWrite, /// The file is truncated if it exists and created otherwise and the opened for read-write access. createTrunc, /// The file is opened for appending data to it and created if it does not exist. append } /** Accesses the contents of a file as a stream. */ private FileInfo makeFileInfo(DirEntry ent) { FileInfo ret; ret.name = baseName(ent.name); if( ret.name.length == 0 ) ret.name = ent.name; assert(ret.name.length > 0); ret.isSymlink = ent.isSymlink; try { ret.isDirectory = ent.isDir; ret.size = ent.size; ret.timeModified = ent.timeLastModified; } catch (Exception e) { logDiagnostic("Failed to get extended file information for %s: %s", ret.name, e.msg); } return ret; } dub-1.40.0/source/dub/internal/vibecompat/data/000077500000000000000000000000001477246567400213245ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/vibecompat/data/json.d000066400000000000000000002006651477246567400224530ustar00rootroot00000000000000/** JSON serialization and value handling. This module provides the Json struct for reading, writing and manipulating JSON values. De(serialization) of arbitrary D types is also supported and is recommended for handling JSON in performance sensitive applications. Copyright: © 2012-2015 RejectedSoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.internal.vibecompat.data.json; version (Have_vibe_d_data) public import vibe.data.json; // vibe.d 0.9.x else version (Have_vibe_serialization) public import vibe.data.json; // vibe.d 0.10.x+ else: import dub.internal.vibecompat.data.utils; public import dub.internal.vibecompat.data.serialization; public import std.json : JSONException; import std.algorithm : equal, min; import std.array; import std.conv; import std.datetime; import std.exception; import std.format; import std.range; import std.string : format; import std.traits; version = JsonLineNumbers; version = VibeJsonFieldNames; /******************************************************************************/ /* public types */ /******************************************************************************/ /** Represents a single JSON value. Json values can have one of the types defined in the Json.Type enum. They behave mostly like values in ECMA script in the way that you can transparently perform operations on them. However, strict typechecking is done, so that operations between differently typed JSON values will throw a JSONException. Additionally, an explicit cast or using get!() or to!() is required to convert a JSON value to the corresponding static D type. */ struct Json { private { // putting all fields in a union results in many false pointers leading to // memory leaks and, worse, std.algorithm.swap triggering an assertion // because of internal pointers. This crude workaround seems to fix // the issues. void*[2] m_data; ref inout(T) getDataAs(T)() inout { static assert(T.sizeof <= m_data.sizeof); return *cast(inout(T)*)m_data.ptr; } @property ref inout(long) m_int() inout return { return getDataAs!long(); } @property ref inout(double) m_float() inout return { return getDataAs!double(); } @property ref inout(bool) m_bool() inout return { return getDataAs!bool(); } @property ref inout(string) m_string() inout return { return getDataAs!string(); } @property ref inout(Json[string]) m_object() inout return { return getDataAs!(Json[string])(); } @property ref inout(Json[]) m_array() inout return { return getDataAs!(Json[])(); } Type m_type = Type.undefined; version (VibeJsonFieldNames) { uint m_magic = 0x1337f00d; // works around Appender bug (DMD BUG 10690/10859/11357) string m_name; string m_fileName; } } /** Represents the run time type of a JSON value. */ enum Type { undefined, /// A non-existent value in a JSON object null_, /// Null value bool_, /// Boolean value int_, /// 64-bit integer value float_, /// 64-bit floating point value string, /// UTF-8 string array, /// Array of JSON values object, /// JSON object aka. dictionary from string to Json Undefined = undefined, /// Compatibility alias - will be deprecated soon Null = null_, /// Compatibility alias - will be deprecated soon Bool = bool_, /// Compatibility alias - will be deprecated soon Int = int_, /// Compatibility alias - will be deprecated soon Float = float_, /// Compatibility alias - will be deprecated soon String = string, /// Compatibility alias - will be deprecated soon Array = array, /// Compatibility alias - will be deprecated soon Object = object /// Compatibility alias - will be deprecated soon } /// New JSON value of Type.Undefined static @property Json undefined() { return Json(); } /// New JSON value of Type.Object static @property Json emptyObject() { return Json(cast(Json[string])null); } /// New JSON value of Type.Array static @property Json emptyArray() { return Json(cast(Json[])null); } version(JsonLineNumbers) int line; /** Constructor for a JSON object. */ this(typeof(null)) { m_type = Type.null_; } /// ditto this(bool v) { m_type = Type.bool_; m_bool = v; } /// ditto this(byte v) { this(cast(long)v); } /// ditto this(ubyte v) { this(cast(long)v); } /// ditto this(short v) { this(cast(long)v); } /// ditto this(ushort v) { this(cast(long)v); } /// ditto this(int v) { this(cast(long)v); } /// ditto this(uint v) { this(cast(long)v); } /// ditto this(long v) { m_type = Type.int_; m_int = v; } /// ditto this(double v) { m_type = Type.float_; m_float = v; } /// ditto this(string v) { m_type = Type.string; m_string = v; } /// ditto this(Json[] v) { m_type = Type.array; m_array = v; } /// ditto this(Json[string] v) { m_type = Type.object; m_object = v; } /** Allows assignment of D values to a JSON value. */ ref Json opAssign(Json v) return { m_type = v.m_type; final switch(m_type){ case Type.undefined: m_string = null; break; case Type.null_: m_string = null; break; case Type.bool_: m_bool = v.m_bool; break; case Type.int_: m_int = v.m_int; break; case Type.float_: m_float = v.m_float; break; case Type.string: m_string = v.m_string; break; case Type.array: opAssign(v.m_array); break; case Type.object: opAssign(v.m_object); break; } return this; } /// ditto void opAssign(typeof(null)) { m_type = Type.null_; m_string = null; } /// ditto bool opAssign(bool v) { m_type = Type.bool_; m_bool = v; return v; } /// ditto int opAssign(int v) { m_type = Type.int_; m_int = v; return v; } /// ditto long opAssign(long v) { m_type = Type.int_; m_int = v; return v; } /// ditto double opAssign(double v) { m_type = Type.float_; m_float = v; return v; } /// ditto string opAssign(string v) { m_type = Type.string; m_string = v; return v; } /// ditto Json[] opAssign(Json[] v) { m_type = Type.array; m_array = v; version (VibeJsonFieldNames) { if (m_magic == 0x1337f00d) { foreach (idx, ref av; m_array) av.m_name = format("%s[%s]", m_name, idx); } else m_name = null; } return v; } /// ditto Json[string] opAssign(Json[string] v) { m_type = Type.object; m_object = v; version (VibeJsonFieldNames) { if (m_magic == 0x1337f00d) { foreach (key, ref av; m_object) av.m_name = format("%s.%s", m_name, key); } else m_name = null; } return v; } /** Allows removal of values from Type.Object Json objects. */ void remove(string item) { checkType!(Json[string])(); m_object.remove(item); } /** The current type id of this JSON object. */ @property Type type() const { return m_type; } /** Clones a JSON value recursively. */ Json clone() const { final switch (m_type) { case Type.undefined: return Json.undefined; case Type.null_: return Json(null); case Type.bool_: return Json(m_bool); case Type.int_: return Json(m_int); case Type.float_: return Json(m_float); case Type.string: return Json(m_string); case Type.array: auto ret = Json.emptyArray; foreach (v; this) ret ~= v.clone(); return ret; case Type.object: auto ret = Json.emptyObject; foreach (string name, v; this) ret[name] = v.clone(); return ret; } } /** Check whether the JSON object contains the given key and if yes, return a pointer to the corresponding object, otherwise return `null`. */ inout(Json*) opBinaryRight(string op : "in")(string key) inout { checkType!(Json[string])(); return key in m_object; } /** Allows direct indexing of array typed JSON values. */ ref inout(Json) opIndex(size_t idx) inout { checkType!(Json[])(); return m_array[idx]; } /// unittest { Json value = Json.emptyArray; value ~= 1; value ~= true; value ~= "foo"; assert(value[0] == 1); assert(value[1] == true); assert(value[2] == "foo"); } /** Allows direct indexing of object typed JSON values using a string as the key. */ const(Json) opIndex(string key) const { checkType!(Json[string])(); if( auto pv = key in m_object ) return *pv; Json ret = Json.undefined; ret.m_string = key; version (VibeJsonFieldNames) ret.m_name = format("%s.%s", m_name, key); return ret; } /// ditto ref Json opIndex(string key) { checkType!(Json[string])(); if( auto pv = key in m_object ) return *pv; if (m_object is null) { m_object = ["": Json.init]; m_object.remove(""); } m_object[key] = Json.init; assert(m_object !is null); assert(key in m_object, "Failed to insert key '"~key~"' into AA!?"); m_object[key].m_type = Type.undefined; // DMDBUG: AAs are the $H1T!!!11 assert(m_object[key].type == Type.undefined); m_object[key].m_string = key; version (VibeJsonFieldNames) m_object[key].m_name = format("%s.%s", m_name, key); return m_object[key]; } /// unittest { Json value = Json.emptyObject; value["a"] = 1; value["b"] = true; value["c"] = "foo"; assert(value["a"] == 1); assert(value["b"] == true); assert(value["c"] == "foo"); } /** Returns a slice of a JSON array. */ inout(Json[]) opSlice() inout { checkType!(Json[])(); return m_array; } /// inout(Json[]) opSlice(size_t from, size_t to) inout { checkType!(Json[])(); return m_array[from .. to]; } /** Returns the number of entries of string, array or object typed JSON values. */ @property size_t length() const { checkType!(string, Json[], Json[string])("property length"); switch(m_type){ case Type.string: return m_string.length; case Type.array: return m_array.length; case Type.object: return m_object.length; default: assert(false); } } /** Allows foreach iterating over JSON objects and arrays. */ int opApply(int delegate(ref Json obj) del) { checkType!(Json[], Json[string])("opApply"); if( m_type == Type.array ){ foreach( ref v; m_array ) if( auto ret = del(v) ) return ret; return 0; } else { foreach( ref v; m_object ) if( v.type != Type.undefined ) if( auto ret = del(v) ) return ret; return 0; } } /// ditto int opApply(int delegate(ref const Json obj) del) const { checkType!(Json[], Json[string])("opApply"); if( m_type == Type.array ){ foreach( ref v; m_array ) if( auto ret = del(v) ) return ret; return 0; } else { foreach( ref v; m_object ) if( v.type != Type.undefined ) if( auto ret = del(v) ) return ret; return 0; } } /// ditto int opApply(int delegate(ref size_t idx, ref Json obj) del) { checkType!(Json[])("opApply"); foreach( idx, ref v; m_array ) if( auto ret = del(idx, v) ) return ret; return 0; } /// ditto int opApply(int delegate(ref size_t idx, ref const Json obj) del) const { checkType!(Json[])("opApply"); foreach( idx, ref v; m_array ) if( auto ret = del(idx, v) ) return ret; return 0; } /// ditto int opApply(int delegate(ref string idx, ref Json obj) del) { checkType!(Json[string])("opApply"); foreach( idx, ref v; m_object ) if( v.type != Type.undefined ) if( auto ret = del(idx, v) ) return ret; return 0; } /// ditto int opApply(int delegate(ref string idx, ref const Json obj) del) const { checkType!(Json[string])("opApply"); foreach( idx, ref v; m_object ) if( v.type != Type.undefined ) if( auto ret = del(idx, v) ) return ret; return 0; } /** Converts the JSON value to the corresponding D type - types must match exactly. Available_Types: $(UL $(LI `bool` (`Type.bool_`)) $(LI `double` (`Type.float_`)) $(LI `float` (Converted from `double`)) $(LI `long` (`Type.int_`)) $(LI `ulong`, `int`, `uint`, `short`, `ushort`, `byte`, `ubyte` (Converted from `long`)) $(LI `string` (`Type.string`)) $(LI `Json[]` (`Type.array`)) $(LI `Json[string]` (`Type.object`)) ) See_Also: `opt`, `to`, `deserializeJson` */ inout(T) opCast(T)() inout { return get!T; } /// ditto @property inout(T) get(T)() inout { checkType!T(); static if (is(T == bool)) return m_bool; else static if (is(T == double)) return m_float; else static if (is(T == float)) return cast(T)m_float; else static if (is(T == long)) return m_int; else static if (is(T == ulong)) return cast(ulong)m_int; else static if (is(T : long)){ enforceJson(m_int <= T.max && m_int >= T.min, "Integer conversion out of bounds error", m_fileName, line); return cast(T)m_int; } else static if (is(T == string)) return m_string; else static if (is(T == Json[])) return m_array; else static if (is(T == Json[string])) return m_object; else static assert("JSON can only be cast to (bool, long, double, string, Json[] or Json[string]. Not "~T.stringof~"."); } /** Returns the native type for this JSON if it matches the current runtime type. If the runtime type does not match the given native type, the 'def' parameter is returned instead. See_Also: `get` */ @property const(T) opt(T)(const(T) def = T.init) const { if( typeId!T != m_type ) return def; return get!T; } /// ditto @property T opt(T)(T def = T.init) { if( typeId!T != m_type ) return def; return get!T; } /** Converts the JSON value to the corresponding D type - types are converted as necessary. Automatically performs conversions between strings and numbers. See `get` for the list of available types. For converting/deserializing JSON to complex data types see `deserializeJson`. See_Also: `get`, `deserializeJson` */ @property inout(T) to(T)() inout { static if( is(T == bool) ){ final switch( m_type ){ case Type.undefined: return false; case Type.null_: return false; case Type.bool_: return m_bool; case Type.int_: return m_int != 0; case Type.float_: return m_float != 0; case Type.string: return m_string.length > 0; case Type.array: return m_array.length > 0; case Type.object: return m_object.length > 0; } } else static if( is(T == double) ){ final switch( m_type ){ case Type.undefined: return T.init; case Type.null_: return 0; case Type.bool_: return m_bool ? 1 : 0; case Type.int_: return m_int; case Type.float_: return m_float; case Type.string: return .to!double(cast(string)m_string); case Type.array: return double.init; case Type.object: return double.init; } } else static if( is(T == float) ){ final switch( m_type ){ case Type.undefined: return T.init; case Type.null_: return 0; case Type.bool_: return m_bool ? 1 : 0; case Type.int_: return m_int; case Type.float_: return m_float; case Type.string: return .to!float(cast(string)m_string); case Type.array: return float.init; case Type.object: return float.init; } } else static if( is(T == long) ){ final switch( m_type ){ case Type.undefined: return 0; case Type.null_: return 0; case Type.bool_: return m_bool ? 1 : 0; case Type.int_: return m_int; case Type.float_: return cast(long)m_float; case Type.string: return .to!long(m_string); case Type.array: return 0; case Type.object: return 0; } } else static if( is(T : long) ){ final switch( m_type ){ case Type.undefined: return 0; case Type.null_: return 0; case Type.bool_: return m_bool ? 1 : 0; case Type.int_: return cast(T)m_int; case Type.float_: return cast(T)m_float; case Type.string: return cast(T).to!long(cast(string)m_string); case Type.array: return 0; case Type.object: return 0; } } else static if( is(T == string) ){ switch( m_type ){ default: return toString(); case Type.string: return m_string; } } else static if( is(T == Json[]) ){ switch( m_type ){ default: return Json([this]); case Type.array: return m_array; } } else static if( is(T == Json[string]) ){ switch( m_type ){ default: return Json(["value": this]); case Type.object: return m_object; } } else static assert("JSON can only be cast to (bool, long, double, string, Json[] or Json[string]. Not "~T.stringof~"."); } /** Performs unary operations on the JSON value. The following operations are supported for each type: $(DL $(DT Null) $(DD none) $(DT Bool) $(DD ~) $(DT Int) $(DD +, -, ++, --) $(DT Float) $(DD +, -, ++, --) $(DT String) $(DD none) $(DT Array) $(DD none) $(DT Object) $(DD none) ) */ Json opUnary(string op)() const { static if( op == "~" ){ checkType!bool(); return Json(~m_bool); } else static if( op == "+" || op == "-" || op == "++" || op == "--" ){ checkType!(long, double)("unary "~op); if( m_type == Type.int_ ) mixin("return Json("~op~"m_int);"); else if( m_type == Type.float_ ) mixin("return Json("~op~"m_float);"); else assert(false); } else static assert("Unsupported operator '"~op~"' for type JSON."); } /** Performs binary operations between JSON values. The two JSON values must be of the same run time type or a JSONException will be thrown. Only the operations listed are allowed for each of the types. $(DL $(DT Null) $(DD none) $(DT Bool) $(DD &&, ||) $(DT Int) $(DD +, -, *, /, %) $(DT Float) $(DD +, -, *, /, %) $(DT String) $(DD ~) $(DT Array) $(DD ~) $(DT Object) $(DD in) ) */ Json opBinary(string op)(ref const(Json) other) const { enforceJson(m_type == other.m_type, "Binary operation '"~op~"' between "~.to!string(m_type)~" and "~.to!string(other.m_type)~" JSON objects."); static if( op == "&&" ){ checkType!(bool)(op); return Json(m_bool && other.m_bool); } else static if( op == "||" ){ checkType!(bool)(op); return Json(m_bool || other.m_bool); } else static if( op == "+" ){ checkType!(long, double)(op); if( m_type == Type.Int ) return Json(m_int + other.m_int); else if( m_type == Type.float_ ) return Json(m_float + other.m_float); else assert(false); } else static if( op == "-" ){ checkType!(long, double)(op); if( m_type == Type.Int ) return Json(m_int - other.m_int); else if( m_type == Type.float_ ) return Json(m_float - other.m_float); else assert(false); } else static if( op == "*" ){ checkType!(long, double)(op); if( m_type == Type.Int ) return Json(m_int * other.m_int); else if( m_type == Type.float_ ) return Json(m_float * other.m_float); else assert(false); } else static if( op == "/" ){ checkType!(long, double)(op); if( m_type == Type.Int ) return Json(m_int / other.m_int); else if( m_type == Type.float_ ) return Json(m_float / other.m_float); else assert(false); } else static if( op == "%" ){ checkType!(long, double)(op); if( m_type == Type.Int ) return Json(m_int % other.m_int); else if( m_type == Type.float_ ) return Json(m_float % other.m_float); else assert(false); } else static if( op == "~" ){ checkType!(string, Json[])(op); if( m_type == Type.string ) return Json(m_string ~ other.m_string); else if (m_type == Type.array) return Json(m_array ~ other.m_array); else assert(false); } else static assert("Unsupported operator '"~op~"' for type JSON."); } /// ditto Json opBinary(string op)(Json other) if( op == "~" ) { static if( op == "~" ){ checkType!(string, Json[])(op); if( m_type == Type.string ) return Json(m_string ~ other.m_string); else if( m_type == Type.array ) return Json(m_array ~ other.m_array); else assert(false); } else static assert("Unsupported operator '"~op~"' for type JSON."); } /// ditto void opOpAssign(string op)(Json other) if (op == "+" || op == "-" || op == "*" || op == "/" || op == "%" || op =="~") { enforceJson(m_type == other.m_type || op == "~" && m_type == Type.array, "Binary operation '"~op~"=' between "~.to!string(m_type)~" and "~.to!string(other.m_type)~" JSON objects."); static if( op == "+" ){ if( m_type == Type.int_ ) m_int += other.m_int; else if( m_type == Type.float_ ) m_float += other.m_float; else enforceJson(false, "'+=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "-" ){ if( m_type == Type.int_ ) m_int -= other.m_int; else if( m_type == Type.float_ ) m_float -= other.m_float; else enforceJson(false, "'-=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "*" ){ if( m_type == Type.int_ ) m_int *= other.m_int; else if( m_type == Type.float_ ) m_float *= other.m_float; else enforceJson(false, "'*=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "/" ){ if( m_type == Type.int_ ) m_int /= other.m_int; else if( m_type == Type.float_ ) m_float /= other.m_float; else enforceJson(false, "'/=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "%" ){ if( m_type == Type.int_ ) m_int %= other.m_int; else if( m_type == Type.float_ ) m_float %= other.m_float; else enforceJson(false, "'%=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "~" ){ if (m_type == Type.string) m_string ~= other.m_string; else if (m_type == Type.array) { if (other.m_type == Type.array) m_array ~= other.m_array; else appendArrayElement(other); } else enforceJson(false, "'~=' only allowed for string and array types, not "~.to!string(m_type)~"."); } else static assert("Unsupported operator '"~op~"=' for type JSON."); } /// ditto void opOpAssign(string op, T)(T other) if (!is(T == Json) && is(typeof(Json(other)))) { opOpAssign!op(Json(other)); } /// ditto Json opBinary(string op)(bool other) const { checkType!bool(); mixin("return Json(m_bool "~op~" other);"); } /// ditto Json opBinary(string op)(long other) const { checkType!long(); mixin("return Json(m_int "~op~" other);"); } /// ditto Json opBinary(string op)(double other) const { checkType!double(); mixin("return Json(m_float "~op~" other);"); } /// ditto Json opBinary(string op)(string other) const { checkType!string(); mixin("return Json(m_string "~op~" other);"); } /// ditto Json opBinary(string op)(Json[] other) { checkType!(Json[])(); mixin("return Json(m_array "~op~" other);"); } /// ditto Json opBinaryRight(string op)(bool other) const { checkType!bool(); mixin("return Json(other "~op~" m_bool);"); } /// ditto Json opBinaryRight(string op)(long other) const { checkType!long(); mixin("return Json(other "~op~" m_int);"); } /// ditto Json opBinaryRight(string op)(double other) const { checkType!double(); mixin("return Json(other "~op~" m_float);"); } /// ditto Json opBinaryRight(string op)(string other) const if(op == "~") { checkType!string(); return Json(other ~ m_string); } /// ditto inout(Json)* opBinaryRight(string op)(string other) inout if(op == "in") { checkType!(Json[string])(); auto pv = other in m_object; if( !pv ) return null; if( pv.type == Type.undefined ) return null; return pv; } /// ditto Json opBinaryRight(string op)(Json[] other) { checkType!(Json[])(); mixin("return Json(other "~op~" m_array);"); } /** * The append operator will append arrays. This method always appends it's argument as an array element, so nested arrays can be created. */ void appendArrayElement(Json element) { enforceJson(m_type == Type.array, "'appendArrayElement' only allowed for array types, not "~.to!string(m_type)~"."); m_array ~= element; } /** Compares two JSON values for equality. If the two values have different types, they are considered unequal. This differs with ECMA script, which performs a type conversion before comparing the values. */ bool opEquals(ref const Json other) const { if( m_type != other.m_type ) return false; final switch(m_type){ case Type.undefined: return false; case Type.null_: return true; case Type.bool_: return m_bool == other.m_bool; case Type.int_: return m_int == other.m_int; case Type.float_: return m_float == other.m_float; case Type.string: return m_string == other.m_string; case Type.array: return m_array == other.m_array; case Type.object: return m_object == other.m_object; } } /// ditto bool opEquals(const Json other) const { return opEquals(other); } /// ditto bool opEquals(typeof(null)) const { return m_type == Type.null_; } /// ditto bool opEquals(bool v) const { return m_type == Type.bool_ && m_bool == v; } /// ditto bool opEquals(int v) const { return m_type == Type.int_ && m_int == v; } /// ditto bool opEquals(long v) const { return m_type == Type.int_ && m_int == v; } /// ditto bool opEquals(double v) const { return m_type == Type.float_ && m_float == v; } /// ditto bool opEquals(string v) const { return m_type == Type.string && m_string == v; } /** Compares two JSON values. If the types of the two values differ, the value with the smaller type id is considered the smaller value. This differs from ECMA script, which performs a type conversion before comparing the values. JSON values of type Object cannot be compared and will throw an exception. */ int opCmp(ref const Json other) const { if( m_type != other.m_type ) return m_type < other.m_type ? -1 : 1; final switch(m_type){ case Type.undefined: return 0; case Type.null_: return 0; case Type.bool_: return m_bool < other.m_bool ? -1 : m_bool == other.m_bool ? 0 : 1; case Type.int_: return m_int < other.m_int ? -1 : m_int == other.m_int ? 0 : 1; case Type.float_: return m_float < other.m_float ? -1 : m_float == other.m_float ? 0 : 1; case Type.string: return m_string < other.m_string ? -1 : m_string == other.m_string ? 0 : 1; case Type.array: return m_array < other.m_array ? -1 : m_array == other.m_array ? 0 : 1; case Type.object: enforceJson(false, "JSON objects cannot be compared."); assert(false); } } alias opDollar = length; /** Returns the type id corresponding to the given D type. */ static @property Type typeId(T)() { static if( is(T == typeof(null)) ) return Type.null_; else static if( is(T == bool) ) return Type.bool_; else static if( is(T == double) ) return Type.float_; else static if( is(T == float) ) return Type.float_; else static if( is(T : long) ) return Type.int_; else static if( is(T == string) ) return Type.string; else static if( is(T == Json[]) ) return Type.array; else static if( is(T == Json[string]) ) return Type.object; else static assert(false, "Unsupported JSON type '"~T.stringof~"'. Only bool, long, double, string, Json[] and Json[string] are allowed."); } /** Returns the JSON object as a string. For large JSON values use writeJsonString instead as this function will store the whole string in memory, whereas writeJsonString writes it out bit for bit. See_Also: writeJsonString, toPrettyString */ string toString() const { auto ret = appender!string(); writeJsonString(ret, this); return ret.data; } /** Returns the JSON object as a "pretty" string. --- auto json = Json(["foo": Json("bar")]); writeln(json.toPrettyString()); // output: // { // "foo": "bar" // } --- Params: level = Specifies the base amount of indentation for the output. Indentation is always done using tab characters. See_Also: writePrettyJsonString, toString */ string toPrettyString(int level = 0) const { auto ret = appender!string(); writePrettyJsonString(ret, this, level); return ret.data; } private void checkType(TYPES...)(string op = null) const { bool matched = false; foreach (T; TYPES) if (m_type == typeId!T) matched = true; if (matched) return; string name; version (VibeJsonFieldNames) { if (m_name.length) name = m_name ~ " of type " ~ m_type.to!string; else name = "JSON of type " ~ m_type.to!string; } else name = "JSON of type " ~ m_type.to!string; string expected; static if (TYPES.length == 1) expected = typeId!(TYPES[0]).to!string; else { foreach (T; TYPES) { if (expected.length > 0) expected ~= ", "; expected ~= typeId!T.to!string; } } enforceJson(op.length > 0, format("Got %s, expected %s.", name, expected), m_fileName, line); enforceJson(false, format("Got %s, expected %s for %s.", name, expected, op), m_fileName, line); } /*invariant() { assert(m_type >= Type.Undefined && m_type <= Type.Object); }*/ } /******************************************************************************/ /* public functions */ /******************************************************************************/ /** Parses the given range as a JSON string and returns the corresponding Json object. The range is shrunk during parsing, leaving any remaining text that is not part of the JSON contents. Throws a JSONException if any parsing error occurred. */ Json parseJson(R)(ref R range, int* line = null, string filename = null) if( is(R == string) ) { import std.string : startsWith; Json ret; enforceJson(!range.empty, "JSON string is empty.", filename, 0); skipWhitespace(range, line); version(JsonLineNumbers) { int curline = line ? *line : 0; } switch( range.front ){ case 'f': enforceJson(range[1 .. $].startsWith("alse"), "Expected 'false', got '"~range[0 .. min(5, $)]~"'.", filename, line); range.popFrontN(5); ret = false; break; case 'n': enforceJson(range[1 .. $].startsWith("ull"), "Expected 'null', got '"~range[0 .. min(4, $)]~"'.", filename, line); range.popFrontN(4); ret = null; break; case 't': enforceJson(range[1 .. $].startsWith("rue"), "Expected 'true', got '"~range[0 .. min(4, $)]~"'.", filename, line); range.popFrontN(4); ret = true; break; case '0': .. case '9': case '-': bool is_float; auto num = skipNumber(range, is_float, filename, line); if( is_float ) ret = to!double(num); else ret = to!long(num); break; case '\"': ret = skipJsonString(range, filename, line); break; case '[': Json[] arr; range.popFront(); while (true) { skipWhitespace(range, line); enforceJson(!range.empty, "Missing ']' before EOF.", filename, line); if(range.front == ']') break; arr ~= parseJson(range, line, filename); skipWhitespace(range, line); enforceJson(!range.empty, "Missing ']' before EOF.", filename, line); enforceJson(range.front == ',' || range.front == ']', format("Expected ']' or ',' - got '%s'.", range.front), filename, line); if( range.front == ']' ) break; else range.popFront(); } range.popFront(); ret = arr; break; case '{': Json[string] obj; range.popFront(); while (true) { skipWhitespace(range, line); enforceJson(!range.empty, "Missing '}' before EOF.", filename, line); if(range.front == '}') break; string key = skipJsonString(range, filename, line); skipWhitespace(range, line); enforceJson(range.startsWith(":"), "Expected ':' for key '" ~ key ~ "'", filename, line); range.popFront(); skipWhitespace(range, line); Json itm = parseJson(range, line, filename); obj[key] = itm; skipWhitespace(range, line); enforceJson(!range.empty, "Missing '}' before EOF.", filename, line); enforceJson(range.front == ',' || range.front == '}', format("Expected '}' or ',' - got '%s'.", range.front), filename, line); if (range.front == '}') break; else range.popFront(); } range.popFront(); ret = obj; break; default: enforceJson(false, format("Expected valid JSON token, got '%s'.", range[0 .. min(12, $)]), filename, line); assert(false); } assert(ret.type != Json.Type.undefined); version(JsonLineNumbers) ret.line = curline; ret.m_fileName = filename; return ret; } /** Parses the given JSON string and returns the corresponding Json object. Throws a JSONException if any parsing error occurs. */ Json parseJsonString(string str, string filename = null) { import std.string : strip; auto strcopy = str; int line = 0; auto ret = parseJson(strcopy, &line, filename); enforceJson(strcopy.strip().length == 0, "Expected end of string after JSON value.", filename, line); return ret; } unittest { assert(parseJsonString("null") == Json(null)); assert(parseJsonString("true") == Json(true)); assert(parseJsonString("false") == Json(false)); assert(parseJsonString("1") == Json(1)); assert(parseJsonString("2.0") == Json(2.0)); assert(parseJsonString("\"test\"") == Json("test")); assert(parseJsonString("[1, 2, 3]") == Json([Json(1), Json(2), Json(3)])); assert(parseJsonString("{\"a\": 1}") == Json(["a": Json(1)])); assert(parseJsonString(`"\\\/\b\f\n\r\t\u1234"`).get!string == "\\/\b\f\n\r\t\u1234"); auto json = parseJsonString(`{"hey": "This is @à test éhééhhéhéé !%/??*&?\ud83d\udcec"}`); assert(json.toPrettyString() == parseJsonString(json.toPrettyString()).toPrettyString()); } unittest { import std.string : endsWith; try parseJsonString(`{"a": 1`); catch (Exception e) assert(e.msg.endsWith("Missing '}' before EOF.")); try parseJsonString(`{"a": 1 x`); catch (Exception e) assert(e.msg.endsWith("Expected '}' or ',' - got 'x'.")); try parseJsonString(`[1`); catch (Exception e) assert(e.msg.endsWith("Missing ']' before EOF.")); try parseJsonString(`[1 x`); catch (Exception e) assert(e.msg.endsWith("Expected ']' or ',' - got 'x'.")); } /** Serializes the given value to JSON. The following types of values are supported: $(DL $(DT `Json`) $(DD Used as-is) $(DT `null`) $(DD Converted to `Json.Type.null_`) $(DT `bool`) $(DD Converted to `Json.Type.bool_`) $(DT `float`, `double`) $(DD Converted to `Json.Type.float_`) $(DT `short`, `ushort`, `int`, `uint`, `long`, `ulong`) $(DD Converted to `Json.Type.int_`) $(DT `string`) $(DD Converted to `Json.Type.string`) $(DT `T[]`) $(DD Converted to `Json.Type.array`) $(DT `T[string]`) $(DD Converted to `Json.Type.object`) $(DT `struct`) $(DD Converted to `Json.Type.object`) $(DT `class`) $(DD Converted to `Json.Type.object` or `Json.Type.null_`) ) All entries of an array or an associative array, as well as all R/W properties and all public fields of a struct/class are recursively serialized using the same rules. Fields ending with an underscore will have the last underscore stripped in the serialized output. This makes it possible to use fields with D keywords as their name by simply appending an underscore. The following methods can be used to customize the serialization of structs/classes: --- Json toJson() const; static T fromJson(Json src); string toString() const; static T fromString(string src); --- The methods will have to be defined in pairs. The first pair that is implemented by the type will be used for serialization (i.e. `toJson` overrides `toString`). See_Also: `deserializeJson`, `vibe.data.serialization` */ Json serializeToJson(T)(T value) { version (VibeOldSerialization) { return serializeToJsonOld(value); } else { return serialize!JsonSerializer(value); } } /// ditto void serializeToJson(R, T)(R destination, T value) if (isOutputRange!(R, char) || isOutputRange!(R, ubyte)) { serialize!(JsonStringSerializer!R)(value, destination); } /// ditto string serializeToJsonString(T)(T value) { auto ret = appender!string; serializeToJson(ret, value); return ret.data; } /// unittest { struct Foo { int number; string str; } Foo f; f.number = 12; f.str = "hello"; string json = serializeToJsonString(f); assert(json == `{"number":12,"str":"hello"}`); Json jsonval = serializeToJson(f); assert(jsonval.type == Json.Type.object); assert(jsonval["number"] == Json(12)); assert(jsonval["str"] == Json("hello")); } /** Serializes the given value to a pretty printed JSON string. See_also: `serializeToJson`, `vibe.data.serialization` */ void serializeToPrettyJson(R, T)(R destination, T value) if (isOutputRange!(R, char) || isOutputRange!(R, ubyte)) { serialize!(JsonStringSerializer!(R, true))(value, destination); } /// ditto string serializeToPrettyJson(T)(T value) { auto ret = appender!string; serializeToPrettyJson(ret, value); return ret.data; } /// unittest { struct Foo { int number; string str; } Foo f; f.number = 12; f.str = "hello"; string json = serializeToPrettyJson(f); assert(json == `{ "number": 12, "str": "hello" }`); } /// private Json serializeToJsonOld(T)(T value) { import vibe.internal.meta.traits; alias TU = Unqual!T; static if (is(TU == Json)) return value; else static if (is(TU == typeof(null))) return Json(null); else static if (is(TU == bool)) return Json(value); else static if (is(TU == float)) return Json(cast(double)value); else static if (is(TU == double)) return Json(value); else static if (is(TU == DateTime)) return Json(value.toISOExtString()); else static if (is(TU == SysTime)) return Json(value.toISOExtString()); else static if (is(TU == Date)) return Json(value.toISOExtString()); else static if (is(TU : long)) return Json(cast(long)value); else static if (is(TU : string)) return Json(value); else static if (isArray!T) { auto ret = new Json[value.length]; foreach (i; 0 .. value.length) ret[i] = serializeToJson(value[i]); return Json(ret); } else static if (isAssociativeArray!TU) { Json[string] ret; alias TK = KeyType!T; foreach (key, value; value) { static if(is(TK == string)) { ret[key] = serializeToJson(value); } else static if (is(TK == enum)) { ret[to!string(key)] = serializeToJson(value); } else static if (isStringSerializable!(TK)) { ret[key.toString()] = serializeToJson(value); } else static assert("AA key type %s not supported for JSON serialization."); } return Json(ret); } else static if (isJsonSerializable!TU) { return value.toJson(); } else static if (isStringSerializable!TU) { return Json(value.toString()); } else static if (is(TU == struct)) { Json[string] ret; foreach (m; __traits(allMembers, T)) { static if (isRWField!(TU, m)) { auto mv = __traits(getMember, value, m); ret[underscoreStrip(m)] = serializeToJson(mv); } } return Json(ret); } else static if(is(TU == class)) { if (value is null) return Json(null); Json[string] ret; foreach (m; __traits(allMembers, T)) { static if (isRWField!(TU, m)) { auto mv = __traits(getMember, value, m); ret[underscoreStrip(m)] = serializeToJson(mv); } } return Json(ret); } else static if (isPointer!TU) { if (value is null) return Json(null); return serializeToJson(*value); } else { static assert(false, "Unsupported type '"~T.stringof~"' for JSON serialization."); } } /** Deserializes a JSON value into the destination variable. The same types as for `serializeToJson()` are supported and handled inversely. See_Also: `serializeToJson`, `serializeToJsonString`, `vibe.data.serialization` */ void deserializeJson(T)(ref T dst, Json src) { dst = deserializeJson!T(src); } /// ditto T deserializeJson(T)(Json src) { version (VibeOldSerialization) { return deserializeJsonOld!T(src); } else { return deserialize!(JsonSerializer, T)(src); } } /// ditto T deserializeJson(T, R)(R input) if (isInputRange!R && !is(R == Json)) { return deserialize!(JsonStringSerializer!R, T)(input); } /// private T deserializeJsonOld(T)(Json src) { import vibe.internal.meta.traits; static if( is(T == struct) || isSomeString!T || isIntegral!T || isFloatingPoint!T ) if( src.type == Json.Type.null_ ) return T.init; static if (is(T == Json)) return src; else static if (is(T == typeof(null))) { return null; } else static if (is(T == bool)) return src.get!bool; else static if (is(T == float)) return src.to!float; // since doubles are frequently serialized without else static if (is(T == double)) return src.to!double; // a decimal point, we allow conversions here else static if (is(T == DateTime)) return DateTime.fromISOExtString(src.get!string); else static if (is(T == SysTime)) return SysTime.fromISOExtString(src.get!string); else static if (is(T == Date)) return Date.fromISOExtString(src.get!string); else static if (is(T : long)) return cast(T)src.get!long; else static if (is(T : string)) return cast(T)src.get!string; else static if (isArray!T) { alias TV = typeof(T.init[0]) ; auto dst = new Unqual!TV[src.length]; foreach (size_t i, v; src) dst[i] = deserializeJson!(Unqual!TV)(v); return cast(T)dst; } else static if( isAssociativeArray!T ) { alias TV = typeof(T.init.values[0]) ; alias TK = KeyType!T; Unqual!TV[TK] dst; foreach (string key, value; src) { static if (is(TK == string)) { dst[key] = deserializeJson!(Unqual!TV)(value); } else static if (is(TK == enum)) { dst[to!(TK)(key)] = deserializeJson!(Unqual!TV)(value); } else static if (isStringSerializable!TK) { auto dsk = TK.fromString(key); dst[dsk] = deserializeJson!(Unqual!TV)(value); } else static assert("AA key type %s not supported for JSON serialization."); } return dst; } else static if (isJsonSerializable!T) { return T.fromJson(src); } else static if (isStringSerializable!T) { return T.fromString(src.get!string); } else static if (is(T == struct)) { T dst; foreach (m; __traits(allMembers, T)) { static if (isRWPlainField!(T, m) || isRWField!(T, m)) { alias TM = typeof(__traits(getMember, dst, m)) ; __traits(getMember, dst, m) = deserializeJson!TM(src[underscoreStrip(m)]); } } return dst; } else static if (is(T == class)) { if (src.type == Json.Type.null_) return null; auto dst = new T; foreach (m; __traits(allMembers, T)) { static if (isRWPlainField!(T, m) || isRWField!(T, m)) { alias TM = typeof(__traits(getMember, dst, m)) ; __traits(getMember, dst, m) = deserializeJson!TM(src[underscoreStrip(m)]); } } return dst; } else static if (isPointer!T) { if (src.type == Json.Type.null_) return null; alias TD = typeof(*T.init) ; dst = new TD; *dst = deserializeJson!TD(src); return dst; } else { static assert(false, "Unsupported type '"~T.stringof~"' for JSON serialization."); } } /// unittest { struct Foo { int number; string str; } Foo f = deserializeJson!Foo(`{"number": 12, "str": "hello"}`); assert(f.number == 12); assert(f.str == "hello"); } unittest { import std.stdio; enum Foo : string { k = "test" } enum Boo : int { l = 5 } static struct S { float a; double b; bool c; int d; string e; byte f; ubyte g; long h; ulong i; float[] j; Foo k; Boo l; } immutable S t = {1.5, -3.0, true, int.min, "Test", -128, 255, long.min, ulong.max, [1.1, 1.2, 1.3], Foo.k, Boo.l}; S u; deserializeJson(u, serializeToJson(t)); assert(t.a == u.a); assert(t.b == u.b); assert(t.c == u.c); assert(t.d == u.d); assert(t.e == u.e); assert(t.f == u.f); assert(t.g == u.g); assert(t.h == u.h); assert(t.i == u.i); assert(t.j == u.j); assert(t.k == u.k); assert(t.l == u.l); } unittest { assert(uint.max == serializeToJson(uint.max).deserializeJson!uint); assert(ulong.max == serializeToJson(ulong.max).deserializeJson!ulong); } unittest { static struct A { int value; static A fromJson(Json val) { return A(val.get!int); } Json toJson() const { return Json(value); } } static struct C { int value; static C fromString(string val) { return C(val.to!int); } string toString() const { return value.to!string; } } static struct D { int value; } assert(serializeToJson(const A(123)) == Json(123)); assert(serializeToJson(A(123)) == Json(123)); assert(serializeToJson(const C(123)) == Json("123")); assert(serializeToJson(C(123)) == Json("123")); assert(serializeToJson(const D(123)) == serializeToJson(["value": 123])); assert(serializeToJson(D(123)) == serializeToJson(["value": 123])); } unittest { auto d = Date(2001,1,1); deserializeJson(d, serializeToJson(Date.init)); assert(d == Date.init); deserializeJson(d, serializeToJson(Date(2001,1,1))); assert(d == Date(2001,1,1)); struct S { immutable(int)[] x; } S s; deserializeJson(s, serializeToJson(S([1,2,3]))); assert(s == S([1,2,3])); struct T { @optional S s; @optional int i; @optional float f_; // underscore strip feature @optional double d; @optional string str; } auto t = T(S([1,2,3])); deserializeJson(t, parseJsonString(`{ "s" : null, "i" : null, "f" : null, "d" : null, "str" : null }`)); assert(text(t) == text(T())); } unittest { static class C { int a; private int _b; @property int b() const { return _b; } @property void b(int v) { _b = v; } @property int test() const { return 10; } void test2() {} } C c = new C; c.a = 1; c.b = 2; C d; deserializeJson(d, serializeToJson(c)); assert(c.a == d.a); assert(c.b == d.b); } unittest { static struct C { int value; static C fromString(string val) { return C(val.to!int); } string toString() const { return value.to!string; } } enum Color { Red, Green, Blue } { static class T { string[Color] enumIndexedMap; string[C] stringableIndexedMap; this() { enumIndexedMap = [ Color.Red : "magenta", Color.Blue : "deep blue" ]; stringableIndexedMap = [ C(42) : "forty-two" ]; } } T original = new T; original.enumIndexedMap[Color.Green] = "olive"; T other; deserializeJson(other, serializeToJson(original)); assert(serializeToJson(other) == serializeToJson(original)); } { static struct S { string[Color] enumIndexedMap; string[C] stringableIndexedMap; } S *original = new S; original.enumIndexedMap = [ Color.Red : "magenta", Color.Blue : "deep blue" ]; original.enumIndexedMap[Color.Green] = "olive"; original.stringableIndexedMap = [ C(42) : "forty-two" ]; S other; deserializeJson(other, serializeToJson(original)); assert(serializeToJson(other) == serializeToJson(original)); } } unittest { import std.typecons : Nullable; struct S { Nullable!int a, b; } S s; s.a = 2; auto j = serializeToJson(s); assert(j["a"].type == Json.Type.int_); assert(j["b"].type == Json.Type.null_); auto t = deserializeJson!S(j); assert(!t.a.isNull() && t.a == 2); assert(t.b.isNull()); } unittest { // #840 int[2][2] nestedArray = 1; assert(nestedArray.serializeToJson.deserializeJson!(typeof(nestedArray)) == nestedArray); } /** Serializer for a plain Json representation. See_Also: vibe.data.serialization.serialize, vibe.data.serialization.deserialize, serializeToJson, deserializeJson */ struct JsonSerializer { template isJsonBasicType(T) { enum isJsonBasicType = isNumeric!T || isBoolean!T || is(T == string) || is(T == typeof(null)) || isJsonSerializable!T; } template isSupportedValueType(T) { enum isSupportedValueType = isJsonBasicType!T || is(T == Json); } private { Json m_current; Json[] m_compositeStack; } this(Json data) { m_current = data; } @disable this(this); // // serialization // Json getSerializedResult() { return m_current; } void beginWriteDictionary(T)() { m_compositeStack ~= Json.emptyObject; } void endWriteDictionary(T)() { m_current = m_compositeStack[$-1]; m_compositeStack.length--; } void beginWriteDictionaryEntry(T)(string name) {} void endWriteDictionaryEntry(T)(string name) { m_compositeStack[$-1][name] = m_current; } void beginWriteArray(T)(size_t) { m_compositeStack ~= Json.emptyArray; } void endWriteArray(T)() { m_current = m_compositeStack[$-1]; m_compositeStack.length--; } void beginWriteArrayEntry(T)(size_t) {} void endWriteArrayEntry(T)(size_t) { m_compositeStack[$-1].appendArrayElement(m_current); } void writeValue(T)(T value) { static if (is(T == Json)) m_current = value; else static if (isJsonSerializable!T) m_current = value.toJson(); else m_current = Json(value); } void writeValue(T)(in Json value) if (is(T == Json)) { m_current = value.clone; } // // deserialization // void readDictionary(T)(scope void delegate(string) field_handler) { enforceJson(m_current.type == Json.Type.object, "Expected JSON object, got "~m_current.type.to!string); auto old = m_current; foreach (string key, value; m_current) { m_current = value; field_handler(key); } m_current = old; } void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback) { enforceJson(m_current.type == Json.Type.array, "Expected JSON array, got "~m_current.type.to!string); auto old = m_current; size_callback(m_current.length); foreach (ent; old) { m_current = ent; entry_callback(); } m_current = old; } T readValue(T)() { static if (is(T == Json)) return m_current; else static if (isJsonSerializable!T) return T.fromJson(m_current); else static if (is(T == float) || is(T == double)) { if (m_current.type == Json.Type.undefined) return T.nan; return m_current.type == Json.Type.float_ ? cast(T)m_current.get!double : cast(T)m_current.get!long; } else { return m_current.get!T(); } } bool tryReadNull() { return m_current.type == Json.Type.null_; } } /** Serializer for a range based plain JSON string representation. See_Also: vibe.data.serialization.serialize, vibe.data.serialization.deserialize, serializeToJson, deserializeJson */ struct JsonStringSerializer(R, bool pretty = false) if (isInputRange!R || isOutputRange!(R, char)) { private { R m_range; size_t m_level = 0; } template isJsonBasicType(T) { enum isJsonBasicType = isNumeric!T || isBoolean!T || is(T == string) || is(T == typeof(null)) || isJsonSerializable!T; } template isSupportedValueType(T) { enum isSupportedValueType = isJsonBasicType!T || is(T == Json); } this(R range) { m_range = range; } @disable this(this); // // serialization // static if (isOutputRange!(R, char)) { private { bool m_firstInComposite; } void getSerializedResult() {} void beginWriteDictionary(T)() { startComposite(); m_range.put('{'); } void endWriteDictionary(T)() { endComposite(); m_range.put("}"); } void beginWriteDictionaryEntry(T)(string name) { startCompositeEntry(); m_range.put('"'); m_range.jsonEscape(name); static if (pretty) m_range.put(`": `); else m_range.put(`":`); } void endWriteDictionaryEntry(T)(string name) {} void beginWriteArray(T)(size_t) { startComposite(); m_range.put('['); } void endWriteArray(T)() { endComposite(); m_range.put(']'); } void beginWriteArrayEntry(T)(size_t) { startCompositeEntry(); } void endWriteArrayEntry(T)(size_t) {} void writeValue(T)(in T value) { static if (is(T == typeof(null))) m_range.put("null"); else static if (is(T == bool)) m_range.put(value ? "true" : "false"); else static if (is(T : long)) m_range.formattedWrite("%s", value); else static if (is(T : real)) m_range.formattedWrite("%.16g", value); else static if (is(T == string)) { m_range.put('"'); m_range.jsonEscape(value); m_range.put('"'); } else static if (is(T == Json)) m_range.writeJsonString(value); else static if (isJsonSerializable!T) m_range.writeJsonString!(R, pretty)(value.toJson(), m_level); else static assert(false, "Unsupported type: " ~ T.stringof); } private void startComposite() { static if (pretty) m_level++; m_firstInComposite = true; } private void startCompositeEntry() { if (!m_firstInComposite) { m_range.put(','); } else { m_firstInComposite = false; } static if (pretty) indent(); } private void endComposite() { static if (pretty) { m_level--; if (!m_firstInComposite) indent(); } m_firstInComposite = false; } private void indent() { m_range.put('\n'); foreach (i; 0 .. m_level) m_range.put('\t'); } } // // deserialization // static if (isInputRange!(R)) { private { int m_line = 0; } void readDictionary(T)(scope void delegate(string) entry_callback) { m_range.skipWhitespace(&m_line); enforceJson(!m_range.empty && m_range.front == '{', "Expecting object."); m_range.popFront(); bool first = true; while(true) { m_range.skipWhitespace(&m_line); enforceJson(!m_range.empty, "Missing '}'."); if (m_range.front == '}') { m_range.popFront(); break; } else if (!first) { enforceJson(m_range.front == ',', "Expecting ',' or '}', not '"~m_range.front.to!string~"'."); m_range.popFront(); m_range.skipWhitespace(&m_line); } else first = false; auto name = m_range.skipJsonString(null, &m_line); m_range.skipWhitespace(&m_line); enforceJson(!m_range.empty && m_range.front == ':', "Expecting ':', not '"~m_range.front.to!string~"'."); m_range.popFront(); entry_callback(name); } } void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback) { m_range.skipWhitespace(&m_line); enforceJson(!m_range.empty && m_range.front == '[', "Expecting array."); m_range.popFront(); bool first = true; while(true) { m_range.skipWhitespace(&m_line); enforceJson(!m_range.empty, "Missing ']'."); if (m_range.front == ']') { m_range.popFront(); break; } else if (!first) { enforceJson(m_range.front == ',', "Expecting ',' or ']'."); m_range.popFront(); } else first = false; entry_callback(); } } T readValue(T)() { m_range.skipWhitespace(&m_line); static if (is(T == typeof(null))) { enforceJson(m_range.take(4).equal("null"), "Expecting 'null'."); return null; } else static if (is(T == bool)) { bool ret = m_range.front == 't'; string expected = ret ? "true" : "false"; foreach (ch; expected) { enforceJson(m_range.front == ch, "Expecting 'true' or 'false'."); m_range.popFront(); } return ret; } else static if (is(T : long)) { bool is_float; auto num = m_range.skipNumber(is_float, null, &m_line); enforceJson(!is_float, "Expecting integer number."); return to!T(num); } else static if (is(T : real)) { bool is_float; auto num = m_range.skipNumber(is_float); return to!T(num); } else static if (is(T == string)) return m_range.skipJsonString(null, &m_line); else static if (is(T == Json)) return m_range.parseJson(&m_line); else static if (isJsonSerializable!T) return T.fromJson(m_range.parseJson(&m_line)); else static assert(false, "Unsupported type: " ~ T.stringof); } bool tryReadNull() { m_range.skipWhitespace(&m_line); if (m_range.front != 'n') return false; foreach (ch; "null") { enforceJson(m_range.front == ch, "Expecting 'null'."); m_range.popFront(); } assert(m_range.empty || m_range.front != 'l'); return true; } } } /** Writes the given JSON object as a JSON string into the destination range. This function will convert the given JSON value to a string without adding any white space between tokens (no newlines, no indentation and no padding). The output size is thus minimized, at the cost of bad human readability. Params: dst = References the string output range to which the result is written. json = Specifies the JSON value that is to be stringified. level = The nesting level at which to write the JSON object (for pretty output). See_Also: Json.toString, writePrettyJsonString */ void writeJsonString(R, bool pretty = false)(ref R dst, in Json json, size_t level = 0) // if( isOutputRange!R && is(ElementEncodingType!R == char) ) { final switch( json.type ){ case Json.Type.undefined: dst.put("undefined"); break; case Json.Type.null_: dst.put("null"); break; case Json.Type.bool_: dst.put(cast(bool)json ? "true" : "false"); break; case Json.Type.int_: formattedWrite(dst, "%d", json.get!long); break; case Json.Type.float_: auto d = json.get!double; if (d != d) dst.put("undefined"); // JSON has no NaN value so set null else formattedWrite(dst, "%.16g", json.get!double); break; case Json.Type.string: dst.put('\"'); jsonEscape(dst, cast(string)json); dst.put('\"'); break; case Json.Type.array: dst.put('['); bool first = true; foreach (ref const Json e; json) { if( !first ) dst.put(","); first = false; static if (pretty) { dst.put('\n'); foreach (tab; 0 .. level+1) dst.put('\t'); } if (e.type == Json.Type.undefined) dst.put("null"); else writeJsonString!(R, pretty)(dst, e, level+1); } static if (pretty) { if (json.length > 0) { dst.put('\n'); foreach (tab; 0 .. level) dst.put('\t'); } } dst.put(']'); break; case Json.Type.object: dst.put('{'); bool first = true; static if (pretty) { import std.algorithm.sorting : sort; string[] keyOrder; foreach (string key, ref const Json e; json) keyOrder ~= key; keyOrder.sort(); foreach( key; keyOrder ){ if( json[key].type == Json.Type.undefined ) continue; if( !first ) dst.put(','); first = false; dst.put('\n'); foreach (tab; 0 .. level+1) dst.put('\t'); dst.put('\"'); jsonEscape(dst, key); dst.put(pretty ? `": ` : `":`); writeJsonString!(R, pretty)(dst, json[key], level+1); } if (json.length > 0) { dst.put('\n'); foreach (tab; 0 .. level) dst.put('\t'); } } else { foreach( string k, ref const Json e; json ){ if( e.type == Json.Type.undefined ) continue; if( !first ) dst.put(','); first = false; dst.put('\"'); jsonEscape(dst, k); dst.put(pretty ? `": ` : `":`); writeJsonString!(R, pretty)(dst, e, level+1); } } dst.put('}'); break; } } unittest { auto a = Json.emptyObject; a["a"] = Json.emptyArray; a["b"] = Json.emptyArray; a["b"] ~= Json(1); a["b"] ~= Json.emptyObject; assert(a.toString() == `{"a":[],"b":[1,{}]}` || a.toString == `{"b":[1,{}],"a":[]}`); assert(a.toPrettyString() == `{ "a": [], "b": [ 1, {} ] }`); } unittest { // #735 auto a = Json.emptyArray; a ~= "a"; a ~= Json(); a ~= "b"; a ~= null; a ~= "c"; assert(a.toString() == `["a",null,"b",null,"c"]`); } unittest { auto a = Json.emptyArray; a ~= Json(1); a ~= Json(2); a ~= Json(3); a ~= Json(4); a ~= Json(5); auto b = Json(a[0..a.length]); assert(a == b); auto c = Json(a[0..$]); assert(a == c); assert(b == c); auto d = [Json(1),Json(2),Json(3)]; assert(d == a[0..a.length-2]); assert(d == a[0..$-2]); } unittest { auto j = Json(double.init); assert(j.toString == "undefined"); // A double nan should serialize to undefined j = 17.04f; assert(j.toString == "17.04"); // A proper double should serialize correctly double d; deserializeJson(d, Json.undefined); // Json.undefined should deserialize to nan assert(d != d); } /** Writes the given JSON object as a prettified JSON string into the destination range. The output will contain newlines and indents to make the output human readable. Params: dst = References the string output range to which the result is written. json = Specifies the JSON value that is to be stringified. level = Specifies the base amount of indentation for the output. Indentation is always done using tab characters. See_Also: Json.toPrettyString, writeJsonString */ void writePrettyJsonString(R)(ref R dst, in Json json, int level = 0) // if( isOutputRange!R && is(ElementEncodingType!R == char) ) { writeJsonString!(R, true)(dst, json, level); } /** Helper function that escapes all Unicode characters in a JSON string. */ string convertJsonToASCII(string json) { auto ret = appender!string; jsonEscape!true(ret, json); return ret.data; } /// private private void jsonEscape(bool escape_unicode = false, R)(ref R dst, string s) { for (size_t pos = 0; pos < s.length; pos++) { immutable(char) ch = s[pos]; switch (ch) { default: static if (escape_unicode) { if (ch > 0x20 && ch < 0x80) dst.put(ch); else { import std.utf : decode; char[13] buf; int len; dchar codepoint = decode(s, pos); import core.stdc.stdio : sprintf; /* codepoint is in BMP */ if(codepoint < 0x10000) { sprintf(&buf[0], "\\u%04X", codepoint); len = 6; } /* not in BMP -> construct a UTF-16 surrogate pair */ else { int first, last; codepoint -= 0x10000; first = 0xD800 | ((codepoint & 0xffc00) >> 10); last = 0xDC00 | (codepoint & 0x003ff); sprintf(&buf[0], "\\u%04X\\u%04X", first, last); len = 12; } pos -= 1; foreach (i; 0 .. len) dst.put(buf[i]); } } else { if (ch < 0x20) dst.formattedWrite("\\u%04X", ch); else dst.put(ch); } break; case '\\': dst.put("\\\\"); break; case '\r': dst.put("\\r"); break; case '\n': dst.put("\\n"); break; case '\t': dst.put("\\t"); break; case '\"': dst.put("\\\""); break; } } } /// private private string jsonUnescape(R)(ref R range, string filename, int* line) { auto ret = appender!string(); while(!range.empty){ auto ch = range.front; switch( ch ){ case '"': return ret.data; case '\\': range.popFront(); enforceJson(!range.empty, "Unterminated string escape sequence.", filename, line); switch(range.front){ default: enforceJson(false, "Invalid string escape sequence.", filename, line); break; case '"': ret.put('\"'); range.popFront(); break; case '\\': ret.put('\\'); range.popFront(); break; case '/': ret.put('/'); range.popFront(); break; case 'b': ret.put('\b'); range.popFront(); break; case 'f': ret.put('\f'); range.popFront(); break; case 'n': ret.put('\n'); range.popFront(); break; case 'r': ret.put('\r'); range.popFront(); break; case 't': ret.put('\t'); range.popFront(); break; case 'u': dchar decode_unicode_escape() { enforceJson(range.front == 'u'); range.popFront(); dchar uch = 0; foreach( i; 0 .. 4 ){ uch *= 16; enforceJson(!range.empty, "Unicode sequence must be '\\uXXXX'.", filename, line); auto dc = range.front; range.popFront(); if( dc >= '0' && dc <= '9' ) uch += dc - '0'; else if( dc >= 'a' && dc <= 'f' ) uch += dc - 'a' + 10; else if( dc >= 'A' && dc <= 'F' ) uch += dc - 'A' + 10; else enforceJson(false, "Unicode sequence must be '\\uXXXX'.", filename, line); } return uch; } auto uch = decode_unicode_escape(); if(0xD800 <= uch && uch <= 0xDBFF) { /* surrogate pair */ range.popFront(); // backslash '\' auto uch2 = decode_unicode_escape(); enforceJson(0xDC00 <= uch2 && uch2 <= 0xDFFF, "invalid Unicode", filename, line); { /* valid second surrogate */ uch = ((uch - 0xD800) << 10) + (uch2 - 0xDC00) + 0x10000; } } ret.put(uch); break; } break; default: ret.put(ch); range.popFront(); break; } } return ret.data; } /// private private string skipNumber(R)(ref R s, out bool is_float, string filename, int* line) { // TODO: make this work with input ranges size_t idx = 0; is_float = false; if (s[idx] == '-') idx++; if (s[idx] == '0') idx++; else { enforceJson(isDigit(s[idx++]), "Digit expected at beginning of number.", filename, line); while( idx < s.length && isDigit(s[idx]) ) idx++; } if( idx < s.length && s[idx] == '.' ){ idx++; is_float = true; while( idx < s.length && isDigit(s[idx]) ) idx++; } if( idx < s.length && (s[idx] == 'e' || s[idx] == 'E') ){ idx++; is_float = true; if( idx < s.length && (s[idx] == '+' || s[idx] == '-') ) idx++; enforceJson( idx < s.length && isDigit(s[idx]), "Expected exponent." ~ s[0 .. idx], filename, line); idx++; while( idx < s.length && isDigit(s[idx]) ) idx++; } string ret = s[0 .. idx]; s = s[idx .. $]; return ret; } /// private private string skipJsonString(R)(ref R s, string filename, int* line) { // TODO: count or disallow any newlines inside of the string enforceJson(!s.empty && s.front == '"', "Expected '\"' to start string.", filename, line); s.popFront(); string ret = jsonUnescape(s, filename, line); enforceJson(!s.empty && s.front == '"', "Expected '\"' to terminate string.", filename, line); s.popFront(); return ret; } /// private private void skipWhitespace(R)(ref R s, int* line = null) { while (!s.empty) { switch (s.front) { default: return; case ' ', '\t': s.popFront(); break; case '\n': s.popFront(); if (!s.empty && s.front == '\r') s.popFront(); if (line) (*line)++; break; case '\r': s.popFront(); if (!s.empty && s.front == '\n') s.popFront(); if (line) (*line)++; break; } } } private bool isDigit(dchar ch) { return ch >= '0' && ch <= '9'; } private string underscoreStrip(string field_name) { if( field_name.length < 1 || field_name[$-1] != '_' ) return field_name; else return field_name[0 .. $-1]; } /// private package template isJsonSerializable(T) { enum isJsonSerializable = is(typeof(T.init.toJson()) == Json) && is(typeof(T.fromJson(Json())) == T); } private void enforceJson(string file = __FILE__, size_t line = __LINE__)(bool cond, lazy string message = "JSON exception") { static if (__VERSION__ >= 2079) enforce!JSONException(cond, message, file, line); else enforceEx!JSONException(cond, message, file, line); } private void enforceJson(string file = __FILE__, size_t line = __LINE__)(bool cond, lazy string message, string err_file, int err_line) { auto errmsg() { return format("%s(%s): Error: %s", err_file, err_line+1, message); } static if (__VERSION__ >= 2079) enforce!JSONException(cond, errmsg, file, line); else enforceEx!JSONException(cond, errmsg, file, line); } private void enforceJson(string file = __FILE__, size_t line = __LINE__)(bool cond, lazy string message, string err_file, int* err_line) { enforceJson!(file, line)(cond, message, err_file, err_line ? *err_line : -1); } dub-1.40.0/source/dub/internal/vibecompat/data/serialization.d000066400000000000000000001235171477246567400243570ustar00rootroot00000000000000/** Generic serialization framework. This module provides general means for implementing (de-)serialization with a standardized behavior. Supported_types: The following rules are applied in order when serializing or deserializing a certain type: $(OL $(LI An `enum` type is serialized as its raw value, except if `@byName` is used, in which case the name of the enum value is serialized.) $(LI Any type that is specifically supported by the serializer is directly serialized. For example, the BSON serializer supports `BsonObjectID` directly.) $(LI Arrays and tuples (`std.typecons.Tuple`) are serialized using the array serialization functions where each element is serialized again according to these rules.) $(LI Associative arrays are serialized similar to arrays. The key type of the AA must satisfy the `isStringSerializable` trait and will always be serialized as a string.) $(LI Any `Nullable!T` will be serialized as either `null`, or as the contained value (subject to these rules again).) $(LI Any `BitFlags!T` value will be serialized as `T[]`) $(LI Types satisfying the `isPolicySerializable` trait for the supplied `Policy` will be serialized as the value returned by the policy `toRepresentation` function (again subject to these rules).) $(LI Types satisfying the `isCustomSerializable` trait will be serialized as the value returned by their `toRepresentation` method (again subject to these rules).) $(LI Types satisfying the `isISOExtStringSerializable` trait will be serialized as a string, as returned by their `toISOExtString` method. This causes types such as `SysTime` to be serialized as strings.) $(LI Types satisfying the `isStringSerializable` trait will be serialized as a string, as returned by their `toString` method.) $(LI Struct and class types by default will be serialized as associative arrays, where the key is the name of the corresponding field (can be overridden using the `@name` attribute). If the struct/class is annotated with `@asArray`, it will instead be serialized as a flat array of values in the order of declaration. Null class references will be serialized as `null`.) $(LI Pointer types will be serialized as either `null`, or as the value they point to.) $(LI Built-in integers and floating point values, as well as boolean values will be converted to strings, if the serializer doesn't support them directly.) ) Note that no aliasing detection is performed, so that pointers, class references and arrays referencing the same memory will be serialized as multiple copies. When in turn deserializing the data, they will also end up as separate copies in memory. Serializer_implementation: Serializers are implemented in terms of a struct with template methods that get called by the serialization framework: --- struct ExampleSerializer { enum isSupportedValueType(T) = is(T == string) || is(T == typeof(null)); // serialization auto getSerializedResult(); void beginWriteDictionary(T)(); void endWriteDictionary(T)(); void beginWriteDictionaryEntry(T)(string name); void endWriteDictionaryEntry(T)(string name); void beginWriteArray(T)(size_t length); void endWriteArray(T)(); void beginWriteArrayEntry(T)(size_t index); void endWriteArrayEntry(T)(size_t index); void writeValue(T)(T value); // deserialization void readDictionary(T)(scope void delegate(string) entry_callback); void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback); T readValue(T)(); bool tryReadNull(); } --- Copyright: © 2013-2014 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.internal.vibecompat.data.serialization; version (Have_vibe_d_data) public import vibe.data.serialization; // vibe.d 0.9.x else version (Have_vibe_serialization) public import vibe.data.serialization; // vibe.d 0.10.x+ else: import dub.internal.vibecompat.data.utils; import std.array : Appender, appender; import std.conv : to; import std.exception : enforce; import std.traits; import std.typetuple; /** Serializes a value with the given serializer. The serializer must have a value result for the first form to work. Otherwise, use the range based form. See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` */ auto serialize(Serializer, T, ARGS...)(T value, ARGS args) { auto serializer = Serializer(args); serialize(serializer, value); return serializer.getSerializedResult(); } /// ditto void serialize(Serializer, T)(ref Serializer serializer, T value) { serializeImpl!(Serializer, DefaultPolicy, T)(serializer, value); } /** Note that there is a convenience function `vibe.data.json.serializeToJson` that can be used instead of manually invoking `serialize`. */ unittest { import dub.internal.vibecompat.data.json; struct Test { int value; string text; } Test test; test.value = 12; test.text = "Hello"; Json serialized = serialize!JsonSerializer(test); assert(serialized["value"].get!int == 12); assert(serialized["text"].get!string == "Hello"); } unittest { import dub.internal.vibecompat.data.json; // Make sure that immutable(char[]) works just like string // (i.e., immutable(char)[]). immutable key = "answer"; auto ints = [key: 42]; auto serialized = serialize!JsonSerializer(ints); assert(serialized[key].get!int == 42); } /** Serializes a value with the given serializer, representing values according to `Policy` when possible. The serializer must have a value result for the first form to work. Otherwise, use the range based form. See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` */ auto serializeWithPolicy(Serializer, alias Policy, T, ARGS...)(T value, ARGS args) { auto serializer = Serializer(args); serializeWithPolicy!(Serializer, Policy)(serializer, value); return serializer.getSerializedResult(); } /// ditto void serializeWithPolicy(Serializer, alias Policy, T)(ref Serializer serializer, T value) { serializeImpl!(Serializer, Policy, T)(serializer, value); } /// version (unittest) { template SizePol(T) { import std.conv; import std.array; string toRepresentation(T value) { return to!string(value.x) ~ "x" ~ to!string(value.y); } T fromRepresentation(string value) { string[] fields = value.split('x'); alias fieldT = typeof(T.x); auto x = to!fieldT(fields[0]); auto y = to!fieldT(fields[1]); return T(x, y); } } } /// unittest { import dub.internal.vibecompat.data.json; static struct SizeI { int x; int y; } SizeI sizeI = SizeI(1,2); Json serializedI = serializeWithPolicy!(JsonSerializer, SizePol)(sizeI); assert(serializedI.get!string == "1x2"); static struct SizeF { float x; float y; } SizeF sizeF = SizeF(0.1f,0.2f); Json serializedF = serializeWithPolicy!(JsonSerializer, SizePol)(sizeF); assert(serializedF.get!string == "0.1x0.2"); } /** Deserializes and returns a serialized value. serialized_data can be either an input range or a value containing the serialized data, depending on the type of serializer used. See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` */ T deserialize(Serializer, T, ARGS...)(ARGS args) { auto deserializer = Serializer(args); return deserializeImpl!(T, DefaultPolicy, Serializer)(deserializer); } /** Note that there is a convenience function `vibe.data.json.deserializeJson` that can be used instead of manually invoking `deserialize`. */ unittest { import dub.internal.vibecompat.data.json; struct Test { int value; string text; } Json serialized = Json.emptyObject; serialized["value"] = 12; serialized["text"] = "Hello"; Test test = deserialize!(JsonSerializer, Test)(serialized); assert(test.value == 12); assert(test.text == "Hello"); } /** Deserializes and returns a serialized value, interpreting values according to `Policy` when possible. serialized_data can be either an input range or a value containing the serialized data, depending on the type of serializer used. See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` */ T deserializeWithPolicy(Serializer, alias Policy, T, ARGS...)(ARGS args) { auto deserializer = Serializer(args); return deserializeImpl!(T, Policy, Serializer)(deserializer); } /// unittest { import dub.internal.vibecompat.data.json; static struct SizeI { int x; int y; } Json serializedI = "1x2"; SizeI sizeI = deserializeWithPolicy!(JsonSerializer, SizePol, SizeI)(serializedI); assert(sizeI.x == 1); assert(sizeI.y == 2); static struct SizeF { float x; float y; } Json serializedF = "0.1x0.2"; SizeF sizeF = deserializeWithPolicy!(JsonSerializer, SizePol, SizeF)(serializedF); assert(sizeF.x == 0.1f); assert(sizeF.y == 0.2f); } private void serializeImpl(Serializer, alias Policy, T, ATTRIBUTES...)(ref Serializer serializer, T value) { import std.typecons : Nullable, Tuple, tuple; import std.typecons : BitFlags; static assert(Serializer.isSupportedValueType!string, "All serializers must support string values."); static assert(Serializer.isSupportedValueType!(typeof(null)), "All serializers must support null values."); alias TU = Unqual!T; static if (is(TU == enum)) { static if (hasAttributeL!(ByNameAttribute, ATTRIBUTES)) { serializeImpl!(Serializer, Policy, string)(serializer, value.to!string()); } else { serializeImpl!(Serializer, Policy, OriginalType!TU)(serializer, cast(OriginalType!TU)value); } } else static if (Serializer.isSupportedValueType!TU) { static if (is(TU == typeof(null))) serializer.writeValue!TU(null); else serializer.writeValue!TU(value); } else static if (/*isInstanceOf!(Tuple, TU)*/is(T == Tuple!TPS, TPS...)) { static if (TU.Types.length == 1) { serializeImpl!(Serializer, Policy, typeof(value[0]), ATTRIBUTES)(serializer, value[0]); } else { serializer.beginWriteArray!TU(value.length); foreach (i, TV; T.Types) { serializer.beginWriteArrayEntry!TV(i); serializeImpl!(Serializer, Policy, TV, ATTRIBUTES)(serializer, value[i]); serializer.endWriteArrayEntry!TV(i); } serializer.endWriteArray!TU(); } } else static if (isArray!TU) { alias TV = typeof(value[0]); serializer.beginWriteArray!TU(value.length); foreach (i, ref el; value) { serializer.beginWriteArrayEntry!TV(i); serializeImpl!(Serializer, Policy, TV, ATTRIBUTES)(serializer, el); serializer.endWriteArrayEntry!TV(i); } serializer.endWriteArray!TU(); } else static if (isAssociativeArray!TU) { alias TK = KeyType!TU; alias TV = ValueType!TU; static if (__traits(compiles, serializer.beginWriteDictionary!TU(0))) { auto nfields = value.length; serializer.beginWriteDictionary!TU(nfields); } else { serializer.beginWriteDictionary!TU(); } foreach (key, ref el; value) { string keyname; static if (is(TK : string)) keyname = key; else static if (is(TK : real) || is(TK : long) || is(TK == enum)) keyname = key.to!string; else static if (isStringSerializable!TK) keyname = key.toString(); else static assert(false, "Associative array keys must be strings, numbers, enums, or have toString/fromString methods."); serializer.beginWriteDictionaryEntry!TV(keyname); serializeImpl!(Serializer, Policy, TV, ATTRIBUTES)(serializer, el); serializer.endWriteDictionaryEntry!TV(keyname); } static if (__traits(compiles, serializer.endWriteDictionary!TU(0))) { serializer.endWriteDictionary!TU(nfields); } else { serializer.endWriteDictionary!TU(); } } else static if (/*isInstanceOf!(Nullable, TU)*/is(T == Nullable!TPS, TPS...)) { if (value.isNull()) serializeImpl!(Serializer, Policy, typeof(null))(serializer, null); else serializeImpl!(Serializer, Policy, typeof(value.get()), ATTRIBUTES)(serializer, value.get()); } else static if (is(T == BitFlags!E, E)) { size_t cnt = 0; foreach (v; EnumMembers!E) if (value & v) cnt++; serializer.beginWriteArray!(E[])(cnt); cnt = 0; foreach (v; EnumMembers!E) if (value & v) { serializer.beginWriteArrayEntry!E(cnt); serializeImpl!(Serializer, Policy, E, ATTRIBUTES)(serializer, v); serializer.endWriteArrayEntry!E(cnt); cnt++; } serializer.endWriteArray!(E[])(); } else static if (isPolicySerializable!(Policy, TU)) { alias CustomType = typeof(Policy!TU.toRepresentation(TU.init)); serializeImpl!(Serializer, Policy, CustomType, ATTRIBUTES)(serializer, Policy!TU.toRepresentation(value)); } else static if (isCustomSerializable!TU) { alias CustomType = typeof(T.init.toRepresentation()); serializeImpl!(Serializer, Policy, CustomType, ATTRIBUTES)(serializer, value.toRepresentation()); } else static if (isISOExtStringSerializable!TU) { serializer.writeValue(value.toISOExtString()); } else static if (isStringSerializable!TU) { serializer.writeValue(value.toString()); } else static if (is(TU == struct) || is(TU == class)) { static if (!hasSerializableFields!TU) pragma(msg, "Serializing composite type "~T.stringof~" which has no serializable fields"); static if (is(TU == class)) { if (value is null) { serializeImpl!(Serializer, Policy, typeof(null))(serializer, null); return; } } static if (hasAttributeL!(AsArrayAttribute, ATTRIBUTES)) { enum nfields = getExpandedFieldCount!(TU, SerializableFields!TU); serializer.beginWriteArray!TU(nfields); foreach (mname; SerializableFields!TU) { alias TMS = TypeTuple!(typeof(__traits(getMember, value, mname))); foreach (j, TM; TMS) { alias TA = TypeTuple!(__traits(getAttributes, TypeTuple!(__traits(getMember, T, mname))[j])); serializer.beginWriteArrayEntry!TM(j); serializeImpl!(Serializer, Policy, TM, TA)(serializer, tuple(__traits(getMember, value, mname))[j]); serializer.endWriteArrayEntry!TM(j); } } serializer.endWriteArray!TU(); } else { static if (__traits(compiles, serializer.beginWriteDictionary!TU(0))) { enum nfields = getExpandedFieldCount!(TU, SerializableFields!TU); serializer.beginWriteDictionary!TU(nfields); } else { serializer.beginWriteDictionary!TU(); } foreach (mname; SerializableFields!TU) { alias TM = TypeTuple!(typeof(__traits(getMember, value, mname))); static if (TM.length == 1) { alias TA = TypeTuple!(__traits(getAttributes, __traits(getMember, T, mname))); enum name = getAttribute!(TU, mname, NameAttribute)(NameAttribute(underscoreStrip(mname))).name; auto vt = __traits(getMember, value, mname); serializer.beginWriteDictionaryEntry!(typeof(vt))(name); serializeImpl!(Serializer, Policy, typeof(vt), TA)(serializer, vt); serializer.endWriteDictionaryEntry!(typeof(vt))(name); } else { alias TA = TypeTuple!(); // FIXME: support attributes for tuples somehow enum name = underscoreStrip(mname); auto vt = tuple(__traits(getMember, value, mname)); serializer.beginWriteDictionaryEntry!(typeof(vt))(name); serializeImpl!(Serializer, Policy, typeof(vt), TA)(serializer, vt); serializer.endWriteDictionaryEntry!(typeof(vt))(name); } } static if (__traits(compiles, serializer.endWriteDictionary!TU(0))) { serializer.endWriteDictionary!TU(nfields); } else { serializer.endWriteDictionary!TU(); } } } else static if (isPointer!TU) { if (value is null) { serializer.writeValue(null); return; } serializeImpl!(Serializer, Policy, PointerTarget!TU)(serializer, *value); } else static if (is(TU == bool) || is(TU : real) || is(TU : long)) { serializeImpl!(Serializer, Policy, string)(serializer, to!string(value)); } else static assert(false, "Unsupported serialization type: " ~ T.stringof); } private T deserializeImpl(T, alias Policy, Serializer, ATTRIBUTES...)(ref Serializer deserializer) { import std.typecons : Nullable; import std.typecons : BitFlags; static assert(Serializer.isSupportedValueType!string, "All serializers must support string values."); static assert(Serializer.isSupportedValueType!(typeof(null)), "All serializers must support null values."); static if (is(T == enum)) { static if (hasAttributeL!(ByNameAttribute, ATTRIBUTES)) { return deserializeImpl!(string, Policy, Serializer)(deserializer).to!T(); } else { return cast(T)deserializeImpl!(OriginalType!T, Policy, Serializer)(deserializer); } } else static if (Serializer.isSupportedValueType!T) { return deserializer.readValue!T(); } else static if (isStaticArray!T) { alias TV = typeof(T.init[0]); T ret; size_t i = 0; deserializer.readArray!T((sz) { assert(sz == 0 || sz == T.length); }, { assert(i < T.length); ret[i++] = deserializeImpl!(TV, Policy, Serializer, ATTRIBUTES)(deserializer); }); return ret; } else static if (isDynamicArray!T) { alias TV = typeof(T.init[0]); //auto ret = appender!T(); T ret; // Cannot use appender because of DMD BUG 10690/10859/11357 deserializer.readArray!T((sz) { ret.reserve(sz); }, () { ret ~= deserializeImpl!(TV, Policy, Serializer, ATTRIBUTES)(deserializer); }); return ret;//cast(T)ret.data; } else static if (isAssociativeArray!T) { alias TK = KeyType!T; alias TV = ValueType!T; T ret; deserializer.readDictionary!T((name) { TK key; static if (is(TK == string)) key = name; else static if (is(TK : real) || is(TK : long) || is(TK == enum)) key = name.to!TK; else static if (isStringSerializable!TK) key = TK.fromString(name); else static assert(false, "Associative array keys must be strings, numbers, enums, or have toString/fromString methods."); ret[key] = deserializeImpl!(TV, Policy, Serializer, ATTRIBUTES)(deserializer); }); return ret; } else static if (isInstanceOf!(Nullable, T)) { if (deserializer.tryReadNull()) return T.init; return T(deserializeImpl!(typeof(T.init.get()), Policy, Serializer, ATTRIBUTES)(deserializer)); } else static if (is(T == BitFlags!E, E)) { T ret; deserializer.readArray!(E[])((sz) {}, { ret |= deserializeImpl!(E, Policy, Serializer, ATTRIBUTES)(deserializer); }); return ret; } else static if (isPolicySerializable!(Policy, T)) { alias CustomType = typeof(Policy!T.toRepresentation(T.init)); return Policy!T.fromRepresentation(deserializeImpl!(CustomType, Policy, Serializer, ATTRIBUTES)(deserializer)); } else static if (isCustomSerializable!T) { alias CustomType = typeof(T.init.toRepresentation()); return T.fromRepresentation(deserializeImpl!(CustomType, Policy, Serializer, ATTRIBUTES)(deserializer)); } else static if (isISOExtStringSerializable!T) { return T.fromISOExtString(deserializer.readValue!string()); } else static if (isStringSerializable!T) { return T.fromString(deserializer.readValue!string()); } else static if (is(T == struct) || is(T == class)) { static if (is(T == class)) { if (deserializer.tryReadNull()) return null; } bool[__traits(allMembers, T).length] set; string name; T ret; static if (is(T == class)) ret = new T; static if (hasAttributeL!(AsArrayAttribute, ATTRIBUTES)) { size_t idx = 0; deserializer.readArray!T((sz){}, { static if (hasSerializableFields!T) { switch (idx++) { default: break; foreach (i, mname; SerializableFields!T) { alias TM = typeof(__traits(getMember, ret, mname)); alias TA = TypeTuple!(__traits(getAttributes, __traits(getMember, ret, mname))); case i: static if (hasAttribute!(OptionalAttribute, __traits(getMember, T, mname))) if (deserializer.tryReadNull()) return; set[i] = true; __traits(getMember, ret, mname) = deserializeImpl!(TM, Serializer, TA)(deserializer); break; } } } else { pragma(msg, "Deserializing composite type "~T.stringof~" which has no serializable fields."); } }); } else { deserializer.readDictionary!T((name) { static if (hasSerializableFields!T) { switch (name) { default: break; foreach (i, mname; SerializableFields!T) { alias TM = typeof(__traits(getMember, ret, mname)); alias TA = TypeTuple!(__traits(getAttributes, __traits(getMember, ret, mname))); enum fname = getAttribute!(T, mname, NameAttribute)(NameAttribute(underscoreStrip(mname))).name; case fname: static if (hasAttribute!(OptionalAttribute, __traits(getMember, T, mname))) if (deserializer.tryReadNull()) return; set[i] = true; __traits(getMember, ret, mname) = deserializeImpl!(TM, Policy, Serializer, TA)(deserializer); break; } } } else { pragma(msg, "Deserializing composite type "~T.stringof~" which has no serializable fields."); } }); } foreach (i, mname; SerializableFields!T) static if (!hasAttribute!(OptionalAttribute, __traits(getMember, T, mname))) enforce(set[i], "Missing non-optional field '"~mname~"' of type '"~T.stringof~"'."); return ret; } else static if (isPointer!T) { if (deserializer.tryReadNull()) return null; alias PT = PointerTarget!T; auto ret = new PT; *ret = deserializeImpl!(PT, Policy, Serializer)(deserializer); return ret; } else static if (is(T == bool) || is(T : real) || is(T : long)) { return to!T(deserializeImpl!(string, Policy, Serializer)(deserializer)); } else static assert(false, "Unsupported serialization type: " ~ T.stringof); } /** Attribute for overriding the field name during (de-)serialization. */ NameAttribute name(string name) { return NameAttribute(name); } /// unittest { struct Test { @name("screen-size") int screenSize; } } /** Attribute marking a field as optional during deserialization. */ @property OptionalAttribute optional() { return OptionalAttribute(); } /// unittest { struct Test { // does not need to be present during deserialization @optional int screenSize = 100; } } /** Attribute for marking non-serialized fields. */ @property IgnoreAttribute ignore() { return IgnoreAttribute(); } /// unittest { struct Test { // is neither serialized not deserialized @ignore int screenSize; } } /** Attribute for forcing serialization of enum fields by name instead of by value. */ @property ByNameAttribute byName() { return ByNameAttribute(); } /// unittest { enum Color { red, green, blue } struct Test { // serialized as an int (e.g. 1 for Color.green) Color color; // serialized as a string (e.g. "green" for Color.green) @byName Color namedColor; // serialized as array of ints Color[] colorArray; // serialized as array of strings @byName Color[] namedColorArray; } } /** Attribute for representing a struct/class as an array instead of an object. Usually structs and class objects are serialized as dictionaries mapping from field name to value. Using this attribute, they will be serialized as a flat array instead. Note that changing the layout will make any already serialized data mismatch when this attribute is used. */ @property AsArrayAttribute asArray() { return AsArrayAttribute(); } /// unittest { struct Fields { int f1; string f2; double f3; } struct Test { // serialized as name:value pairs ["f1": int, "f2": string, "f3": double] Fields object; // serialized as a sequential list of values [int, string, double] @asArray Fields array; } import dub.internal.vibecompat.data.json; static assert(is(typeof(serializeToJson(Test())))); } /// enum FieldExistence { missing, exists, defer } /// User defined attribute (not intended for direct use) struct NameAttribute { string name; } /// ditto struct OptionalAttribute {} /// ditto struct IgnoreAttribute {} /// ditto struct ByNameAttribute {} /// ditto struct AsArrayAttribute {} /** Checks if a given type has a custom serialization representation. A class or struct type is custom serializable if it defines a pair of `toRepresentation`/`fromRepresentation` methods. Any class or struct type that has this trait will be serialized by using the return value of it's `toRepresentation` method instead of the original value. This trait has precedence over `isISOExtStringSerializable` and `isStringSerializable`. */ template isCustomSerializable(T) { enum bool isCustomSerializable = is(typeof(T.init.toRepresentation())) && is(typeof(T.fromRepresentation(T.init.toRepresentation())) == T); } /// unittest { // represented as a single uint when serialized static struct S { ushort x, y; uint toRepresentation() const { return x + (y << 16); } static S fromRepresentation(uint i) { return S(i & 0xFFFF, i >> 16); } } static assert(isCustomSerializable!S); } /** Checks if a given type has an ISO extended string serialization representation. A class or struct type is ISO extended string serializable if it defines a pair of `toISOExtString`/`fromISOExtString` methods. Any class or struct type that has this trait will be serialized by using the return value of it's `toISOExtString` method instead of the original value. This is mainly useful for supporting serialization of the the date/time types in `std.datetime`. This trait has precedence over `isStringSerializable`. */ template isISOExtStringSerializable(T) { enum bool isISOExtStringSerializable = is(typeof(T.init.toISOExtString()) == string) && is(typeof(T.fromISOExtString("")) == T); } /// unittest { import std.datetime; static assert(isISOExtStringSerializable!DateTime); static assert(isISOExtStringSerializable!SysTime); // represented as an ISO extended string when serialized static struct S { // dummy example implementations string toISOExtString() const { return ""; } static S fromISOExtString(string s) { return S.init; } } static assert(isISOExtStringSerializable!S); } /** Checks if a given type has a string serialization representation. A class or struct type is string serializable if it defines a pair of `toString`/`fromString` methods. Any class or struct type that has this trait will be serialized by using the return value of it's `toString` method instead of the original value. */ template isStringSerializable(T) { enum bool isStringSerializable = is(typeof(T.init.toString()) == string) && is(typeof(T.fromString("")) == T); } /// unittest { import std.conv; // represented as the boxed value when serialized static struct Box(T) { T value; } template BoxPol(S) { auto toRepresentation(S s) { return s.value; } S fromRepresentation(typeof(S.init.value) v) { return S(v); } } static assert(isPolicySerializable!(BoxPol, Box!int)); } private template DefaultPolicy(T) { } /** Checks if a given policy supports custom serialization for a given type. A class or struct type is custom serializable according to a policy if the policy defines a pair of `toRepresentation`/`fromRepresentation` functions. Any class or struct type that has this trait for the policy supplied to `serializeWithPolicy` will be serialized by using the return value of the policy `toRepresentation` function instead of the original value. This trait has precedence over `isCustomSerializable`, `isISOExtStringSerializable` and `isStringSerializable`. See_Also: `vibe.data.serialization.serializeWithPolicy` */ template isPolicySerializable(alias Policy, T) { enum bool isPolicySerializable = is(typeof(Policy!T.toRepresentation(T.init))) && is(typeof(Policy!T.fromRepresentation(Policy!T.toRepresentation(T.init))) == T); } /// unittest { import std.conv; // represented as a string when serialized static struct S { int value; // dummy example implementations string toString() const { return value.to!string(); } static S fromString(string s) { return S(s.to!int()); } } static assert(isStringSerializable!S); } /** Chains serialization policy. Constructs a serialization policy that given a type `T` will apply the first compatible policy `toRepresentation` and `fromRepresentation` functions. Policies are evaluated left-to-right according to `isPolicySerializable`. See_Also: `vibe.data.serialization.serializeWithPolicy` */ template ChainedPolicy(alias Primary, Fallbacks...) { static if (Fallbacks.length == 0) { alias ChainedPolicy = Primary; } else { alias ChainedPolicy = ChainedPolicy!(ChainedPolicyImpl!(Primary, Fallbacks[0]), Fallbacks[1..$]); } } /// unittest { import std.conv; // To be represented as the boxed value when serialized static struct Box(T) { T value; } // Also to be represented as the boxed value when serialized, but has // a different way to access the value. static struct Box2(T) { private T v; ref T get() { return v; } } template BoxPol(S) { auto toRepresentation(S s) { return s.value; } S fromRepresentation(typeof(toRepresentation(S.init)) v) { return S(v); } } template Box2Pol(S) { auto toRepresentation(S s) { return s.get(); } S fromRepresentation(typeof(toRepresentation(S.init)) v) { S s; s.get() = v; return s; } } alias ChainPol = ChainedPolicy!(BoxPol, Box2Pol); static assert(!isPolicySerializable!(BoxPol, Box2!int)); static assert(!isPolicySerializable!(Box2Pol, Box!int)); static assert(isPolicySerializable!(ChainPol, Box!int)); static assert(isPolicySerializable!(ChainPol, Box2!int)); } private template ChainedPolicyImpl(alias Primary, alias Fallback) { template Pol(T) { static if (isPolicySerializable!(Primary, T)) { alias toRepresentation = Primary!T.toRepresentation; alias fromRepresentation = Primary!T.fromRepresentation; } else { alias toRepresentation = Fallback!T.toRepresentation; alias fromRepresentation = Fallback!T.fromRepresentation; } } alias ChainedPolicyImpl = Pol; } private template hasAttribute(T, alias decl) { enum hasAttribute = findFirstUDA!(T, decl).found; } unittest { @asArray int i1; static assert(hasAttribute!(AsArrayAttribute, i1)); int i2; static assert(!hasAttribute!(AsArrayAttribute, i2)); } private template hasAttributeL(T, ATTRIBUTES...) { static if (ATTRIBUTES.length == 1) { enum hasAttributeL = is(typeof(ATTRIBUTES[0]) == T); } else static if (ATTRIBUTES.length > 1) { enum hasAttributeL = hasAttributeL!(T, ATTRIBUTES[0 .. $/2]) || hasAttributeL!(T, ATTRIBUTES[$/2 .. $]); } else { enum hasAttributeL = false; } } unittest { static assert(hasAttributeL!(AsArrayAttribute, byName, asArray)); static assert(!hasAttributeL!(AsArrayAttribute, byName)); } private static T getAttribute(TT, string mname, T)(T default_value) { enum val = findFirstUDA!(T, __traits(getMember, TT, mname)); static if (val.found) return val.value; else return default_value; } private string underscoreStrip(string field_name) { if( field_name.length < 1 || field_name[$-1] != '_' ) return field_name; else return field_name[0 .. $-1]; } private template hasSerializableFields(T, size_t idx = 0) { enum hasSerializableFields = SerializableFields!(T).length > 0; /*static if (idx < __traits(allMembers, T).length) { enum mname = __traits(allMembers, T)[idx]; static if (!isRWPlainField!(T, mname) && !isRWField!(T, mname)) enum hasSerializableFields = hasSerializableFields!(T, idx+1); else static if (hasAttribute!(IgnoreAttribute, __traits(getMember, T, mname))) enum hasSerializableFields = hasSerializableFields!(T, idx+1); else enum hasSerializableFields = true; } else enum hasSerializableFields = false;*/ } private template SerializableFields(COMPOSITE) { alias SerializableFields = FilterSerializableFields!(COMPOSITE, __traits(allMembers, COMPOSITE)); } private template FilterSerializableFields(COMPOSITE, FIELDS...) { static if (FIELDS.length > 1) { alias FilterSerializableFields = TypeTuple!( FilterSerializableFields!(COMPOSITE, FIELDS[0 .. $/2]), FilterSerializableFields!(COMPOSITE, FIELDS[$/2 .. $])); } else static if (FIELDS.length == 1) { alias T = COMPOSITE; enum mname = FIELDS[0]; static if (isRWPlainField!(T, mname) || isRWField!(T, mname)) { alias Tup = TypeTuple!(__traits(getMember, COMPOSITE, FIELDS[0])); static if (Tup.length != 1) { alias FilterSerializableFields = TypeTuple!(mname); } else { static if (!hasAttribute!(IgnoreAttribute, __traits(getMember, T, mname))) alias FilterSerializableFields = TypeTuple!(mname); else alias FilterSerializableFields = TypeTuple!(); } } else alias FilterSerializableFields = TypeTuple!(); } else alias FilterSerializableFields = TypeTuple!(); } private size_t getExpandedFieldCount(T, FIELDS...)() { size_t ret = 0; foreach (F; FIELDS) ret += TypeTuple!(__traits(getMember, T, F)).length; return ret; } /******************************************************************************/ /* General serialization unit testing */ /******************************************************************************/ version (unittest) { private struct TestSerializer { import std.array, std.conv, std.string; string result; enum isSupportedValueType(T) = is(T == string) || is(T == typeof(null)) || is(T == float) || is (T == int); string getSerializedResult() { return result; } void beginWriteDictionary(T)() { result ~= "D("~T.mangleof~"){"; } void endWriteDictionary(T)() { result ~= "}D("~T.mangleof~")"; } void beginWriteDictionaryEntry(T)(string name) { result ~= "DE("~T.mangleof~","~name~")("; } void endWriteDictionaryEntry(T)(string name) { result ~= ")DE("~T.mangleof~","~name~")"; } void beginWriteArray(T)(size_t length) { result ~= "A("~T.mangleof~")["~length.to!string~"]["; } void endWriteArray(T)() { result ~= "]A("~T.mangleof~")"; } void beginWriteArrayEntry(T)(size_t i) { result ~= "AE("~T.mangleof~","~i.to!string~")("; } void endWriteArrayEntry(T)(size_t i) { result ~= ")AE("~T.mangleof~","~i.to!string~")"; } void writeValue(T)(T value) { if (is(T == typeof(null))) result ~= "null"; else { assert(isSupportedValueType!T); result ~= "V("~T.mangleof~")("~value.to!string~")"; } } // deserialization void readDictionary(T)(scope void delegate(string) entry_callback) { skip("D("~T.mangleof~"){"); while (result.startsWith("DE(")) { result = result[3 .. $]; auto idx = result.indexOf(','); auto idx2 = result.indexOf(")("); assert(idx > 0 && idx2 > idx); auto t = result[0 .. idx]; auto n = result[idx+1 .. idx2]; result = result[idx2+2 .. $]; entry_callback(n); skip(")DE("~t~","~n~")"); } skip("}D("~T.mangleof~")"); } void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback) { skip("A("~T.mangleof~")["); auto bidx = result.indexOf("]["); assert(bidx > 0); auto cnt = result[0 .. bidx].to!size_t; result = result[bidx+2 .. $]; size_t i = 0; while (result.startsWith("AE(")) { result = result[3 .. $]; auto idx = result.indexOf(','); auto idx2 = result.indexOf(")("); assert(idx > 0 && idx2 > idx); auto t = result[0 .. idx]; auto n = result[idx+1 .. idx2]; result = result[idx2+2 .. $]; assert(n == i.to!string); entry_callback(); skip(")AE("~t~","~n~")"); i++; } skip("]A("~T.mangleof~")"); assert(i == cnt); } T readValue(T)() { skip("V("~T.mangleof~")("); auto idx = result.indexOf(')'); assert(idx >= 0); auto ret = result[0 .. idx].to!T; result = result[idx+1 .. $]; return ret; } void skip(string prefix) { assert(result.startsWith(prefix), result); result = result[prefix.length .. $]; } bool tryReadNull() { if (result.startsWith("null")) { result = result[4 .. $]; return true; } else return false; } } } unittest { // basic serialization behavior import std.typecons : Nullable; static void test(T)(T value, string expected) { assert(serialize!TestSerializer(value) == expected, serialize!TestSerializer(value)); static if (isPointer!T) { if (value) assert(*deserialize!(TestSerializer, T)(expected) == *value); else assert(deserialize!(TestSerializer, T)(expected) is null); } else static if (is(T == Nullable!U, U)) { if (value.isNull()) assert(deserialize!(TestSerializer, T)(expected).isNull); else assert(deserialize!(TestSerializer, T)(expected) == value); } else assert(deserialize!(TestSerializer, T)(expected) == value); } test("hello", "V(Aya)(hello)"); test(12, "V(i)(12)"); test(12.0, "V(Aya)(12)"); test(12.0f, "V(f)(12)"); assert(serialize!TestSerializer(null) == "null"); test(["hello", "world"], "A(AAya)[2][AE(Aya,0)(V(Aya)(hello))AE(Aya,0)AE(Aya,1)(V(Aya)(world))AE(Aya,1)]A(AAya)"); string mangleOfAA = (string[string]).mangleof; test(["hello": "world"], "D(" ~ mangleOfAA ~ "){DE(Aya,hello)(V(Aya)(world))DE(Aya,hello)}D(" ~ mangleOfAA ~ ")"); test(cast(int*)null, "null"); int i = 42; test(&i, "V(i)(42)"); Nullable!int j; test(j, "null"); j = 42; test(j, "V(i)(42)"); } unittest { // basic user defined types static struct S { string f; } enum Sm = S.mangleof; auto s = S("hello"); enum s_ser = "D("~Sm~"){DE(Aya,f)(V(Aya)(hello))DE(Aya,f)}D("~Sm~")"; assert(serialize!TestSerializer(s) == s_ser, serialize!TestSerializer(s)); assert(deserialize!(TestSerializer, S)(s_ser) == s); static class C { string f; } enum Cm = C.mangleof; C c; assert(serialize!TestSerializer(c) == "null"); c = new C; c.f = "hello"; enum c_ser = "D("~Cm~"){DE(Aya,f)(V(Aya)(hello))DE(Aya,f)}D("~Cm~")"; assert(serialize!TestSerializer(c) == c_ser); assert(deserialize!(TestSerializer, C)(c_ser).f == c.f); enum E { hello, world } assert(serialize!TestSerializer(E.hello) == "V(i)(0)"); assert(serialize!TestSerializer(E.world) == "V(i)(1)"); } unittest { // tuple serialization import std.typecons : Tuple; static struct S(T...) { T f; } enum Sm = S!(int, string).mangleof; enum Tum = Tuple!(int, string).mangleof; auto s = S!(int, string)(42, "hello"); assert(serialize!TestSerializer(s) == "D("~Sm~"){DE("~Tum~",f)(A("~Tum~")[2][AE(i,0)(V(i)(42))AE(i,0)AE(Aya,1)(V(Aya)(hello))AE(Aya,1)]A("~Tum~"))DE("~Tum~",f)}D("~Sm~")"); static struct T { @asArray S!(int, string) g; } enum Tm = T.mangleof; auto t = T(s); assert(serialize!TestSerializer(t) == "D("~Tm~"){DE("~Sm~",g)(A("~Sm~")[2][AE(i,0)(V(i)(42))AE(i,0)AE(Aya,1)(V(Aya)(hello))AE(Aya,1)]A("~Sm~"))DE("~Sm~",g)}D("~Tm~")"); } unittest { // testing the various UDAs enum E { hello, world } enum Em = E.mangleof; static struct S { @byName E e; @ignore int i; @optional float f; } enum Sm = S.mangleof; auto s = S(E.world, 42, 1.0f); assert(serialize!TestSerializer(s) == "D("~Sm~"){DE("~Em~",e)(V(Aya)(world))DE("~Em~",e)DE(f,f)(V(f)(1))DE(f,f)}D("~Sm~")"); } unittest { // custom serialization support // iso-ext import std.datetime; auto t = TimeOfDay(6, 31, 23); assert(serialize!TestSerializer(t) == "V(Aya)(06:31:23)"); auto d = Date(1964, 1, 23); assert(serialize!TestSerializer(d) == "V(Aya)(1964-01-23)"); auto dt = DateTime(d, t); assert(serialize!TestSerializer(dt) == "V(Aya)(1964-01-23T06:31:23)"); auto st = SysTime(dt, UTC()); assert(serialize!TestSerializer(st) == "V(Aya)(1964-01-23T06:31:23Z)"); // string struct S1 { int i; string toString() const { return "hello"; } static S1 fromString(string) { return S1.init; } } struct S2 { int i; string toString() const { return "hello"; } } enum S2m = S2.mangleof; struct S3 { int i; static S3 fromString(string) { return S3.init; } } enum S3m = S3.mangleof; assert(serialize!TestSerializer(S1.init) == "V(Aya)(hello)"); assert(serialize!TestSerializer(S2.init) == "D("~S2m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~S2m~")"); assert(serialize!TestSerializer(S3.init) == "D("~S3m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~S3m~")"); // custom struct C1 { int i; float toRepresentation() const { return 1.0f; } static C1 fromRepresentation(float f) { return C1.init; } } struct C2 { int i; float toRepresentation() const { return 1.0f; } } enum C2m = C2.mangleof; struct C3 { int i; static C3 fromRepresentation(float f) { return C3.init; } } enum C3m = C3.mangleof; assert(serialize!TestSerializer(C1.init) == "V(f)(1)"); assert(serialize!TestSerializer(C2.init) == "D("~C2m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~C2m~")"); assert(serialize!TestSerializer(C3.init) == "D("~C3m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~C3m~")"); } unittest // Testing corner case: member function returning by ref { import dub.internal.vibecompat.data.json; static struct S { int i; ref int foo() return { return i; } } static assert(__traits(compiles, { S().serializeToJson(); })); static assert(__traits(compiles, { Json().deserializeJson!S(); })); auto s = S(1); assert(s.serializeToJson().deserializeJson!S() == s); } unittest // Testing corner case: Variadic template constructors and methods { import dub.internal.vibecompat.data.json; static struct S { int i; this(Args...)(Args args) {} int foo(Args...)(Args args) { return i; } ref int bar(Args...)(Args args) { return i; } } static assert(__traits(compiles, { S().serializeToJson(); })); static assert(__traits(compiles, { Json().deserializeJson!S(); })); auto s = S(1); assert(s.serializeToJson().deserializeJson!S() == s); } unittest // Make sure serializing through properties still works { import dub.internal.vibecompat.data.json; static struct S { public int i; private int privateJ; @property int j() { return privateJ; } @property void j(int j) { privateJ = j; } } auto s = S(1, 2); assert(s.serializeToJson().deserializeJson!S() == s); } unittest { // test BitFlags serialization import std.typecons : BitFlags; enum Flag { a = 1<<0, b = 1<<1, c = 1<<2 } enum Flagm = Flag.mangleof; alias Flags = BitFlags!Flag; enum Flagsm = Flags.mangleof; enum Fi_ser = "A(A"~Flagm~")[0][]A(A"~Flagm~")"; assert(serialize!TestSerializer(Flags.init) == Fi_ser); enum Fac_ser = "A(A"~Flagm~")[2][AE("~Flagm~",0)(V(i)(1))AE("~Flagm~",0)AE("~Flagm~",1)(V(i)(4))AE("~Flagm~",1)]A(A"~Flagm~")"; assert(serialize!TestSerializer(Flags(Flag.a, Flag.c)) == Fac_ser); struct S { @byName Flags f; } enum Sm = S.mangleof; enum Sac_ser = "D("~Sm~"){DE("~Flagsm~",f)(A(A"~Flagm~")[2][AE("~Flagm~",0)(V(Aya)(a))AE("~Flagm~",0)AE("~Flagm~",1)(V(Aya)(c))AE("~Flagm~",1)]A(A"~Flagm~"))DE("~Flagsm~",f)}D("~Sm~")"; assert(serialize!TestSerializer(S(Flags(Flag.a, Flag.c))) == Sac_ser); assert(deserialize!(TestSerializer, Flags)(Fi_ser) == Flags.init); assert(deserialize!(TestSerializer, Flags)(Fac_ser) == Flags(Flag.a, Flag.c)); assert(deserialize!(TestSerializer, S)(Sac_ser) == S(Flags(Flag.a, Flag.c))); } dub-1.40.0/source/dub/internal/vibecompat/data/utils.d000066400000000000000000000452021477246567400226340ustar00rootroot00000000000000/** Utility functions for data serialization Copyright: © 2012 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.internal.vibecompat.data.utils; version (Have_vibe_d_data) {} // vibe.d 0.9.x else version (Have_vibe_serialization) {} // vibe.d 0.10.x+ else: public import std.traits; /** Checks if given type is a getter function type Returns: `true` if argument is a getter */ template isPropertyGetter(T...) if (T.length == 1) { import std.traits : functionAttributes, FunctionAttribute, ReturnType, isSomeFunction; static if (isSomeFunction!(T[0])) { enum isPropertyGetter = (functionAttributes!(T[0]) & FunctionAttribute.property) != 0 && !is(ReturnType!T == void); } else enum isPropertyGetter = false; } /// unittest { interface Test { @property int getter(); @property void setter(int); int simple(); } static assert(isPropertyGetter!(typeof(&Test.getter))); static assert(!isPropertyGetter!(typeof(&Test.setter))); static assert(!isPropertyGetter!(typeof(&Test.simple))); static assert(!isPropertyGetter!int); } /** Checks if given type is a setter function type Returns: `true` if argument is a setter */ template isPropertySetter(T...) if (T.length == 1) { import std.traits : functionAttributes, FunctionAttribute, ReturnType, isSomeFunction; static if (isSomeFunction!(T[0])) { enum isPropertySetter = (functionAttributes!(T) & FunctionAttribute.property) != 0 && is(ReturnType!(T[0]) == void); } else enum isPropertySetter = false; } /// unittest { interface Test { @property int getter(); @property void setter(int); int simple(); } static assert(isPropertySetter!(typeof(&Test.setter))); static assert(!isPropertySetter!(typeof(&Test.getter))); static assert(!isPropertySetter!(typeof(&Test.simple))); static assert(!isPropertySetter!int); } /** Deduces single base interface for a type. Multiple interfaces will result in compile-time error. Params: T = interface or class type Returns: T if it is an interface. If T is a class, interface it implements. */ template baseInterface(T) if (is(T == interface) || is(T == class)) { import std.traits : InterfacesTuple; static if (is(T == interface)) { alias baseInterface = T; } else { alias Ifaces = InterfacesTuple!T; static assert ( Ifaces.length == 1, "Type must be either provided as an interface or implement only one interface" ); alias baseInterface = Ifaces[0]; } } /// unittest { interface I1 { } class A : I1 { } interface I2 { } class B : I1, I2 { } static assert (is(baseInterface!I1 == I1)); static assert (is(baseInterface!A == I1)); static assert (!is(typeof(baseInterface!B))); } /** Determines if a member is a public, non-static data field. */ template isRWPlainField(T, string M) { static if (!isRWField!(T, M)) enum isRWPlainField = false; else { //pragma(msg, T.stringof~"."~M~":"~typeof(__traits(getMember, T, M)).stringof); enum isRWPlainField = __traits(compiles, *(&__traits(getMember, Tgen!T(), M)) = *(&__traits(getMember, Tgen!T(), M))); } } /** Determines if a member is a public, non-static, de-facto data field. In addition to plain data fields, R/W properties are also accepted. */ template isRWField(T, string M) { import std.traits; import std.typetuple; static void testAssign()() { T t = void; __traits(getMember, t, M) = __traits(getMember, t, M); } // reject type aliases static if (is(TypeTuple!(__traits(getMember, T, M)))) enum isRWField = false; // reject non-public members else static if (!isPublicMember!(T, M)) enum isRWField = false; // reject static members else static if (!isNonStaticMember!(T, M)) enum isRWField = false; // reject non-typed members else static if (!is(typeof(__traits(getMember, T, M)))) enum isRWField = false; // reject void typed members (includes templates) else static if (is(typeof(__traits(getMember, T, M)) == void)) enum isRWField = false; // reject non-assignable members else static if (!__traits(compiles, testAssign!()())) enum isRWField = false; else static if (anySatisfy!(isSomeFunction, __traits(getMember, T, M))) { // If M is a function, reject if not @property or returns by ref private enum FA = functionAttributes!(__traits(getMember, T, M)); enum isRWField = (FA & FunctionAttribute.property) != 0; } else { enum isRWField = true; } } unittest { import std.algorithm; struct S { alias a = int; // alias int i; // plain RW field enum j = 42; // manifest constant static int k = 42; // static field private int privateJ; // private RW field this(Args...)(Args args) {} // read-write property (OK) @property int p1() { return privateJ; } @property void p1(int j) { privateJ = j; } // read-only property (NO) @property int p2() { return privateJ; } // write-only property (NO) @property void p3(int value) { privateJ = value; } // ref returning property (OK) @property ref int p4() return { return i; } // parameter-less template property (OK) @property ref int p5()() { return i; } // not treated as a property by DMD, so not a field @property int p6()() { return privateJ; } @property void p6(int j)() { privateJ = j; } static @property int p7() { return k; } static @property void p7(int value) { k = value; } ref int f1() return { return i; } // ref returning function (no field) int f2(Args...)(Args args) { return i; } ref int f3(Args...)(Args args) { return i; } void someMethod() {} ref int someTempl()() { return i; } } immutable plainFields = ["i"]; immutable fields = ["i", "p1", "p4", "p5"]; foreach (mem; __traits(allMembers, S)) { static if (isRWField!(S, mem)) static assert(fields.canFind(mem), mem~" detected as field."); else static assert(!fields.canFind(mem), mem~" not detected as field."); static if (isRWPlainField!(S, mem)) static assert(plainFields.canFind(mem), mem~" not detected as plain field."); else static assert(!plainFields.canFind(mem), mem~" not detected as plain field."); } } package T Tgen(T)(){ return T.init; } /** Tests if the protection of a member is public. */ template isPublicMember(T, string M) { import std.algorithm, std.typetuple : TypeTuple; static if (!__traits(compiles, TypeTuple!(__traits(getMember, T, M)))) enum isPublicMember = false; else { alias MEM = TypeTuple!(__traits(getMember, T, M)); enum _prot = __traits(getProtection, MEM); enum isPublicMember = _prot == "public" || _prot == "export"; } } unittest { class C { int a; export int b; protected int c; private int d; package int e; void f() {} static void g() {} private void h() {} private static void i() {} } static assert (isPublicMember!(C, "a")); static assert (isPublicMember!(C, "b")); static assert (!isPublicMember!(C, "c")); static assert (!isPublicMember!(C, "d")); static assert (!isPublicMember!(C, "e")); static assert (isPublicMember!(C, "f")); static assert (isPublicMember!(C, "g")); static assert (!isPublicMember!(C, "h")); static assert (!isPublicMember!(C, "i")); struct S { int a; export int b; private int d; package int e; } static assert (isPublicMember!(S, "a")); static assert (isPublicMember!(S, "b")); static assert (!isPublicMember!(S, "d")); static assert (!isPublicMember!(S, "e")); S s; s.a = 21; assert(s.a == 21); } /** Tests if a member requires $(D this) to be used. */ template isNonStaticMember(T, string M) { import std.typetuple; import std.traits; alias MF = TypeTuple!(__traits(getMember, T, M)); static if (M.length == 0) { enum isNonStaticMember = false; } else static if (anySatisfy!(isSomeFunction, MF)) { enum isNonStaticMember = !__traits(isStaticFunction, MF); } else { enum isNonStaticMember = !__traits(compiles, (){ auto x = __traits(getMember, T, M); }()); } } unittest { // normal fields struct S { int a; static int b; enum c = 42; void f(); static void g(); ref int h() return { return a; } static ref int i() { return b; } } static assert(isNonStaticMember!(S, "a")); static assert(!isNonStaticMember!(S, "b")); static assert(!isNonStaticMember!(S, "c")); static assert(isNonStaticMember!(S, "f")); static assert(!isNonStaticMember!(S, "g")); static assert(isNonStaticMember!(S, "h")); static assert(!isNonStaticMember!(S, "i")); } unittest { // tuple fields struct S(T...) { T a; static T b; } alias T = S!(int, float); auto p = T.b; static assert(isNonStaticMember!(T, "a")); static assert(!isNonStaticMember!(T, "b")); alias U = S!(); static assert(!isNonStaticMember!(U, "a")); static assert(!isNonStaticMember!(U, "b")); } /** Tests if a Group of types is implicitly convertible to a Group of target types. */ bool areConvertibleTo(alias TYPES, alias TARGET_TYPES)() if (isGroup!TYPES && isGroup!TARGET_TYPES) { static assert(TYPES.expand.length == TARGET_TYPES.expand.length); foreach (i, V; TYPES.expand) if (!is(V : TARGET_TYPES.expand[i])) return false; return true; } /// Test if the type $(D DG) is a correct delegate for an opApply where the /// key/index is of type $(D TKEY) and the value of type $(D TVALUE). template isOpApplyDg(DG, TKEY, TVALUE) { import std.traits; static if (is(DG == delegate) && is(ReturnType!DG : int)) { private alias PTT = ParameterTypeTuple!(DG); private alias PSCT = ParameterStorageClassTuple!(DG); private alias STC = ParameterStorageClass; // Just a value static if (PTT.length == 1) { enum isOpApplyDg = (is(PTT[0] == TVALUE) && PSCT[0] == STC.ref_); } else static if (PTT.length == 2) { enum isOpApplyDg = (is(PTT[0] == TKEY) && PSCT[0] == STC.ref_) && (is(PTT[1] == TKEY) && PSCT[1] == STC.ref_); } else enum isOpApplyDg = false; } else { enum isOpApplyDg = false; } } /** TypeTuple which does not auto-expand. Useful when you need to multiple several type tuples as different template argument list parameters, without merging those. */ template Group(T...) { alias expand = T; } /// unittest { alias group = Group!(int, double, string); static assert (!is(typeof(group.length))); static assert (group.expand.length == 3); static assert (is(group.expand[1] == double)); } /** */ template isGroup(T...) { static if (T.length != 1) enum isGroup = false; else enum isGroup = !is(T[0]) && is(typeof(T[0]) == void) // does not evaluate to something && is(typeof(T[0].expand.length) : size_t) // expands to something with length && !is(typeof(&(T[0].expand))); // expands to not addressable } version (unittest) // NOTE: GDC complains about template definitions in unittest blocks { import std.typetuple; alias group = Group!(int, double, string); alias group2 = Group!(); template Fake(T...) { int[] expand; } alias fake = Fake!(int, double, string); alias fake2 = TypeTuple!(int, double, string); static assert (isGroup!group); static assert (isGroup!group2); static assert (!isGroup!fake); static assert (!isGroup!fake2); } /* Copied from Phobos as it is private there. */ private template isSame(ab...) if (ab.length == 2) { static if (is(ab[0]) && is(ab[1])) { enum isSame = is(ab[0] == ab[1]); } else static if (!is(ab[0]) && !is(ab[1]) && is(typeof(ab[0] == ab[1]) == bool) && (ab[0] == ab[1])) { static if (!__traits(compiles, &ab[0]) || !__traits(compiles, &ab[1])) enum isSame = (ab[0] == ab[1]); else enum isSame = __traits(isSame, ab[0], ab[1]); } else { enum isSame = __traits(isSame, ab[0], ab[1]); } } /** Small convenience wrapper to find and extract certain UDA from given type. Will stop on first element which is of required type. Params: UDA = type or template to search for in UDA list Symbol = symbol to query for UDA's allow_types = if set to `false` considers attached `UDA` types an error (only accepts instances/values) Returns: aggregated search result struct with 3 field. `value` aliases found UDA. `found` is boolean flag for having a valid find. `index` is integer index in attribute list this UDA was found at. */ template findFirstUDA(alias UDA, alias Symbol, bool allow_types = false) if (!is(UDA)) { enum findFirstUDA = findNextUDA!(UDA, Symbol, 0, allow_types); } /// Ditto template findFirstUDA(UDA, alias Symbol, bool allow_types = false) { enum findFirstUDA = findNextUDA!(UDA, Symbol, 0, allow_types); } private struct UdaSearchResult(alias UDA) { alias value = UDA; bool found = false; long index = -1; } /** Small convenience wrapper to find and extract certain UDA from given type. Will start at the given index and stop on the next element which is of required type. Params: UDA = type or template to search for in UDA list Symbol = symbol to query for UDA's idx = 0-based index to start at. Should be positive, and under the total number of attributes. allow_types = if set to `false` considers attached `UDA` types an error (only accepts instances/values) Returns: aggregated search result struct with 3 field. `value` aliases found UDA. `found` is boolean flag for having a valid find. `index` is integer index in attribute list this UDA was found at. */ template findNextUDA(alias UDA, alias Symbol, long idx, bool allow_types = false) if (!is(UDA)) { import std.traits : isInstanceOf; import std.typetuple : TypeTuple; private alias udaTuple = TypeTuple!(__traits(getAttributes, Symbol)); static assert(idx >= 0, "Index given to findNextUDA can't be negative"); static assert(idx <= udaTuple.length, "Index given to findNextUDA is above the number of attribute"); public template extract(size_t index, list...) { static if (!list.length) enum extract = UdaSearchResult!(null)(false, -1); else { static if (is(list[0])) { static if (is(UDA) && is(list[0] == UDA) || !is(UDA) && isInstanceOf!(UDA, list[0])) { static assert (allow_types, "findNextUDA is designed to look up values, not types"); enum extract = UdaSearchResult!(list[0])(true, index); } else enum extract = extract!(index + 1, list[1..$]); } else { static if (is(UDA) && is(typeof(list[0]) == UDA) || !is(UDA) && isInstanceOf!(UDA, typeof(list[0]))) { import vibe.internal.meta.traits : isPropertyGetter; static if (isPropertyGetter!(list[0])) { enum value = list[0]; enum extract = UdaSearchResult!(value)(true, index); } else enum extract = UdaSearchResult!(list[0])(true, index); } else enum extract = extract!(index + 1, list[1..$]); } } } enum findNextUDA = extract!(idx, udaTuple[idx .. $]); } /// ditto template findNextUDA(UDA, alias Symbol, long idx, bool allow_types = false) { import std.traits : isInstanceOf; import std.typetuple : TypeTuple; private alias udaTuple = TypeTuple!(__traits(getAttributes, Symbol)); static assert(idx >= 0, "Index given to findNextUDA can't be negative"); static assert(idx <= udaTuple.length, "Index given to findNextUDA is above the number of attribute"); public template extract(size_t index, list...) { static if (!list.length) enum extract = UdaSearchResult!(null)(false, -1); else { static if (is(list[0])) { static if (is(list[0] == UDA)) { static assert (allow_types, "findNextUDA is designed to look up values, not types"); enum extract = UdaSearchResult!(list[0])(true, index); } else enum extract = extract!(index + 1, list[1..$]); } else { static if (is(typeof(list[0]) == UDA)) { static if (isPropertyGetter!(list[0])) { enum value = list[0]; enum extract = UdaSearchResult!(value)(true, index); } else enum extract = UdaSearchResult!(list[0])(true, index); } else enum extract = extract!(index + 1, list[1..$]); } } } enum findNextUDA = extract!(idx, udaTuple[idx .. $]); } /// unittest { struct Attribute { int x; } @("something", Attribute(42), Attribute(41)) void symbol(); enum result0 = findNextUDA!(string, symbol, 0); static assert (result0.found); static assert (result0.index == 0); static assert (result0.value == "something"); enum result1 = findNextUDA!(Attribute, symbol, 0); static assert (result1.found); static assert (result1.index == 1); static assert (result1.value == Attribute(42)); enum result2 = findNextUDA!(int, symbol, 0); static assert (!result2.found); enum result3 = findNextUDA!(Attribute, symbol, result1.index + 1); static assert (result3.found); static assert (result3.index == 2); static assert (result3.value == Attribute(41)); } unittest { struct Attribute { int x; } @(Attribute) void symbol(); static assert (!is(findNextUDA!(Attribute, symbol, 0))); enum result0 = findNextUDA!(Attribute, symbol, 0, true); static assert (result0.found); static assert (result0.index == 0); static assert (is(result0.value == Attribute)); } unittest { struct Attribute { int x; } enum Dummy; @property static Attribute getter() { return Attribute(42); } @Dummy @getter void symbol(); enum result0 = findNextUDA!(Attribute, symbol, 0); static assert (result0.found); static assert (result0.index == 1); static assert (result0.value == Attribute(42)); } /// Eager version of findNextUDA that represent all instances of UDA in a Tuple. /// If one of the attribute is a type instead of an instance, compilation will fail. template UDATuple(alias UDA, alias Sym) { import std.typetuple : TypeTuple; private template extract(size_t maxSize, Founds...) { private alias LastFound = Founds[$ - 1]; // No more to find static if (!LastFound.found) enum extract = Founds[0 .. $ - 1]; else { // For ease of use, this is a Tuple of UDA, not a tuple of UdaSearchResult!(...) private alias Result = TypeTuple!(Founds[0 .. $ - 1], LastFound.value); // We're at the last parameter static if (LastFound.index == maxSize) enum extract = Result; else enum extract = extract!(maxSize, Result, findNextUDA!(UDA, Sym, LastFound.index + 1)); } } private enum maxIndex = TypeTuple!(__traits(getAttributes, Sym)).length; enum UDATuple = extract!(maxIndex, findNextUDA!(UDA, Sym, 0)); } unittest { import std.typetuple : TypeTuple; struct Attribute { int x; } enum Dummy; @(Dummy, Attribute(21), Dummy, Attribute(42), Attribute(84)) void symbol() {} @(Dummy, Attribute(21), Dummy, Attribute(42), Attribute) void wrong() {} alias Cmp = TypeTuple!(Attribute(21), Attribute(42), Attribute(84)); static assert(Cmp == UDATuple!(Attribute, symbol)); static assert(!is(UDATuple!(Attribute, wrong))); } /// Avoid repeating the same error message again and again. /// ---- /// if (!__ctfe) /// assert(0, onlyAsUda!func); /// ---- template onlyAsUda(string from /*= __FUNCTION__*/) { // With default param, DMD think expression is void, even when writing 'enum string onlyAsUda = ...' enum onlyAsUda = from~" must only be used as an attribute - not called as a runtime function."; } dub-1.40.0/source/dub/internal/vibecompat/inet/000077500000000000000000000000001477246567400213525ustar00rootroot00000000000000dub-1.40.0/source/dub/internal/vibecompat/inet/path.d000066400000000000000000000376121477246567400224640ustar00rootroot00000000000000/** Contains routines for high level path handling. Copyright: © 2012 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.internal.vibecompat.inet.path; version (Have_vibe_core) public import vibe.core.path; else: import std.algorithm; import std.array; import std.conv; import std.exception; import std.string; deprecated("Use NativePath instead.") alias Path = NativePath; /** Represents an absolute or relative file system path. This struct allows to do safe operations on paths, such as concatenation and sub paths. Checks are done to disallow invalid operations such as concatenating two absolute paths. It also validates path strings and allows for easy checking of malicious relative paths. */ struct NativePath { private { immutable(PathEntry)[] m_nodes; bool m_absolute = false; bool m_endsWithSlash = false; } alias Segment = PathEntry; alias bySegment = nodes; /// Constructs a NativePath object by parsing a path string. this(string pathstr) { m_nodes = splitPath(pathstr); m_absolute = (pathstr.startsWith("/") || m_nodes.length > 0 && (m_nodes[0].toString().countUntil(':')>0 || m_nodes[0] == "\\")); m_endsWithSlash = pathstr.endsWith("/"); } /// Constructs a path object from a list of PathEntry objects. this(immutable(PathEntry)[] nodes, bool absolute = false) { m_nodes = nodes; m_absolute = absolute; } /// Constructs a relative path with one path entry. this(PathEntry entry){ m_nodes = [entry]; m_absolute = false; } /// Determines if the path is absolute. @property bool absolute() const scope @safe pure nothrow @nogc { return m_absolute; } /// Resolves all '.' and '..' path entries as far as possible. void normalize() { immutable(PathEntry)[] newnodes; foreach( n; m_nodes ){ switch(n.toString()){ default: newnodes ~= n; break; case "", ".": break; case "..": enforce(!m_absolute || newnodes.length > 0, "Path goes below root node."); if( newnodes.length > 0 && newnodes[$-1] != ".." ) newnodes = newnodes[0 .. $-1]; else newnodes ~= n; break; } } m_nodes = newnodes; } /// Converts the Path back to a string representation using slashes. string toString() const @safe { if( m_nodes.empty ) return absolute ? "/" : ""; Appender!string ret; // for absolute paths start with / version(Windows) { // Make sure windows path isn't "DRIVE:" if( absolute && !m_nodes[0].toString().endsWith(':') ) ret.put('/'); } else { if( absolute ) { ret.put('/'); } } foreach( i, f; m_nodes ){ if( i > 0 ) ret.put('/'); ret.put(f.toString()); } if( m_nodes.length > 0 && m_endsWithSlash ) ret.put('/'); return ret.data; } /// Converts the NativePath object to a native path string (backslash as path separator on Windows). string toNativeString() const { if (m_nodes.empty) { version(Windows) { assert(!absolute, "Empty absolute path detected."); return m_endsWithSlash ? ".\\" : "."; } else return absolute ? "/" : m_endsWithSlash ? "./" : "."; } Appender!string ret; // for absolute unix paths start with / version(Posix) { if(absolute) ret.put('/'); } foreach( i, f; m_nodes ){ version(Windows) { if( i > 0 ) ret.put('\\'); } else version(Posix) { if( i > 0 ) ret.put('/'); } else { static assert(0, "Unsupported OS"); } ret.put(f.toString()); } if( m_nodes.length > 0 && m_endsWithSlash ){ version(Windows) { ret.put('\\'); } version(Posix) { ret.put('/'); } } return ret.data; } /// Tests if `rhs` is an ancestor or the same as this path. bool startsWith(const NativePath rhs) const { if( rhs.m_nodes.length > m_nodes.length ) return false; foreach( i; 0 .. rhs.m_nodes.length ) if( m_nodes[i] != rhs.m_nodes[i] ) return false; return true; } /// Computes the relative path from `parentPath` to this path. NativePath relativeTo(const NativePath parentPath) const { assert(this.absolute && parentPath.absolute, "Determining relative path between non-absolute paths."); version(Windows){ // a path such as ..\C:\windows is not valid, so force the path to stay absolute in this case if( this.absolute && !this.empty && (m_nodes[0].toString().endsWith(":") && !parentPath.startsWith(this[0 .. 1]) || m_nodes[0] == "\\" && !parentPath.startsWith(this[0 .. min(2, $)]))) { return this; } } int nup = 0; while( parentPath.length > nup && !startsWith(parentPath[0 .. parentPath.length-nup]) ){ nup++; } assert(m_nodes.length >= parentPath.length - nup); NativePath ret = NativePath(null, false); assert(m_nodes.length >= parentPath.length - nup); ret.m_endsWithSlash = true; foreach( i; 0 .. nup ) ret ~= ".."; ret ~= NativePath(m_nodes[parentPath.length-nup .. $], false); ret.m_endsWithSlash = this.m_endsWithSlash; return ret; } /// The last entry of the path @property ref immutable(PathEntry) head() const { enforce(m_nodes.length > 0, "Getting head of empty path."); return m_nodes[$-1]; } /// The parent path @property NativePath parentPath() const { return this[0 .. length-1]; } /// Forward compatibility with vibe-d @property bool hasParentPath() const { return length > 1; } /// The list of path entries of which this path is composed @property immutable(PathEntry)[] nodes() const { return m_nodes; } /// The number of path entries of which this path is composed @property size_t length() const scope @safe pure nothrow @nogc { return m_nodes.length; } /// True if the path contains no entries @property bool empty() const scope @safe pure nothrow @nogc { return m_nodes.length == 0; } /// Determines if the path ends with a slash (i.e. is a directory) @property bool endsWithSlash() const { return m_endsWithSlash; } /// ditto @property void endsWithSlash(bool v) { m_endsWithSlash = v; } /// Determines if this path goes outside of its base path (i.e. begins with '..'). @property bool external() const { return !m_absolute && m_nodes.length > 0 && m_nodes[0].m_name == ".."; } ref immutable(PathEntry) opIndex(size_t idx) const { return m_nodes[idx]; } NativePath opSlice(size_t start, size_t end) const { auto ret = NativePath(m_nodes[start .. end], start == 0 ? absolute : false); if( end == m_nodes.length ) ret.m_endsWithSlash = m_endsWithSlash; return ret; } size_t opDollar(int dim)() const if(dim == 0) { return m_nodes.length; } NativePath opBinary(string OP)(const NativePath rhs) const if( OP == "~" ) { NativePath ret; ret.m_nodes = m_nodes; ret.m_absolute = m_absolute; ret.m_endsWithSlash = rhs.m_endsWithSlash; ret.normalize(); // needed to avoid "."~".." become "" instead of ".." assert(!rhs.absolute, "Trying to append absolute path: " ~ this.toNativeString() ~ " ~ " ~ rhs.toNativeString()); foreach(folder; rhs.m_nodes){ switch(folder.toString()){ default: ret.m_nodes = ret.m_nodes ~ folder; break; case "", ".": break; case "..": enforce(!ret.absolute || ret.m_nodes.length > 0, "Relative path goes below root node!"); if( ret.m_nodes.length > 0 && ret.m_nodes[$-1].toString() != ".." ) ret.m_nodes = ret.m_nodes[0 .. $-1]; else ret.m_nodes = ret.m_nodes ~ folder; break; } } return ret; } NativePath opBinary(string OP)(string rhs) const if( OP == "~" ) { assert(rhs.length > 0, "Cannot append empty path string."); return opBinary!"~"(NativePath(rhs)); } NativePath opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { assert(rhs.toString().length > 0, "Cannot append empty path string."); return opBinary!"~"(NativePath(rhs)); } void opOpAssign(string OP)(string rhs) if( OP == "~" ) { assert(rhs.length > 0, "Cannot append empty path string."); opOpAssign!"~"(NativePath(rhs)); } void opOpAssign(string OP)(PathEntry rhs) if( OP == "~" ) { assert(rhs.toString().length > 0, "Cannot append empty path string."); opOpAssign!"~"(NativePath(rhs)); } void opOpAssign(string OP)(NativePath rhs) if( OP == "~" ) { auto p = this ~ rhs; m_nodes = p.m_nodes; m_endsWithSlash = rhs.m_endsWithSlash; } /// Tests two paths for equality using '=='. bool opEquals(scope ref const NativePath rhs) const scope @safe { if( m_absolute != rhs.m_absolute ) return false; if( m_endsWithSlash != rhs.m_endsWithSlash ) return false; if( m_nodes.length != rhs.length ) return false; foreach( i; 0 .. m_nodes.length ) if( m_nodes[i] != rhs.m_nodes[i] ) return false; return true; } /// ditto bool opEquals(scope const NativePath other) const scope @safe { return opEquals(other); } int opCmp(ref const NativePath rhs) const { if( m_absolute != rhs.m_absolute ) return cast(int)m_absolute - cast(int)rhs.m_absolute; foreach( i; 0 .. min(m_nodes.length, rhs.m_nodes.length) ) if( m_nodes[i] != rhs.m_nodes[i] ) return m_nodes[i].opCmp(rhs.m_nodes[i]); if( m_nodes.length > rhs.m_nodes.length ) return 1; if( m_nodes.length < rhs.m_nodes.length ) return -1; return 0; } size_t toHash() const nothrow @trusted { size_t ret; auto strhash = &typeid(string).getHash; try foreach (n; nodes) ret ^= strhash(&n.m_name); catch (Exception) assert(false); if (m_absolute) ret ^= 0xfe3c1738; if (m_endsWithSlash) ret ^= 0x6aa4352d; return ret; } } struct PathEntry { private { string m_name; } this(string str) pure { assert(str.countUntil('/') < 0 && (str.countUntil('\\') < 0 || str.length == 1)); m_name = str; } string toString() const return scope @safe pure nothrow @nogc { return m_name; } @property string name() const return scope @safe pure nothrow @nogc { return m_name; } NativePath opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { return NativePath([this, rhs], false); } bool opEquals(scope ref const PathEntry rhs) const scope @safe pure nothrow @nogc { return m_name == rhs.m_name; } bool opEquals(scope PathEntry rhs) const scope @safe pure nothrow @nogc { return m_name == rhs.m_name; } bool opEquals(string rhs) const scope @safe pure nothrow @nogc { return m_name == rhs; } int opCmp(scope ref const PathEntry rhs) const scope @safe pure nothrow @nogc { return m_name.cmp(rhs.m_name); } int opCmp(string rhs) const scope @safe pure nothrow @nogc { return m_name.cmp(rhs); } } /// Joins two path strings. sub-path must be relative. string joinPath(string basepath, string subpath) { NativePath p1 = NativePath(basepath); NativePath p2 = NativePath(subpath); return (p1 ~ p2).toString(); } /// Splits up a path string into its elements/folders PathEntry[] splitPath(string path) pure { if( path.startsWith("/") || path.startsWith("\\") ) path = path[1 .. $]; if( path.empty ) return null; if( path.endsWith("/") || path.endsWith("\\") ) path = path[0 .. $-1]; // count the number of path nodes size_t nelements = 0; foreach( i, char ch; path ) if( ch == '\\' || ch == '/' ) nelements++; nelements++; // reserve space for the elements auto elements = new PathEntry[nelements]; size_t eidx = 0; // detect UNC path if(path.startsWith("\\")) { elements[eidx++] = PathEntry(path[0 .. 1]); path = path[1 .. $]; } // read and return the elements size_t startidx = 0; foreach( i, char ch; path ) if( ch == '\\' || ch == '/' ){ elements[eidx++] = PathEntry(path[startidx .. i]); startidx = i+1; } elements[eidx++] = PathEntry(path[startidx .. $]); assert(eidx == nelements); return elements; } unittest { NativePath p; assert(p.toNativeString() == "."); p.endsWithSlash = true; version(Windows) assert(p.toNativeString() == ".\\"); else assert(p.toNativeString() == "./"); p = NativePath("test/"); version(Windows) assert(p.toNativeString() == "test\\"); else assert(p.toNativeString() == "test/"); p.endsWithSlash = false; assert(p.toNativeString() == "test"); } unittest { { auto unc = "\\\\server\\share\\path"; auto uncp = NativePath(unc); uncp.normalize(); version(Windows) assert(uncp.toNativeString() == unc); assert(uncp.absolute); assert(!uncp.endsWithSlash); } { auto abspath = "/test/path/"; auto abspathp = NativePath(abspath); assert(abspathp.toString() == abspath); version(Windows) {} else assert(abspathp.toNativeString() == abspath); assert(abspathp.absolute); assert(abspathp.endsWithSlash); assert(abspathp.length == 2); assert(abspathp[0] == "test"); assert(abspathp[1] == "path"); } { auto relpath = "test/path/"; auto relpathp = NativePath(relpath); assert(relpathp.toString() == relpath); version(Windows) assert(relpathp.toNativeString() == "test\\path\\"); else assert(relpathp.toNativeString() == relpath); assert(!relpathp.absolute); assert(relpathp.endsWithSlash); assert(relpathp.length == 2); assert(relpathp[0] == "test"); assert(relpathp[1] == "path"); } { auto winpath = "C:\\windows\\test"; auto winpathp = NativePath(winpath); version(Windows) { assert(winpathp.toString() == "C:/windows/test", winpathp.toString()); assert(winpathp.toNativeString() == winpath); } else { assert(winpathp.toString() == "/C:/windows/test", winpathp.toString()); assert(winpathp.toNativeString() == "/C:/windows/test"); } assert(winpathp.absolute); assert(!winpathp.endsWithSlash); assert(winpathp.length == 3); assert(winpathp[0] == "C:"); assert(winpathp[1] == "windows"); assert(winpathp[2] == "test"); } { auto dotpath = "/test/../test2/././x/y"; auto dotpathp = NativePath(dotpath); assert(dotpathp.toString() == "/test/../test2/././x/y"); dotpathp.normalize(); assert(dotpathp.toString() == "/test2/x/y"); } { auto dotpath = "/test/..////test2//./x/y"; auto dotpathp = NativePath(dotpath); assert(dotpathp.toString() == "/test/..////test2//./x/y"); dotpathp.normalize(); assert(dotpathp.toString() == "/test2/x/y"); } { auto parentpath = "/path/to/parent"; auto parentpathp = NativePath(parentpath); auto subpath = "/path/to/parent/sub/"; auto subpathp = NativePath(subpath); auto subpath_rel = "sub/"; assert(subpathp.relativeTo(parentpathp).toString() == subpath_rel); auto subfile = "/path/to/parent/child"; auto subfilep = NativePath(subfile); auto subfile_rel = "child"; assert(subfilep.relativeTo(parentpathp).toString() == subfile_rel); } { // relative paths across Windows devices are not allowed version (Windows) { auto p1 = NativePath("\\\\server\\share"); assert(p1.absolute); auto p2 = NativePath("\\\\server\\othershare"); assert(p2.absolute); auto p3 = NativePath("\\\\otherserver\\share"); assert(p3.absolute); auto p4 = NativePath("C:\\somepath"); assert(p4.absolute); auto p5 = NativePath("C:\\someotherpath"); assert(p5.absolute); auto p6 = NativePath("D:\\somepath"); assert(p6.absolute); assert(p4.relativeTo(p5) == NativePath("../somepath")); assert(p4.relativeTo(p6) == NativePath("C:\\somepath")); assert(p4.relativeTo(p1) == NativePath("C:\\somepath")); assert(p1.relativeTo(p2) == NativePath("../share")); assert(p1.relativeTo(p3) == NativePath("\\\\server\\share")); assert(p1.relativeTo(p4) == NativePath("\\\\server\\share")); } } } unittest { assert(NativePath("/foo/bar/baz").relativeTo(NativePath("/foo")).toString == "bar/baz"); assert(NativePath("/foo/bar/baz/").relativeTo(NativePath("/foo")).toString == "bar/baz/"); assert(NativePath("/foo/bar").relativeTo(NativePath("/foo")).toString == "bar"); assert(NativePath("/foo/bar/").relativeTo(NativePath("/foo")).toString == "bar/"); assert(NativePath("/foo").relativeTo(NativePath("/foo/bar")).toString() == ".."); assert(NativePath("/foo/").relativeTo(NativePath("/foo/bar")).toString() == "../"); assert(NativePath("/foo/baz").relativeTo(NativePath("/foo/bar/baz")).toString() == "../../baz"); assert(NativePath("/foo/baz/").relativeTo(NativePath("/foo/bar/baz")).toString() == "../../baz/"); assert(NativePath("/foo/").relativeTo(NativePath("/foo/bar/baz")).toString() == "../../"); assert(NativePath("/foo/").relativeTo(NativePath("/foo/bar/baz/mumpitz")).toString() == "../../../"); assert(NativePath("/foo").relativeTo(NativePath("/foo")).toString() == ""); assert(NativePath("/foo/").relativeTo(NativePath("/foo")).toString() == ""); } dub-1.40.0/source/dub/internal/vibecompat/inet/url.d000066400000000000000000000175701477246567400223330ustar00rootroot00000000000000/** URL parsing routines. Copyright: © 2012 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.internal.vibecompat.inet.url; public import dub.internal.vibecompat.inet.path; version (Have_vibe_d_inet) public import vibe.inet.url; // vibe.d 0.9.x else version (Have_vibe_inet) public import vibe.inet.url; // vibe.d 0.10.x+ else: import std.algorithm; import std.array; import std.conv; import std.exception; import std.string; import std.uri; import std.meta : AliasSeq; /** Represents a URL decomposed into its components. */ struct URL { private { string m_schema; string m_pathString; NativePath m_path; string m_host; ushort m_port; string m_username; string m_password; string m_queryString; string m_anchor; alias m_schemes = AliasSeq!("http", "https", "ftp", "spdy", "file", "sftp"); } /// Constructs a new URL object from its components. this(string schema, string host, ushort port, NativePath path) { m_schema = schema; m_host = host; m_port = port; m_path = path; m_pathString = path.toString(); } /// ditto this(string schema, NativePath path) { this(schema, null, 0, path); } /** Constructs a URL from its string representation. TODO: additional validation required (e.g. valid host and user names and port) */ this(string url_string) { auto str = url_string; enforce(str.length > 0, "Empty URL."); if( str[0] != '/' ){ auto idx = str.countUntil(':'); enforce(idx > 0, "No schema in URL:"~str); m_schema = str[0 .. idx]; str = str[idx+1 .. $]; bool requires_host = false; auto schema_parts = m_schema.split("+"); if (!schema_parts.empty && schema_parts.back.canFind(m_schemes)) { // proto://server/path style enforce(str.startsWith("//"), "URL must start with proto://..."); requires_host = true; str = str[2 .. $]; } auto si = str.countUntil('/'); if( si < 0 ) si = str.length; auto ai = str[0 .. si].countUntil('@'); ptrdiff_t hs = 0; if( ai >= 0 ){ hs = ai+1; auto ci = str[0 .. ai].countUntil(':'); if( ci >= 0 ){ m_username = str[0 .. ci]; m_password = str[ci+1 .. ai]; } else m_username = str[0 .. ai]; enforce(m_username.length > 0, "Empty user name in URL."); } m_host = str[hs .. si]; auto pi = m_host.countUntil(':'); if(pi > 0) { enforce(pi < m_host.length-1, "Empty port in URL."); m_port = to!ushort(m_host[pi+1..$]); m_host = m_host[0 .. pi]; } enforce(!requires_host || m_schema == "file" || m_host.length > 0, "Empty server name in URL."); str = str[si .. $]; } this.localURI = (str == "") ? "/" : str; } /// ditto static URL parse(string url_string) { return URL(url_string); } /// The schema/protocol part of the URL @property string schema() const { return m_schema; } /// ditto @property void schema(string v) { m_schema = v; } /// The path part of the URL in the original string form @property string pathString() const { return m_pathString; } /// The path part of the URL @property NativePath path() const { return m_path; } /// ditto @property void path(NativePath p) { m_path = p; auto pstr = p.toString(); m_pathString = pstr; } /// The host part of the URL (depends on the schema) @property string host() const { return m_host; } /// ditto @property void host(string v) { m_host = v; } /// The port part of the URL (optional) @property ushort port() const { return m_port; } /// ditto @property port(ushort v) { m_port = v; } /// The user name part of the URL (optional) @property string username() const { return m_username; } /// ditto @property void username(string v) { m_username = v; } /// The password part of the URL (optional) @property string password() const { return m_password; } /// ditto @property void password(string v) { m_password = v; } /// The query string part of the URL (optional) @property string queryString() const { return m_queryString; } /// ditto @property void queryString(string v) { m_queryString = v; } /// The anchor part of the URL (optional) @property string anchor() const { return m_anchor; } /// The path part plus query string and anchor @property string localURI() const { auto str = appender!string(); str.reserve(m_pathString.length + 2 + queryString.length + anchor.length); str.put(encode(path.toString())); if( queryString.length ) { str.put("?"); str.put(queryString); } if( anchor.length ) { str.put("#"); str.put(anchor); } return str.data; } /// ditto @property void localURI(string str) { auto ai = str.countUntil('#'); if( ai >= 0 ){ m_anchor = str[ai+1 .. $]; str = str[0 .. ai]; } auto qi = str.countUntil('?'); if( qi >= 0 ){ m_queryString = str[qi+1 .. $]; str = str[0 .. qi]; } m_pathString = str; m_path = NativePath(decode(str)); } /// The URL to the parent path with query string and anchor stripped. @property URL parentURL() const { URL ret; ret.schema = schema; ret.host = host; ret.port = port; ret.username = username; ret.password = password; ret.path = path.parentPath; return ret; } /// Converts this URL object to its string representation. string toString() const { import std.format; auto dst = appender!string(); dst.put(schema); dst.put(":"); auto schema_parts = schema.split("+"); if (!schema_parts.empty && schema_parts.back.canFind(m_schemes)) { dst.put("//"); } dst.put(host); if( m_port > 0 ) formattedWrite(dst, ":%d", m_port); dst.put(localURI); return dst.data; } bool startsWith(const URL rhs) const { if( m_schema != rhs.m_schema ) return false; if( m_host != rhs.m_host ) return false; // FIXME: also consider user, port, querystring, anchor etc return path.startsWith(rhs.m_path); } URL opBinary(string OP)(NativePath rhs) const if( OP == "~" ) { return URL(m_schema, m_host, m_port, m_path ~ rhs); } URL opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { return URL(m_schema, m_host, m_port, m_path ~ rhs); } void opOpAssign(string OP)(NativePath rhs) if( OP == "~" ) { m_path ~= rhs; } void opOpAssign(string OP)(PathEntry rhs) if( OP == "~" ) { m_path ~= rhs; } /// Tests two URLs for equality using '=='. bool opEquals(ref const URL rhs) const { if( m_schema != rhs.m_schema ) return false; if( m_host != rhs.m_host ) return false; if( m_path != rhs.m_path ) return false; return true; } /// ditto bool opEquals(const URL other) const { return opEquals(other); } int opCmp(ref const URL rhs) const { if( m_schema != rhs.m_schema ) return m_schema.cmp(rhs.m_schema); if( m_host != rhs.m_host ) return m_host.cmp(rhs.m_host); if( m_path != rhs.m_path ) return m_path.opCmp(rhs.m_path); return true; } } unittest { auto url = URL.parse("https://www.example.net/index.html"); assert(url.schema == "https", url.schema); assert(url.host == "www.example.net", url.host); assert(url.path == NativePath("/index.html"), url.path.toString()); url = URL.parse("http://jo.doe:password@sub.www.example.net:4711/sub2/index.html?query#anchor"); assert(url.schema == "http", url.schema); assert(url.username == "jo.doe", url.username); assert(url.password == "password", url.password); assert(url.port == 4711, to!string(url.port)); assert(url.host == "sub.www.example.net", url.host); assert(url.path.toString() == "/sub2/index.html", url.path.toString()); assert(url.queryString == "query", url.queryString); assert(url.anchor == "anchor", url.anchor); url = URL("http://localhost")~NativePath("packages"); assert(url.toString() == "http://localhost/packages", url.toString()); url = URL("http://localhost/")~NativePath("packages"); assert(url.toString() == "http://localhost/packages", url.toString()); url = URL.parse("dub+https://code.dlang.org/"); assert(url.host == "code.dlang.org"); assert(url.toString() == "dub+https://code.dlang.org/"); assert(url.schema == "dub+https"); } dub-1.40.0/source/dub/package_.d000066400000000000000000000654371477246567400163640ustar00rootroot00000000000000/** Contains high-level functionality for working with packages. Copyright: © 2012-2013 Matthias Dondorff, © 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig, Martin Nowak, Nick Sabalausky */ module dub.package_; public import dub.recipe.packagerecipe; import dub.compilers.compiler; import dub.dependency; import dub.description; import dub.recipe.json; import dub.recipe.sdl; import dub.internal.logging; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.internal.configy.Read : StrictMode; import std.algorithm; import std.array; import std.conv; import std.exception; import std.range; import std.string; import std.typecons : Nullable; /// Lists the supported package recipe formats. enum PackageFormat { json, /// JSON based, using the ".json" file extension sdl /// SDLang based, using the ".sdl" file extension } struct FilenameAndFormat { string filename; PackageFormat format; } /// Supported package descriptions in decreasing order of preference. static immutable FilenameAndFormat[] packageInfoFiles = [ {"dub.json", PackageFormat.json}, {"dub.sdl", PackageFormat.sdl}, {"package.json", PackageFormat.json} ]; /// Returns a list of all recognized package recipe file names in descending order of precedence. deprecated("Open an issue if this is needed") @property string[] packageInfoFilenames() { return packageInfoFiles.map!(f => cast(string)f.filename).array; } /// Returns the default package recile file name. @property string defaultPackageFilename() { return packageInfoFiles[0].filename; } /// All built-in build type names except for the special `$DFLAGS` build type. /// Has the default build type (`debug`) as first index. static immutable string[] builtinBuildTypes = [ "debug", "plain", "release", "release-debug", "release-nobounds", "unittest", "profile", "profile-gc", "docs", "ddox", "cov", "cov-ctfe", "unittest-cov", "unittest-cov-ctfe", "syntax" ]; /** Represents a package, including its sub packages. */ class Package { // `package` visibility as it is set from the PackageManager package NativePath m_infoFile; private { NativePath m_path; PackageRecipe m_info; PackageRecipe m_rawRecipe; Package m_parentPackage; } /** Constructs a `Package` using an in-memory package recipe. Params: json_recipe = The package recipe in JSON format recipe = The package recipe in generic format root = The directory in which the package resides (if any). parent = Reference to the parent package, if the new package is a sub package. version_override = Optional version to associate to the package instead of the one declared in the package recipe, or the one determined by invoking the VCS (GIT currently). */ deprecated("Provide an already parsed PackageRecipe instead of a JSON object") this(Json json_recipe, NativePath root = NativePath(), Package parent = null, string version_override = "") { import dub.recipe.json; PackageRecipe recipe; parseJson(recipe, json_recipe, parent ? parent.name : null); this(recipe, root, parent, version_override); } /// ditto this(PackageRecipe recipe, NativePath root = NativePath(), Package parent = null, string version_override = "") { // save the original recipe m_rawRecipe = recipe.clone; if (!version_override.empty) recipe.version_ = version_override; // try to run git to determine the version of the package if no explicit version was given if (recipe.version_.length == 0 && !parent) { try recipe.version_ = determineVersionFromSCM(root); catch (Exception e) logDebug("Failed to determine version by SCM: %s", e.msg); if (recipe.version_.length == 0) { logDiagnostic("Note: Failed to determine version of package %s at %s. Assuming ~master.", recipe.name, this.path.toNativeString()); // TODO: Assume unknown version here? // recipe.version_ = Version.unknown.toString(); recipe.version_ = Version.masterBranch.toString(); } else logDiagnostic("Determined package version using GIT: %s %s", recipe.name, recipe.version_); } m_parentPackage = parent; m_path = root; m_path.endsWithSlash = true; // use the given recipe as the basis m_info = recipe; checkDubRequirements(); fillWithDefaults(); mutuallyExcludeMainFiles(); } /** Searches the given directory for package recipe files. Params: directory = The directory to search Returns: Returns the full path to the package file, if any was found. Otherwise returns an empty path. */ deprecated("Use `PackageManager.findPackageFile`") static NativePath findPackageFile(NativePath directory) { foreach (file; packageInfoFiles) { auto filename = directory ~ file.filename; if (existsFile(filename)) return filename; } return NativePath.init; } /** Constructs a `Package` using a package that is physically present on the local file system. Params: root = The directory in which the package resides. recipe_file = Optional path to the package recipe file. If left empty, the `root` directory will be searched for a recipe file. parent = Reference to the parent package, if the new package is a sub package. version_override = Optional version to associate to the package instead of the one declared in the package recipe, or the one determined by invoking the VCS (GIT currently). mode = Whether to issue errors, warning, or ignore unknown keys in dub.json */ deprecated("Use `PackageManager.getOrLoadPackage` instead of loading packages directly") static Package load(NativePath root, NativePath recipe_file = NativePath.init, Package parent = null, string version_override = "", StrictMode mode = StrictMode.Ignore) { import dub.recipe.io; if (recipe_file.empty) recipe_file = findPackageFile(root); enforce(!recipe_file.empty, "No package file found in %s, expected one of %s" .format(root.toNativeString(), packageInfoFiles.map!(f => cast(string)f.filename).join("/"))); auto recipe = readPackageRecipe(recipe_file, parent ? parent.name : null, mode); auto ret = new Package(recipe, root, parent, version_override); ret.m_infoFile = recipe_file; return ret; } /** Returns the qualified name of the package. The qualified name includes any possible parent package if this package is a sub package. */ @property string name() const { if (m_parentPackage) return m_parentPackage.name ~ ":" ~ m_info.name; else return m_info.name; } /** Returns the directory in which the package resides. Note that this can be empty for packages that are not stored in the local file system. */ @property NativePath path() const { return m_path; } /** Accesses the version associated with this package. Note that this is a shortcut to `this.recipe.version_`. */ @property Version version_() const { return m_parentPackage ? m_parentPackage.version_ : Version(m_info.version_); } /// ditto @property void version_(Version value) { assert(m_parentPackage is null); m_info.version_ = value.toString(); } /** Accesses the recipe contents of this package. The recipe contains any default values and configurations added by DUB. To access the raw user recipe, use the `rawRecipe` property. See_Also: `rawRecipe` */ @property ref inout(PackageRecipe) recipe() inout { return m_info; } /** Accesses the original package recipe. The returned recipe matches exactly the contents of the original package recipe. For the effective package recipe, augmented with DUB generated default settings and configurations, use the `recipe` property. See_Also: `recipe` */ @property ref const(PackageRecipe) rawRecipe() const { return m_rawRecipe; } /** Returns the path to the package recipe file. Note that this can be empty for packages that are not stored in the local file system. */ @property NativePath recipePath() const { return m_infoFile; } /** Returns the base package of this package. The base package is the root of the sub package hierarchy (i.e. the topmost parent). This will be `null` for packages that are not sub packages. */ @property inout(Package) basePackage() inout { return m_parentPackage ? m_parentPackage.basePackage : this; } /** Returns the parent of this package. The parent package is the package that contains a sub package. This will be `null` for packages that are not sub packages. */ @property inout(Package) parentPackage() inout { return m_parentPackage; } /** Returns the list of all sub packages. Note that this is a shortcut for `this.recipe.subPackages`. */ @property inout(SubPackage)[] subPackages() inout { return m_info.subPackages; } /** Returns the list of all build configuration names. Configuration contents can be accessed using `this.recipe.configurations`. */ @property string[] configurations() const { auto ret = appender!(string[])(); foreach (ref config; m_info.configurations) ret.put(config.name); return ret.data; } /** Returns the list of all custom build type names. Build type contents can be accessed using `this.recipe.buildTypes`. */ @property string[] customBuildTypes() const { auto ret = appender!(string[])(); foreach (name; m_info.buildTypes.byKey) ret.put(name); return ret.data; } /** Writes the current recipe contents to a recipe file. The parameter-less overload writes to `this.path`, which must not be empty. The default recipe file name will be used in this case. */ void storeInfo() { storeInfo(m_path); m_infoFile = m_path ~ defaultPackageFilename; } /// ditto void storeInfo(NativePath path) const { auto filename = path ~ defaultPackageFilename; writeJsonFile(filename, m_info.toJson()); } deprecated("Use `PackageManager.getSubPackage` instead") Nullable!PackageRecipe getInternalSubPackage(string name) { foreach (ref p; m_info.subPackages) if (p.path.empty && p.recipe.name == name) return Nullable!PackageRecipe(p.recipe); return Nullable!PackageRecipe(); } /** Searches for use of compiler-specific flags that have generic alternatives. This will output a warning message for each such flag to the console. */ void warnOnSpecialCompilerFlags() { // warn about use of special flags m_info.buildSettings.warnOnSpecialCompilerFlags(m_info.name, null); foreach (ref config; m_info.configurations) config.buildSettings.warnOnSpecialCompilerFlags(m_info.name, config.name); } /** Retrieves a build settings template. If no `config` is given, this returns the build settings declared at the root level of the package recipe. Otherwise returns the settings declared within the given configuration (excluding those at the root level). Note that this is a shortcut to accessing `this.recipe.buildSettings` or `this.recipe.configurations[].buildSettings`. */ const(BuildSettingsTemplate) getBuildSettings(string config = null) const { if (config.length) { foreach (ref conf; m_info.configurations) if (conf.name == config) return conf.buildSettings; assert(false, "Unknown configuration: "~config); } else { return m_info.buildSettings; } } /** Returns all BuildSettings for the given platform and configuration. This will gather the effective build settings declared in the package recipe for when building on a particular platform and configuration. Root build settings and configuration specific settings will be merged. */ BuildSettings getBuildSettings(in BuildPlatform platform, string config) const { BuildSettings ret; m_info.buildSettings.getPlatformSettings(ret, platform, this.path); bool found = false; foreach(ref conf; m_info.configurations){ if( conf.name != config ) continue; conf.buildSettings.getPlatformSettings(ret, platform, this.path); found = true; break; } assert(found || config is null, "Unknown configuration for "~m_info.name~": "~config); // construct default target name based on package name if( ret.targetName.empty ) ret.targetName = this.name.replace(":", "_"); // special support for DMD style flags getCompiler("dmd").extractBuildOptions(ret); return ret; } /** Returns the combination of all build settings for all configurations and platforms. This can be useful for IDEs to gather a list of all potentially used files or settings. */ BuildSettings getCombinedBuildSettings() const { BuildSettings ret; m_info.buildSettings.getPlatformSettings(ret, BuildPlatform.any, this.path); foreach(ref conf; m_info.configurations) conf.buildSettings.getPlatformSettings(ret, BuildPlatform.any, this.path); // construct default target name based on package name if (ret.targetName.empty) ret.targetName = this.name.replace(":", "_"); // special support for DMD style flags getCompiler("dmd").extractBuildOptions(ret); return ret; } /** Adds build type specific settings to an existing set of build settings. This function searches the package recipe for overridden build types. If none is found, the default build settings will be applied, if `build_type` matches a default build type name. An exception is thrown otherwise. */ void addBuildTypeSettings(ref BuildSettings settings, in BuildPlatform platform, string build_type) const { if (auto pbt = build_type in m_info.buildTypes) { logDiagnostic("Using custom build type '%s'.", build_type); pbt.getPlatformSettings(settings, platform, this.path); } else { with(BuildOption) switch (build_type) { default: throw new Exception(format("Unknown build type for %s: '%s'", this.name, build_type)); case "$DFLAGS": break; case "plain": break; case "debug": settings.addOptions(debugMode, debugInfo); break; case "release": settings.addOptions(releaseMode, optimize, inline); break; case "release-debug": settings.addOptions(releaseMode, optimize, inline, debugInfo); break; case "release-nobounds": settings.addOptions(releaseMode, optimize, inline, noBoundsCheck); break; case "unittest": settings.addOptions(unittests, debugMode, debugInfo); break; case "docs": settings.addOptions(syntaxOnly, _docs); break; case "ddox": settings.addOptions(syntaxOnly, _ddox); break; case "profile": settings.addOptions(profile, optimize, inline, debugInfo); break; case "profile-gc": settings.addOptions(profileGC, debugInfo); break; case "cov": settings.addOptions(coverage, debugInfo); break; case "cov-ctfe": settings.addOptions(coverageCTFE, debugInfo); break; case "unittest-cov": settings.addOptions(unittests, coverage, debugMode, debugInfo); break; case "unittest-cov-ctfe": settings.addOptions(unittests, coverageCTFE, debugMode, debugInfo); break; case "syntax": settings.addOptions(syntaxOnly); break; } } // Add environment DFLAGS last so that user specified values are not overriden by us. import std.process : environment; string dflags = environment.get("DFLAGS", ""); settings.addDFlags(dflags.split()); } /** Returns the selected configuration for a certain dependency. If no configuration is specified in the package recipe, null will be returned instead. FIXME: The `platform` parameter is currently ignored, as the `"subConfigurations"` field doesn't support platform suffixes. */ string getSubConfiguration(string config, in Package dependency, in BuildPlatform platform) const { bool found = false; foreach(ref c; m_info.configurations){ if( c.name == config ){ if( auto pv = dependency.name in c.buildSettings.subConfigurations ) return *pv; found = true; break; } } assert(found || config is null, "Invalid configuration \""~config~"\" for "~this.name); if( auto pv = dependency.name in m_info.buildSettings.subConfigurations ) return *pv; return null; } /** Returns the default configuration to build for the given platform. This will return the first configuration that is applicable to the given platform, or `null` if none is applicable. By default, only library configurations will be returned. Setting `allow_non_library` to `true` will also return executable configurations. See_Also: `getPlatformConfigurations` */ string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library = false) const { foreach (ref conf; m_info.configurations) { if (!conf.matchesPlatform(platform)) continue; if (!allow_non_library && conf.buildSettings.targetType == TargetType.executable) continue; return conf.name; } return null; } /** Returns a list of configurations suitable for the given platform. Params: platform = The platform against which to match configurations allow_non_library = If set to true, executable configurations will also be included. See_Also: `getDefaultConfiguration` */ string[] getPlatformConfigurations(in BuildPlatform platform, bool allow_non_library = false) const { auto ret = appender!(string[]); foreach(ref conf; m_info.configurations){ if (!conf.matchesPlatform(platform)) continue; if (!allow_non_library && conf.buildSettings.targetType == TargetType.executable) continue; ret ~= conf.name; } if (ret.data.length == 0) ret.put(null); return ret.data; } /** Determines if the package has a dependency to a certain package. Params: dependency_name = The name of the package to search for config = Name of the configuration to use when searching for dependencies See_Also: `getDependencies` */ bool hasDependency(string dependency_name, string config) const { if (dependency_name in m_info.buildSettings.dependencies) return true; foreach (ref c; m_info.configurations) if ((config.empty || c.name == config) && dependency_name in c.buildSettings.dependencies) return true; return false; } /** Retrieves all dependencies for a particular configuration. This includes dependencies that are declared at the root level of the package recipe, as well as those declared within the specified configuration. If no configuration with the given name exists, only dependencies declared at the root level will be returned. See_Also: `hasDependency` */ const(Dependency[string]) getDependencies(string config) const { Dependency[string] ret; foreach (k, v; m_info.buildSettings.dependencies) { // DMD bug: Not giving `Dependency` here leads to RangeError Dependency dep = v; ret[k] = dep; } foreach (ref conf; m_info.configurations) if (conf.name == config) { foreach (k, v; conf.buildSettings.dependencies) { Dependency dep = v; ret[k] = dep; } break; } return ret; } /** Returns a list of all possible dependencies of the package. This list includes all dependencies of all configurations. The same package may occur multiple times with possibly different `Dependency` values. */ PackageDependency[] getAllDependencies() const { auto ret = appender!(PackageDependency[]); getAllDependenciesRange().copy(ret); return ret.data; } // Left as package until the final API for this has been found package auto getAllDependenciesRange() const { import std.algorithm: sort, uniq; import std.array: array; return chain( only(this.recipe.buildSettings.dependencies.byKeyValue), this.recipe.configurations.map!(c => c.buildSettings.dependencies.byKeyValue) ) .joiner() .map!(d => PackageDependency(PackageName(d.key), d.value)) .array .sort .uniq; } /** Returns a description of the package for use in IDEs or build tools. */ PackageDescription describe(BuildPlatform platform, string config) const { return describe(platform, getCompiler(platform.compilerBinary), config); } /// ditto PackageDescription describe(BuildPlatform platform, Compiler compiler, string config) const { PackageDescription ret; ret.configuration = config; ret.path = m_path.toNativeString(); ret.name = this.name; ret.version_ = this.version_; ret.description = m_info.description; ret.homepage = m_info.homepage; ret.authors = m_info.authors.dup; ret.copyright = m_info.copyright; ret.license = m_info.license; ret.dependencies = getDependencies(config).keys; // save build settings BuildSettings bs = getBuildSettings(platform, config); BuildSettings allbs = getCombinedBuildSettings(); ret.targetType = bs.targetType; ret.targetPath = bs.targetPath; ret.targetName = bs.targetName; if (ret.targetType != TargetType.none && compiler) ret.targetFileName = compiler.getTargetFileName(bs, platform); ret.workingDirectory = bs.workingDirectory; ret.mainSourceFile = bs.mainSourceFile; ret.dflags = bs.dflags; ret.lflags = bs.lflags; ret.libs = bs.libs; ret.injectSourceFiles = bs.injectSourceFiles; ret.copyFiles = bs.copyFiles; ret.versions = bs.versions; ret.debugVersions = bs.debugVersions; ret.importPaths = bs.importPaths; ret.cImportPaths = bs.cImportPaths; ret.stringImportPaths = bs.stringImportPaths; ret.preGenerateCommands = bs.preGenerateCommands; ret.postGenerateCommands = bs.postGenerateCommands; ret.preBuildCommands = bs.preBuildCommands; ret.postBuildCommands = bs.postBuildCommands; ret.environments = bs.environments; ret.buildEnvironments = bs.buildEnvironments; ret.runEnvironments = bs.runEnvironments; ret.preGenerateEnvironments = bs.preGenerateEnvironments; ret.postGenerateEnvironments = bs.postGenerateEnvironments; ret.preBuildEnvironments = bs.preBuildEnvironments; ret.postBuildEnvironments = bs.postBuildEnvironments; ret.preRunEnvironments = bs.preRunEnvironments; ret.postRunEnvironments = bs.postRunEnvironments; // prettify build requirements output for (int i = 1; i <= BuildRequirement.max; i <<= 1) if (bs.requirements & cast(BuildRequirement)i) ret.buildRequirements ~= cast(BuildRequirement)i; // prettify options output for (int i = 1; i <= BuildOption.max; i <<= 1) if (bs.options & cast(BuildOption)i) ret.options ~= cast(BuildOption)i; // collect all possible source files and determine their types SourceFileRole[string] sourceFileTypes; foreach (f; allbs.stringImportFiles) sourceFileTypes[f] = SourceFileRole.unusedStringImport; foreach (f; allbs.importFiles) sourceFileTypes[f] = SourceFileRole.unusedImport; foreach (f; allbs.sourceFiles) sourceFileTypes[f] = SourceFileRole.unusedSource; foreach (f; bs.stringImportFiles) sourceFileTypes[f] = SourceFileRole.stringImport; foreach (f; bs.importFiles) sourceFileTypes[f] = SourceFileRole.import_; foreach (f; bs.sourceFiles) sourceFileTypes[f] = SourceFileRole.source; foreach (f; sourceFileTypes.byKey.array.sort()) { SourceFileDescription sf; sf.path = f; sf.role = sourceFileTypes[f]; ret.files ~= sf; } return ret; } private void checkDubRequirements() { import dub.semver : isValidVersion; import dub.version_ : dubVersion; import std.exception : enforce; const dep = m_info.toolchainRequirements.dub; static assert(dubVersion.length); immutable dv = Version(dubVersion[(dubVersion[0] == 'v') .. $]); enforce(dep.matches(dv), "dub-" ~ dv.toString() ~ " does not comply with toolchainRequirements.dub " ~ "specification: " ~ m_info.toolchainRequirements.dub.toString() ~ "\nPlease consider upgrading your DUB installation"); } private void fillWithDefaults() { auto bs = &m_info.buildSettings; // check for default string import folders if ("" !in bs.stringImportPaths) { foreach(defvf; ["views"]){ if( existsFile(m_path ~ defvf) ) bs.stringImportPaths[""] ~= defvf; } } // check for default source folders immutable hasSP = ("" in bs.sourcePaths) !is null; immutable hasIP = ("" in bs.importPaths) !is null; if (!hasSP || !hasIP) { foreach (defsf; ["source/", "src/"]) { if (existsFile(m_path ~ defsf)) { if (!hasSP) bs.sourcePaths[""] ~= defsf; if (!hasIP) bs.importPaths[""] ~= defsf; } } } // generate default configurations if none are defined if (m_info.configurations.length == 0) { // check for default app_main string app_main_file; auto pkg_name = m_info.name.length ? m_info.name : "unknown"; MainFileSearch: foreach_reverse(sf; bs.sourcePaths.get("", null)){ auto p = m_path ~ sf; if( !existsFile(p) ) continue; foreach(fil; ["app.d", "main.d", pkg_name ~ "/main.d", pkg_name ~ "/" ~ "app.d"]){ if( existsFile(p ~ fil) ) { app_main_file = (NativePath(sf) ~ fil).toNativeString(); break MainFileSearch; } } } if (bs.targetType == TargetType.executable) { BuildSettingsTemplate app_settings; app_settings.targetType = TargetType.executable; if (bs.mainSourceFile.empty) app_settings.mainSourceFile = app_main_file; m_info.configurations ~= ConfigurationInfo("application", app_settings); } else if (bs.targetType != TargetType.none) { BuildSettingsTemplate lib_settings; lib_settings.targetType = bs.targetType == TargetType.autodetect ? TargetType.library : bs.targetType; if (bs.targetType == TargetType.autodetect) { if (app_main_file.length) { lib_settings.excludedSourceFiles[""] ~= app_main_file; BuildSettingsTemplate app_settings; app_settings.targetType = TargetType.executable; app_settings.mainSourceFile = app_main_file; m_info.configurations ~= ConfigurationInfo("application", app_settings); } } m_info.configurations ~= ConfigurationInfo("library", lib_settings); } } } package void simpleLint() const { if (m_parentPackage) { if (m_parentPackage.path != path) { if (this.recipe.license.length && this.recipe.license != m_parentPackage.recipe.license) logWarn("Warning: License in sub-package %s is different than its parent package, this is discouraged.", name); } } if (name.empty) logWarn("Warning: The package in %s has no name.", path); bool[string] cnames; foreach (ref c; this.recipe.configurations) { if (c.name in cnames) logWarn("Warning: Multiple configurations with the name \"%s\" are defined in package \"%s\". This will most likely cause configuration resolution issues.", c.name, this.name); cnames[c.name] = true; } } /// Exclude files listed in mainSourceFile for other configurations unless they are listed in sourceFiles private void mutuallyExcludeMainFiles() { string[] allMainFiles; foreach (ref config; m_info.configurations) if (!config.buildSettings.mainSourceFile.empty()) allMainFiles ~= config.buildSettings.mainSourceFile; if (allMainFiles.length == 0) return; foreach (ref config; m_info.configurations) { import std.algorithm.searching : canFind; auto bs = &config.buildSettings; auto otherMainFiles = allMainFiles.filter!(elem => (elem != bs.mainSourceFile)).array; if (bs.sourceFiles.length == 0) bs.excludedSourceFiles[""] ~= otherMainFiles; else foreach (suffix, arr; bs.sourceFiles) bs.excludedSourceFiles[suffix] ~= otherMainFiles.filter!(elem => !canFind(arr, elem)).array; } } } private string determineVersionFromSCM(NativePath path) { if (existsFile(path ~ ".git")) { import dub.internal.git : determineVersionWithGit; return determineVersionWithGit(path); } return null; } dub-1.40.0/source/dub/packagemanager.d000066400000000000000000001640341477246567400175510ustar00rootroot00000000000000/** Management of packages on the local computer. Copyright: © 2012-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig, Matthias Dondorff */ module dub.packagemanager; import dub.dependency; import dub.internal.io.filesystem; import dub.internal.utils; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.package_; import dub.recipe.io; import dub.recipe.selection; import dub.internal.configy.Exceptions; public import dub.internal.configy.Read : StrictMode; import dub.internal.dyaml.stdsumtype; import std.algorithm : countUntil, filter, map, sort, canFind, remove; import std.array; import std.conv; import std.datetime.systime; import std.digest.sha; import std.encoding : sanitize; import std.exception; import std.range; import std.string; import std.typecons; import std.zip; /// Indicates where a package has been or should be placed to. public enum PlacementLocation { /// Packages retrieved with 'local' will be placed in the current folder /// using the package name as destination. local, /// Packages with 'userWide' will be placed in a folder accessible by /// all of the applications from the current user. user, /// Packages retrieved with 'systemWide' will be placed in a shared folder, /// which can be accessed by all users of the system. system, } /// Converts a `PlacementLocation` to a string public string toString (PlacementLocation loc) @safe pure nothrow @nogc { final switch (loc) { case PlacementLocation.local: return "Local"; case PlacementLocation.user: return "User"; case PlacementLocation.system: return "System"; } } /// A SelectionsFile associated with its file-system path. struct SelectionsFileLookupResult { /// The absolute path to the dub.selections.json file /// (potentially inherited from a parent directory of the root package). NativePath absolutePath; /// The parsed dub.selections.json file. SelectionsFile selectionsFile; } /// The PackageManager can retrieve present packages and get / remove /// packages. class PackageManager { protected { /** * The 'internal' location, for packages not attributable to a location. * * There are two uses for this: * - In `bare` mode, the search paths are set at this scope, * and packages gathered are stored in `localPackage`; * - In the general case, any path-based or SCM-based dependency * is loaded in `fromPath`; */ Location m_internal; /** * List of locations that are managed by this `PackageManager` * * The `PackageManager` can be instantiated either in 'bare' mode, * in which case this array will be empty, or in the normal mode, * this array will have 3 entries, matching values * in the `PlacementLocation` enum. * * See_Also: `Location`, `PlacementLocation` */ Location[] m_repositories; /** * Whether `refreshLocal` / `refreshCache` has been called or not * * User local cache can get pretty large, and we want to avoid our build * time being dependent on their size. However, in order to support * local packages and overrides, we were scanning the whole cache prior * to v1.39.0 (although attempts at fixing this behavior were made * earlier). Those booleans record whether we have been semi-initialized * (local packages and overrides have been loaded) or fully initialized * (all caches have been scanned), the later still being required for * some API (e.g. `getBestPackage` or `getPackageIterator`). */ enum InitializationState { /// No `refresh*` function has been called none, /// `refreshLocal` has been called partial, /// `refreshCache` (and `refreshLocal`) has been called full, } /// Ditto InitializationState m_state; /// The `Filesystem` object, used to interact with directory / files Filesystem fs; } /** Instantiate an instance with a single search path This constructor is used when dub is invoked with the '--bare' CLI switch. The instance will not look up the default repositories (e.g. ~/.dub/packages), using only `path` instead. Params: path = Path of the single repository */ this(NativePath path) { import dub.internal.io.realfs; this.fs = new RealFS(); this.m_internal.searchPath = [ path ]; this.refresh(); } this(NativePath package_path, NativePath user_path, NativePath system_path, bool refresh_packages = true) { import dub.internal.io.realfs; this(new RealFS(), package_path ~ ".dub/packages/", user_path ~ "packages/", system_path ~ "packages/"); if (refresh_packages) refresh(); } /** * Instantiate a `PackageManager` with the provided `Filesystem` and paths * * Unlike the other overload, paths are taken as-if, e.g. `packages/` is not * appended to them. * * Params: * fs = Filesystem abstraction to handle all folder/file I/O. * local = Path to the local package cache (usually the one in the project), * whih takes preference over `user` and `system`. * user = Path to the user package cache (usually ~/.dub/packages/), takes * precedence over `system` but not over `local`. * system = Path to the system package cache, this has the least precedence. */ public this(Filesystem fs, NativePath local, NativePath user, NativePath system) { this.fs = fs; this.m_repositories = [ Location(local), Location(user), Location(system) ]; } /** Gets/sets the list of paths to search for local packages. */ @property void searchPath(NativePath[] paths) { if (paths == this.m_internal.searchPath) return; this.m_internal.searchPath = paths.dup; this.refresh(); } /// ditto @property const(NativePath)[] searchPath() const { return this.m_internal.searchPath; } /** Returns the effective list of search paths, including default ones. */ deprecated("Use the `PackageManager` facilities instead") @property const(NativePath)[] completeSearchPath() const { auto ret = appender!(const(NativePath)[])(); ret.put(this.m_internal.searchPath); foreach (ref repo; m_repositories) { ret.put(repo.searchPath); ret.put(repo.packagePath); } return ret.data; } /** Sets additional (read-only) package cache paths to search for packages. Cache paths have the same structure as the default cache paths, such as ".dub/packages/". Note that previously set custom paths will be removed when setting this property. */ @property void customCachePaths(NativePath[] custom_cache_paths) { import std.algorithm.iteration : map; import std.array : array; m_repositories.length = PlacementLocation.max+1; m_repositories ~= custom_cache_paths.map!(p => Location(p)).array; this.refresh(); } /** * Looks up a package, first in the list of loaded packages, * then directly on the file system. * * This function allows for lazy loading of packages, without needing to * first scan all the available locations (as `refresh` does). * * Note: * This function does not take overrides into account. Overrides need * to be resolved by the caller before `lookup` is called. * Additionally, if a package of the same version is loaded in multiple * locations, the first one matching (local > user > system) * will be returned. * * Params: * name = The full name of the package to look up * vers = The version the package must match * * Returns: * A `Package` if one was found, `null` if none exists. */ protected Package lookup (in PackageName name, in Version vers) { // This is the only place we can get away with lazy initialization, // since we know exactly what package and version we want. // However, it is also the most often called API. this.ensureInitialized(InitializationState.partial); if (auto pkg = this.m_internal.lookup(name, vers)) return pkg; foreach (ref location; this.m_repositories) if (auto p = location.load(name, vers, this)) return p; return null; } /** Looks up a specific package. Looks up a package matching the given version/path in the set of registered packages. The lookup order is done according the the usual rules (see getPackageIterator). Params: name = The name of the package ver = The exact version of the package to query path = An exact path that the package must reside in. Note that the package must still be registered in the package manager. enable_overrides = Apply the local package override list before returning a package (enabled by default) Returns: The matching package or null if no match was found. */ Package getPackage(in PackageName name, in Version ver, bool enable_overrides = true) { if (enable_overrides) { foreach (ref repo; m_repositories) foreach (ovr; repo.overrides) if (ovr.package_ == name.toString() && ovr.source.matches(ver)) { Package pack = ovr.target.match!( (NativePath path) => getOrLoadPackage(path), (Version vers) => getPackage(name, vers, false), ); if (pack) return pack; ovr.target.match!( (any) { logWarn("Package override %s %s -> '%s' doesn't reference an existing package.", ovr.package_, ovr.source, any); }, ); } } return this.lookup(name, ver); } deprecated("Use the overload that accepts a `PackageName` instead") Package getPackage(string name, Version ver, bool enable_overrides = true) { return this.getPackage(PackageName(name), ver, enable_overrides); } /// ditto deprecated("Use the overload that accepts a `Version` as second argument") Package getPackage(string name, string ver, bool enable_overrides = true) { return getPackage(name, Version(ver), enable_overrides); } /// ditto deprecated("Use the overload that takes a `PlacementLocation`") Package getPackage(string name, Version ver, NativePath path) { foreach (p; getPackageIterator(name)) { auto pvm = isManagedPackage(p) ? VersionMatchMode.strict : VersionMatchMode.standard; if (p.version_.matches(ver, pvm) && p.path.startsWith(path)) return p; } return null; } /// Ditto deprecated("Use the overload that accepts a `PackageName` instead") Package getPackage(string name, Version ver, PlacementLocation loc) { return this.getPackage(PackageName(name), ver, loc); } /// Ditto Package getPackage(in PackageName name, in Version ver, PlacementLocation loc) { // Bare mode if (loc >= this.m_repositories.length) return null; return this.m_repositories[loc].load(name, ver, this); } /// ditto deprecated("Use the overload that accepts a `Version` as second argument") Package getPackage(string name, string ver, NativePath path) { return getPackage(name, Version(ver), path); } /// ditto deprecated("Use another `PackageManager` API, open an issue if none suits you") Package getPackage(string name, NativePath path) { foreach( p; getPackageIterator(name) ) if (p.path.startsWith(path)) return p; return null; } /** Looks up the first package matching the given name. */ deprecated("Use `getBestPackage` instead") Package getFirstPackage(string name) { foreach (ep; getPackageIterator(name)) return ep; return null; } /** Looks up the latest package matching the given name. */ deprecated("Use `getBestPackage` with `name, Dependency.any` instead") Package getLatestPackage(string name) { Package pkg; foreach (ep; getPackageIterator(name)) if (pkg is null || pkg.version_ < ep.version_) pkg = ep; return pkg; } /** For a given package path, returns the corresponding package. If the package is already loaded, a reference is returned. Otherwise the package gets loaded and cached for the next call to this function. Params: path = NativePath to the root directory of the package recipe_path = Optional path to the recipe file of the package allow_sub_packages = Also return a sub package if it resides in the given folder mode = Whether to issue errors, warning, or ignore unknown keys in dub.json Returns: The packages loaded from the given path Throws: Throws an exception if no package can be loaded */ Package getOrLoadPackage(NativePath path, NativePath recipe_path = NativePath.init, bool allow_sub_packages = false, StrictMode mode = StrictMode.Ignore) { path.endsWithSlash = true; foreach (p; this.m_internal.fromPath) if (p.path == path && (!p.parentPackage || (allow_sub_packages && p.parentPackage.path != p.path))) return p; auto pack = this.load(path, recipe_path, null, null, mode); addPackages(this.m_internal.fromPath, pack); return pack; } /** * Loads a `Package` from the filesystem * * This is called when a `Package` needs to be loaded from the path. * This does not change the internal state of the `PackageManager`, * it simply loads the `Package` and returns it - it is up to the caller * to call `addPackages`. * * Throws: * If no package can be found at the `path` / with the `recipe`. * * Params: * path = The directory in which the package resides. * recipe = Optional path to the package recipe file. If left empty, * the `path` directory will be searched for a recipe file. * parent = Reference to the parent package, if the new package is a * sub package. * version_ = Optional version to associate to the package instead of * the one declared in the package recipe, or the one * determined by invoking the VCS (GIT currently). * mode = Whether to issue errors, warning, or ignore unknown keys in * dub.json * * Returns: A populated `Package`. */ protected Package load(NativePath path, NativePath recipe = NativePath.init, Package parent = null, string version_ = null, StrictMode mode = StrictMode.Ignore) { if (recipe.empty) recipe = this.findPackageFile(path); enforce(!recipe.empty, "No package file found in %s, expected one of %s" .format(path.toNativeString(), packageInfoFiles.map!(f => cast(string)f.filename).join("/"))); const PackageName pname = parent ? PackageName(parent.name) : PackageName.init; string text = this.fs.readText(recipe); auto content = parsePackageRecipe( text, recipe.toNativeString(), pname, null, mode); auto ret = new Package(content, path, parent, version_); ret.m_infoFile = recipe; return ret; } /** Searches the given directory for package recipe files. * * Params: * directory = The directory to search * * Returns: * Returns the full path to the package file, if any was found. * Otherwise returns an empty path. */ public NativePath findPackageFile(NativePath directory) { foreach (file; packageInfoFiles) { auto filename = directory ~ file.filename; if (this.fs.existsFile(filename)) return filename; } return NativePath.init; } /** For a given SCM repository, returns the corresponding package. An SCM repository is provided as its remote URL, the repository is cloned and in the dependency specified commit is checked out. If the target directory already exists, just returns the package without cloning. Params: name = Package name dependency = Dependency that contains the repository URL and a specific commit Returns: The package loaded from the given SCM repository or null if the package couldn't be loaded. */ Package loadSCMPackage(in PackageName name, in Repository repo) in { assert(!repo.empty); } do { Package pack; final switch (repo.kind) { case repo.Kind.git: return this.loadGitPackage(name, repo); } } deprecated("Use the overload that accepts a `dub.dependency : Repository`") Package loadSCMPackage(string name, Dependency dependency) in { assert(!dependency.repository.empty); } do { return this.loadSCMPackage(name, dependency.repository); } deprecated("Use `loadSCMPackage(PackageName, Repository)`") Package loadSCMPackage(string name, Repository repo) { return this.loadSCMPackage(PackageName(name), repo); } private Package loadGitPackage(in PackageName name, in Repository repo) { if (!repo.ref_.startsWith("~") && !repo.ref_.isGitHash) { return null; } string gitReference = repo.ref_.chompPrefix("~"); NativePath destination = this.getPackagePath(PlacementLocation.user, name, repo.ref_); // Before doing a git clone, let's see if the package exists locally if (this.fs.existsDirectory(destination)) { bool isMatch(Package p) { return p.name == name.toString() && p.basePackage.path == destination; } // It exists, check if we already loaded it. // Either we loaded it on refresh and it's in PlacementLocation.user, // or we just added it and it's in m_internal. foreach (p; this.m_internal.fromPath) if (isMatch(p)) return p; if (this.m_repositories.length) foreach (p; this.m_repositories[PlacementLocation.user].fromPath) if (isMatch(p)) return p; } else if (!this.gitClone(repo.remote, gitReference, destination)) return null; Package p = this.load(destination); if (p is null) return null; return this.addPackagesAndResolveSubPackage(this.m_internal.fromPath, p, name); } /** * Perform a `git clone` operation at `dest` using `repo` * * Params: * remote = The remote to clone from * gitref = The git reference to use * dest = Where the result of git clone operation is to be stored * * Returns: * Whether or not the clone operation was successfull. */ protected bool gitClone(string remote, string gitref, in NativePath dest) { static import dub.internal.git; return dub.internal.git.cloneRepository(remote, gitref, dest.toNativeString()); } /** * Get the final destination a specific package needs to be stored in. * * See `Location.getPackagePath`. */ package(dub) NativePath getPackagePath(PlacementLocation base, in PackageName name, string vers) { assert(this.m_repositories.length == 3, "getPackagePath called in bare mode"); return this.m_repositories[base].getPackagePath(name, vers); } /** * Searches for the latest version of a package matching the version range. * * This will search the local file system only (it doesn't connect * to the registry) for the "best" (highest version) that matches `range`. * An overload with a single version exists to search for an exact version. * * Params: * name = Package name to search for * vers = Exact version to search for * range = Range of versions to search for, defaults to any * * Returns: * The best package matching the parameters, or `null` if none was found. */ deprecated("Use the overload that accepts a `PackageName` instead") Package getBestPackage(string name, Version vers) { return this.getBestPackage(PackageName(name), vers); } /// Ditto Package getBestPackage(in PackageName name, in Version vers) { return this.getBestPackage(name, VersionRange(vers, vers)); } /// Ditto deprecated("Use the overload that accepts a `PackageName` instead") Package getBestPackage(string name, VersionRange range = VersionRange.Any) { return this.getBestPackage(PackageName(name), range); } /// Ditto Package getBestPackage(in PackageName name, in VersionRange range = VersionRange.Any) { return this.getBestPackage_(name, Dependency(range)); } /// Ditto deprecated("Use the overload that accepts a `Version` or a `VersionRange`") Package getBestPackage(string name, string range) { return this.getBestPackage(name, VersionRange.fromString(range)); } /// Ditto deprecated("`getBestPackage` should only be used with a `Version` or `VersionRange` argument") Package getBestPackage(string name, Dependency version_spec, bool enable_overrides = true) { return this.getBestPackage_(PackageName(name), version_spec, enable_overrides); } // TODO: Merge this into `getBestPackage(string, VersionRange)` private Package getBestPackage_(in PackageName name, in Dependency version_spec, bool enable_overrides = true) { Package ret; foreach (p; getPackageIterator(name.toString())) { auto vmm = isManagedPackage(p) ? VersionMatchMode.strict : VersionMatchMode.standard; if (version_spec.matches(p.version_, vmm) && (!ret || p.version_ > ret.version_)) ret = p; } if (enable_overrides && ret) { if (auto ovr = getPackage(name, ret.version_)) return ovr; } return ret; } /** Gets the a specific sub package. Params: base_package = The package from which to get a sub package sub_name = Name of the sub package (not prefixed with the base package name) silent_fail = If set to true, the function will return `null` if no package is found. Otherwise will throw an exception. */ Package getSubPackage(Package base_package, string sub_name, bool silent_fail) { foreach (p; getPackageIterator(base_package.name~":"~sub_name)) if (p.parentPackage is base_package) return p; enforce(silent_fail, "Sub package \""~base_package.name~":"~sub_name~"\" doesn't exist."); return null; } /** Determines if a package is managed by DUB. Managed packages can be upgraded and removed. */ bool isManagedPackage(const(Package) pack) const { auto ppath = pack.basePackage.path; return isManagedPath(ppath); } /** Determines if a specific path is within a DUB managed package folder. By default, managed folders are "~/.dub/packages" and "/var/lib/dub/packages". */ bool isManagedPath(NativePath path) const { foreach (rep; m_repositories) if (rep.isManaged(path)) return true; return false; } /** Enables iteration over all known local packages. Returns: A delegate suitable for use with `foreach` is returned. */ int delegate(int delegate(ref Package)) getPackageIterator() { // This API requires full knowledge of the package cache this.ensureInitialized(InitializationState.full); int iterator(int delegate(ref Package) del) { // Search scope by priority, internal has the highest foreach (p; this.m_internal.fromPath) if (auto ret = del(p)) return ret; foreach (p; this.m_internal.localPackages) if (auto ret = del(p)) return ret; foreach (ref repo; m_repositories) { foreach (p; repo.localPackages) if (auto ret = del(p)) return ret; foreach (p; repo.fromPath) if (auto ret = del(p)) return ret; } return 0; } return &iterator; } /** Enables iteration over all known local packages with a certain name. Returns: A delegate suitable for use with `foreach` is returned. */ int delegate(int delegate(ref Package)) getPackageIterator(string name) { int iterator(int delegate(ref Package) del) { foreach (p; getPackageIterator()) if (p.name == name) if (auto ret = del(p)) return ret; return 0; } return &iterator; } /** Returns a list of all package overrides for the given scope. */ deprecated(OverrideDepMsg) const(PackageOverride)[] getOverrides(PlacementLocation scope_) const { return cast(typeof(return)) this.getOverrides_(scope_); } package(dub) const(PackageOverride_)[] getOverrides_(PlacementLocation scope_) const { return m_repositories[scope_].overrides; } /** Adds a new override for the given package. */ deprecated("Use the overload that accepts a `VersionRange` as 3rd argument") void addOverride(PlacementLocation scope_, string package_, Dependency version_spec, Version target) { m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); m_repositories[scope_].writeOverrides(this); } /// ditto deprecated("Use the overload that accepts a `VersionRange` as 3rd argument") void addOverride(PlacementLocation scope_, string package_, Dependency version_spec, NativePath target) { m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); m_repositories[scope_].writeOverrides(this); } /// Ditto deprecated(OverrideDepMsg) void addOverride(PlacementLocation scope_, string package_, VersionRange source, Version target) { this.addOverride_(scope_, package_, source, target); } /// ditto deprecated(OverrideDepMsg) void addOverride(PlacementLocation scope_, string package_, VersionRange source, NativePath target) { this.addOverride_(scope_, package_, source, target); } // Non deprecated version that is used by `commandline`. Do not use! package(dub) void addOverride_(PlacementLocation scope_, string package_, VersionRange source, Version target) { m_repositories[scope_].overrides ~= PackageOverride_(package_, source, target); m_repositories[scope_].writeOverrides(this); } // Non deprecated version that is used by `commandline`. Do not use! package(dub) void addOverride_(PlacementLocation scope_, string package_, VersionRange source, NativePath target) { m_repositories[scope_].overrides ~= PackageOverride_(package_, source, target); m_repositories[scope_].writeOverrides(this); } /** Removes an existing package override. */ deprecated("Use the overload that accepts a `VersionRange` as 3rd argument") void removeOverride(PlacementLocation scope_, string package_, Dependency version_spec) { version_spec.visit!( (VersionRange src) => this.removeOverride(scope_, package_, src), (any) { throw new Exception(format("No override exists for %s %s", package_, version_spec)); }, ); } deprecated(OverrideDepMsg) void removeOverride(PlacementLocation scope_, string package_, VersionRange src) { this.removeOverride_(scope_, package_, src); } package(dub) void removeOverride_(PlacementLocation scope_, string package_, VersionRange src) { Location* rep = &m_repositories[scope_]; foreach (i, ovr; rep.overrides) { if (ovr.package_ != package_ || ovr.source != src) continue; rep.overrides = rep.overrides[0 .. i] ~ rep.overrides[i+1 .. $]; (*rep).writeOverrides(this); return; } throw new Exception(format("No override exists for %s %s", package_, src)); } deprecated("Use `store(NativePath source, PlacementLocation dest, string name, Version vers)`") Package storeFetchedPackage(NativePath zip_file_path, Json package_info, NativePath destination) { import dub.internal.vibecompat.core.file; return this.store_(readFile(zip_file_path), destination, PackageName(package_info["name"].get!string), Version(package_info["version"].get!string)); } /** * Store a zip file stored at `src` into a managed location `destination` * * This will extracts the package supplied as (as a zip file) to the * `destination` and sets a version field in the package description. * In the future, we should aim not to alter the package description, * but this is done for backward compatibility. * * Params: * src = The path to the zip file containing the package * dest = At which `PlacementLocation` the package should be stored * name = Name of the package being stored * vers = Version of the package * * Returns: * The `Package` after it has been loaded. * * Throws: * If the package cannot be loaded / the zip is corrupted / the package * already exists, etc... */ deprecated("Use the overload that accepts a `PackageName` instead") Package store(NativePath src, PlacementLocation dest, string name, Version vers) { return this.store(src, dest, PackageName(name), vers); } /// Ditto Package store(NativePath src, PlacementLocation dest, in PackageName name, in Version vers) { import dub.internal.vibecompat.core.file; auto data = readFile(src); return this.store(data, dest, name, vers); } /// Ditto Package store(ubyte[] data, PlacementLocation dest, in PackageName name, in Version vers) { assert(!name.sub.length, "Cannot store a subpackage, use main package instead"); NativePath dstpath = this.getPackagePath(dest, name, vers.toString()); this.fs.mkdir(dstpath.parentPath()); const lockPath = dstpath.parentPath() ~ ".lock"; // possibly wait for other dub instance import core.time : seconds; auto lock = lockFile(lockPath.toNativeString(), 30.seconds); if (this.fs.existsFile(dstpath)) { return this.getPackage(name, vers, dest); } return this.store_(data, dstpath, name, vers); } /// Backward-compatibility for deprecated overload, simplify once `storeFetchedPatch` /// is removed protected Package store_(ubyte[] data, NativePath destination, in PackageName name, in Version vers) { import dub.recipe.json : toJson; import std.range : walkLength; logDebug("Placing package '%s' version '%s' to location '%s'", name, vers, destination.toNativeString()); enforce(!this.fs.existsFile(destination), "%s (%s) needs to be removed from '%s' prior placement." .format(name, vers, destination)); ZipArchive archive = new ZipArchive(data); logDebug("Extracting from zip."); // In a GitHub zip, the actual contents are in a sub-folder alias PSegment = typeof(NativePath.init.head); PSegment[] zip_prefix; outer: foreach(ArchiveMember am; archive.directory) { auto path = NativePath(am.name).bySegment.array; foreach (fil; packageInfoFiles) if (path.length == 2 && path[$-1].name == fil.filename) { zip_prefix = path[0 .. $-1]; break outer; } } logDebug("zip root folder: %s", zip_prefix); NativePath getCleanedPath(string fileName) { auto path = NativePath(fileName); if (zip_prefix.length && !path.bySegment.startsWith(zip_prefix)) return NativePath.init; static if (is(typeof(path[0 .. 1]))) return path[zip_prefix.length .. $]; else return NativePath(path.bySegment.array[zip_prefix.length .. $]); } void setAttributes(NativePath path, ArchiveMember am) { import std.datetime : DosFileTimeToSysTime; auto mtime = DosFileTimeToSysTime(am.time); this.fs.setTimes(path, mtime, mtime); if (auto attrs = am.fileAttributes) this.fs.setAttributes(path, attrs); } // extract & place this.fs.mkdir(destination); logDebug("Copying all files..."); int countFiles = 0; foreach(ArchiveMember a; archive.directory) { auto cleanedPath = getCleanedPath(a.name); if(cleanedPath.empty) continue; auto dst_path = destination ~ cleanedPath; logDebug("Creating %s", cleanedPath); if (dst_path.endsWithSlash) { this.fs.mkdir(dst_path); } else { this.fs.mkdir(dst_path.parentPath); // for symlinks on posix systems, use the symlink function to // create them. Windows default unzip doesn't handle symlinks, // so we don't need to worry about it for Windows. version(Posix) { import core.sys.posix.sys.stat; if( S_ISLNK(cast(mode_t)a.fileAttributes) ){ import core.sys.posix.unistd; // need to convert name and target to zero-terminated string auto target = toStringz(cast(const(char)[])archive.expand(a)); auto dstFile = toStringz(dst_path.toNativeString()); enforce(symlink(target, dstFile) == 0, "Error creating symlink: " ~ dst_path.toNativeString()); goto symlink_exit; } } this.fs.writeFile(dst_path, archive.expand(a)); setAttributes(dst_path, a); symlink_exit: ++countFiles; } } logDebug("%s file(s) copied.", to!string(countFiles)); // overwrite dub.json (this one includes a version field) auto pack = this.load(destination, NativePath.init, null, vers.toString()); if (pack.recipePath.head != defaultPackageFilename) // Storeinfo saved a default file, this could be different to the file from the zip. this.fs.removeFile(pack.recipePath); auto app = appender!string(); app.writePrettyJsonString(pack.recipe.toJson()); this.fs.writeFile(pack.recipePath.parentPath ~ defaultPackageFilename, app.data); addPackages(this.m_internal.localPackages, pack); return pack; } /// Removes the given the package. void remove(in Package pack) { logDebug("Remove %s, version %s, path '%s'", pack.name, pack.version_, pack.path); enforce(!pack.path.empty, "Cannot remove package "~pack.name~" without a path."); enforce(pack.parentPackage is null, "Cannot remove subpackage %s".format(pack.name)); // remove package from repositories' list bool found = false; bool removeFrom(Package[] packs, in Package pack) { auto packPos = countUntil!("a.path == b.path")(packs, pack); if(packPos != -1) { packs = .remove(packs, packPos); return true; } return false; } foreach(repo; m_repositories) { if (removeFrom(repo.fromPath, pack)) { found = true; break; } // Maintain backward compatibility with pre v1.30.0 behavior, // this is equivalent to remove-local if (removeFrom(repo.localPackages, pack)) { found = true; break; } } if(!found) found = removeFrom(this.m_internal.localPackages, pack); enforce(found, "Cannot remove, package not found: '"~ pack.name ~"', path: " ~ to!string(pack.path)); logDebug("About to delete root folder for package '%s'.", pack.path); import std.file : rmdirRecurse; rmdirRecurse(pack.path.toNativeString()); logInfo("Removed", Color.yellow, "%s %s", pack.name.color(Mode.bold), pack.version_); } /// Compatibility overload. Use the version without a `force_remove` argument instead. deprecated("Use `remove(pack)` directly instead, the boolean has no effect") void remove(in Package pack, bool force_remove) { remove(pack); } Package addLocalPackage(NativePath path, string verName, PlacementLocation type) { // As we iterate over `localPackages` we need it to be populated // In theory we could just populate that specific repository, // but multiple calls would then become inefficient. this.ensureInitialized(InitializationState.full); path.endsWithSlash = true; auto pack = this.load(path); enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); if (verName.length) pack.version_ = Version(verName); // don't double-add packages Package[]* packs = &m_repositories[type].localPackages; foreach (p; *packs) { if (p.path == path) { enforce(p.version_ == pack.version_, "Adding the same local package twice with differing versions is not allowed."); logInfo("Package is already registered: %s (version: %s)", p.name, p.version_); return p; } } addPackages(*packs, pack); this.m_repositories[type].writeLocalPackageList(this); logInfo("Registered package: %s (version: %s)", pack.name, pack.version_); return pack; } void removeLocalPackage(NativePath path, PlacementLocation type) { // As we iterate over `localPackages` we need it to be populated // In theory we could just populate that specific repository, // but multiple calls would then become inefficient. this.ensureInitialized(InitializationState.full); path.endsWithSlash = true; Package[]* packs = &m_repositories[type].localPackages; size_t[] to_remove; foreach( i, entry; *packs ) if( entry.path == path ) to_remove ~= i; enforce(to_remove.length > 0, "No "~type.to!string()~" package found at "~path.toNativeString()); string[Version] removed; foreach (i; to_remove) removed[(*packs)[i].version_] = (*packs)[i].name; *packs = (*packs).enumerate .filter!(en => !to_remove.canFind(en.index)) .map!(en => en.value).array; this.m_repositories[type].writeLocalPackageList(this); foreach(ver, name; removed) logInfo("Deregistered package: %s (version: %s)", name, ver); } /// For the given type add another path where packages will be looked up. void addSearchPath(NativePath path, PlacementLocation type) { m_repositories[type].searchPath ~= path; this.m_repositories[type].writeLocalPackageList(this); } /// Removes a search path from the given type. void removeSearchPath(NativePath path, PlacementLocation type) { m_repositories[type].searchPath = m_repositories[type].searchPath.filter!(p => p != path)().array(); this.m_repositories[type].writeLocalPackageList(this); } deprecated("Use `refresh()` without boolean argument(same as `refresh(false)`") void refresh(bool refresh) { if (refresh) logDiagnostic("Refreshing local packages (refresh existing: true)..."); else logDiagnostic("Scanning local packages..."); this.refreshLocal(refresh); this.refreshCache(refresh); } void refresh() { logDiagnostic("Scanning local packages..."); this.refreshLocal(false); this.refreshCache(false); } /// Private API to ensure a level of initialization private void ensureInitialized(InitializationState state) { if (this.m_state >= state) return; if (state == InitializationState.partial) this.refreshLocal(false); else this.refresh(); } /// Refresh pay-as-you-go: Only load local packages, not the full cache private void refreshLocal(bool refresh) { foreach (ref repository; this.m_repositories) repository.scanLocalPackages(refresh, this); this.m_internal.scan(this, refresh); foreach (ref repository; this.m_repositories) { auto existing = refresh ? null : repository.fromPath; foreach (path; repository.searchPath) repository.scanPackageFolder(path, this, existing); repository.loadOverrides(this); } if (this.m_state < InitializationState.partial) this.m_state = InitializationState.partial; } /// Refresh the full cache, a potentially expensive operation private void refreshCache(bool refresh) { foreach (ref repository; this.m_repositories) repository.scan(this, refresh); this.m_state = InitializationState.full; } alias Hash = ubyte[]; /// Generates a hash digest for a given package. /// Some files or folders are ignored during the generation (like .dub and /// .svn folders) Hash hashPackage(Package pack) { import std.file; import dub.internal.vibecompat.core.file; string[] ignored_directories = [".git", ".dub", ".svn"]; // something from .dub_ignore or what? string[] ignored_files = []; SHA256 hash; foreach(file; dirEntries(pack.path.toNativeString(), SpanMode.depth)) { const isDir = file.isDir; if(isDir && ignored_directories.canFind(NativePath(file.name).head.name)) continue; else if(ignored_files.canFind(NativePath(file.name).head.name)) continue; hash.put(cast(ubyte[])NativePath(file.name).head.name); if(isDir) { logDebug("Hashed directory name %s", NativePath(file.name).head); } else { hash.put(cast(ubyte[]) readFile(NativePath(file.name))); logDebug("Hashed file contents from %s", NativePath(file.name).head); } } auto digest = hash.finish(); logDebug("Project hash: %s", digest); return digest[].dup; } /** * Loads the selections file (`dub.selections.json`) * * The selections file is only used for the root package / project. * However, due to it being a filesystem interaction, it is managed * from the `PackageManager`. * * Params: * absProjectPath = The absolute path to the root package/project for * which to load the selections file. * * Returns: * Either `null` (if no selections file exists or parsing encountered an error), * or a `SelectionsFileLookupResult`. Note that the nested `SelectionsFile` * might use an unsupported version (see `SelectionsFile` documentation). */ Nullable!SelectionsFileLookupResult readSelections(in NativePath absProjectPath) in (absProjectPath.absolute) { import dub.internal.configy.Read; alias N = typeof(return); // check for dub.selections.json in root project dir first, then walk up its // parent directories and look for inheritable dub.selections.json files const path = this.findSelections(absProjectPath); if (path.empty) return N.init; const content = this.fs.readText(path); // TODO: Remove `StrictMode.Warn` after v1.40 release // The default is to error, but as the previous parser wasn't // complaining, we should first warn the user. auto selections = wrapException(parseConfigString!SelectionsFile( content, path.toNativeString(), StrictMode.Warn)); // Could not parse file if (selections.isNull()) return N.init; // Non-inheritable selections found if (!path.startsWith(absProjectPath) && !selections.get().inheritable) return N.init; return N(SelectionsFileLookupResult(path, selections.get())); } /// Helper function to walk up the filesystem and find `dub.selections.json` private NativePath findSelections(in NativePath dir) { const path = dir ~ "dub.selections.json"; if (this.fs.existsFile(path)) return path; if (!dir.hasParentPath) return NativePath.init; return this.findSelections(dir.parentPath); } /** * Writes the selections file (`dub.selections.json`) * * The selections file is only used for the root package / project. * However, due to it being a filesystem interaction, it is managed * from the `PackageManager`. * * Params: * project = The root package / project to read the selections file for. * selections = The `SelectionsFile` to write. * overwrite = Whether to overwrite an existing selections file. * True by default. */ public void writeSelections(in Package project, in Selections!1 selections, bool overwrite = true) { const path = project.path ~ "dub.selections.json"; if (!overwrite && this.fs.existsFile(path)) return; this.fs.writeFile(path, selectionsToString(selections)); } /// Package function to avoid code duplication with deprecated /// SelectedVersions.save, merge with `writeSelections` in /// the future. package static string selectionsToString (in Selections!1 s) { Json json = selectionsToJSON(s); assert(json.type == Json.Type.object); assert(json.length == 2 || json.length == 3); assert(json["versions"].type != Json.Type.undefined); auto result = appender!string(); result.put("{\n\t\"fileVersion\": "); result.writeJsonString(json["fileVersion"]); if (s.inheritable) result.put(",\n\t\"inheritable\": true"); result.put(",\n\t\"versions\": {"); auto vers = json["versions"].get!(Json[string]); bool first = true; foreach (k; vers.byKey.array.sort()) { if (!first) result.put(","); else first = false; result.put("\n\t\t"); result.writeJsonString(Json(k)); result.put(": "); result.writeJsonString(vers[k]); } result.put("\n\t}\n}\n"); return result.data; } /// Ditto package static Json selectionsToJSON (in Selections!1 s) { Json serialized = Json.emptyObject; serialized["fileVersion"] = s.fileVersion; if (s.inheritable) serialized["inheritable"] = true; serialized["versions"] = Json.emptyObject; foreach (p, dep; s.versions) serialized["versions"][p] = dep.toJson(true); return serialized; } /// Adds the package and its sub-packages. protected void addPackages(ref Package[] dst_repos, Package pack) { // Add the main package. dst_repos ~= pack; // Additionally to the internally defined sub-packages, whose metadata // is loaded with the main dub.json, load all externally defined // packages after the package is available with all the data. foreach (spr; pack.subPackages) { Package sp; if (spr.path.length) { auto p = NativePath(spr.path); p.normalize(); enforce(!p.absolute, "Sub package paths must be sub paths of the parent package."); auto path = pack.path ~ p; sp = this.load(path, NativePath.init, pack); } else sp = new Package(spr.recipe, pack.path, pack); // Add the sub-package. try { dst_repos ~= sp; } catch (Exception e) { logError("Package '%s': Failed to load sub-package %s: %s", pack.name, spr.path.length ? spr.path : spr.recipe.name, e.msg); logDiagnostic("Full error: %s", e.toString().sanitize()); } } } /// Adds the package and its sub-packages, and returns the added package matching /// the specified name (of the package itself or a sub-package). /// Returns null if the sub-package doesn't exist. private Package addPackagesAndResolveSubPackage(ref Package[] dst_repos, Package pack, in PackageName nameToResolve) in(pack.name == nameToResolve.main.toString(), "nameToResolve must be the added package or one of its sub-packages") { this.addPackages(dst_repos, pack); if (nameToResolve.sub.empty) return pack; // available sub-packages have been appended foreach_reverse (sp; dst_repos) { if (sp.parentPackage is pack && sp.name == nameToResolve.toString()) return sp; } logDiagnostic("Sub-package %s not found in parent package", nameToResolve); return null; } } deprecated(OverrideDepMsg) alias PackageOverride = PackageOverride_; package(dub) struct PackageOverride_ { private alias ResolvedDep = SumType!(NativePath, Version); string package_; VersionRange source; ResolvedDep target; deprecated("Use `source` instead") @property inout(Dependency) version_ () inout return @safe { return Dependency(this.source); } deprecated("Assign `source` instead") @property ref PackageOverride version_ (Dependency v) scope return @safe pure { this.source = v.visit!( (VersionRange range) => range, (any) { int a; if (a) return VersionRange.init; // Trick the compiler throw new Exception("Cannot use anything else than a `VersionRange` for overrides"); }, ); return this; } deprecated("Use `target.match` directly instead") @property inout(Version) targetVersion () inout return @safe pure nothrow @nogc { return this.target.match!( (Version v) => v, (any) => Version.init, ); } deprecated("Assign `target` directly instead") @property ref PackageOverride targetVersion (Version v) scope return pure nothrow @nogc { this.target = v; return this; } deprecated("Use `target.match` directly instead") @property inout(NativePath) targetPath () inout return @safe pure nothrow @nogc { return this.target.match!( (NativePath v) => v, (any) => NativePath.init, ); } deprecated("Assign `target` directly instead") @property ref PackageOverride targetPath (NativePath v) scope return pure nothrow @nogc { this.target = v; return this; } deprecated("Use the overload that accepts a `VersionRange` as 2nd argument") this(string package_, Dependency version_, Version target_version) { this.package_ = package_; this.version_ = version_; this.target = target_version; } deprecated("Use the overload that accepts a `VersionRange` as 2nd argument") this(string package_, Dependency version_, NativePath target_path) { this.package_ = package_; this.version_ = version_; this.target = target_path; } this(string package_, VersionRange src, Version target) { this.package_ = package_; this.source = src; this.target = target; } this(string package_, VersionRange src, NativePath target) { this.package_ = package_; this.source = src; this.target = target; } } deprecated("Use `PlacementLocation` instead") enum LocalPackageType : PlacementLocation { package_ = PlacementLocation.local, user = PlacementLocation.user, system = PlacementLocation.system, } private enum LocalPackagesFilename = "local-packages.json"; private enum LocalOverridesFilename = "local-overrides.json"; /** * A managed location, with packages, configuration, and overrides * * There exists three standards locations, listed in `PlacementLocation`. * The user one is the default, with the system and local one meeting * different needs. * * Each location has a root, under which the following may be found: * - A `packages/` directory, where packages are stored (see `packagePath`); * - A `local-packages.json` file, with extra search paths * and manually added packages (see `dub add-local`); * - A `local-overrides.json` file, with manually added overrides (`dub add-override`); * * Additionally, each location host a config file, * which is not managed by this module, but by dub itself. */ package struct Location { /// The absolute path to the root of the location NativePath packagePath; /// Configured (extra) search paths for this `Location` NativePath[] searchPath; /** * List of manually registered packages at this `Location` * and stored in `local-packages.json` */ Package[] localPackages; /// List of overrides stored at this `Location` PackageOverride_[] overrides; /** * List of packages stored under `packagePath` and automatically detected */ Package[] fromPath; this(NativePath path) @safe pure nothrow @nogc { this.packagePath = path; } void loadOverrides(PackageManager mgr) { this.overrides = null; auto ovrfilepath = this.packagePath ~ LocalOverridesFilename; if (mgr.fs.existsFile(ovrfilepath)) { logWarn("Found local override file: %s", ovrfilepath); logWarn(OverrideDepMsg); logWarn("Replace with a path-based dependency in your project or a custom cache path"); const text = mgr.fs.readText(ovrfilepath); auto json = parseJsonString(text, ovrfilepath.toNativeString()); foreach (entry; json) { PackageOverride_ ovr; ovr.package_ = entry["name"].get!string; ovr.source = VersionRange.fromString(entry["version"].get!string); if (auto pv = "targetVersion" in entry) ovr.target = Version(pv.get!string); if (auto pv = "targetPath" in entry) ovr.target = NativePath(pv.get!string); this.overrides ~= ovr; } } } private void writeOverrides(PackageManager mgr) { Json[] newlist; foreach (ovr; this.overrides) { auto jovr = Json.emptyObject; jovr["name"] = ovr.package_; jovr["version"] = ovr.source.toString(); ovr.target.match!( (NativePath path) { jovr["targetPath"] = path.toNativeString(); }, (Version vers) { jovr["targetVersion"] = vers.toString(); }, ); newlist ~= jovr; } auto path = this.packagePath; mgr.fs.mkdir(path); auto app = appender!string(); app.writePrettyJsonString(Json(newlist)); mgr.fs.writeFile(path ~ LocalOverridesFilename, app.data); } private void writeLocalPackageList(PackageManager mgr) { Json[] newlist; foreach (p; this.searchPath) { auto entry = Json.emptyObject; entry["name"] = "*"; entry["path"] = p.toNativeString(); newlist ~= entry; } foreach (p; this.localPackages) { if (p.parentPackage) continue; // do not store sub packages auto entry = Json.emptyObject; entry["name"] = p.name; entry["version"] = p.version_.toString(); entry["path"] = p.path.toNativeString(); newlist ~= entry; } NativePath path = this.packagePath; mgr.fs.mkdir(path); auto app = appender!string(); app.writePrettyJsonString(Json(newlist)); mgr.fs.writeFile(path ~ LocalPackagesFilename, app.data); } // load locally defined packages void scanLocalPackages(bool refresh, PackageManager manager) { NativePath list_path = this.packagePath; Package[] packs; NativePath[] paths; try { auto local_package_file = list_path ~ LocalPackagesFilename; if (!manager.fs.existsFile(local_package_file)) return; logDiagnostic("Loading local package map at %s", local_package_file.toNativeString()); const text = manager.fs.readText(local_package_file); auto packlist = parseJsonString( text, local_package_file.toNativeString()); enforce(packlist.type == Json.Type.array, LocalPackagesFilename ~ " must contain an array."); foreach (pentry; packlist) { try { auto name = pentry["name"].get!string; auto path = NativePath(pentry["path"].get!string); if (name == "*") { paths ~= path; } else { auto ver = Version(pentry["version"].get!string); Package pp; if (!refresh) { foreach (p; this.localPackages) if (p.path == path) { pp = p; break; } } if (!pp) { auto infoFile = manager.findPackageFile(path); if (!infoFile.empty) pp = manager.load(path, infoFile); else { logWarn("Locally registered package %s %s was not found. Please run 'dub remove-local \"%s\"'.", name, ver, path.toNativeString()); // Store a dummy package pp = new Package(PackageRecipe(name), path); } } if (pp.name != name) logWarn("Local package at %s has different name than %s (%s)", path.toNativeString(), name, pp.name); pp.version_ = ver; manager.addPackages(packs, pp); } } catch (Exception e) { logWarn("Error adding local package: %s", e.msg); } } } catch (Exception e) { logDiagnostic("Loading of local package list at %s failed: %s", list_path.toNativeString(), e.msg); } this.localPackages = packs; this.searchPath = paths; } /** * Scan this location */ void scan(PackageManager mgr, bool refresh) { // If we're asked to refresh, reload the packages from scratch auto existing = refresh ? null : this.fromPath; if (this.packagePath !is NativePath.init) { // For the internal location, we use `fromPath` to store packages // loaded by the user (e.g. the project and its sub-packages), // so don't clean it. this.fromPath = null; } foreach (path; this.searchPath) this.scanPackageFolder(path, mgr, existing); if (this.packagePath !is NativePath.init) this.scanPackageFolder(this.packagePath, mgr, existing); } /** * Scan the content of a folder (`packagePath` or in `searchPaths`), * and add all packages that were found to this location. */ void scanPackageFolder(NativePath path, PackageManager mgr, Package[] existing_packages) { if (!mgr.fs.existsDirectory(path)) return; void loadInternal (NativePath pack_path, NativePath packageFile) { import std.algorithm.searching : find; // If the package has already been loaded, no need to re-load it. auto rng = existing_packages.find!(pp => pp.path == pack_path); if (!rng.empty) return mgr.addPackages(this.fromPath, rng.front); try { mgr.addPackages(this.fromPath, mgr.load(pack_path, packageFile)); } catch (ConfigException exc) { // Configy error message already include the path logError("Invalid recipe for local package: %S", exc); } catch (Exception e) { logError("Failed to load package in %s: %s", pack_path, e.msg); logDiagnostic("Full error: %s", e.toString().sanitize()); } } logDebug("iterating dir %s", path.toNativeString()); try foreach (pdir; mgr.fs.iterateDirectory(path)) { logDebug("iterating dir %s entry %s", path.toNativeString(), pdir.name); if (!pdir.isDirectory) continue; const pack_path = path ~ (pdir.name ~ "/"); auto packageFile = mgr.findPackageFile(pack_path); if (isManaged(path)) { // Old / flat directory structure, used in non-standard path // Packages are stored in $ROOT/$SOMETHING/` if (!packageFile.empty) { // Deprecated flat managed directory structure logWarn("Package at path '%s' should be under '%s'", pack_path.toNativeString().color(Mode.bold), (pack_path ~ "$VERSION" ~ pdir.name).toNativeString().color(Mode.bold)); logWarn("The package will no longer be detected starting from v1.42.0"); loadInternal(pack_path, packageFile); } else { // New managed structure: $ROOT/$NAME/$VERSION/$NAME // This is the most common code path // Iterate over versions of a package foreach (versdir; mgr.fs.iterateDirectory(pack_path)) { if (!versdir.isDirectory) continue; auto vers_path = pack_path ~ versdir.name ~ (pdir.name ~ "/"); if (!mgr.fs.existsDirectory(vers_path)) continue; packageFile = mgr.findPackageFile(vers_path); loadInternal(vers_path, packageFile); } } } else { // Unmanaged directories (dub add-path) are always stored as a // flat list of packages, as these are the working copies managed // by the user. The nested structure should not be supported, // even optionally, because that would lead to bogus "no package // file found" errors in case the internal directory structure // accidentally matches the $NAME/$VERSION/$NAME scheme if (!packageFile.empty) loadInternal(pack_path, packageFile); } } catch (Exception e) logDiagnostic("Failed to enumerate %s packages: %s", path.toNativeString(), e.toString()); } /** * Looks up already-loaded packages at a specific version * * Looks up a package according to this `Location`'s priority, * that is, packages from the search path and local packages * have the highest priority. * * Params: * name = The full name of the package to look up * ver = The version to look up * * Returns: * A `Package` if one was found, `null` if none exists. */ inout(Package) lookup(in PackageName name, in Version ver) inout { foreach (pkg; this.localPackages) if (pkg.name == name.toString() && pkg.version_.matches(ver, VersionMatchMode.standard)) return pkg; foreach (pkg; this.fromPath) { auto pvm = this.isManaged(pkg.basePackage.path) ? VersionMatchMode.strict : VersionMatchMode.standard; if (pkg.name == name.toString() && pkg.version_.matches(ver, pvm)) return pkg; } return null; } /** * Looks up a package, first in the list of loaded packages, * then directly on the file system. * * This function allows for lazy loading of packages, without needing to * first scan all the available locations (as `scan` does). * * Params: * name = The full name of the package to look up * vers = The version the package must match * mgr = The `PackageManager` to use for adding packages * * Returns: * A `Package` if one was found, `null` if none exists. */ Package load (in PackageName name, Version vers, PackageManager mgr) { if (auto pkg = this.lookup(name, vers)) return pkg; string versStr = vers.toString(); const path = this.getPackagePath(name, versStr); if (!mgr.fs.existsDirectory(path)) return null; logDiagnostic("Lazily loading package %s:%s from %s", name.main, vers, path); auto p = mgr.load(path); enforce( p.version_ == vers, format("Package %s located in %s has a different version than its path: Got %s, expected %s", name.main, path, p.version_, vers)); return mgr.addPackagesAndResolveSubPackage(this.fromPath, p, name); } /** * Get the final destination a specific package needs to be stored in. * * Note that there needs to be an extra level for libraries like `ae` * which expects their containing folder to have an exact name and use * `importPath "../"`. * * Hence the final format returned is `$BASE/$NAME/$VERSION/$NAME`, * `$BASE` is `this.packagePath`. * * Params: * name = The package name - if the name is that of a subpackage, * only the path to the main package is returned, as the * subpackage path can only be known after reading the recipe. * vers = A version string. Typed as a string because git hashes * can be used with this function. * * Returns: * An absolute `NativePath` nested in this location. */ NativePath getPackagePath (in PackageName name, string vers) { NativePath result = this.packagePath ~ name.main.toString() ~ vers ~ name.main.toString(); result.endsWithSlash = true; return result; } /// Determines if a specific path is within a DUB managed Location. bool isManaged(NativePath path) const { return path.startsWith(this.packagePath); } } private immutable string OverrideDepMsg = "Overrides are deprecated as they are redundant with more fine-grained approaches"; dub-1.40.0/source/dub/packagesupplier.d000066400000000000000000000007151477246567400177750ustar00rootroot00000000000000/** +deprecated("Please use dub.packagesuppliers") Contains (remote) package supplier interface and implementations. +public import dub.packagesuppliers; Copyright: © 2012-2013 Matthias Dondorff, 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff */ deprecated("Please use dub.packagesuppliers") module dub.packagesupplier; public import dub.packagesuppliers; dub-1.40.0/source/dub/packagesuppliers/000077500000000000000000000000001477246567400200105ustar00rootroot00000000000000dub-1.40.0/source/dub/packagesuppliers/fallback.d000066400000000000000000000043771477246567400217270ustar00rootroot00000000000000module dub.packagesuppliers.fallback; import dub.packagesuppliers.packagesupplier; import std.typecons : AutoImplement; package abstract class AbstractFallbackPackageSupplier : PackageSupplier { protected import core.time : minutes; protected import std.datetime : Clock, SysTime; static struct Pair { PackageSupplier ps; SysTime failTime; } protected Pair[] m_suppliers; this(PackageSupplier[] suppliers) { assert(suppliers.length); m_suppliers.length = suppliers.length; foreach (i, ps; suppliers) m_suppliers[i].ps = ps; } override @property string description() { import std.algorithm.iteration : map; import std.format : format; return format("%s (fallbacks %-(%s, %))", m_suppliers[0].ps.description, m_suppliers[1 .. $].map!(pair => pair.ps.description)); } // Workaround https://issues.dlang.org/show_bug.cgi?id=2525 abstract override Version[] getVersions(in PackageName name); abstract override ubyte[] fetchPackage(in PackageName name, in VersionRange dep, bool pre_release); abstract override Json fetchPackageRecipe(in PackageName name, in VersionRange dep, bool pre_release); abstract override SearchResult[] searchPackages(string query); } /** Combines two package suppliers and uses the second as fallback to handle failures. Assumes that both registries serve the same packages (--mirror). */ package(dub) alias FallbackPackageSupplier = AutoImplement!(AbstractFallbackPackageSupplier, fallback); private template fallback(T, alias func) { import std.format : format; enum fallback = q{ import dub.internal.logging : logDebug; Exception firstEx; try return m_suppliers[0].ps.%1$s(args); catch (Exception e) { logDebug("Package supplier %%s failed with '%%s', trying fallbacks.", m_suppliers[0].ps.description, e.msg); firstEx = e; } immutable now = Clock.currTime; foreach (ref pair; m_suppliers[1 .. $]) { if (pair.failTime > now - 10.minutes) continue; try { scope (success) logDebug("Fallback %%s succeeded", pair.ps.description); return pair.ps.%1$s(args); } catch (Exception e) { pair.failTime = now; logDebug("Fallback package supplier %%s failed with '%%s'.", pair.ps.description, e.msg); } } throw firstEx; }.format(__traits(identifier, func)); } dub-1.40.0/source/dub/packagesuppliers/filesystem.d000066400000000000000000000063731477246567400223520ustar00rootroot00000000000000module dub.packagesuppliers.filesystem; import dub.internal.logging; import dub.internal.vibecompat.inet.path; import dub.packagesuppliers.packagesupplier; import std.exception : enforce; /** File system based package supplier. This package supplier searches a certain directory for files with names of the form "[package name]-[version].zip". */ class FileSystemPackageSupplier : PackageSupplier { private { NativePath m_path; } this(NativePath root) { m_path = root; } override @property string description() { return "file repository at "~m_path.toNativeString(); } Version[] getVersions(in PackageName name) { import std.algorithm.sorting : sort; import std.file : dirEntries, DirEntry, SpanMode; import std.conv : to; import dub.semver : isValidVersion; Version[] ret; const zipFileGlob = name.main.toString() ~ "?*.zip"; foreach (DirEntry d; dirEntries(m_path.toNativeString(), zipFileGlob, SpanMode.shallow)) { NativePath p = NativePath(d.name); auto vers = p.head.name[name.main.toString().length+1..$-4]; if (!isValidVersion(vers)) { logDebug("Ignoring entry '%s' because it isn't a version of package '%s'", p, name.main); continue; } logDebug("Entry: %s", p); logDebug("Version: %s", vers); ret ~= Version(vers); } ret.sort(); return ret; } override ubyte[] fetchPackage(in PackageName name, in VersionRange dep, bool pre_release) { import dub.internal.vibecompat.core.file : readFile, existsFile; logInfo("Storing package '%s', version requirements: %s", name.main, dep); auto filename = bestPackageFile(name, dep, pre_release); enforce(existsFile(filename)); return readFile(filename); } override Json fetchPackageRecipe(in PackageName name, in VersionRange dep, bool pre_release) { import std.array : split; import std.path : stripExtension; import std.algorithm : startsWith, endsWith; import dub.internal.utils : packageInfoFileFromZip; import dub.recipe.io : parsePackageRecipe; import dub.recipe.json : toJson; auto filePath = bestPackageFile(name, dep, pre_release); string packageFileName; string packageFileContent = packageInfoFileFromZip(filePath, packageFileName); auto recipe = parsePackageRecipe(packageFileContent, packageFileName); Json json = toJson(recipe); auto basename = filePath.head.name; enforce(basename.endsWith(".zip"), "Malformed package filename: " ~ filePath.toNativeString); enforce(basename.startsWith(name.main.toString()), "Malformed package filename: " ~ filePath.toNativeString); json["version"] = basename[name.main.toString().length + 1 .. $-4]; return json; } SearchResult[] searchPackages(string query) { // TODO! return null; } private NativePath bestPackageFile(in PackageName name, in VersionRange dep, bool pre_release) { import std.algorithm.iteration : filter; import std.array : array; import std.format : format; NativePath toPath(Version ver) { return m_path ~ "%s-%s.zip".format(name.main, ver); } auto versions = getVersions(name).filter!(v => dep.matches(v)).array; enforce(versions.length > 0, format("No package %s found matching %s", name.main, dep)); foreach_reverse (ver; versions) { if (pre_release || !ver.isPreRelease) return toPath(ver); } return toPath(versions[$-1]); } } dub-1.40.0/source/dub/packagesuppliers/maven.d000066400000000000000000000073641477246567400212750ustar00rootroot00000000000000module dub.packagesuppliers.maven; import dub.packagesuppliers.packagesupplier; /** Maven repository based package supplier. This package supplier connects to a maven repository to search for available packages. */ class MavenRegistryPackageSupplier : PackageSupplier { import dub.internal.utils : retryDownload, HTTPStatusException; import dub.internal.vibecompat.data.json : serializeToJson; import dub.internal.vibecompat.inet.url : URL; import dub.internal.logging; import std.datetime : Clock, Duration, hours, SysTime, UTC; private { enum httpTimeout = 16; URL m_mavenUrl; struct CacheEntry { Json data; SysTime cacheTime; } CacheEntry[PackageName] m_metadataCache; Duration m_maxCacheTime; } this(URL mavenUrl) { m_mavenUrl = mavenUrl; m_maxCacheTime = 24.hours(); } override @property string description() { return "maven repository at "~m_mavenUrl.toString(); } override Version[] getVersions(in PackageName name) { import std.algorithm.sorting : sort; auto md = getMetadata(name.main); if (md.type == Json.Type.null_) return null; Version[] ret; foreach (json; md["versions"]) { auto cur = Version(json["version"].get!string); ret ~= cur; } ret.sort(); return ret; } override ubyte[] fetchPackage(in PackageName name, in VersionRange dep, bool pre_release) { import std.format : format; auto md = getMetadata(name.main); Json best = getBestPackage(md, name.main, dep, pre_release); if (best.type == Json.Type.null_) return null; auto vers = best["version"].get!string; auto url = m_mavenUrl ~ NativePath( "%s/%s/%s-%s.zip".format(name.main, vers, name.main, vers)); try { return retryDownload(url, 3, httpTimeout); } catch(HTTPStatusException e) { if (e.status == 404) throw e; else logDebug("Failed to download package %s from %s", name.main, url); } catch(Exception e) { logDebug("Failed to download package %s from %s", name.main, url); } throw new Exception("Failed to download package %s from %s".format(name.main, url)); } override Json fetchPackageRecipe(in PackageName name, in VersionRange dep, bool pre_release) { auto md = getMetadata(name); return getBestPackage(md, name, dep, pre_release); } private Json getMetadata(in PackageName name) { import dub.internal.undead.xml; auto now = Clock.currTime(UTC()); if (auto pentry = name.main in m_metadataCache) { if (pentry.cacheTime + m_maxCacheTime > now) return pentry.data; m_metadataCache.remove(name.main); } auto url = m_mavenUrl ~ NativePath(name.main.toString() ~ "/maven-metadata.xml"); logDebug("Downloading maven metadata for %s", name.main); string xmlData; try xmlData = cast(string)retryDownload(url, 3, httpTimeout); catch(HTTPStatusException e) { if (e.status == 404) { logDebug("Maven metadata %s not found at %s (404): %s", name.main, description, e.msg); return Json(null); } else throw e; } auto json = Json([ "name": Json(name.main.toString()), "versions": Json.emptyArray ]); auto xml = new DocumentParser(xmlData); xml.onStartTag["versions"] = (ElementParser xml) { xml.onEndTag["version"] = (in Element e) { json["versions"] ~= serializeToJson([ "name": name.main.toString(), "version": e.text, ]); }; xml.parse(); }; xml.parse(); m_metadataCache[name.main] = CacheEntry(json, now); return json; } SearchResult[] searchPackages(string query) { // Only exact search is supported // This enables retrieval of dub packages on dub run auto md = getMetadata(PackageName(query)); if (md.type == Json.Type.null_) return null; auto json = getBestPackage(md, PackageName(query), VersionRange.Any, true); return [SearchResult(json["name"].opt!string, "", json["version"].opt!string)]; } } dub-1.40.0/source/dub/packagesuppliers/package.d000066400000000000000000000005171477246567400215530ustar00rootroot00000000000000module dub.packagesuppliers; /** Contains (remote) package supplier interface and implementations. */ public import dub.packagesuppliers.fallback; public import dub.packagesuppliers.filesystem; public import dub.packagesuppliers.packagesupplier; public import dub.packagesuppliers.maven; public import dub.packagesuppliers.registry; dub-1.40.0/source/dub/packagesuppliers/packagesupplier.d000066400000000000000000000105041477246567400233340ustar00rootroot00000000000000module dub.packagesuppliers.packagesupplier; public import dub.dependency : PackageName, Dependency, Version, VersionRange; import dub.dependency : visit; public import dub.internal.vibecompat.core.file : NativePath; public import dub.internal.vibecompat.data.json : Json; /** Base interface for remote package suppliers. Provides functionality necessary to query package versions, recipes and contents. */ interface PackageSupplier { /// Represents a single package search result. static struct SearchResult { string name, description, version_; } /// Returns a human-readable representation of the package supplier. @property string description(); /** Retrieves a list of all available versions(/branches) of a package. Throws: Throws an exception if the package name is not known, or if an error occurred while retrieving the version list. */ deprecated("Use `getVersions(PackageName)` instead") final Version[] getVersions(string name) { return this.getVersions(PackageName(name)); } Version[] getVersions(in PackageName name); /** Downloads a package and returns its binary content Params: name = Name of the package to retrieve dep = Version constraint to match against pre_release = If true, matches the latest pre-release version. Otherwise prefers stable versions. */ ubyte[] fetchPackage(in PackageName name, in VersionRange dep, bool pre_release); deprecated("Use `writeFile(path, fetchPackage(PackageName, VersionRange, bool))` instead") final void fetchPackage(in NativePath path, in PackageName name, in VersionRange dep, bool pre_release) { import dub.internal.vibecompat.core.file : writeFile; if (auto res = this.fetchPackage(name, dep, pre_release)) writeFile(path, res); } deprecated("Use `fetchPackage(NativePath, PackageName, VersionRange, bool)` instead") final void fetchPackage(NativePath path, string name, Dependency dep, bool pre_release) { return dep.visit!( (const VersionRange rng) { return this.fetchPackage(path, PackageName(name), rng, pre_release); }, (any) { assert(0, "Trying to fetch a package with a non-version dependency: " ~ any.toString()); }, ); } /** Retrieves only the recipe of a particular package. Params: package_id = Name of the package of which to retrieve the recipe dep = Version constraint to match against pre_release = If true, matches the latest pre-release version. Otherwise prefers stable versions. */ Json fetchPackageRecipe(in PackageName name, in VersionRange dep, bool pre_release); deprecated("Use `fetchPackageRecipe(PackageName, VersionRange, bool)` instead") final Json fetchPackageRecipe(string name, Dependency dep, bool pre_release) { return dep.visit!( (const VersionRange rng) { return this.fetchPackageRecipe(PackageName(name), rng, pre_release); }, (any) { return Json.init; }, ); } /** Searches for packages matching the given search query term. Search queries are currently a simple list of words separated by white space. Results will get ordered from best match to worst. */ SearchResult[] searchPackages(string query); } // TODO: Could drop the "best package" behavior and let retrievePackage/ // getPackageDescription take a Version instead of Dependency. But note // this means that two requests to the registry are necessary to retrieve // a package recipe instead of one (first get version list, then the // package recipe) package Json getBestPackage(Json metadata, in PackageName name, in VersionRange dep, bool pre_release) { import std.exception : enforce; import std.format : format; if (metadata.type == Json.Type.null_) return metadata; Json best = null; Version bestver; foreach (json; metadata["versions"]) { auto cur = Version(json["version"].get!string); if (!dep.matches(cur)) continue; if (best == null) best = json; else if (pre_release) { if (cur > bestver) best = json; } else if (bestver.isPreRelease) { if (!cur.isPreRelease || cur > bestver) best = json; } else if (!cur.isPreRelease && cur > bestver) best = json; bestver = Version(cast(string)best["version"]); } enforce(best != null, "No package candidate found for %s@%s".format(name.main, dep)); return best; } dub-1.40.0/source/dub/packagesuppliers/registry.d000066400000000000000000000075621477246567400220370ustar00rootroot00000000000000module dub.packagesuppliers.registry; import dub.dependency; import dub.packagesuppliers.packagesupplier; package enum PackagesPath = "packages"; /** Online registry based package supplier. This package supplier connects to an online registry (e.g. $(LINK https://code.dlang.org/)) to search for available packages. */ class RegistryPackageSupplier : PackageSupplier { import dub.internal.utils : retryDownload, HTTPStatusException; import dub.internal.vibecompat.data.json : parseJson, parseJsonString, serializeToJson; import dub.internal.vibecompat.inet.url : URL; import dub.internal.logging; import std.uri : encodeComponent; import std.datetime : Clock, Duration, hours, SysTime, UTC; private { URL m_registryUrl; struct CacheEntry { Json data; SysTime cacheTime; } CacheEntry[PackageName] m_metadataCache; Duration m_maxCacheTime; } this(URL registry) { m_registryUrl = registry; m_maxCacheTime = 24.hours(); } override @property string description() { return "registry at "~m_registryUrl.toString(); } override Version[] getVersions(in PackageName name) { import std.algorithm.sorting : sort; auto md = getMetadata(name); if (md.type == Json.Type.null_) return null; Version[] ret; foreach (json; md["versions"]) { auto cur = Version(cast(string)json["version"]); ret ~= cur; } ret.sort(); return ret; } auto genPackageDownloadUrl(in PackageName name, in VersionRange dep, bool pre_release) { import std.array : replace; import std.format : format; import std.typecons : Nullable; auto md = getMetadata(name); Json best = getBestPackage(md, name, dep, pre_release); Nullable!URL ret; if (best.type != Json.Type.null_) { auto vers = best["version"].get!string; ret = m_registryUrl ~ NativePath( "%s/%s/%s.zip".format(PackagesPath, name.main, vers)); } return ret; } override ubyte[] fetchPackage(in PackageName name, in VersionRange dep, bool pre_release) { import std.format : format; auto url = genPackageDownloadUrl(name, dep, pre_release); if(url.isNull) return null; try { return retryDownload(url.get); } catch(HTTPStatusException e) { if (e.status == 404) throw e; else logDebug("Failed to download package %s from %s", name.main, url); } catch(Exception e) { logDebug("Failed to download package %s from %s", name.main, url); } throw new Exception("Failed to download package %s from %s".format(name.main, url)); } override Json fetchPackageRecipe(in PackageName name, in VersionRange dep, bool pre_release) { auto md = getMetadata(name); return getBestPackage(md, name, dep, pre_release); } private Json getMetadata(in PackageName name) { auto now = Clock.currTime(UTC()); if (auto pentry = name.main in m_metadataCache) { if (pentry.cacheTime + m_maxCacheTime > now) return pentry.data; m_metadataCache.remove(name.main); } auto url = m_registryUrl ~ NativePath("api/packages/infos"); url.queryString = "packages=" ~ encodeComponent(`["` ~ name.main.toString() ~ `"]`) ~ "&include_dependencies=true&minimize=true"; logDebug("Downloading metadata for %s", name.main); string jsonData; jsonData = cast(string)retryDownload(url); Json json = parseJsonString(jsonData, url.toString()); foreach (pkg, info; json.get!(Json[string])) { logDebug("adding %s to metadata cache", pkg); m_metadataCache[PackageName(pkg)] = CacheEntry(info, now); } return json[name.main.toString()]; } SearchResult[] searchPackages(string query) { import std.array : array; import std.algorithm.iteration : map; import std.uri : encodeComponent; auto url = m_registryUrl; url.localURI = "/api/packages/search?q="~encodeComponent(query); string data; data = cast(string)retryDownload(url); return data.parseJson.opt!(Json[]) .map!(j => SearchResult(j["name"].opt!string, j["description"].opt!string, j["version"].opt!string)) .array; } } dub-1.40.0/source/dub/platform.d000066400000000000000000000232161477246567400164430ustar00rootroot00000000000000/** Build platform identification and specification matching. This module is useful for determining the build platform for a certain machine and compiler invocation. Example applications include classifying CI slave machines. It also contains means to match build platforms against a platform specification string as used in package recipes. Copyright: © 2012-2017 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.platform; import std.array; // archCheck, compilerCheck, and platformCheck are used below and in // generatePlatformProbeFile, so they've been extracted into these strings // that can be reused. // Try to not use phobos in the probes to avoid long import times. /// private enum string platformCheck = q{ string[] ret; version(Windows) ret ~= "windows"; version(linux) ret ~= "linux"; version(Posix) ret ~= "posix"; version(OSX) ret ~= ["osx", "darwin"]; version(iOS) ret ~= ["ios", "darwin"]; version(TVOS) ret ~= ["tvos", "darwin"]; version(WatchOS) ret ~= ["watchos", "darwin"]; version(FreeBSD) ret ~= "freebsd"; version(OpenBSD) ret ~= "openbsd"; version(NetBSD) ret ~= "netbsd"; version(DragonFlyBSD) ret ~= "dragonflybsd"; version(BSD) ret ~= "bsd"; version(Solaris) ret ~= "solaris"; version(AIX) ret ~= "aix"; version(Haiku) ret ~= "haiku"; version(SkyOS) ret ~= "skyos"; version(SysV3) ret ~= "sysv3"; version(SysV4) ret ~= "sysv4"; version(Hurd) ret ~= "hurd"; version(Android) ret ~= "android"; version(Cygwin) ret ~= "cygwin"; version(MinGW) ret ~= "mingw"; version(PlayStation4) ret ~= "playstation4"; version(WebAssembly) ret ~= "wasm"; return ret; }; /// private enum string archCheck = q{ string[] ret; version(X86) ret ~= "x86"; // Hack: see #1535 // Makes "x86_omf" available as a platform specifier in the package recipe version(X86) version(CRuntime_DigitalMars) ret ~= "x86_omf"; // Hack: see #1059 // When compiling with --arch=x86_mscoff build_platform.architecture is equal to ["x86"] and canFind below is false. // This hack prevents unnecessary warning 'Failed to apply the selected architecture x86_mscoff. Got ["x86"]'. // And also makes "x86_mscoff" available as a platform specifier in the package recipe version(X86) version(CRuntime_Microsoft) ret ~= "x86_mscoff"; version(X86_64) ret ~= "x86_64"; version(ARM) ret ~= "arm"; version(AArch64) ret ~= "aarch64"; version(ARM_Thumb) ret ~= "arm_thumb"; version(ARM_SoftFloat) ret ~= "arm_softfloat"; version(ARM_HardFloat) ret ~= "arm_hardfloat"; version(PPC) ret ~= "ppc"; version(PPC_SoftFP) ret ~= "ppc_softfp"; version(PPC_HardFP) ret ~= "ppc_hardfp"; version(PPC64) ret ~= "ppc64"; version(IA64) ret ~= "ia64"; version(MIPS) ret ~= "mips"; version(MIPS32) ret ~= "mips32"; version(MIPS64) ret ~= "mips64"; version(MIPS_O32) ret ~= "mips_o32"; version(MIPS_N32) ret ~= "mips_n32"; version(MIPS_O64) ret ~= "mips_o64"; version(MIPS_N64) ret ~= "mips_n64"; version(MIPS_EABI) ret ~= "mips_eabi"; version(MIPS_NoFloat) ret ~= "mips_nofloat"; version(MIPS_SoftFloat) ret ~= "mips_softfloat"; version(MIPS_HardFloat) ret ~= "mips_hardfloat"; version(SPARC) ret ~= "sparc"; version(SPARC_V8Plus) ret ~= "sparc_v8plus"; version(SPARC_SoftFP) ret ~= "sparc_softfp"; version(SPARC_HardFP) ret ~= "sparc_hardfp"; version(SPARC64) ret ~= "sparc64"; version(S390) ret ~= "s390"; version(S390X) ret ~= "s390x"; version(HPPA) ret ~= "hppa"; version(HPPA64) ret ~= "hppa64"; version(SH) ret ~= "sh"; version(SH64) ret ~= "sh64"; version(Alpha) ret ~= "alpha"; version(Alpha_SoftFP) ret ~= "alpha_softfp"; version(Alpha_HardFP) ret ~= "alpha_hardfp"; version(LoongArch32) ret ~= "loongarch32"; version(LoongArch64) ret ~= "loongarch64"; version(LoongArch_SoftFloat) ret ~= "loongarch_softfloat"; version(LoongArch_HardFloat) ret ~= "loongarch_hardfloat"; return ret; }; /// private enum string compilerCheck = q{ version(DigitalMars) return "dmd"; else version(GNU) return "gdc"; else version(LDC) return "ldc"; else version(SDC) return "sdc"; else return null; }; /// private enum string compilerCheckPragmas = q{ version(DigitalMars) pragma(msg, ` "dmd"`); else version(GNU) pragma(msg, ` "gdc"`); else version(LDC) pragma(msg, ` "ldc"`); else version(SDC) pragma(msg, ` "sdc"`); }; /// private, converts the above appender strings to pragmas string pragmaGen(string str) { import std.string : replace; return str.replace("return ret;", "").replace("string[] ret;", "").replace(`["`, `"`).replace(`", "`,`" "`).replace(`"]`, `"`).replace(`;`, "`);").replace("ret ~= ", "pragma(msg, ` "); } /** Determines the full build platform used for the current build. Note that the `BuildPlatform.compilerBinary` field will be left empty. See_Also: `determinePlatform`, `determineArchitecture`, `determineCompiler` */ BuildPlatform determineBuildPlatform() { BuildPlatform ret; ret.platform = determinePlatform(); ret.architecture = determineArchitecture(); ret.compiler = determineCompiler(); ret.frontendVersion = __VERSION__; return ret; } /** Returns a list of platform identifiers that apply to the current build. Example results are `["windows"]` or `["posix", "osx"]`. The identifiers correspond to the compiler defined version constants built into the language, except that they are converted to lower case. See_Also: `determineBuildPlatform` */ string[] determinePlatform() { mixin(platformCheck); } /** Returns a list of architecture identifiers that apply to the current build. Example results are `["x86_64"]` or `["arm", "arm_softfloat"]`. The identifiers correspond to the compiler defined version constants built into the language, except that they are converted to lower case. See_Also: `determineBuildPlatform` */ string[] determineArchitecture() { mixin(archCheck); } /** Determines the canonical compiler name used for the current build. The possible values currently are "dmd", "gdc", "ldc" or "sdc". If an unknown compiler is used, this function will return an empty string. See_Also: `determineBuildPlatform` */ string determineCompiler() { mixin(compilerCheck); } /** Matches a platform specification string against a build platform. Specifications are build upon the following scheme, where each component is optional (indicated by []), but the order is obligatory: "[-platform][-architecture][-compiler]" So the following strings are valid specifications: `"-windows-x86-dmd"`, `"-dmd"`, `"-arm"`, `"-arm-dmd"`, `"-windows-dmd"` Params: platform = The build platform to match against the platform specification specification = The specification being matched. It must either be an empty string or start with a dash. Returns: `true` if the given specification matches the build platform, `false` otherwise. Using an empty string as the platform specification will always result in a match. */ bool matchesSpecification(in BuildPlatform platform, const(char)[] specification) { import std.string : chompPrefix, format; import std.algorithm : canFind, splitter; import std.exception : enforce; if (specification.empty) return true; if (platform == BuildPlatform.any) return true; auto splitted = specification.chompPrefix("-").splitter('-'); enforce(!splitted.empty, format("Platform specification, if present, must not be empty: \"%s\"", specification)); if (platform.platform.canFind(splitted.front)) { splitted.popFront(); if (splitted.empty) return true; } if (platform.architecture.canFind(splitted.front)) { splitted.popFront(); if (splitted.empty) return true; } if (platform.compiler == splitted.front) { splitted.popFront(); enforce(splitted.empty, "No valid specification! The compiler has to be the last element: " ~ specification); return true; } return false; } /// unittest { auto platform = BuildPlatform(["posix", "linux"], ["x86_64"], "dmd"); assert(platform.matchesSpecification("")); assert(platform.matchesSpecification("posix")); assert(platform.matchesSpecification("linux")); assert(platform.matchesSpecification("linux-dmd")); assert(platform.matchesSpecification("linux-x86_64-dmd")); assert(platform.matchesSpecification("x86_64")); assert(!platform.matchesSpecification("windows")); assert(!platform.matchesSpecification("ldc")); assert(!platform.matchesSpecification("windows-dmd")); // Before PR#2279, a leading '-' was required assert(platform.matchesSpecification("-x86_64")); } /// Represents a platform a package can be build upon. struct BuildPlatform { /// Special constant used to denote matching any build platform. enum any = BuildPlatform(null, null, null, null, -1); /// Platform identifiers, e.g. ["posix", "windows"] string[] platform; /// CPU architecture identifiers, e.g. ["x86", "x86_64"] string[] architecture; /// Canonical compiler name e.g. "dmd" string compiler; /// Compiler binary name e.g. "ldmd2" string compilerBinary; /// Compiled frontend version (e.g. `2067` for frontend versions 2.067.x) int frontendVersion; /// Compiler version e.g. "1.11.0" string compilerVersion; /// Frontend version string from frontendVersion /// e.g: 2067 => "2.067" string frontendVersionString() const { import std.format : format; const maj = frontendVersion / 1000; const min = frontendVersion % 1000; return format("%d.%03d", maj, min); } /// unittest { BuildPlatform bp; bp.frontendVersion = 2067; assert(bp.frontendVersionString == "2.067"); } /// Checks to see if platform field contains windows bool isWindows() const { import std.algorithm : canFind; return this.platform.canFind("windows"); } /// unittest { BuildPlatform bp; bp.platform = ["windows"]; assert(bp.isWindows); bp.platform = ["posix"]; assert(!bp.isWindows); } } dub-1.40.0/source/dub/project.d000066400000000000000000002335521477246567400162730ustar00rootroot00000000000000/** Representing a full project, with a root Package and several dependencies. Copyright: © 2012-2013 Matthias Dondorff, 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ module dub.project; import dub.compilers.compiler; import dub.dependency; import dub.description; import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.recipe.selection; import dub.internal.configy.Read; import std.algorithm; import std.array; import std.conv : to; import std.datetime; import std.encoding : sanitize; import std.exception : enforce; import std.string; /** Represents a full project, a root package with its dependencies and package selection. All dependencies must be available locally so that the package dependency graph can be built. Use `Project.reinit` if necessary for reloading dependencies after more packages are available. */ class Project { private { PackageManager m_packageManager; Package m_rootPackage; Package[] m_dependencies; Package[string] m_dependenciesByName; Package[][Package] m_dependees; SelectedVersions m_selections; string[] m_missingDependencies; string[string] m_overriddenConfigs; } /** Loads a project. Params: package_manager = Package manager instance to use for loading dependencies project_path = Path of the root package to load pack = An existing `Package` instance to use as the root package */ deprecated("Load the package using `PackageManager.getOrLoadPackage` then call the `(PackageManager, Package)` overload") this(PackageManager package_manager, NativePath project_path) { Package pack; auto packageFile = Package.findPackageFile(project_path); if (packageFile.empty) { logWarn("There was no package description found for the application in '%s'.", project_path.toNativeString()); pack = new Package(PackageRecipe.init, project_path); } else { pack = package_manager.getOrLoadPackage(project_path, packageFile, false, StrictMode.Warn); } this(package_manager, pack); } /// Ditto this(PackageManager package_manager, Package pack) { auto selections = Project.loadSelections(pack.path, package_manager); this(package_manager, pack, selections); } /// ditto this(PackageManager package_manager, Package pack, SelectedVersions selections) { m_packageManager = package_manager; m_rootPackage = pack; m_selections = selections; reinit(); } /** * Loads a project's `dub.selections.json` and returns it * * This function will load `dub.selections.json` from the path at which * `pack` is located, and returned the resulting `SelectedVersions`. * If no `dub.selections.json` is found, an empty `SelectedVersions` * is returned. * * Params: * packPath = Absolute path of the Package to load the selection file from. * * Returns: * Always a non-null instance. */ static package SelectedVersions loadSelections(in NativePath packPath, PackageManager mgr) { import dub.version_; import dub.internal.dyaml.stdsumtype; auto lookupResult = mgr.readSelections(packPath); if (lookupResult.isNull()) // no file, or parsing error (displayed to the user) return new SelectedVersions(); auto r = lookupResult.get(); return r.selectionsFile.content.match!( (Selections!0 s) { logWarnTag("Unsupported version", "File %s has fileVersion %s, which is not yet supported by DUB %s.", r.absolutePath, s.fileVersion, dubVersion); logWarn("Ignoring selections file. Use a newer DUB version " ~ "and set the appropriate toolchainRequirements in your recipe file"); return new SelectedVersions(); }, (Selections!1 s) { auto selectionsDir = r.absolutePath.parentPath; return new SelectedVersions(s, selectionsDir.relativeTo(packPath)); }, ); } /** List of all resolved dependencies. This includes all direct and indirect dependencies of all configurations combined. Optional dependencies that were not chosen are not included. */ @property const(Package[]) dependencies() const { return m_dependencies; } /// The root package of the project. @property inout(Package) rootPackage() inout { return m_rootPackage; } /// The versions to use for all dependencies. Call reinit() after changing these. @property inout(SelectedVersions) selections() inout { return m_selections; } /// Package manager instance used by the project. deprecated("Use `Dub.packageManager` instead") @property inout(PackageManager) packageManager() inout { return m_packageManager; } /** Determines if all dependencies necessary to build have been collected. If this function returns `false`, it may be necessary to add more entries to `selections`, or to use `Dub.upgrade` to automatically select all missing dependencies. */ bool hasAllDependencies() const { return m_missingDependencies.length == 0; } /// Sorted list of missing dependencies. string[] missingDependencies() { return m_missingDependencies; } /** Allows iteration of the dependency tree in topological order */ int delegate(int delegate(ref Package)) getTopologicalPackageList(bool children_first = false, Package root_package = null, string[string] configs = null) { // ugly way to avoid code duplication since inout isn't compatible with foreach type inference return cast(int delegate(int delegate(ref Package)))(cast(const)this).getTopologicalPackageList(children_first, root_package, configs); } /// ditto int delegate(int delegate(ref const Package)) getTopologicalPackageList(bool children_first = false, in Package root_package = null, string[string] configs = null) const { const(Package) rootpack = root_package ? root_package : m_rootPackage; int iterator(int delegate(ref const Package) del) { int ret = 0; bool[const(Package)] visited; void perform_rec(in Package p){ if( p in visited ) return; visited[p] = true; if( !children_first ){ ret = del(p); if( ret ) return; } auto cfg = configs.get(p.name, null); PackageDependency[] deps; if (!cfg.length) deps = p.getAllDependencies(); else { auto depmap = p.getDependencies(cfg); deps = depmap.byKey.map!(k => PackageDependency(PackageName(k), depmap[k])).array; } deps.sort!((a, b) => a.name.toString() < b.name.toString()); foreach (d; deps) { auto dependency = getDependency(d.name.toString(), true); assert(dependency || d.spec.optional, format("Non-optional dependency '%s' of '%s' not found in dependency tree!?.", d.name, p.name)); if(dependency) perform_rec(dependency); if( ret ) return; } if( children_first ){ ret = del(p); if( ret ) return; } } perform_rec(rootpack); return ret; } return &iterator; } /** Retrieves a particular dependency by name. Params: name = (Qualified) package name of the dependency is_optional = If set to true, will return `null` for unsatisfiable dependencies instead of throwing an exception. */ inout(Package) getDependency(string name, bool is_optional) inout { if (auto pp = name in m_dependenciesByName) return *pp; if (!is_optional) throw new Exception("Unknown dependency: "~name); else return null; } /** Returns the name of the default build configuration for the specified target platform. Params: platform = The target build platform allow_non_library_configs = If set to true, will use the first possible configuration instead of the first "executable" configuration. */ string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library_configs = true) const { auto cfgs = getPackageConfigs(platform, null, allow_non_library_configs); return cfgs[m_rootPackage.name]; } /** Overrides the configuration chosen for a particular package in the dependency graph. Setting a certain configuration here is equivalent to removing all but one configuration from the package. Params: package_ = The package for which to force selecting a certain dependency config = Name of the configuration to force */ void overrideConfiguration(string package_, string config) { auto p = getDependency(package_, true); enforce(p !is null, format("Package '%s', marked for configuration override, is not present in dependency graph.", package_)); enforce(p.configurations.canFind(config), format("Package '%s' does not have a configuration named '%s'.", package_, config)); m_overriddenConfigs[package_] = config; } /** Adds a test runner configuration for the root package. Params: settings = The generator settings to use generate_main = Whether to generate the main.d file base_config = Optional base configuration custom_main_file = Optional path to file with custom main entry point Returns: Name of the added test runner configuration, or null for base configurations with target type `none` */ string addTestRunnerConfiguration(in GeneratorSettings settings, bool generate_main = true, string base_config = "", NativePath custom_main_file = NativePath()) { if (base_config.length == 0) { // if a custom main file was given, favor the first library configuration, so that it can be applied if (!custom_main_file.empty) base_config = getDefaultConfiguration(settings.platform, false); // else look for a "unittest" configuration if (!base_config.length && rootPackage.configurations.canFind("unittest")) base_config = "unittest"; // if not found, fall back to the first "library" configuration if (!base_config.length) base_config = getDefaultConfiguration(settings.platform, false); // if still nothing found, use the first executable configuration if (!base_config.length) base_config = getDefaultConfiguration(settings.platform, true); } BuildSettings lbuildsettings = settings.buildSettings.dup; addBuildSettings(lbuildsettings, settings, base_config, null, true); if (lbuildsettings.targetType == TargetType.none) { logInfo(`Configuration '%s' has target type "none". Skipping test runner configuration.`, base_config); return null; } if (lbuildsettings.targetType == TargetType.executable && base_config == "unittest") { if (!custom_main_file.empty) logWarn("Ignoring custom main file."); return base_config; } if (lbuildsettings.sourceFiles.empty) { logInfo(`No source files found in configuration '%s'. Falling back to default configuration for test runner.`, base_config); if (!custom_main_file.empty) logWarn("Ignoring custom main file."); return getDefaultConfiguration(settings.platform); } const config = format("%s-test-%s", rootPackage.name.replace(".", "-").replace(":", "-"), base_config); logInfo(`Generating test runner configuration '%s' for '%s' (%s).`, config, base_config, lbuildsettings.targetType); BuildSettingsTemplate tcinfo = rootPackage.recipe.getConfiguration(base_config).buildSettings.dup; tcinfo.targetType = TargetType.executable; // set targetName unless specified explicitly in unittest base configuration if (tcinfo.targetName.empty || base_config != "unittest") tcinfo.targetName = config; auto mainfil = tcinfo.mainSourceFile; if (!mainfil.length) mainfil = rootPackage.recipe.buildSettings.mainSourceFile; string custommodname; if (!custom_main_file.empty) { import std.path; tcinfo.sourceFiles[""] ~= custom_main_file.relativeTo(rootPackage.path).toNativeString(); tcinfo.importPaths[""] ~= custom_main_file.parentPath.toNativeString(); custommodname = custom_main_file.head.name.baseName(".d"); } // prepare the list of tested modules string[] import_modules; if (settings.single) lbuildsettings.importPaths ~= NativePath(mainfil).parentPath.toNativeString; bool firstTimePackage = true; foreach (file; lbuildsettings.sourceFiles) { if (file.endsWith(".d")) { auto fname = NativePath(file).head.name; NativePath msf = NativePath(mainfil); if (msf.absolute) msf = msf.relativeTo(rootPackage.path); if (!settings.single && NativePath(file).relativeTo(rootPackage.path) == msf) { logWarn("Excluding main source file %s from test.", mainfil); tcinfo.excludedSourceFiles[""] ~= mainfil; continue; } if (fname == "package.d") { if (firstTimePackage) { firstTimePackage = false; logDiagnostic("Excluding package.d file from test due to https://issues.dlang.org/show_bug.cgi?id=11847"); } continue; } import_modules ~= dub.internal.utils.determineModuleName(lbuildsettings, NativePath(file), rootPackage.path); } } NativePath mainfile; if (settings.tempBuild) mainfile = getTempFile("dub_test_root", ".d"); else { import dub.generators.build : computeBuildName; mainfile = packageCache(settings.cache, this.rootPackage) ~ format("code/%s/dub_test_root.d", computeBuildName(config, settings, import_modules)); } auto escapedMainFile = mainfile.toNativeString().replace("$", "$$"); tcinfo.sourceFiles[""] ~= escapedMainFile; tcinfo.mainSourceFile = escapedMainFile; if (!settings.tempBuild) { // add the directory containing dub_test_root.d to the import paths tcinfo.importPaths[""] ~= NativePath(escapedMainFile).parentPath.toNativeString(); } if (generate_main && (settings.force || !existsFile(mainfile))) { ensureDirectory(mainfile.parentPath); const runnerCode = custommodname.length ? format("import %s;", custommodname) : DefaultTestRunnerCode; const content = TestRunnerTemplate.format( import_modules, import_modules, runnerCode); writeFile(mainfile, content); } rootPackage.recipe.configurations ~= ConfigurationInfo(config, tcinfo); return config; } /** Performs basic validation of various aspects of the package. This will emit warnings to `stderr` if any discouraged names or dependency patterns are found. */ void validate() { bool isSDL = !m_rootPackage.recipePath.empty && m_rootPackage.recipePath.head.name.endsWith(".sdl"); // some basic package lint m_rootPackage.warnOnSpecialCompilerFlags(); string nameSuggestion() { string ret; ret ~= `Please modify the "name" field in %s accordingly.`.format(m_rootPackage.recipePath.toNativeString()); if (!m_rootPackage.recipe.buildSettings.targetName.length) { if (isSDL) { ret ~= ` You can then add 'targetName "%s"' to keep the current executable name.`.format(m_rootPackage.name); } else { ret ~= ` You can then add '"targetName": "%s"' to keep the current executable name.`.format(m_rootPackage.name); } } return ret; } if (m_rootPackage.name != m_rootPackage.name.toLower()) { logWarn(`DUB package names should always be lower case. %s`, nameSuggestion()); } else if (!m_rootPackage.recipe.name.all!(ch => ch >= 'a' && ch <= 'z' || ch >= '0' && ch <= '9' || ch == '-' || ch == '_')) { logWarn(`DUB package names may only contain alphanumeric characters, ` ~ `as well as '-' and '_'. %s`, nameSuggestion()); } enforce(!m_rootPackage.name.canFind(' '), "Aborting due to the package name containing spaces."); foreach (d; m_rootPackage.getAllDependencies()) if (d.spec.isExactVersion && d.spec.version_.isBranch) { string suggestion = isSDL ? format(`dependency "%s" repository="git+" version=""`, d.name) : format(`"%s": {"repository": "git+", "version": ""}`, d.name); logWarn("Dependency '%s' depends on git branch '%s', which is deprecated.", d.name.toString().color(Mode.bold), d.spec.version_.toString.color(Mode.bold)); logWarnTag("", "Specify the git repository and commit hash in your %s:", (isSDL ? "dub.sdl" : "dub.json").color(Mode.bold)); logWarnTag("", "%s", suggestion.color(Mode.bold)); } // search for orphan sub configurations void warnSubConfig(string pack, string config) { logWarn("The sub configuration directive \"%s\" -> [%s] " ~ "references a package that is not specified as a dependency " ~ "and will have no effect.", pack.color(Mode.bold), config.color(Color.blue)); } void checkSubConfig(in PackageName name, string config) { auto p = getDependency(name.toString(), true); if (p && !p.configurations.canFind(config)) { logWarn("The sub configuration directive \"%s\" -> [%s] " ~ "references a configuration that does not exist.", name.toString().color(Mode.bold), config.color(Color.red)); } } auto globalbs = m_rootPackage.getBuildSettings(); foreach (p, c; globalbs.subConfigurations) { if (p !in globalbs.dependencies) warnSubConfig(p, c); else checkSubConfig(PackageName(p), c); } foreach (c; m_rootPackage.configurations) { auto bs = m_rootPackage.getBuildSettings(c); foreach (p, subConf; bs.subConfigurations) { if (p !in bs.dependencies && p !in globalbs.dependencies) warnSubConfig(p, subConf); else checkSubConfig(PackageName(p), subConf); } } // check for version specification mismatches bool[Package] visited; void validateDependenciesRec(Package pack) { // perform basic package linting pack.simpleLint(); foreach (d; pack.getAllDependencies()) { auto basename = d.name.main; d.spec.visit!( (NativePath path) { /* Valid */ }, (Repository repo) { /* Valid */ }, (VersionRange vers) { if (m_selections.hasSelectedVersion(basename)) { auto selver = m_selections.getSelectedVersion(basename); if (d.spec.merge(selver) == Dependency.Invalid) { logWarn(`Selected package %s@%s does not match ` ~ `the dependency specification %s in ` ~ `package %s. Need to "%s"?`, basename.toString().color(Mode.bold), selver, vers, pack.name.color(Mode.bold), "dub upgrade".color(Mode.bold)); } } }, ); auto deppack = getDependency(d.name.toString(), true); if (deppack in visited) continue; visited[deppack] = true; if (deppack) validateDependenciesRec(deppack); } } validateDependenciesRec(m_rootPackage); } /** * Reloads dependencies * * This function goes through the project and make sure that all * required packages are loaded. To do so, it uses information * both from the recipe file (`dub.json`) and from the selections * file (`dub.selections.json`). * * In the process, it populates the `dependencies`, `missingDependencies`, * and `hasAllDependencies` properties, which can only be relied on * once this has run once (the constructor always calls this). */ void reinit() { m_dependencies = null; m_dependenciesByName = null; m_missingDependencies = []; collectDependenciesRec(m_rootPackage); foreach (p; m_dependencies) m_dependenciesByName[p.name] = p; m_missingDependencies.sort(); } /// Implementation of `reinit` private void collectDependenciesRec(Package pack, int depth = 0) { auto indent = replicate(" ", depth); logDebug("%sCollecting dependencies for %s", indent, pack.name); indent ~= " "; foreach (dep; pack.getAllDependencies()) { Dependency vspec = dep.spec; Package p; auto basename = dep.name.main; auto subname = dep.name.sub; // non-optional and optional-default dependencies (if no selections file exists) // need to be satisfied bool is_desired = !vspec.optional || m_selections.hasSelectedVersion(basename) || (vspec.default_ && m_selections.bare); if (dep.name.toString() == m_rootPackage.basePackage.name) { vspec = Dependency(m_rootPackage.version_); p = m_rootPackage.basePackage; } else if (basename.toString() == m_rootPackage.basePackage.name) { vspec = Dependency(m_rootPackage.version_); try p = m_packageManager.getSubPackage(m_rootPackage.basePackage, subname, false); catch (Exception e) { logDiagnostic("%sError getting sub package %s: %s", indent, dep.name, e.msg); if (is_desired) m_missingDependencies ~= dep.name.toString(); continue; } } else if (m_selections.hasSelectedVersion(basename)) { vspec = m_selections.getSelectedVersion(basename); p = vspec.visit!( (NativePath path_) { auto path = path_.absolute ? path_ : m_rootPackage.path ~ path_; auto tmp = m_packageManager.getOrLoadPackage(path, NativePath.init, true); return resolveSubPackage(tmp, subname, true); }, (Repository repo) { return m_packageManager.loadSCMPackage(dep.name, repo); }, (VersionRange range) { // See `dub.recipe.selection : SelectedDependency.fromYAML` assert(range.isExactVersion()); return m_packageManager.getPackage(dep.name, vspec.version_); }, ); } else if (m_dependencies.canFind!(d => PackageName(d.name).main == basename)) { auto idx = m_dependencies.countUntil!(d => PackageName(d.name).main == basename); auto bp = m_dependencies[idx].basePackage; vspec = Dependency(bp.path); p = resolveSubPackage(bp, subname, false); } else { logDiagnostic("%sVersion selection for dependency %s (%s) of %s is missing.", indent, basename, dep.name, pack.name); } // We didn't find the package if (p is null) { if (!vspec.repository.empty) { p = m_packageManager.loadSCMPackage(dep.name, vspec.repository); enforce(p !is null, "Unable to fetch '%s@%s' using git - does the repository and version exist?".format( dep.name, vspec.repository)); } else if (!vspec.path.empty && is_desired) { NativePath path = vspec.path; if (!path.absolute) path = pack.path ~ path; logDiagnostic("%sAdding local %s in %s", indent, dep.name, path); p = m_packageManager.getOrLoadPackage(path, NativePath.init, true); if (p.parentPackage !is null) { logWarn("%sSub package %s must be referenced using the path to it's parent package.", indent, dep.name); p = p.parentPackage; } p = resolveSubPackage(p, subname, false); enforce(p.name == dep.name.toString(), format("Path based dependency %s is referenced with a wrong name: %s vs. %s", path.toNativeString(), dep.name, p.name)); } else { logDiagnostic("%sMissing dependency %s %s of %s", indent, dep.name, vspec, pack.name); if (is_desired) m_missingDependencies ~= dep.name.toString(); continue; } } if (!m_dependencies.canFind(p)) { logDiagnostic("%sFound dependency %s %s", indent, dep.name, vspec.toString()); m_dependencies ~= p; if (basename.toString() == m_rootPackage.basePackage.name) p.warnOnSpecialCompilerFlags(); collectDependenciesRec(p, depth+1); } m_dependees[p] ~= pack; //enforce(p !is null, "Failed to resolve dependency "~dep.name~" "~vspec.toString()); } } /// Convenience function used by `reinit` private Package resolveSubPackage(Package p, string subname, bool silentFail) { if (!subname.length || p is null) return p; return m_packageManager.getSubPackage(p, subname, silentFail); } /// Returns the name of the root package. @property string name() const { return m_rootPackage ? m_rootPackage.name : "app"; } /// Returns the names of all configurations of the root package. @property string[] configurations() const { return m_rootPackage.configurations; } /// Returns the names of all built-in and custom build types of the root package. /// The default built-in build type is the first item in the list. @property string[] builds() const { return builtinBuildTypes ~ m_rootPackage.customBuildTypes; } /// Returns a map with the configuration for all packages in the dependency tree. string[string] getPackageConfigs(in BuildPlatform platform, string config, bool allow_non_library = true) const { import std.typecons : Rebindable, rebindable; import std.range : only; // prepare by collecting information about all packages in the project // qualified names and dependencies are cached, to avoid recomputing // them multiple times during the algorithm auto packages = collectPackageInformation(); // graph of the project's package configuration dependencies // (package, config) -> (sub-package, sub-config) static struct Vertex { size_t pack = size_t.max; string config; } static struct Edge { size_t from, to; } Vertex[] configs; void[0][Vertex] configs_set; Edge[] edges; size_t createConfig(size_t pack_idx, string config) { foreach (i, v; configs) if (v.pack == pack_idx && v.config == config) return i; auto pname = packages[pack_idx].name; assert(pname !in m_overriddenConfigs || config == m_overriddenConfigs[pname]); logDebug("Add config %s %s", pname, config); auto cfg = Vertex(pack_idx, config); configs ~= cfg; configs_set[cfg] = (void[0]).init; return configs.length-1; } bool haveConfig(size_t pack_idx, string config) { return (Vertex(pack_idx, config) in configs_set) !is null; } void removeConfig(size_t config_index) { logDebug("Eliminating config %s for %s", configs[config_index].config, configs[config_index].pack); auto had_dep_to_pack = new bool[configs.length]; auto still_has_dep_to_pack = new bool[configs.length]; // eliminate all edges that connect to config 'config_index' and // track all connected configs edges = edges.filterInPlace!((e) { if (e.to == config_index) { had_dep_to_pack[e.from] = true; return false; } else if (configs[e.to].pack == configs[config_index].pack) { still_has_dep_to_pack[e.from] = true; } return e.from != config_index; }); // mark config as removed configs_set.remove(configs[config_index]); configs[config_index] = Vertex.init; // also remove any configs that cannot be satisfied anymore foreach (j; 0 .. configs.length) if (j != config_index && had_dep_to_pack[j] && !still_has_dep_to_pack[j]) removeConfig(j); } bool[] reachable = new bool[packages.length]; // reused to avoid continuous re-allocation bool isReachableByAllParentPacks(size_t cidx) { foreach (p; packages[configs[cidx].pack].parents) reachable[p] = false; foreach (e; edges) { if (e.to != cidx) continue; reachable[configs[e.from].pack] = true; } foreach (p; packages[configs[cidx].pack].parents) if (!reachable[p]) return false; return true; } string[][] depconfigs = new string[][](packages.length); void determineDependencyConfigs(size_t pack_idx, string c) { void[0][Edge] edges_set; void createEdge(size_t from, size_t to) { if (Edge(from, to) in edges_set) return; logDebug("Including %s %s -> %s %s", configs[from].pack, configs[from].config, configs[to].pack, configs[to].config); edges ~= Edge(from, to); edges_set[Edge(from, to)] = (void[0]).init; } auto pack = &packages[pack_idx]; // below we call createConfig for the main package if // config.length is not zero. Carry on for that case, // otherwise we've handle the pair (p, c) already if(haveConfig(pack_idx, c) && !(config.length && pack.name == m_rootPackage.name && config == c)) return; foreach (d; pack.dependencies) { auto dp = packages.getPackageIndex(d.name.toString()); if (dp == size_t.max) continue; depconfigs[dp].length = 0; depconfigs[dp].assumeSafeAppend; void setConfigs(R)(R configs) { configs .filter!(c => haveConfig(dp, c)) .each!((c) { depconfigs[dp] ~= c; }); } if (auto pc = packages[dp].name in m_overriddenConfigs) { setConfigs(only(*pc)); } else { auto subconf = pack.package_.getSubConfiguration(c, packages[dp].package_, platform); if (!subconf.empty) setConfigs(only(subconf)); else setConfigs(packages[dp].package_.getPlatformConfigurations(platform)); } // if no valid configuration was found for a dependency, don't include the // current configuration if (!depconfigs[dp].length) { logDebug("Skip %s %s (missing configuration for %s)", pack.name, c, packages[dp].name); return; } } // add this configuration to the graph size_t cidx = createConfig(pack_idx, c); foreach (d; pack.dependencies) { if (auto pdp = d.name.toString() in packages) foreach (sc; depconfigs[*pdp]) createEdge(cidx, createConfig(*pdp, sc)); } } string[] allconfigs_path; void determineAllConfigs(size_t pack_idx) { auto pack = &packages[pack_idx]; auto idx = allconfigs_path.countUntil(pack.name); enforce(idx < 0, format("Detected dependency cycle: %s", (allconfigs_path[idx .. $] ~ pack.name).join("->"))); allconfigs_path ~= pack.name; scope (exit) { allconfigs_path.length--; allconfigs_path.assumeSafeAppend; } // first, add all dependency configurations foreach (d; pack.dependencies) if (auto pi = d.name.toString() in packages) determineAllConfigs(*pi); // for each configuration, determine the configurations usable for the dependencies if (auto pc = pack.name in m_overriddenConfigs) determineDependencyConfigs(pack_idx, *pc); else foreach (c; pack.package_.getPlatformConfigurations(platform, pack.package_ is m_rootPackage && allow_non_library)) determineDependencyConfigs(pack_idx, c); } // first, create a graph of all possible package configurations assert(packages[0].package_ is m_rootPackage); if (config.length) createConfig(0, config); determineAllConfigs(0); // then, successively remove configurations until only one configuration // per package is left bool changed; do { // remove all configs that are not reachable by all parent packages changed = false; foreach (i, ref c; configs) { if (c == Vertex.init) continue; // ignore deleted configurations if (!isReachableByAllParentPacks(i)) { logDebug("%s %s NOT REACHABLE by all of (%s):", c.pack, c.config, packages[c.pack].parents); removeConfig(i); changed = true; } } // when all edges are cleaned up, pick one package and remove all but one config if (!changed) { foreach (pidx; 0 .. packages.length) { size_t cnt = 0; foreach (i, ref c; configs) if (c.pack == pidx && ++cnt > 1) { logDebug("NON-PRIMARY: %s %s", c.pack, c.config); removeConfig(i); } if (cnt > 1) { changed = true; break; } } } } while (changed); // print out the resulting tree foreach (e; edges) logDebug(" %s %s -> %s %s", configs[e.from].pack, configs[e.from].config, configs[e.to].pack, configs[e.to].config); // return the resulting configuration set as an AA string[string] ret; foreach (c; configs) { if (c == Vertex.init) continue; // ignore deleted configurations auto pname = packages[c.pack].name; assert(ret.get(pname, c.config) == c.config, format("Conflicting configurations for %s found: %s vs. %s", pname, c.config, ret[pname])); logDebug("Using configuration '%s' for %s", c.config, pname); ret[pname] = c.config; } // check for conflicts (packages missing in the final configuration graph) auto visited = new bool[](packages.length); void checkPacksRec(size_t pack_idx) { if (visited[pack_idx]) return; visited[pack_idx] = true; auto pname = packages[pack_idx].name; auto pc = pname in ret; enforce(pc !is null, "Could not resolve configuration for package "~pname); foreach (p, dep; packages[pack_idx].package_.getDependencies(*pc)) { auto deppack = getDependency(p, dep.optional); if (deppack) checkPacksRec(packages[].countUntil!(p => p.package_ is deppack)); } } checkPacksRec(0); return ret; } /** Returns an ordered list of all packages with the additional possibility to look up by name. */ private auto collectPackageInformation() const { static struct PackageInfo { const(Package) package_; size_t[] parents; string name; PackageDependency[] dependencies; } static struct PackageInfoAccessor { private { PackageInfo[] m_packages; size_t[string] m_packageMap; } private void initialize(P)(P all_packages, size_t reserve_count) { m_packages.reserve(reserve_count); foreach (p; all_packages) { auto pname = p.name; m_packageMap[pname] = m_packages.length; m_packages ~= PackageInfo(p, null, pname, p.getAllDependencies()); } foreach (pack_idx, ref pack_info; m_packages) foreach (d; pack_info.dependencies) if (auto pi = d.name.toString() in m_packageMap) m_packages[*pi].parents ~= pack_idx; } size_t length() const { return m_packages.length; } const(PackageInfo)[] opIndex() const { return m_packages; } ref const(PackageInfo) opIndex(size_t package_index) const { return m_packages[package_index]; } size_t getPackageIndex(string package_name) const { return m_packageMap.get(package_name, size_t.max); } const(size_t)* opBinaryRight(string op = "in")(string package_name) const { return package_name in m_packageMap; } } PackageInfoAccessor ret; ret.initialize(getTopologicalPackageList(), m_dependencies.length); return ret; } /** * Fills `dst` with values from this project. * * `dst` gets initialized according to the given platform and config. * * Params: * dst = The BuildSettings struct to fill with data. * gsettings = The generator settings to retrieve the values for. * config = Values of the given configuration will be retrieved. * root_package = If non null, use it instead of the project's real root package. * shallow = If true, collects only build settings for the main package (including inherited settings) and doesn't stop on target type none and sourceLibrary. */ void addBuildSettings(ref BuildSettings dst, in GeneratorSettings gsettings, string config, in Package root_package = null, bool shallow = false) const { import dub.internal.utils : stripDlangSpecialChars; auto configs = getPackageConfigs(gsettings.platform, config); foreach (pkg; this.getTopologicalPackageList(false, root_package, configs)) { auto pkg_path = pkg.path.toNativeString(); dst.addVersions(["Have_" ~ stripDlangSpecialChars(pkg.name)]); assert(pkg.name in configs, "Missing configuration for "~pkg.name); logDebug("Gathering build settings for %s (%s)", pkg.name, configs[pkg.name]); auto psettings = pkg.getBuildSettings(gsettings.platform, configs[pkg.name]); if (psettings.targetType != TargetType.none) { if (shallow && pkg !is m_rootPackage) psettings.sourceFiles = null; processVars(dst, this, pkg, psettings, gsettings); if (!gsettings.single && psettings.importPaths.empty) logWarn(`Package %s (configuration "%s") defines no import paths, use {"importPaths": [...]} or the default package directory structure to fix this.`, pkg.name, configs[pkg.name]); if (psettings.mainSourceFile.empty && pkg is m_rootPackage && psettings.targetType == TargetType.executable) logWarn(`Executable configuration "%s" of package %s defines no main source file, this may cause certain build modes to fail. Add an explicit "mainSourceFile" to the package description to fix this.`, configs[pkg.name], pkg.name); } if (pkg is m_rootPackage) { if (!shallow) { enforce(psettings.targetType != TargetType.none, "Main package has target type \"none\" - stopping build."); enforce(psettings.targetType != TargetType.sourceLibrary, "Main package has target type \"sourceLibrary\" which generates no target - stopping build."); } dst.targetType = psettings.targetType; dst.targetPath = psettings.targetPath; dst.targetName = psettings.targetName; if (!psettings.workingDirectory.empty) dst.workingDirectory = processVars(psettings.workingDirectory, this, pkg, gsettings, true, [dst.environments, dst.buildEnvironments]); if (psettings.mainSourceFile.length) dst.mainSourceFile = processVars(psettings.mainSourceFile, this, pkg, gsettings, true, [dst.environments, dst.buildEnvironments]); } } // always add all version identifiers of all packages foreach (pkg; this.getTopologicalPackageList(false, null, configs)) { auto psettings = pkg.getBuildSettings(gsettings.platform, configs[pkg.name]); dst.addVersions(psettings.versions); } } /** Fills `dst` with build settings specific to the given build type. Params: dst = The `BuildSettings` instance to add the build settings to gsettings = Target generator settings for_root_package = Selects if the build settings are for the root package or for one of the dependencies. Unittest flags will only be added to the root package. */ void addBuildTypeSettings(ref BuildSettings dst, in GeneratorSettings gsettings, bool for_root_package = true) { bool usedefflags = !(dst.requirements & BuildRequirement.noDefaultFlags); if (usedefflags) { BuildSettings btsettings; m_rootPackage.addBuildTypeSettings(btsettings, gsettings.platform, gsettings.buildType); if (!for_root_package) { // don't propagate unittest switch to dependencies, as dependent // unit tests aren't run anyway and the additional code may // cause linking to fail on Windows (issue #640) btsettings.removeOptions(BuildOption.unittests); } processVars(dst, this, m_rootPackage, btsettings, gsettings); } } /// Outputs a build description of the project, including its dependencies. ProjectDescription describe(GeneratorSettings settings) { import dub.generators.targetdescription; // store basic build parameters ProjectDescription ret; ret.rootPackage = m_rootPackage.name; ret.configuration = settings.config; ret.buildType = settings.buildType; ret.compiler = settings.platform.compiler; ret.architecture = settings.platform.architecture; ret.platform = settings.platform.platform; // collect high level information about projects (useful for IDE display) auto configs = getPackageConfigs(settings.platform, settings.config); ret.packages ~= m_rootPackage.describe(settings.platform, settings.config); foreach (dep; m_dependencies) ret.packages ~= dep.describe(settings.platform, configs[dep.name]); foreach (p; getTopologicalPackageList(false, null, configs)) ret.packages[ret.packages.countUntil!(pp => pp.name == p.name)].active = true; if (settings.buildType.length) { // collect build target information (useful for build tools) auto gen = new TargetDescriptionGenerator(this); try { gen.generate(settings); ret.targets = gen.targetDescriptions; ret.targetLookup = gen.targetDescriptionLookup; } catch (Exception e) { logDiagnostic("Skipping targets description: %s", e.msg); logDebug("Full error: %s", e.toString().sanitize); } } return ret; } private string[] listBuildSetting(string attributeName)(ref GeneratorSettings settings, string config, ProjectDescription projectDescription, Compiler compiler, bool disableEscaping) { return listBuildSetting!attributeName(settings, getPackageConfigs(settings.platform, config), projectDescription, compiler, disableEscaping); } private string[] listBuildSetting(string attributeName)(ref GeneratorSettings settings, string[string] configs, ProjectDescription projectDescription, Compiler compiler, bool disableEscaping) { if (compiler) return formatBuildSettingCompiler!attributeName(settings, configs, projectDescription, compiler, disableEscaping); else return formatBuildSettingPlain!attributeName(settings, configs, projectDescription); } // Output a build setting formatted for a compiler private string[] formatBuildSettingCompiler(string attributeName)(ref GeneratorSettings settings, string[string] configs, ProjectDescription projectDescription, Compiler compiler, bool disableEscaping) { import std.process : escapeShellFileName; import std.path : dirSeparator; assert(compiler); auto targetDescription = projectDescription.lookupTarget(projectDescription.rootPackage); auto buildSettings = targetDescription.buildSettings; string[] values; switch (attributeName) { case "dflags": case "linkerFiles": case "mainSourceFile": case "importFiles": values = formatBuildSettingPlain!attributeName(settings, configs, projectDescription); break; case "lflags": case "sourceFiles": case "injectSourceFiles": case "versions": case "debugVersions": case "importPaths": case "cImportPaths": case "stringImportPaths": case "options": auto bs = buildSettings.dup; bs.dflags = null; // Ensure trailing slash on directory paths auto ensureTrailingSlash = (string path) => path.endsWith(dirSeparator) ? path : path ~ dirSeparator; static if (attributeName == "importPaths") bs.importPaths = bs.importPaths.map!(ensureTrailingSlash).array(); else static if (attributeName == "cImportPaths") bs.cImportPaths = bs.cImportPaths.map!(ensureTrailingSlash).array(); else static if (attributeName == "stringImportPaths") bs.stringImportPaths = bs.stringImportPaths.map!(ensureTrailingSlash).array(); compiler.prepareBuildSettings(bs, settings.platform, BuildSetting.all & ~to!BuildSetting(attributeName)); values = bs.dflags; break; case "libs": auto bs = buildSettings.dup; bs.dflags = null; bs.lflags = null; bs.sourceFiles = null; bs.targetType = TargetType.none; // Force Compiler to NOT omit dependency libs when package is a library. compiler.prepareBuildSettings(bs, settings.platform, BuildSetting.all & ~to!BuildSetting(attributeName)); if (bs.lflags) values = compiler.lflagsToDFlags( bs.lflags ); else if (bs.sourceFiles) values = compiler.lflagsToDFlags( bs.sourceFiles ); else values = bs.dflags; break; default: assert(0); } // Escape filenames and paths if(!disableEscaping) { switch (attributeName) { case "mainSourceFile": case "linkerFiles": case "injectSourceFiles": case "copyFiles": case "importFiles": case "stringImportFiles": case "sourceFiles": case "importPaths": case "cImportPaths": case "stringImportPaths": return values.map!(escapeShellFileName).array(); default: return values; } } return values; } // Output a build setting without formatting for any particular compiler private string[] formatBuildSettingPlain(string attributeName)(ref GeneratorSettings settings, string[string] configs, ProjectDescription projectDescription) { import std.path : buildNormalizedPath, dirSeparator; import std.range : only; string[] list; enforce(attributeName == "targetType" || projectDescription.lookupRootPackage().targetType != TargetType.none, "Target type is 'none'. Cannot list build settings."); static if (attributeName == "targetType") if (projectDescription.rootPackage !in projectDescription.targetLookup) return ["none"]; auto targetDescription = projectDescription.lookupTarget(projectDescription.rootPackage); auto buildSettings = targetDescription.buildSettings; string[] substituteCommands(Package pack, string[] commands, CommandType type) { auto env = makeCommandEnvironmentVariables(type, pack, this, settings, buildSettings); return processVars(this, pack, settings, commands, false, env); } // Return any BuildSetting member attributeName as a range of strings. Don't attempt to fixup values. // allowEmptyString: When the value is a string (as opposed to string[]), // is empty string an actual permitted value instead of // a missing value? auto getRawBuildSetting(Package pack, bool allowEmptyString) { auto value = __traits(getMember, buildSettings, attributeName); static if( attributeName.endsWith("Commands") ) return substituteCommands(pack, value, mixin("CommandType.", attributeName[0 .. $ - "Commands".length])); else static if( is(typeof(value) == string[]) ) return value; else static if( is(typeof(value) == string) ) { auto ret = only(value); // only() has a different return type from only(value), so we // have to empty the range rather than just returning only(). if(value.empty && !allowEmptyString) { ret.popFront(); assert(ret.empty); } return ret; } else static if( is(typeof(value) == string[string]) ) return value.byKeyValue.map!(a => a.key ~ "=" ~ a.value); else static if( is(typeof(value) == enum) ) return only(value); else static if( is(typeof(value) == Flags!BuildRequirement) ) return only(cast(BuildRequirement) cast(int) value.values); else static if( is(typeof(value) == Flags!BuildOption) ) return only(cast(BuildOption) cast(int) value.values); else static assert(false, "Type of BuildSettings."~attributeName~" is unsupported."); } // Adjust BuildSetting member attributeName as needed. // Returns a range of strings. auto getFixedBuildSetting(Package pack) { // Is relative path(s) to a directory? enum isRelativeDirectory = attributeName == "importPaths" || attributeName == "cImportPaths" || attributeName == "stringImportPaths" || attributeName == "targetPath" || attributeName == "workingDirectory"; // Is relative path(s) to a file? enum isRelativeFile = attributeName == "sourceFiles" || attributeName == "linkerFiles" || attributeName == "importFiles" || attributeName == "stringImportFiles" || attributeName == "copyFiles" || attributeName == "mainSourceFile" || attributeName == "injectSourceFiles"; // For these, empty string means "main project directory", not "missing value" enum allowEmptyString = attributeName == "targetPath" || attributeName == "workingDirectory"; enum isEnumBitfield = attributeName == "requirements" || attributeName == "options"; enum isEnum = attributeName == "targetType"; auto values = getRawBuildSetting(pack, allowEmptyString); string fixRelativePath(string importPath) { return buildNormalizedPath(pack.path.toString(), importPath); } static string ensureTrailingSlash(string path) { return path.endsWith(dirSeparator) ? path : path ~ dirSeparator; } static if(isRelativeDirectory) { // Return full paths for the paths, making sure a // directory separator is on the end of each path. return values.map!(fixRelativePath).map!(ensureTrailingSlash); } else static if(isRelativeFile) { // Return full paths. return values.map!(fixRelativePath); } else static if(isEnumBitfield) return bitFieldNames(values.front); else static if (isEnum) return [values.front.to!string]; else return values; } foreach(value; getFixedBuildSetting(m_rootPackage)) { list ~= value; } return list; } // The "compiler" arg is for choosing which compiler the output should be formatted for, // or null to imply "list" format. private string[] listBuildSetting(ref GeneratorSettings settings, string[string] configs, ProjectDescription projectDescription, string requestedData, Compiler compiler, bool disableEscaping) { // Certain data cannot be formatter for a compiler if (compiler) { switch (requestedData) { case "target-type": case "target-path": case "target-name": case "working-directory": case "string-import-files": case "copy-files": case "extra-dependency-files": case "pre-generate-commands": case "post-generate-commands": case "pre-build-commands": case "post-build-commands": case "pre-run-commands": case "post-run-commands": case "environments": case "build-environments": case "run-environments": case "pre-generate-environments": case "post-generate-environments": case "pre-build-environments": case "post-build-environments": case "pre-run-environments": case "post-run-environments": case "default-config": case "configs": case "default-build": case "builds": enforce(false, "--data="~requestedData~" can only be used with `--data-list` or `--data-list --data-0`."); break; case "requirements": enforce(false, "--data=requirements can only be used with `--data-list` or `--data-list --data-0`. Use --data=options instead."); break; default: break; } } import std.typetuple : TypeTuple; auto args = TypeTuple!(settings, configs, projectDescription, compiler, disableEscaping); switch (requestedData) { case "target-type": return listBuildSetting!"targetType"(args); case "target-path": return listBuildSetting!"targetPath"(args); case "target-name": return listBuildSetting!"targetName"(args); case "working-directory": return listBuildSetting!"workingDirectory"(args); case "main-source-file": return listBuildSetting!"mainSourceFile"(args); case "dflags": return listBuildSetting!"dflags"(args); case "lflags": return listBuildSetting!"lflags"(args); case "libs": return listBuildSetting!"libs"(args); case "linker-files": return listBuildSetting!"linkerFiles"(args); case "source-files": return listBuildSetting!"sourceFiles"(args); case "inject-source-files": return listBuildSetting!"injectSourceFiles"(args); case "copy-files": return listBuildSetting!"copyFiles"(args); case "extra-dependency-files": return listBuildSetting!"extraDependencyFiles"(args); case "versions": return listBuildSetting!"versions"(args); case "debug-versions": return listBuildSetting!"debugVersions"(args); case "import-paths": return listBuildSetting!"importPaths"(args); case "string-import-paths": return listBuildSetting!"stringImportPaths"(args); case "import-files": return listBuildSetting!"importFiles"(args); case "string-import-files": return listBuildSetting!"stringImportFiles"(args); case "pre-generate-commands": return listBuildSetting!"preGenerateCommands"(args); case "post-generate-commands": return listBuildSetting!"postGenerateCommands"(args); case "pre-build-commands": return listBuildSetting!"preBuildCommands"(args); case "post-build-commands": return listBuildSetting!"postBuildCommands"(args); case "pre-run-commands": return listBuildSetting!"preRunCommands"(args); case "post-run-commands": return listBuildSetting!"postRunCommands"(args); case "environments": return listBuildSetting!"environments"(args); case "build-environments": return listBuildSetting!"buildEnvironments"(args); case "run-environments": return listBuildSetting!"runEnvironments"(args); case "pre-generate-environments": return listBuildSetting!"preGenerateEnvironments"(args); case "post-generate-environments": return listBuildSetting!"postGenerateEnvironments"(args); case "pre-build-environments": return listBuildSetting!"preBuildEnvironments"(args); case "post-build-environments": return listBuildSetting!"postBuildEnvironments"(args); case "pre-run-environments": return listBuildSetting!"preRunEnvironments"(args); case "post-run-environments": return listBuildSetting!"postRunEnvironments"(args); case "requirements": return listBuildSetting!"requirements"(args); case "options": return listBuildSetting!"options"(args); case "default-config": return [getDefaultConfiguration(settings.platform)]; case "configs": return configurations; case "default-build": return [builds[0]]; case "builds": return builds; default: enforce(false, "--data="~requestedData~ " is not a valid option. See 'dub describe --help' for accepted --data= values."); } assert(0); } /// Outputs requested data for the project, optionally including its dependencies. string[] listBuildSettings(GeneratorSettings settings, string[] requestedData, ListBuildSettingsFormat list_type) { import dub.compilers.utils : isLinkerFile; auto projectDescription = describe(settings); auto configs = getPackageConfigs(settings.platform, settings.config); PackageDescription packageDescription; foreach (pack; projectDescription.packages) { if (pack.name == projectDescription.rootPackage) packageDescription = pack; } if (projectDescription.rootPackage in projectDescription.targetLookup) { // Copy linker files from sourceFiles to linkerFiles auto target = projectDescription.lookupTarget(projectDescription.rootPackage); foreach (file; target.buildSettings.sourceFiles.filter!(f => isLinkerFile(settings.platform, f))) target.buildSettings.addLinkerFiles(file); // Remove linker files from sourceFiles target.buildSettings.sourceFiles = target.buildSettings.sourceFiles .filter!(a => !isLinkerFile(settings.platform, a)) .array(); projectDescription.lookupTarget(projectDescription.rootPackage) = target; } Compiler compiler; bool no_escape; final switch (list_type) with (ListBuildSettingsFormat) { case list: break; case listNul: no_escape = true; break; case commandLine: compiler = settings.compiler; break; case commandLineNul: compiler = settings.compiler; no_escape = true; break; } auto result = requestedData .map!(dataName => listBuildSetting(settings, configs, projectDescription, dataName, compiler, no_escape)); final switch (list_type) with (ListBuildSettingsFormat) { case list: return result.map!(l => l.join("\n")).array(); case listNul: return result.map!(l => l.join("\0")).array; case commandLine: return result.map!(l => l.join(" ")).array; case commandLineNul: return result.map!(l => l.join("\0")).array; } } /** Saves the currently selected dependency versions to disk. The selections will be written to a file named `SelectedVersions.defaultFile` ("dub.selections.json") within the directory of the root package. Any existing file will get overwritten. */ void saveSelections() { assert(m_selections !is null, "Cannot save selections for non-disk based project (has no selections)."); const name = PackageName(m_rootPackage.basePackage.name); if (m_selections.hasSelectedVersion(name)) m_selections.deselectVersion(name); this.m_packageManager.writeSelections( this.m_rootPackage, this.m_selections.m_selections, this.m_selections.dirty); } deprecated bool isUpgradeCacheUpToDate() { return false; } deprecated Dependency[string] getUpgradeCache() { return null; } } /// Determines the output format used for `Project.listBuildSettings`. enum ListBuildSettingsFormat { list, /// Newline separated list entries listNul, /// NUL character separated list entries (unescaped) commandLine, /// Formatted for compiler command line (one data list per line) commandLineNul, /// NUL character separated list entries (unescaped, data lists separated by two NUL characters) } deprecated("Use `dub.packagemanager : PlacementLocation` instead") public alias PlacementLocation = dub.packagemanager.PlacementLocation; void processVars(ref BuildSettings dst, in Project project, in Package pack, BuildSettings settings, in GeneratorSettings gsettings, bool include_target_settings = false) { string[string] processVerEnvs(in string[string] targetEnvs, in string[string] defaultEnvs) { string[string] retEnv; foreach (k, v; targetEnvs) retEnv[k] = v; foreach (k, v; defaultEnvs) { if (k !in targetEnvs) retEnv[k] = v; } return processVars(project, pack, gsettings, retEnv); } dst.addEnvironments(processVerEnvs(settings.environments, gsettings.buildSettings.environments)); dst.addBuildEnvironments(processVerEnvs(settings.buildEnvironments, gsettings.buildSettings.buildEnvironments)); dst.addRunEnvironments(processVerEnvs(settings.runEnvironments, gsettings.buildSettings.runEnvironments)); dst.addPreGenerateEnvironments(processVerEnvs(settings.preGenerateEnvironments, gsettings.buildSettings.preGenerateEnvironments)); dst.addPostGenerateEnvironments(processVerEnvs(settings.postGenerateEnvironments, gsettings.buildSettings.postGenerateEnvironments)); dst.addPreBuildEnvironments(processVerEnvs(settings.preBuildEnvironments, gsettings.buildSettings.preBuildEnvironments)); dst.addPostBuildEnvironments(processVerEnvs(settings.postBuildEnvironments, gsettings.buildSettings.postBuildEnvironments)); dst.addPreRunEnvironments(processVerEnvs(settings.preRunEnvironments, gsettings.buildSettings.preRunEnvironments)); dst.addPostRunEnvironments(processVerEnvs(settings.postRunEnvironments, gsettings.buildSettings.postRunEnvironments)); auto buildEnvs = [dst.environments, dst.buildEnvironments]; dst.addDFlags(processVars(project, pack, gsettings, settings.dflags, false, buildEnvs)); dst.addLFlags(processVars(project, pack, gsettings, settings.lflags, false, buildEnvs)); dst.addLibs(processVars(project, pack, gsettings, settings.libs, false, buildEnvs)); dst.addSourceFiles(processVars!true(project, pack, gsettings, settings.sourceFiles, true, buildEnvs)); dst.addImportFiles(processVars(project, pack, gsettings, settings.importFiles, true, buildEnvs)); dst.addStringImportFiles(processVars(project, pack, gsettings, settings.stringImportFiles, true, buildEnvs)); dst.addInjectSourceFiles(processVars!true(project, pack, gsettings, settings.injectSourceFiles, true, buildEnvs)); dst.addCopyFiles(processVars(project, pack, gsettings, settings.copyFiles, true, buildEnvs)); dst.addExtraDependencyFiles(processVars(project, pack, gsettings, settings.extraDependencyFiles, true, buildEnvs)); dst.addVersions(processVars(project, pack, gsettings, settings.versions, false, buildEnvs)); dst.addDebugVersions(processVars(project, pack, gsettings, settings.debugVersions, false, buildEnvs)); dst.addVersionFilters(processVars(project, pack, gsettings, settings.versionFilters, false, buildEnvs)); dst.addDebugVersionFilters(processVars(project, pack, gsettings, settings.debugVersionFilters, false, buildEnvs)); dst.addImportPaths(processVars(project, pack, gsettings, settings.importPaths, true, buildEnvs)); dst.addCImportPaths(processVars(project, pack, gsettings, settings.cImportPaths, true, buildEnvs)); dst.addStringImportPaths(processVars(project, pack, gsettings, settings.stringImportPaths, true, buildEnvs)); dst.addRequirements(settings.requirements); dst.addOptions(settings.options); // commands are substituted in dub.generators.generator : runBuildCommands dst.addPreGenerateCommands(settings.preGenerateCommands); dst.addPostGenerateCommands(settings.postGenerateCommands); dst.addPreBuildCommands(settings.preBuildCommands); dst.addPostBuildCommands(settings.postBuildCommands); dst.addPreRunCommands(settings.preRunCommands); dst.addPostRunCommands(settings.postRunCommands); if (include_target_settings) { dst.targetType = settings.targetType; dst.targetPath = processVars(settings.targetPath, project, pack, gsettings, true, buildEnvs); dst.targetName = settings.targetName; if (!settings.workingDirectory.empty) dst.workingDirectory = processVars(settings.workingDirectory, project, pack, gsettings, true, buildEnvs); if (settings.mainSourceFile.length) dst.mainSourceFile = processVars(settings.mainSourceFile, project, pack, gsettings, true, buildEnvs); } } string[] processVars(bool glob = false)(in Project project, in Package pack, in GeneratorSettings gsettings, in string[] vars, bool are_paths = false, in string[string][] extraVers = null) { auto ret = appender!(string[])(); processVars!glob(ret, project, pack, gsettings, vars, are_paths, extraVers); return ret.data; } void processVars(bool glob = false)(ref Appender!(string[]) dst, in Project project, in Package pack, in GeneratorSettings gsettings, in string[] vars, bool are_paths = false, in string[string][] extraVers = null) { static if (glob) alias process = processVarsWithGlob!(Project, Package); else alias process = processVars!(Project, Package); foreach (var; vars) dst.put(process(var, project, pack, gsettings, are_paths, extraVers)); } string processVars(Project, Package)(string var, in Project project, in Package pack, in GeneratorSettings gsettings, bool is_path, in string[string][] extraVers = null) { var = var.expandVars!(varName => getVariable(varName, project, pack, gsettings, extraVers)); if (!is_path) return var; auto p = NativePath(var); if (!p.absolute) return (pack.path ~ p).toNativeString(); else return p.toNativeString(); } string[string] processVars(bool glob = false)(in Project project, in Package pack, in GeneratorSettings gsettings, in string[string] vars, in string[string][] extraVers = null) { string[string] ret; processVars!glob(ret, project, pack, gsettings, vars, extraVers); return ret; } void processVars(bool glob = false)(ref string[string] dst, in Project project, in Package pack, in GeneratorSettings gsettings, in string[string] vars, in string[string][] extraVers) { static if (glob) alias process = processVarsWithGlob!(Project, Package); else alias process = processVars!(Project, Package); foreach (k, var; vars) dst[k] = process(var, project, pack, gsettings, false, extraVers); } private string[] processVarsWithGlob(Project, Package)(string var, in Project project, in Package pack, in GeneratorSettings gsettings, bool is_path, in string[string][] extraVers) { assert(is_path, "can't glob something that isn't a path"); string res = processVars(var, project, pack, gsettings, is_path, extraVers); // Find the unglobbed prefix and iterate from there. size_t i = 0; size_t sepIdx = 0; loop: while (i < res.length) { switch_: switch (res[i]) { case '*', '?', '[', '{': break loop; case '/': sepIdx = i; goto default; version (Windows) { case '\\': sepIdx = i; goto default; } default: ++i; break switch_; } } if (i == res.length) //no globbing found in the path return [res]; import std.file : dirEntries, SpanMode; import std.path : buildNormalizedPath, globMatch, isAbsolute, relativePath; auto cwd = gsettings.toolWorkingDirectory.toNativeString; auto path = res[0 .. sepIdx]; bool prependCwd = false; if (!isAbsolute(path)) { prependCwd = true; path = buildNormalizedPath(cwd, path); } return dirEntries(path, SpanMode.depth) .map!(de => prependCwd ? de.name.relativePath(cwd) : de.name) .filter!(name => globMatch(name, res)) .array; } /// Expand variables using `$VAR_NAME` or `${VAR_NAME}` syntax. /// `$$` escapes itself and is expanded to a single `$`. private string expandVars(alias expandVar)(string s) { import std.functional : not; auto result = appender!string; static bool isVarChar(char c) { import std.ascii; return isAlphaNum(c) || c == '_'; } while (true) { auto pos = s.indexOf('$'); if (pos < 0) { result.put(s); return result.data; } result.put(s[0 .. pos]); s = s[pos + 1 .. $]; enforce(s.length > 0, "Variable name expected at end of string"); switch (s[0]) { case '$': result.put("$"); s = s[1 .. $]; break; case '{': pos = s.indexOf('}'); enforce(pos >= 0, "Could not find '}' to match '${'"); result.put(expandVar(s[1 .. pos])); s = s[pos + 1 .. $]; break; default: pos = s.representation.countUntil!(not!isVarChar); if (pos < 0) pos = s.length; result.put(expandVar(s[0 .. pos])); s = s[pos .. $]; break; } } } unittest { string[string] vars = [ "A" : "a", "B" : "b", ]; string expandVar(string name) { auto p = name in vars; enforce(p, name); return *p; } assert(expandVars!expandVar("") == ""); assert(expandVars!expandVar("x") == "x"); assert(expandVars!expandVar("$$") == "$"); assert(expandVars!expandVar("x$$") == "x$"); assert(expandVars!expandVar("$$x") == "$x"); assert(expandVars!expandVar("$$$$") == "$$"); assert(expandVars!expandVar("x$A") == "xa"); assert(expandVars!expandVar("x$$A") == "x$A"); assert(expandVars!expandVar("$A$B") == "ab"); assert(expandVars!expandVar("${A}$B") == "ab"); assert(expandVars!expandVar("$A${B}") == "ab"); assert(expandVars!expandVar("a${B}") == "ab"); assert(expandVars!expandVar("${A}b") == "ab"); import std.exception : assertThrown; assertThrown(expandVars!expandVar("$")); assertThrown(expandVars!expandVar("${}")); assertThrown(expandVars!expandVar("$|")); assertThrown(expandVars!expandVar("x$")); assertThrown(expandVars!expandVar("$X")); assertThrown(expandVars!expandVar("${")); assertThrown(expandVars!expandVar("${X")); // https://github.com/dlang/dmd/pull/9275 assert(expandVars!expandVar("$${DUB_EXE:-dub}") == "${DUB_EXE:-dub}"); } /// Expands the variables in the input string with the same rules as command /// variables inside custom dub commands. /// /// Params: /// s = the input string where environment variables in form `$VAR` should be replaced /// throwIfMissing = if true, throw an exception if the given variable is not found, /// otherwise replace unknown variables with the empty string. string expandEnvironmentVariables(string s, bool throwIfMissing = true) { import std.process : environment; return expandVars!((v) { auto ret = environment.get(v); if (ret is null && throwIfMissing) throw new Exception("Specified environment variable `$" ~ v ~ "` is not set"); return ret; })(s); } // Keep the following list up-to-date if adding more build settings variables. /// List of variables that can be used in build settings package(dub) immutable buildSettingsVars = [ "ARCH", "PLATFORM", "PLATFORM_POSIX", "BUILD_TYPE" ]; private string getVariable(Project, Package)(string name, in Project project, in Package pack, in GeneratorSettings gsettings, in string[string][] extraVars = null) { import dub.internal.utils : getDUBExePath; import std.process : environment, escapeShellFileName; import std.uni : asUpperCase; NativePath path; if (name == "PACKAGE_DIR") path = pack.path; else if (name == "ROOT_PACKAGE_DIR") path = project.rootPackage.path; if (name.endsWith("_PACKAGE_DIR")) { auto pname = name[0 .. $-12]; foreach (prj; project.getTopologicalPackageList()) if (prj.name.asUpperCase.map!(a => a == '-' ? '_' : a).equal(pname)) { path = prj.path; break; } } if (!path.empty) { // no trailing slash for clean path concatenation (see #1392) path.endsWithSlash = false; return path.toNativeString(); } if (name == "DUB") { return getDUBExePath(gsettings.platform.compilerBinary).toNativeString(); } if (name == "ARCH") { foreach (a; gsettings.platform.architecture) return a; return ""; } if (name == "PLATFORM") { import std.algorithm : filter; foreach (p; gsettings.platform.platform.filter!(p => p != "posix")) return p; foreach (p; gsettings.platform.platform) return p; return ""; } if (name == "PLATFORM_POSIX") { import std.algorithm : canFind; if (gsettings.platform.platform.canFind("posix")) return "posix"; foreach (p; gsettings.platform.platform) return p; return ""; } if (name == "BUILD_TYPE") return gsettings.buildType; if (name == "DFLAGS" || name == "LFLAGS") { auto buildSettings = pack.getBuildSettings(gsettings.platform, gsettings.config); if (name == "DFLAGS") return join(buildSettings.dflags," "); else if (name == "LFLAGS") return join(buildSettings.lflags," "); } import std.range; foreach (aa; retro(extraVars)) if (auto exvar = name in aa) return *exvar; auto envvar = environment.get(name); if (envvar !is null) return envvar; throw new Exception("Invalid variable: "~name); } unittest { static struct MockPackage { this(string name) { this.name = name; version (Posix) path = NativePath("/pkgs/"~name); else version (Windows) path = NativePath(`C:\pkgs\`~name); // see 4d4017c14c, #268, and #1392 for why this all package paths end on slash internally path.endsWithSlash = true; } string name; NativePath path; BuildSettings getBuildSettings(in BuildPlatform platform, string config) const { return BuildSettings(); } } static struct MockProject { MockPackage rootPackage; inout(MockPackage)[] getTopologicalPackageList() inout { return _dependencies; } private: MockPackage[] _dependencies; } MockProject proj = { rootPackage: MockPackage("root"), _dependencies: [MockPackage("dep1"), MockPackage("dep2")] }; auto pack = MockPackage("test"); GeneratorSettings gsettings; enum isPath = true; import std.path : dirSeparator; static NativePath woSlash(NativePath p) { p.endsWithSlash = false; return p; } // basic vars assert(processVars("Hello $PACKAGE_DIR", proj, pack, gsettings, !isPath) == "Hello "~woSlash(pack.path).toNativeString); assert(processVars("Hello $ROOT_PACKAGE_DIR", proj, pack, gsettings, !isPath) == "Hello "~woSlash(proj.rootPackage.path).toNativeString.chomp(dirSeparator)); assert(processVars("Hello $DEP1_PACKAGE_DIR", proj, pack, gsettings, !isPath) == "Hello "~woSlash(proj._dependencies[0].path).toNativeString); // ${VAR} replacements assert(processVars("Hello ${PACKAGE_DIR}"~dirSeparator~"foobar", proj, pack, gsettings, !isPath) == "Hello "~(pack.path ~ "foobar").toNativeString); assert(processVars("Hello $PACKAGE_DIR"~dirSeparator~"foobar", proj, pack, gsettings, !isPath) == "Hello "~(pack.path ~ "foobar").toNativeString); // test with isPath assert(processVars("local", proj, pack, gsettings, isPath) == (pack.path ~ "local").toNativeString); assert(processVars("foo/$$ESCAPED", proj, pack, gsettings, isPath) == (pack.path ~ "foo/$ESCAPED").toNativeString); assert(processVars("$$ESCAPED", proj, pack, gsettings, !isPath) == "$ESCAPED"); // test other env variables import std.process : environment; environment["MY_ENV_VAR"] = "blablabla"; assert(processVars("$MY_ENV_VAR", proj, pack, gsettings, !isPath) == "blablabla"); assert(processVars("${MY_ENV_VAR}suffix", proj, pack, gsettings, !isPath) == "blablablasuffix"); assert(processVars("$MY_ENV_VAR-suffix", proj, pack, gsettings, !isPath) == "blablabla-suffix"); assert(processVars("$MY_ENV_VAR:suffix", proj, pack, gsettings, !isPath) == "blablabla:suffix"); assert(processVars("$MY_ENV_VAR$MY_ENV_VAR", proj, pack, gsettings, !isPath) == "blablablablablabla"); environment.remove("MY_ENV_VAR"); } /** * Holds and stores a set of version selections for package dependencies. * * This is the runtime representation of the information contained in * "dub.selections.json" within a package's directory. * * Note that as subpackages share the same version as their main package, * this class will treat any subpackage reference as a reference to its * main package. */ public class SelectedVersions { protected { enum FileVersion = 1; Selections!1 m_selections; bool m_dirty = false; // has changes since last save bool m_bare = true; } /// Default file name to use for storing selections. enum defaultFile = "dub.selections.json"; /// Constructs a new empty version selection. public this(uint version_ = FileVersion) @safe pure { enforce(version_ == 1, "Unsupported file version"); this.m_selections = Selections!1(version_); } /// Constructs a new non-empty version selection. public this(Selections!1 data) @safe pure nothrow @nogc { this.m_selections = data; this.m_bare = false; } /** Constructs a new non-empty version selection, prefixing relative path selections with the specified prefix. To be used in cases where the "dub.selections.json" file isn't located in the root package directory. */ public this(Selections!1 data, NativePath relPathPrefix) { this(data); if (relPathPrefix.empty) return; foreach (ref dep; m_selections.versions.byValue) { const depPath = dep.path; if (!depPath.empty && !depPath.absolute) dep = Dependency(relPathPrefix ~ depPath); } } /** Constructs a new version selection from JSON data. The structure of the JSON document must match the contents of the "dub.selections.json" file. */ deprecated("Pass a `dub.recipe.selection : Selected` directly") this(Json data) { deserialize(data); m_dirty = false; } /** Constructs a new version selections from an existing JSON file. */ deprecated("JSON deserialization is deprecated") this(NativePath path) { auto json = jsonFromFile(path); deserialize(json); m_dirty = false; m_bare = false; } /// Returns a list of names for all packages that have a version selection. @property string[] selectedPackages() const { return m_selections.versions.keys; } /// Determines if any changes have been made after loading the selections from a file. @property bool dirty() const { return m_dirty; } /// Determine if this set of selections is still empty (but not `clear`ed). @property bool bare() const { return m_bare && !m_dirty; } /// Removes all selections. void clear() { m_selections.versions = null; m_dirty = true; } /// Duplicates the set of selected versions from another instance. void set(SelectedVersions versions) { m_selections.fileVersion = versions.m_selections.fileVersion; m_selections.versions = versions.m_selections.versions.dup; m_selections.inheritable = versions.m_selections.inheritable; m_dirty = true; } /// Selects a certain version for a specific package. deprecated("Use the overload that accepts a `PackageName`") void selectVersion(string package_id, Version version_) { const name = PackageName(package_id); return this.selectVersion(name, version_); } /// Ditto void selectVersion(in PackageName name, Version version_) { const dep = Dependency(version_); this.selectVersionInternal(name, dep); } /// Selects a certain path for a specific package. deprecated("Use the overload that accepts a `PackageName`") void selectVersion(string package_id, NativePath path) { const name = PackageName(package_id); return this.selectVersion(name, path); } /// Ditto void selectVersion(in PackageName name, NativePath path) { const dep = Dependency(path); this.selectVersionInternal(name, dep); } /// Selects a certain Git reference for a specific package. deprecated("Use the overload that accepts a `PackageName`") void selectVersion(string package_id, Repository repository) { const name = PackageName(package_id); return this.selectVersion(name, repository); } /// Ditto void selectVersion(in PackageName name, Repository repository) { const dep = Dependency(repository); this.selectVersionInternal(name, dep); } /// Internal implementation of selectVersion private void selectVersionInternal(in PackageName name, in Dependency dep) { if (auto pdep = name.main.toString() in m_selections.versions) { if (*pdep == dep) return; } m_selections.versions[name.main.toString()] = dep; m_dirty = true; } deprecated("Move `spec` inside of the `repository` parameter and call `selectVersion`") void selectVersionWithRepository(string package_id, Repository repository, string spec) { this.selectVersion(package_id, Repository(repository.remote(), spec)); } /// Removes the selection for a particular package. deprecated("Use the overload that accepts a `PackageName`") void deselectVersion(string package_id) { const n = PackageName(package_id); this.deselectVersion(n); } /// Ditto void deselectVersion(in PackageName name) { m_selections.versions.remove(name.main.toString()); m_dirty = true; } /// Determines if a particular package has a selection set. deprecated("Use the overload that accepts a `PackageName`") bool hasSelectedVersion(string packageId) const { const name = PackageName(packageId); return this.hasSelectedVersion(name); } /// Ditto bool hasSelectedVersion(in PackageName name) const { return (name.main.toString() in m_selections.versions) !is null; } /** Returns the selection for a particular package. Note that the returned `Dependency` can either have the `Dependency.path` property set to a non-empty value, in which case this is a path based selection, or its `Dependency.version_` property is valid and it is a version selection. */ deprecated("Use the overload that accepts a `PackageName`") Dependency getSelectedVersion(string packageId) const { const name = PackageName(packageId); return this.getSelectedVersion(name); } /// Ditto Dependency getSelectedVersion(in PackageName name) const { enforce(hasSelectedVersion(name)); return m_selections.versions[name.main.toString()]; } /** Stores the selections to disk. The target file will be written in JSON format. Usually, `defaultFile` should be used as the file name and the directory should be the root directory of the project's root package. */ deprecated("Use `PackageManager.writeSelections` to write a `SelectionsFile`") void save(NativePath path) { path.writeFile(PackageManager.selectionsToString(this.m_selections)); m_dirty = false; m_bare = false; } deprecated("Use `dub.dependency : Dependency.toJson(true)`") static Json dependencyToJson(Dependency d) { return d.toJson(true); } deprecated("JSON deserialization is deprecated") static Dependency dependencyFromJson(Json j) { if (j.type == Json.Type.string) return Dependency(Version(j.get!string)); else if (j.type == Json.Type.object && "path" in j) return Dependency(NativePath(j["path"].get!string)); else if (j.type == Json.Type.object && "repository" in j) return Dependency(Repository(j["repository"].get!string, enforce("version" in j, "Expected \"version\" field in repository version object").get!string)); else throw new Exception(format("Unexpected type for dependency: %s", j)); } deprecated("JSON serialization is deprecated") Json serialize() const { return PackageManager.selectionsToJSON(this.m_selections); } deprecated("JSON deserialization is deprecated") private void deserialize(Json json) { const fileVersion = json["fileVersion"].get!int; enforce(fileVersion == FileVersion, "Mismatched dub.selections.json version: " ~ to!string(fileVersion) ~ " vs. " ~ to!string(FileVersion)); clear(); m_selections.fileVersion = fileVersion; scope(failure) clear(); if (auto p = "inheritable" in json) m_selections.inheritable = p.get!bool; foreach (string p, dep; json["versions"]) m_selections.versions[p] = dependencyFromJson(dep); } } /// The template code from which the test runner is generated private immutable TestRunnerTemplate = q{ deprecated // allow silently using deprecated symbols module dub_test_root; import std.typetuple; %-(static import %s; %); alias allModules = TypeTuple!( %-(%s, %) ); %s }; /// The default test runner that gets used if none is provided private immutable DefaultTestRunnerCode = q{ version(D_BetterC) { extern(C) int main() { foreach (module_; allModules) { foreach (unitTest; __traits(getUnitTests, module_)) { unitTest(); } } import core.stdc.stdio : puts; puts("All unit tests have been run successfully."); return 0; } } else { void main() { version (D_Coverage) { } else { import std.stdio : writeln; writeln("All unit tests have been run successfully."); } } shared static this() { version (Have_tested) { import tested; import core.runtime; import std.exception; Runtime.moduleUnitTester = () => true; enforce(runUnitTests!allModules(new ConsoleTestResultWriter), "Unit tests failed."); } } } }; dub-1.40.0/source/dub/recipe/000077500000000000000000000000001477246567400157155ustar00rootroot00000000000000dub-1.40.0/source/dub/recipe/io.d000066400000000000000000000257651477246567400165100ustar00rootroot00000000000000/** Package recipe reading/writing facilities. Copyright: © 2015-2016, Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.recipe.io; import dub.dependency : PackageName; import dub.recipe.packagerecipe; import dub.internal.logging; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.configy.Read; /** Reads a package recipe from a file. The file format (JSON/SDLang) will be determined from the file extension. Params: filename = NativePath of the package recipe file parent = Optional name of the parent package (if this is a sub package) mode = Whether to issue errors, warning, or ignore unknown keys in dub.json Returns: Returns the package recipe contents Throws: Throws an exception if an I/O or syntax error occurs */ deprecated("Use the overload that accepts a `NativePath` as first argument") PackageRecipe readPackageRecipe( string filename, string parent = null, StrictMode mode = StrictMode.Ignore) { return readPackageRecipe(NativePath(filename), parent, mode); } /// ditto deprecated("Use the overload that accepts a `PackageName` as second argument") PackageRecipe readPackageRecipe( NativePath filename, string parent, StrictMode mode = StrictMode.Ignore) { return readPackageRecipe(filename, parent.length ? PackageName(parent) : PackageName.init, mode); } /// ditto PackageRecipe readPackageRecipe(NativePath filename, in PackageName parent = PackageName.init, StrictMode mode = StrictMode.Ignore) { string text = readText(filename); return parsePackageRecipe(text, filename.toNativeString(), parent, null, mode); } /** Parses an in-memory package recipe. The file format (JSON/SDLang) will be determined from the file extension. Params: contents = The contents of the recipe file filename = Name associated with the package recipe - this is only used to determine the file format from the file extension parent = Optional name of the parent package (if this is a sub package) default_package_name = Optional default package name (if no package name is found in the recipe this value will be used) mode = Whether to issue errors, warning, or ignore unknown keys in dub.json Returns: Returns the package recipe contents Throws: Throws an exception if an I/O or syntax error occurs */ deprecated("Use the overload that accepts a `PackageName` as 3rd argument") PackageRecipe parsePackageRecipe(string contents, string filename, string parent, string default_package_name = null, StrictMode mode = StrictMode.Ignore) { return parsePackageRecipe(contents, filename, parent.length ? PackageName(parent) : PackageName.init, default_package_name, mode); } /// Ditto PackageRecipe parsePackageRecipe(string contents, string filename, in PackageName parent = PackageName.init, string default_package_name = null, StrictMode mode = StrictMode.Ignore) { import std.algorithm : endsWith; import dub.compilers.buildsettings : TargetType; import dub.internal.vibecompat.data.json; import dub.recipe.json : parseJson; import dub.recipe.sdl : parseSDL; PackageRecipe ret; ret.name = default_package_name; if (filename.endsWith(".json")) { try { ret = parseConfigString!PackageRecipe(contents, filename, mode); fixDependenciesNames(ret.name, ret); } catch (ConfigException exc) { logWarn("Your `dub.json` file use non-conventional features that are deprecated"); logWarn("Please adjust your `dub.json` file as those warnings will turn into errors in dub v1.40.0"); logWarn("Error was: %s", exc); // Fallback to JSON parser ret = PackageRecipe.init; parseJson(ret, parseJsonString(contents, filename), parent); } catch (Exception exc) { logWarn("Your `dub.json` file use non-conventional features that are deprecated"); logWarn("This is most likely due to duplicated keys."); logWarn("Please adjust your `dub.json` file as those warnings will turn into errors in dub v1.40.0"); logWarn("Error was: %s", exc); // Fallback to JSON parser ret = PackageRecipe.init; parseJson(ret, parseJsonString(contents, filename), parent); } // `debug = ConfigFillerDebug` also enables verbose parser output debug (ConfigFillerDebug) { import std.stdio; PackageRecipe jsonret; parseJson(jsonret, parseJsonString(contents, filename), parent_name); if (ret != jsonret) { writeln("Content of JSON and YAML parsing differ for file: ", filename); writeln("-------------------------------------------------------------------"); writeln("JSON (excepted): ", jsonret); writeln("-------------------------------------------------------------------"); writeln("YAML (actual ): ", ret); writeln("========================================"); ret = jsonret; } } } else if (filename.endsWith(".sdl")) parseSDL(ret, contents, parent, filename); else assert(false, "readPackageRecipe called with filename with unknown extension: "~filename); // Fix for issue #711: `targetType` should be inherited, or default to library static void sanitizeTargetType(ref PackageRecipe r) { TargetType defaultTT = (r.buildSettings.targetType == TargetType.autodetect) ? TargetType.library : r.buildSettings.targetType; foreach (ref conf; r.configurations) if (conf.buildSettings.targetType == TargetType.autodetect) conf.buildSettings.targetType = defaultTT; // recurse into sub packages foreach (ref subPackage; r.subPackages) sanitizeTargetType(subPackage.recipe); } sanitizeTargetType(ret); return ret; } unittest { // issue #711 - configuration default target type not correct for SDL import dub.compilers.buildsettings : TargetType; auto inputs = [ "dub.sdl": "name \"test\"\nconfiguration \"a\" {\n}", "dub.json": "{\"name\": \"test\", \"configurations\": [{\"name\": \"a\"}]}" ]; foreach (file, content; inputs) { auto pr = parsePackageRecipe(content, file); assert(pr.name == "test"); assert(pr.configurations.length == 1); assert(pr.configurations[0].name == "a"); assert(pr.configurations[0].buildSettings.targetType == TargetType.library); } } unittest { // issue #711 - configuration default target type not correct for SDL import dub.compilers.buildsettings : TargetType; auto inputs = [ "dub.sdl": "name \"test\"\ntargetType \"autodetect\"\nconfiguration \"a\" {\n}", "dub.json": "{\"name\": \"test\", \"targetType\": \"autodetect\", \"configurations\": [{\"name\": \"a\"}]}" ]; foreach (file, content; inputs) { auto pr = parsePackageRecipe(content, file); assert(pr.name == "test"); assert(pr.configurations.length == 1); assert(pr.configurations[0].name == "a"); assert(pr.configurations[0].buildSettings.targetType == TargetType.library); } } unittest { // issue #711 - configuration default target type not correct for SDL import dub.compilers.buildsettings : TargetType; auto inputs = [ "dub.sdl": "name \"test\"\ntargetType \"executable\"\nconfiguration \"a\" {\n}", "dub.json": "{\"name\": \"test\", \"targetType\": \"executable\", \"configurations\": [{\"name\": \"a\"}]}" ]; foreach (file, content; inputs) { auto pr = parsePackageRecipe(content, file); assert(pr.name == "test"); assert(pr.configurations.length == 1); assert(pr.configurations[0].name == "a"); assert(pr.configurations[0].buildSettings.targetType == TargetType.executable); } } unittest { // make sure targetType of sub packages are sanitized too import dub.compilers.buildsettings : TargetType; auto inputs = [ "dub.sdl": "name \"test\"\nsubPackage {\nname \"sub\"\ntargetType \"sourceLibrary\"\nconfiguration \"a\" {\n}\n}", "dub.json": "{\"name\": \"test\", \"subPackages\": [ { \"name\": \"sub\", \"targetType\": \"sourceLibrary\", \"configurations\": [{\"name\": \"a\"}] } ] }" ]; foreach (file, content; inputs) { auto pr = parsePackageRecipe(content, file); assert(pr.name == "test"); const spr = pr.subPackages[0].recipe; assert(spr.name == "sub"); assert(spr.configurations.length == 1); assert(spr.configurations[0].name == "a"); assert(spr.configurations[0].buildSettings.targetType == TargetType.sourceLibrary); } } /** Writes the textual representation of a package recipe to a file. Note that the file extension must be either "json" or "sdl". */ void writePackageRecipe(string filename, const scope ref PackageRecipe recipe) { writePackageRecipe(NativePath(filename), recipe); } /// ditto void writePackageRecipe(NativePath filename, const scope ref PackageRecipe recipe) { import std.array; auto app = appender!string(); serializePackageRecipe(app, recipe, filename.toNativeString()); writeFile(filename, app.data); } /** Converts a package recipe to its textual representation. The extension of the supplied `filename` must be either "json" or "sdl". The output format is chosen accordingly. */ void serializePackageRecipe(R)(ref R dst, const scope ref PackageRecipe recipe, string filename) { import std.algorithm : endsWith; import dub.internal.vibecompat.data.json : writeJsonString; import dub.recipe.json : toJson; import dub.recipe.sdl : toSDL; if (filename.endsWith(".json")) dst.writeJsonString!(R, true)(toJson(recipe)); else if (filename.endsWith(".sdl")) toSDL(recipe).toSDLDocument(dst); else assert(false, "writePackageRecipe called with filename with unknown extension: "~filename); } unittest { import std.format; import dub.dependency; import dub.internal.utils : deepCompare; static void success (string source, in PackageRecipe expected, size_t line = __LINE__) { const result = parseConfigString!PackageRecipe(source, "dub.json"); deepCompare(result, expected, __FILE__, line); } static void error (string source, string expected, size_t line = __LINE__) { try { auto result = parseConfigString!PackageRecipe(source, "dub.json"); assert(0, format("[%s:%d] Exception should have been thrown but wasn't: %s", __FILE__, line, result)); } catch (Exception exc) assert(exc.toString() == expected, format("[%s:%s] result != expected: '%s' != '%s'", __FILE__, line, exc.toString(), expected)); } alias YAMLDep = typeof(BuildSettingsTemplate.dependencies[string.init]); const PackageRecipe expected1 = { name: "foo", buildSettings: { dependencies: RecipeDependencyAA([ "repo": YAMLDep(Dependency(Repository( "git+https://github.com/dlang/dmd", "09d04945bdbc0cba36f7bb1e19d5bd009d4b0ff2", ))), "path": YAMLDep(Dependency(NativePath("/foo/bar/jar/"))), "version": YAMLDep(Dependency(VersionRange.fromString("~>1.0"))), "version2": YAMLDep(Dependency(Version("4.2.0"))), ])}, }; success( `{ "name": "foo", "dependencies": { "repo": { "repository": "git+https://github.com/dlang/dmd", "version": "09d04945bdbc0cba36f7bb1e19d5bd009d4b0ff2" }, "path": { "path": "/foo/bar/jar/" }, "version": { "version": "~>1.0" }, "version2": "4.2.0" }}`, expected1); error(`{ "name": "bar", "dependencies": {"bad": { "repository": "git+https://github.com/dlang/dmd" }}}`, "dub.json(0:41): dependencies[bad]: Need to provide a commit hash in 'version' field with 'repository' dependency"); } dub-1.40.0/source/dub/recipe/json.d000066400000000000000000000437171477246567400170470ustar00rootroot00000000000000/** JSON format support for PackageRecipe Copyright: © 2012-2014 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig, Matthias Dondorff */ module dub.recipe.json; import dub.compilers.compiler; import dub.dependency; import dub.recipe.packagerecipe; import dub.internal.vibecompat.data.json; import std.algorithm : canFind, startsWith; import std.conv : to; import std.exception : enforce; import std.range; import std.string : format, indexOf; import std.traits : EnumMembers; deprecated("Use the overload that takes a `PackageName` as 3rd argument") void parseJson(ref PackageRecipe recipe, Json json, string parent) { const PackageName pname = parent ? PackageName(parent) : PackageName.init; parseJson(recipe, json, pname); } void parseJson(ref PackageRecipe recipe, Json json, in PackageName parent = PackageName.init) { foreach (string field, value; json) { switch (field) { default: break; case "name": recipe.name = value.get!string; break; case "version": recipe.version_ = value.get!string; break; case "description": recipe.description = value.get!string; break; case "homepage": recipe.homepage = value.get!string; break; case "authors": recipe.authors = deserializeJson!(string[])(value); break; case "copyright": recipe.copyright = value.get!string; break; case "license": recipe.license = value.get!string; break; case "configurations": break; // handled below, after the global settings have been parsed case "buildTypes": foreach (string name, settings; value) { BuildSettingsTemplate bs; bs.parseJson(settings, PackageName.init); recipe.buildTypes[name] = bs; } break; case "toolchainRequirements": recipe.toolchainRequirements.parseJson(value); break; case "-ddoxFilterArgs": recipe.ddoxFilterArgs = deserializeJson!(string[])(value); break; case "-ddoxTool": recipe.ddoxTool = value.get!string; break; } } enforce(recipe.name.length > 0, "The package \"name\" field is missing or empty."); const fullname = parent.toString().length ? PackageName(parent.toString() ~ ":" ~ recipe.name) : PackageName(recipe.name); // parse build settings recipe.buildSettings.parseJson(json, fullname); if (auto pv = "configurations" in json) { foreach (settings; *pv) { ConfigurationInfo ci; ci.parseJson(settings, fullname); recipe.configurations ~= ci; } } // parse any sub packages after the main package has been fully parsed if (auto ps = "subPackages" in json) recipe.parseSubPackages(fullname, ps.opt!(Json[])); } Json toJson(const scope ref PackageRecipe recipe) { auto ret = recipe.buildSettings.toJson(); ret["name"] = recipe.name; if (!recipe.version_.empty) ret["version"] = recipe.version_; if (!recipe.description.empty) ret["description"] = recipe.description; if (!recipe.homepage.empty) ret["homepage"] = recipe.homepage; if (!recipe.authors.empty) ret["authors"] = serializeToJson(recipe.authors); if (!recipe.copyright.empty) ret["copyright"] = recipe.copyright; if (!recipe.license.empty) ret["license"] = recipe.license; if (!recipe.subPackages.empty) { Json[] jsonSubPackages = new Json[recipe.subPackages.length]; foreach (i, subPackage; recipe.subPackages) { if (subPackage.path !is null) { jsonSubPackages[i] = Json(subPackage.path); } else { jsonSubPackages[i] = subPackage.recipe.toJson(); } } ret["subPackages"] = jsonSubPackages; } if (recipe.configurations.length) { Json[] configs; foreach(config; recipe.configurations) configs ~= config.toJson(); ret["configurations"] = configs; } if (recipe.buildTypes.length) { Json[string] types; foreach (name, settings; recipe.buildTypes) types[name] = settings.toJson(); ret["buildTypes"] = types; } if (!recipe.toolchainRequirements.empty) { ret["toolchainRequirements"] = recipe.toolchainRequirements.toJson(); } if (!recipe.ddoxFilterArgs.empty) ret["-ddoxFilterArgs"] = recipe.ddoxFilterArgs.serializeToJson(); if (!recipe.ddoxTool.empty) ret["-ddoxTool"] = recipe.ddoxTool; return ret; } private void parseSubPackages(ref PackageRecipe recipe, in PackageName parent, Json[] subPackagesJson) { enforce(!parent.sub, format("'subPackages' found in '%s'. This is only supported in the main package file for '%s'.", parent, parent.main)); recipe.subPackages = new SubPackage[subPackagesJson.length]; foreach (i, subPackageJson; subPackagesJson) { // Handle referenced Packages if(subPackageJson.type == Json.Type.string) { string subpath = subPackageJson.get!string; recipe.subPackages[i] = SubPackage(subpath, PackageRecipe.init); } else { PackageRecipe subinfo; subinfo.parseJson(subPackageJson, parent); recipe.subPackages[i] = SubPackage(null, subinfo); } } } private void parseJson(ref ConfigurationInfo config, Json json, in PackageName pname) { foreach (string name, value; json) { switch (name) { default: break; case "name": config.name = value.get!string; enforce(!config.name.empty, "Configurations must have a non-empty name."); break; case "platforms": config.platforms = deserializeJson!(string[])(value); break; } } enforce(!config.name.empty, "Configuration is missing a name."); config.buildSettings.parseJson(json, pname); } private Json toJson(const scope ref ConfigurationInfo config) { auto ret = config.buildSettings.toJson(); ret["name"] = config.name; if (config.platforms.length) ret["platforms"] = serializeToJson(config.platforms); return ret; } private void parseJson(ref BuildSettingsTemplate bs, Json json, in PackageName pname) { foreach(string name, value; json) { auto idx = indexOf(name, "-"); string basename, suffix; if( idx >= 0 ) { basename = name[0 .. idx]; suffix = name[idx + 1 .. $]; } else basename = name; switch(basename){ default: break; case "dependencies": foreach (string pkg, verspec; value) { if (pkg.startsWith(":")) { enforce(!pname.sub.length, "Short-hand packages syntax not allowed within " ~ "sub packages: %s -> %s".format(pname, pkg)); pkg = pname.toString() ~ pkg; } enforce(pkg !in bs.dependencies, "The dependency '"~pkg~"' is specified more than once." ); bs.dependencies[pkg] = Dependency.fromJson(verspec); if (verspec.type == Json.Type.object) bs.dependencies[pkg].settings.parseJson(verspec, pname); } break; case "systemDependencies": bs.systemDependencies = value.get!string; break; case "targetType": enforce(suffix.empty, "targetType does not support platform customization."); bs.targetType = value.get!string.to!TargetType; break; case "targetPath": enforce(suffix.empty, "targetPath does not support platform customization."); bs.targetPath = value.get!string; break; case "targetName": enforce(suffix.empty, "targetName does not support platform customization."); bs.targetName = value.get!string; break; case "workingDirectory": enforce(suffix.empty, "workingDirectory does not support platform customization."); bs.workingDirectory = value.get!string; break; case "mainSourceFile": enforce(suffix.empty, "mainSourceFile does not support platform customization."); bs.mainSourceFile = value.get!string; break; case "subConfigurations": enforce(suffix.empty, "subConfigurations does not support platform customization."); bs.subConfigurations = deserializeJson!(string[string])(value); break; case "dflags": bs.dflags[suffix] = deserializeJson!(string[])(value); break; case "lflags": bs.lflags[suffix] = deserializeJson!(string[])(value); break; case "libs": bs.libs[suffix] = deserializeJson!(string[])(value); break; case "files": case "sourceFiles": bs.sourceFiles[suffix] = deserializeJson!(string[])(value); break; case "sourcePaths": bs.sourcePaths[suffix] = deserializeJson!(string[])(value); break; case "cSourcePaths": bs.cSourcePaths[suffix] = deserializeJson!(string[])(value); break; case "sourcePath": bs.sourcePaths[suffix] ~= [value.get!string]; break; // deprecated case "excludedSourceFiles": bs.excludedSourceFiles[suffix] = deserializeJson!(string[])(value); break; case "injectSourceFiles": bs.injectSourceFiles[suffix] = deserializeJson!(string[])(value); break; case "copyFiles": bs.copyFiles[suffix] = deserializeJson!(string[])(value); break; case "extraDependencyFiles": bs.extraDependencyFiles[suffix] = deserializeJson!(string[])(value); break; case "versions": bs.versions[suffix] = deserializeJson!(string[])(value); break; case "debugVersions": bs.debugVersions[suffix] = deserializeJson!(string[])(value); break; case "-versionFilters": bs.versionFilters[suffix] = deserializeJson!(string[])(value); break; case "-debugVersionFilters": bs.debugVersionFilters[suffix] = deserializeJson!(string[])(value); break; case "importPaths": bs.importPaths[suffix] = deserializeJson!(string[])(value); break; case "cImportPaths": bs.cImportPaths[suffix] = deserializeJson!(string[])(value); break; case "stringImportPaths": bs.stringImportPaths[suffix] = deserializeJson!(string[])(value); break; case "preGenerateCommands": bs.preGenerateCommands[suffix] = deserializeJson!(string[])(value); break; case "postGenerateCommands": bs.postGenerateCommands[suffix] = deserializeJson!(string[])(value); break; case "preBuildCommands": bs.preBuildCommands[suffix] = deserializeJson!(string[])(value); break; case "postBuildCommands": bs.postBuildCommands[suffix] = deserializeJson!(string[])(value); break; case "preRunCommands": bs.preRunCommands[suffix] = deserializeJson!(string[])(value); break; case "postRunCommands": bs.postRunCommands[suffix] = deserializeJson!(string[])(value); break; case "environments": bs.environments[suffix] = deserializeJson!(string[string])(value); break; case "buildEnvironments": bs.buildEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "runEnvironments": bs.runEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "preGenerateEnvironments": bs.preGenerateEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "postGenerateEnvironments": bs.postGenerateEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "preBuildEnvironments": bs.preBuildEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "postBuildEnvironments": bs.postBuildEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "preRunEnvironments": bs.preRunEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "postRunEnvironments": bs.postRunEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "buildRequirements": Flags!BuildRequirement reqs; foreach (req; deserializeJson!(string[])(value)) reqs |= to!BuildRequirement(req); bs.buildRequirements[suffix] = reqs; break; case "buildOptions": Flags!BuildOption options; foreach (opt; deserializeJson!(string[])(value)) options |= to!BuildOption(opt); bs.buildOptions[suffix] = options; break; } } } private Json toJson(const scope ref BuildSettingsTemplate bs) { static string withSuffix (string pre, string post) { if (!post.length) return pre; return pre ~ "-" ~ post; } auto ret = Json.emptyObject; if( bs.dependencies !is null ){ auto deps = Json.emptyObject; foreach( pack, d; bs.dependencies ) deps[pack] = d.toJson(); ret["dependencies"] = deps; } if (bs.systemDependencies !is null) ret["systemDependencies"] = bs.systemDependencies; if (bs.targetType != TargetType.autodetect) ret["targetType"] = bs.targetType.to!string(); if (!bs.targetPath.empty) ret["targetPath"] = bs.targetPath; if (!bs.targetName.empty) ret["targetName"] = bs.targetName; if (!bs.workingDirectory.empty) ret["workingDirectory"] = bs.workingDirectory; if (!bs.mainSourceFile.empty) ret["mainSourceFile"] = bs.mainSourceFile; if (bs.subConfigurations.length > 0) ret["subConfigurations"] = serializeToJson(bs.subConfigurations); foreach (suffix, arr; bs.dflags) ret[withSuffix("dflags", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.lflags) ret[withSuffix("lflags", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.libs) ret[withSuffix("libs", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.sourceFiles) ret[withSuffix("sourceFiles", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.sourcePaths) ret[withSuffix("sourcePaths", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.cSourcePaths) ret[withSuffix("cSourcePaths", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.excludedSourceFiles) ret[withSuffix("excludedSourceFiles", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.injectSourceFiles) ret[withSuffix("injectSourceFiles", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.copyFiles) ret[withSuffix("copyFiles", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.extraDependencyFiles) ret[withSuffix("extraDependencyFiles", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.versions) ret[withSuffix("versions", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.debugVersions) ret[withSuffix("debugVersions", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.versionFilters) ret[withSuffix("-versionFilters", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.debugVersionFilters) ret[withSuffix("-debugVersionFilters", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.importPaths) ret[withSuffix("importPaths", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.cImportPaths) ret[withSuffix("cImportPaths", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.stringImportPaths) ret[withSuffix("stringImportPaths", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.preGenerateCommands) ret[withSuffix("preGenerateCommands", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.postGenerateCommands) ret[withSuffix("postGenerateCommands", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.preBuildCommands) ret[withSuffix("preBuildCommands", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.postBuildCommands) ret[withSuffix("postBuildCommands", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.preRunCommands) ret[withSuffix("preRunCommands", suffix)] = serializeToJson(arr); foreach (suffix, arr; bs.postRunCommands) ret[withSuffix("postRunCommands", suffix)] = serializeToJson(arr); foreach (suffix, aa; bs.environments) ret[withSuffix("environments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.buildEnvironments) ret[withSuffix("buildEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.runEnvironments) ret[withSuffix("runEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.preGenerateEnvironments) ret[withSuffix("preGenerateEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.postGenerateEnvironments) ret[withSuffix("postGenerateEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.preBuildEnvironments) ret[withSuffix("preBuildEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.postBuildEnvironments) ret[withSuffix("postBuildEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.preRunEnvironments) ret[withSuffix("preRunEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, aa; bs.postRunEnvironments) ret[withSuffix("postRunEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, arr; bs.buildRequirements) { string[] val; foreach (i; [EnumMembers!BuildRequirement]) if (arr & i) val ~= to!string(i); ret[withSuffix("buildRequirements", suffix)] = serializeToJson(val); } foreach (suffix, arr; bs.buildOptions) { string[] val; foreach (i; [EnumMembers!BuildOption]) if (arr & i) val ~= to!string(i); ret[withSuffix("buildOptions", suffix)] = serializeToJson(val); } return ret; } private void parseJson(ref ToolchainRequirements tr, Json json) { foreach (string name, value; json) tr.addRequirement(name, value.get!string); } private Json toJson(const scope ref ToolchainRequirements tr) { auto ret = Json.emptyObject; if (tr.dub != VersionRange.Any) ret["dub"] = serializeToJson(tr.dub); if (tr.frontend != VersionRange.Any) ret["frontend"] = serializeToJson(tr.frontend); if (tr.dmd != VersionRange.Any) ret["dmd"] = serializeToJson(tr.dmd); if (tr.ldc != VersionRange.Any) ret["ldc"] = serializeToJson(tr.ldc); if (tr.gdc != VersionRange.Any) ret["gdc"] = serializeToJson(tr.gdc); return ret; } unittest { import std.string: strip, outdent; static immutable json = ` { "name": "projectname", "environments": { "Var1": "env" }, "buildEnvironments": { "Var2": "buildEnv" }, "runEnvironments": { "Var3": "runEnv" }, "preGenerateEnvironments": { "Var4": "preGenEnv" }, "postGenerateEnvironments": { "Var5": "postGenEnv" }, "preBuildEnvironments": { "Var6": "preBuildEnv" }, "postBuildEnvironments": { "Var7": "postBuildEnv" }, "preRunEnvironments": { "Var8": "preRunEnv" }, "postRunEnvironments": { "Var9": "postRunEnv" } } `.strip.outdent; auto jsonValue = parseJsonString(json); PackageRecipe rec1; parseJson(rec1, jsonValue); PackageRecipe rec; // verify that all fields are serialized properly parseJson(rec, rec1.toJson()); assert(rec.name == "projectname"); assert(rec.buildSettings.environments == ["": ["Var1": "env"]]); assert(rec.buildSettings.buildEnvironments == ["": ["Var2": "buildEnv"]]); assert(rec.buildSettings.runEnvironments == ["": ["Var3": "runEnv"]]); assert(rec.buildSettings.preGenerateEnvironments == ["": ["Var4": "preGenEnv"]]); assert(rec.buildSettings.postGenerateEnvironments == ["": ["Var5": "postGenEnv"]]); assert(rec.buildSettings.preBuildEnvironments == ["": ["Var6": "preBuildEnv"]]); assert(rec.buildSettings.postBuildEnvironments == ["": ["Var7": "postBuildEnv"]]); assert(rec.buildSettings.preRunEnvironments == ["": ["Var8": "preRunEnv"]]); assert(rec.buildSettings.postRunEnvironments == ["": ["Var9": "postRunEnv"]]); } dub-1.40.0/source/dub/recipe/packagerecipe.d000066400000000000000000000712471477246567400206600ustar00rootroot00000000000000/** Abstract representation of a package description file. Copyright: © 2012-2014 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig, Matthias Dondorff */ module dub.recipe.packagerecipe; import dub.compilers.compiler; import dub.compilers.utils : warnOnSpecialCompilerFlags; import dub.dependency; import dub.internal.logging; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; import dub.internal.configy.Attributes; import std.algorithm : findSplit, sort; import std.array : join, split; import std.exception : enforce; import std.file; import std.range; import std.process : environment; /** Returns the individual parts of a qualified package name. Sub qualified package names are lists of package names separated by ":". For example, "packa:packb:packc" references a package named "packc" that is a sub package of "packb", which in turn is a sub package of "packa". */ deprecated("This function is not supported as subpackages cannot be nested") string[] getSubPackagePath(string package_name) @safe pure { return package_name.split(":"); } deprecated @safe unittest { assert(getSubPackagePath("packa:packb:packc") == ["packa", "packb", "packc"]); assert(getSubPackagePath("pack") == ["pack"]); } /** Returns the name of the top level package for a given (sub) package name of format `"basePackageName"` or `"basePackageName:subPackageName"`. In case of a top level package, the qualified name is returned unmodified. */ deprecated("Use `dub.dependency : PackageName(arg).main` instead") string getBasePackageName(string package_name) @safe pure { return package_name.findSplit(":")[0]; } /** Returns the qualified sub package part of the given package name of format `"basePackageName:subPackageName"`, or empty string if none. This is the part of the package name excluding the base package name. See also $(D getBasePackageName). */ deprecated("Use `dub.dependency : PackageName(arg).sub` instead") string getSubPackageName(string package_name) @safe pure { return package_name.findSplit(":")[2]; } deprecated @safe unittest { assert(getBasePackageName("packa:packb:packc") == "packa"); assert(getBasePackageName("pack") == "pack"); assert(getSubPackageName("packa:packb:packc") == "packb:packc"); assert(getSubPackageName("pack") == ""); } /** Represents the contents of a package recipe file (dub.json/dub.sdl) in an abstract way. This structure is used to reason about package descriptions in isolation. For higher level package handling, see the $(D Package) class. */ struct PackageRecipe { /** * Name of the package, used to uniquely identify the package. * * This field is the only mandatory one. * Must be comprised of only lower case ASCII alpha-numeric characters, * "-" or "_". */ string name; /// Brief description of the package. @Optional string description; /// URL of the project website @Optional string homepage; /** * List of project authors * * the suggested format is either: * "Peter Parker" * or * "Peter Parker " */ @Optional string[] authors; /// Copyright declaration string @Optional string copyright; /// License(s) under which the project can be used @Optional string license; /// Set of version requirements for DUB, compilers and/or language frontend. @Optional ToolchainRequirements toolchainRequirements; /** * Specifies an optional list of build configurations * * By default, the first configuration present in the package recipe * will be used, except for special configurations (e.g. "unittest"). * A specific configuration can be chosen from the command line using * `--config=name` or `-c name`. A package can select a specific * configuration in one of its dependency by using the `subConfigurations` * build setting. * Build settings defined at the top level affect all configurations. */ @Optional @Key("name") ConfigurationInfo[] configurations; /** * Defines additional custom build types or overrides the default ones * * Build types can be selected from the command line using `--build=name` * or `-b name`. The default build type is `debug`. */ @Optional BuildSettingsTemplate[string] buildTypes; /** * Build settings influence the command line arguments and options passed * to the compiler and linker. * * All build settings can be present at the top level, and are optional. * Build settings can also be found in `configurations`. */ @Optional BuildSettingsTemplate buildSettings; alias buildSettings this; /** * Specifies a list of command line flags usable for controlling * filter behavior for `--build=ddox` [experimental] */ @Optional @Name("-ddoxFilterArgs") string[] ddoxFilterArgs; /// Specify which tool to use with `--build=ddox` (experimental) @Optional @Name("-ddoxTool") string ddoxTool; /** * Sub-packages path or definitions * * Sub-packages allow to break component of a large framework into smaller * packages. In the recipe file, sub-packages entry can take one of two forms: * either the path to a sub-folder where a recipe file exists, * or an object of the same format as a recipe file (or `PackageRecipe`). */ @Optional SubPackage[] subPackages; /// Usually unused by users, this is set by dub automatically @Optional @Name("version") string version_; inout(ConfigurationInfo) getConfiguration(string name) inout { foreach (c; configurations) if (c.name == name) return c; throw new Exception("Unknown configuration: "~name); } /** Clones the package recipe recursively. */ PackageRecipe clone() const { return .clone(this); } } struct SubPackage { string path; PackageRecipe recipe; /** * Given a YAML parser, recurses into `recipe` or use `path` * depending on the node type. * * Two formats are supported for sub-packages: a string format, * which is just the path to the sub-package, and embedding the * full sub-package recipe into the parent package recipe. * * To support such a dual syntax, Configy requires the use * of a `fromYAML` method, as it exposes the underlying format. */ static SubPackage fromYAML (scope ConfigParser!SubPackage p) { import dub.internal.dyaml.node; if (p.node.nodeID == NodeID.mapping) return SubPackage(null, p.parseAs!PackageRecipe); else return SubPackage(p.parseAs!string); } } /// Describes minimal toolchain requirements struct ToolchainRequirements { import std.typecons : Tuple, tuple; // TODO: We can remove `@Optional` once bosagora/configy#30 is resolved, // currently it fails because `Dependency.opCmp` is not CTFE-able. /// DUB version requirement @Optional @converter((scope ConfigParser!VersionRange p) => p.node.as!string.parseVersionRange) VersionRange dub = VersionRange.Any; /// D front-end version requirement @Optional @converter((scope ConfigParser!VersionRange p) => p.node.as!string.parseDMDDependency) VersionRange frontend = VersionRange.Any; /// DMD version requirement @Optional @converter((scope ConfigParser!VersionRange p) => p.node.as!string.parseDMDDependency) VersionRange dmd = VersionRange.Any; /// LDC version requirement @Optional @converter((scope ConfigParser!VersionRange p) => p.node.as!string.parseVersionRange) VersionRange ldc = VersionRange.Any; /// GDC version requirement @Optional @converter((scope ConfigParser!VersionRange p) => p.node.as!string.parseVersionRange) VersionRange gdc = VersionRange.Any; /** Get the list of supported compilers. Returns: An array of couples of compiler name and compiler requirement */ @property Tuple!(string, VersionRange)[] supportedCompilers() const { Tuple!(string, VersionRange)[] res; if (dmd != VersionRange.Invalid) res ~= Tuple!(string, VersionRange)("dmd", dmd); if (ldc != VersionRange.Invalid) res ~= Tuple!(string, VersionRange)("ldc", ldc); if (gdc != VersionRange.Invalid) res ~= Tuple!(string, VersionRange)("gdc", gdc); return res; } bool empty() const { import std.algorithm.searching : all; return only(dub, frontend, dmd, ldc, gdc) .all!(r => r == VersionRange.Any); } } /// Bundles information about a build configuration. struct ConfigurationInfo { string name; @Optional string[] platforms; @Optional BuildSettingsTemplate buildSettings; alias buildSettings this; /** * Equivalent to the default constructor, used by Configy */ this(string name, string[] p, BuildSettingsTemplate build_settings) @safe pure nothrow @nogc { this.name = name; this.platforms = p; this.buildSettings = build_settings; } this(string name, BuildSettingsTemplate build_settings) { enforce(!name.empty, "Configuration name is empty."); this.name = name; this.buildSettings = build_settings; } bool matchesPlatform(in BuildPlatform platform) const { if( platforms.empty ) return true; foreach(p; platforms) if (platform.matchesSpecification(p)) return true; return false; } } /** * A dependency with possible `BuildSettingsTemplate` * * Currently only `dflags` is taken into account, but the parser accepts any * value that is in `BuildSettingsTemplate`. * This feature was originally introduced to support `-preview`, as setting * a `-preview` in `dflags` does not propagate down to dependencies. */ public struct RecipeDependency { /// The dependency itself public Dependency dependency; /// Additional dflags, if any public BuildSettingsTemplate settings; /// Convenience alias as most uses just want to deal with the `Dependency` public alias dependency this; /** * Read a `Dependency` and `BuildSettingsTemplate` from the config file * * Required to support both short and long form */ static RecipeDependency fromYAML (scope ConfigParser!RecipeDependency p) { import dub.internal.dyaml.node; if (p.node.nodeID == NodeID.scalar) { auto d = YAMLFormat(p.node.as!string); return RecipeDependency(d.toDependency()); } auto d = p.parseAs!YAMLFormat; return RecipeDependency(d.toDependency(), d.settings); } /// In-file representation of a dependency as specified by the user private struct YAMLFormat { @Name("version") @Optional string version_; @Optional string path; @Optional string repository; bool optional; @Name("default") bool default_; @Optional BuildSettingsTemplate settings; alias settings this; /** * Used by Configy to provide rich error message when parsing. * * Exceptions thrown from `validate` methods will be wrapped with field/file * information and rethrown from Configy, providing the user * with the location of the configuration that triggered the error. */ public void validate () const { enforce(this.optional || !this.default_, "Setting default to 'true' has no effect if 'optional' is not set"); enforce(this.version_.length || this.path.length || this.repository.length, "Need to provide one of the following fields: 'version', 'path', or 'repository'"); enforce(!this.path.length || !this.repository.length, "Cannot provide a 'path' dependency if a repository dependency is used"); enforce(!this.repository.length || this.version_.length, "Need to provide a commit hash in 'version' field with 'repository' dependency"); // Need to deprecate this as it's fairly common version (none) { enforce(!this.path.length || !this.version_.length, "Cannot provide a 'path' dependency if a 'version' dependency is used"); } } /// Turns this struct into a `Dependency` public Dependency toDependency () const { auto result = () { if (this.path.length) return Dependency(NativePath(this.path)); if (this.repository.length) return Dependency(Repository(this.repository, this.version_)); return Dependency(VersionRange.fromString(this.version_)); }(); result.optional = this.optional; result.default_ = this.default_; return result; } } } /// Type used to avoid a breaking change when `Dependency[string]` /// was changed to `RecipeDependency[string]` package struct RecipeDependencyAA { /// The underlying data, `public` as `alias this` to `private` field doesn't /// always work. public RecipeDependency[string] data; /// Expose base function, e.g. `clear` alias data this; /// Supports assignment from a `RecipeDependency` (used in the parser) public void opIndexAssign(RecipeDependency dep, string key) pure nothrow { this.data[key] = dep; } /// Supports assignment from a `Dependency`, used in user code mostly public void opIndexAssign(Dependency dep, string key) pure nothrow { this.data[key] = RecipeDependency(dep); } /// Configy doesn't like `alias this` to an AA static RecipeDependencyAA fromYAML (scope ConfigParser!RecipeDependencyAA p) { return RecipeDependencyAA(p.parseAs!(typeof(this.data))); } } /// This keeps general information about how to build a package. /// It contains functions to create a specific BuildSetting, targeted at /// a certain BuildPlatform. struct BuildSettingsTemplate { @Optional RecipeDependencyAA dependencies; @Optional string systemDependencies; @Optional TargetType targetType = TargetType.autodetect; @Optional string targetPath; @Optional string targetName; @Optional string workingDirectory; @Optional string mainSourceFile; @Optional string[string] subConfigurations; @StartsWith("dflags") string[][string] dflags; @StartsWith("lflags") string[][string] lflags; @StartsWith("libs") string[][string] libs; @StartsWith("sourceFiles") string[][string] sourceFiles; @StartsWith("sourcePaths") string[][string] sourcePaths; @StartsWith("cSourcePaths") string[][string] cSourcePaths; @StartsWith("excludedSourceFiles") string[][string] excludedSourceFiles; @StartsWith("injectSourceFiles") string[][string] injectSourceFiles; @StartsWith("copyFiles") string[][string] copyFiles; @StartsWith("extraDependencyFiles") string[][string] extraDependencyFiles; @StartsWith("versions") string[][string] versions; @StartsWith("debugVersions") string[][string] debugVersions; @StartsWith("versionFilters") string[][string] versionFilters; @StartsWith("debugVersionFilters") string[][string] debugVersionFilters; @StartsWith("importPaths") string[][string] importPaths; @StartsWith("cImportPaths") string[][string] cImportPaths; @StartsWith("stringImportPaths") string[][string] stringImportPaths; @StartsWith("preGenerateCommands") string[][string] preGenerateCommands; @StartsWith("postGenerateCommands") string[][string] postGenerateCommands; @StartsWith("preBuildCommands") string[][string] preBuildCommands; @StartsWith("postBuildCommands") string[][string] postBuildCommands; @StartsWith("preRunCommands") string[][string] preRunCommands; @StartsWith("postRunCommands") string[][string] postRunCommands; @StartsWith("environments") string[string][string] environments; @StartsWith("buildEnvironments")string[string][string] buildEnvironments; @StartsWith("runEnvironments") string[string][string] runEnvironments; @StartsWith("preGenerateEnvironments") string[string][string] preGenerateEnvironments; @StartsWith("postGenerateEnvironments") string[string][string] postGenerateEnvironments; @StartsWith("preBuildEnvironments") string[string][string] preBuildEnvironments; @StartsWith("postBuildEnvironments") string[string][string] postBuildEnvironments; @StartsWith("preRunEnvironments") string[string][string] preRunEnvironments; @StartsWith("postRunEnvironments") string[string][string] postRunEnvironments; @StartsWith("buildRequirements") @Optional Flags!BuildRequirement[string] buildRequirements; @StartsWith("buildOptions") @Optional Flags!BuildOption[string] buildOptions; BuildSettingsTemplate dup() const { return clone(this); } /// Constructs a BuildSettings object from this template. void getPlatformSettings(ref BuildSettings dst, in BuildPlatform platform, NativePath base_path) const { dst.targetType = this.targetType; if (!this.targetPath.empty) dst.targetPath = this.targetPath; if (!this.targetName.empty) dst.targetName = this.targetName; if (!this.workingDirectory.empty) dst.workingDirectory = this.workingDirectory; if (!this.mainSourceFile.empty) { auto p = NativePath(this.mainSourceFile); p.normalize(); dst.mainSourceFile = p.toNativeString(); dst.addSourceFiles(dst.mainSourceFile); } string[] collectFiles(in string[][string] paths_map, string pattern) { auto files = appender!(string[]); import dub.project : buildSettingsVars; import std.typecons : Nullable; static Nullable!(string[string]) envVarCache; if (envVarCache.isNull) envVarCache = environment.toAA(); foreach (suffix, paths; paths_map) { if (!platform.matchesSpecification(suffix)) continue; foreach (spath; paths) { enforce(!spath.empty, "Paths must not be empty strings."); auto path = NativePath(spath); if (!path.absolute) path = base_path ~ path; if (!existsDirectory(path)) { import std.algorithm : any, find; const hasVar = chain(buildSettingsVars, envVarCache.get.byKey).any!((string var) { return spath.find("$"~var).length > 0 || spath.find("${"~var~"}").length > 0; }); if (!hasVar) logWarn("Invalid source/import path: %s", path.toNativeString()); continue; } auto pstr = path.toNativeString(); foreach (d; dirEntries(pstr, pattern, SpanMode.depth)) { import std.path : baseName, pathSplitter; import std.algorithm.searching : canFind; // eliminate any hidden files, or files in hidden directories. But always include // files that are listed inside hidden directories that are specifically added to // the project. if (d.isDir || pathSplitter(d.name[pstr.length .. $]) .canFind!(name => name.length && name[0] == '.')) continue; auto src = NativePath(d.name).relativeTo(base_path); files ~= src.toNativeString(); } } } return files.data; } // collect source files. Note: D source from 'sourcePaths' and C sources from 'cSourcePaths' are joint into 'sourceFiles' dst.addSourceFiles(collectFiles(sourcePaths, "*.d")); dst.addSourceFiles(collectFiles(cSourcePaths, "*.{c,i}")); auto sourceFiles = dst.sourceFiles.sort(); // collect import files and remove sources import std.algorithm : copy, setDifference; auto importFiles = chain(collectFiles(importPaths, "*.{d,di}"), collectFiles(cImportPaths, "*.h")) .array() .sort(); immutable nremoved = importFiles.setDifference(sourceFiles).copy(importFiles.release).length; importFiles = importFiles[0 .. $ - nremoved]; dst.addImportFiles(importFiles.release); dst.addStringImportFiles(collectFiles(stringImportPaths, "*")); getPlatformSetting!("dflags", "addDFlags")(dst, platform); getPlatformSetting!("lflags", "addLFlags")(dst, platform); getPlatformSetting!("libs", "addLibs")(dst, platform); getPlatformSetting!("sourceFiles", "addSourceFiles")(dst, platform); getPlatformSetting!("excludedSourceFiles", "removeSourceFiles")(dst, platform); getPlatformSetting!("injectSourceFiles", "addInjectSourceFiles")(dst, platform); getPlatformSetting!("copyFiles", "addCopyFiles")(dst, platform); getPlatformSetting!("extraDependencyFiles", "addExtraDependencyFiles")(dst, platform); getPlatformSetting!("versions", "addVersions")(dst, platform); getPlatformSetting!("debugVersions", "addDebugVersions")(dst, platform); getPlatformSetting!("versionFilters", "addVersionFilters")(dst, platform); getPlatformSetting!("debugVersionFilters", "addDebugVersionFilters")(dst, platform); getPlatformSetting!("importPaths", "addImportPaths")(dst, platform); getPlatformSetting!("cImportPaths", "addCImportPaths")(dst, platform); getPlatformSetting!("stringImportPaths", "addStringImportPaths")(dst, platform); getPlatformSetting!("preGenerateCommands", "addPreGenerateCommands")(dst, platform); getPlatformSetting!("postGenerateCommands", "addPostGenerateCommands")(dst, platform); getPlatformSetting!("preBuildCommands", "addPreBuildCommands")(dst, platform); getPlatformSetting!("postBuildCommands", "addPostBuildCommands")(dst, platform); getPlatformSetting!("preRunCommands", "addPreRunCommands")(dst, platform); getPlatformSetting!("postRunCommands", "addPostRunCommands")(dst, platform); getPlatformSetting!("environments", "addEnvironments")(dst, platform); getPlatformSetting!("buildEnvironments", "addBuildEnvironments")(dst, platform); getPlatformSetting!("runEnvironments", "addRunEnvironments")(dst, platform); getPlatformSetting!("preGenerateEnvironments", "addPreGenerateEnvironments")(dst, platform); getPlatformSetting!("postGenerateEnvironments", "addPostGenerateEnvironments")(dst, platform); getPlatformSetting!("preBuildEnvironments", "addPreBuildEnvironments")(dst, platform); getPlatformSetting!("postBuildEnvironments", "addPostBuildEnvironments")(dst, platform); getPlatformSetting!("preRunEnvironments", "addPreRunEnvironments")(dst, platform); getPlatformSetting!("postRunEnvironments", "addPostRunEnvironments")(dst, platform); getPlatformSetting!("buildRequirements", "addRequirements")(dst, platform); getPlatformSetting!("buildOptions", "addOptions")(dst, platform); } void getPlatformSetting(string name, string addname)(ref BuildSettings dst, in BuildPlatform platform) const { foreach(suffix, values; __traits(getMember, this, name)){ if( platform.matchesSpecification(suffix) ) __traits(getMember, dst, addname)(values); } } void warnOnSpecialCompilerFlags(string package_name, string config_name) { auto nodef = false; auto noprop = false; foreach (req; this.buildRequirements) { if (req & BuildRequirement.noDefaultFlags) nodef = true; if (req & BuildRequirement.relaxProperties) noprop = true; } if (noprop) { logWarn(`Warning: "buildRequirements": ["relaxProperties"] is deprecated and is now the default behavior. Note that the -property switch will probably be removed in future versions of DMD.`); logWarn(""); } if (nodef) { logWarn("Warning: This package uses the \"noDefaultFlags\" build requirement. Please use only for development purposes and not for released packages."); logWarn(""); } else { string[] all_dflags; Flags!BuildOption all_options; foreach (flags; this.dflags) all_dflags ~= flags; foreach (options; this.buildOptions) all_options |= options; .warnOnSpecialCompilerFlags(all_dflags, all_options, package_name, config_name); } } } package(dub) void checkPlatform(const scope ref ToolchainRequirements tr, BuildPlatform platform, string package_name) { import std.algorithm.iteration : map; import std.format : format; Version compilerver; VersionRange compilerspec; switch (platform.compiler) { default: compilerspec = VersionRange.Any; compilerver = Version.minRelease; break; case "dmd": compilerspec = tr.dmd; compilerver = platform.compilerVersion.length ? Version(dmdLikeVersionToSemverLike(platform.compilerVersion)) : Version.minRelease; break; case "ldc": compilerspec = tr.ldc; compilerver = platform.compilerVersion.length ? Version(platform.compilerVersion) : Version.minRelease; break; case "gdc": compilerspec = tr.gdc; compilerver = platform.compilerVersion.length ? Version(platform.compilerVersion) : Version.minRelease; break; } enforce(compilerspec != VersionRange.Invalid, format( "Installed %s %s is not supported by %s. Supported compiler(s):\n%s", platform.compiler, platform.compilerVersion, package_name, tr.supportedCompilers.map!((cs) { auto str = " - " ~ cs[0]; if (cs[1] != VersionRange.Any) str ~= ": " ~ cs[1].toString(); return str; }).join("\n") ) ); enforce(compilerspec.matches(compilerver), format( "Installed %s-%s does not comply with %s compiler requirement: %s %s\n" ~ "Please consider upgrading your installation.", platform.compiler, platform.compilerVersion, package_name, platform.compiler, compilerspec ) ); enforce(tr.frontend.matches(Version(dmdLikeVersionToSemverLike(platform.frontendVersionString))), format( "Installed %s-%s with frontend %s does not comply with %s frontend requirement: %s\n" ~ "Please consider upgrading your installation.", platform.compiler, platform.compilerVersion, platform.frontendVersionString, package_name, tr.frontend ) ); } package bool addRequirement(ref ToolchainRequirements req, string name, string value) { switch (name) { default: return false; case "dub": req.dub = parseVersionRange(value); break; case "frontend": req.frontend = parseDMDDependency(value); break; case "ldc": req.ldc = parseVersionRange(value); break; case "gdc": req.gdc = parseVersionRange(value); break; case "dmd": req.dmd = parseDMDDependency(value); break; } return true; } private VersionRange parseVersionRange(string dep) { if (dep == "no") return VersionRange.Invalid; return VersionRange.fromString(dep); } private VersionRange parseDMDDependency(string dep) { import std.algorithm : map, splitter; import std.array : join; if (dep == "no") return VersionRange.Invalid; // `dmdLikeVersionToSemverLike` does not handle this, VersionRange does if (dep == "*") return VersionRange.Any; return VersionRange.fromString(dep .splitter(' ') .map!(r => dmdLikeVersionToSemverLike(r)) .join(' ')); } private T clone(T)(ref const(T) val) { import dub.internal.dyaml.stdsumtype; import std.traits : isSomeString, isDynamicArray, isAssociativeArray, isBasicType, ValueType; static if (is(T == immutable)) return val; else static if (isBasicType!T || is(T Base == enum) && isBasicType!Base) { return val; } else static if (isDynamicArray!T) { alias V = typeof(T.init[0]); static if (is(V == immutable)) return val; else { T ret = new V[val.length]; foreach (i, ref f; val) ret[i] = clone!V(f); return ret; } } else static if (isAssociativeArray!T) { alias V = ValueType!T; T ret; foreach (k, ref f; val) ret[k] = clone!V(f); return ret; } else static if (is(T == SumType!A, A...)) { return val.match!((any) => T(clone(any))); } else static if (is(T == struct)) { T ret; foreach (i, M; typeof(T.tupleof)) ret.tupleof[i] = clone!M(val.tupleof[i]); return ret; } else static assert(false, "Unsupported type: "~T.stringof); } unittest { // issue #1407 - duplicate main source file { BuildSettingsTemplate t; t.mainSourceFile = "./foo.d"; t.sourceFiles[""] = ["foo.d"]; BuildSettings bs; t.getPlatformSettings(bs, BuildPlatform.init, NativePath("/")); assert(bs.sourceFiles == ["foo.d"]); } version (Windows) {{ BuildSettingsTemplate t; t.mainSourceFile = "src/foo.d"; t.sourceFiles[""] = ["src\\foo.d"]; BuildSettings bs; t.getPlatformSettings(bs, BuildPlatform.init, NativePath("/")); assert(bs.sourceFiles == ["src\\foo.d"]); }} } /** * Edit all dependency names from `:foo` to `name:foo`. * * TODO: Remove the special case in the parser and remove this hack. */ package void fixDependenciesNames (T) (string root, ref T aggr) nothrow { static foreach (idx, FieldRef; T.tupleof) { static if (is(immutable typeof(FieldRef) == immutable RecipeDependencyAA)) { string[] toReplace; foreach (key; aggr.tupleof[idx].byKey) if (key.length && key[0] == ':') toReplace ~= key; foreach (k; toReplace) { aggr.tupleof[idx][root ~ k] = aggr.tupleof[idx][k]; aggr.tupleof[idx].data.remove(k); } } else static if (is(typeof(FieldRef) == struct)) fixDependenciesNames(root, aggr.tupleof[idx]); } } /** Turn a DMD-like version (e.g. 2.082.1) into a SemVer-like version (e.g. 2.82.1). The function accepts a dependency operator prefix and some text postfix. Prefix and postfix are returned verbatim. Params: ver = version string, possibly with a dependency operator prefix and some test postfix. Returns: A Semver compliant string */ private string dmdLikeVersionToSemverLike(string ver) { import std.algorithm : countUntil, joiner, map, skipOver, splitter; import std.array : join, split; import std.ascii : isDigit; import std.conv : text; import std.exception : enforce; import std.functional : not; import std.range : padRight; const start = ver.countUntil!isDigit; enforce(start != -1, "Invalid semver: "~ver); const prefix = ver[0 .. start]; ver = ver[start .. $]; const end = ver.countUntil!(c => !c.isDigit && c != '.'); const postfix = end == -1 ? null : ver[end .. $]; auto verStr = ver[0 .. $-postfix.length]; auto comps = verStr .splitter(".") .map!((a) { if (a.length > 1) a.skipOver("0"); return a;}) .padRight("0", 3); return text(prefix, comps.joiner("."), postfix); } /// unittest { assert(dmdLikeVersionToSemverLike("2.082.1") == "2.82.1"); assert(dmdLikeVersionToSemverLike("2.082.0") == "2.82.0"); assert(dmdLikeVersionToSemverLike("2.082") == "2.82.0"); assert(dmdLikeVersionToSemverLike("~>2.082") == "~>2.82.0"); assert(dmdLikeVersionToSemverLike("~>2.082-beta1") == "~>2.82.0-beta1"); assert(dmdLikeVersionToSemverLike("2.4.6") == "2.4.6"); assert(dmdLikeVersionToSemverLike("2.4.6-alpha12") == "2.4.6-alpha12"); } dub-1.40.0/source/dub/recipe/sdl.d000066400000000000000000000757021477246567400166570ustar00rootroot00000000000000/** SDL format support for PackageRecipe Copyright: © 2014-2015 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.recipe.sdl; import dub.compilers.compiler; import dub.dependency; import dub.internal.dyaml.stdsumtype; import dub.internal.logging; import dub.internal.sdlang; import dub.internal.vibecompat.inet.path; import dub.recipe.packagerecipe; import std.algorithm : map; import std.array : array; import std.conv; import std.string : startsWith, format; deprecated("Use `parseSDL(PackageRecipe, string, PackageName, string)` instead") void parseSDL(ref PackageRecipe recipe, string sdl, string parent_name, string filename) { parseSDL(recipe, parseSource(sdl, filename), PackageName(parent_name)); } deprecated("Use `parseSDL(PackageRecipe, Tag, PackageName)` instead") void parseSDL(ref PackageRecipe recipe, Tag sdl, string parent_name) { parseSDL(recipe, sdl, PackageName(parent_name)); } void parseSDL(ref PackageRecipe recipe, string sdl, in PackageName parent, string filename) { parseSDL(recipe, parseSource(sdl, filename), parent); } void parseSDL(ref PackageRecipe recipe, Tag sdl, in PackageName parent = PackageName.init) { Tag[] subpacks; Tag[] configs; // parse top-level fields foreach (n; sdl.all.tags) { enforceSDL(n.name.length > 0, "Anonymous tags are not allowed at the root level.", n); switch (n.fullName) { default: break; case "name": recipe.name = n.stringTagValue; break; case "version": recipe.version_ = n.stringTagValue; break; case "description": recipe.description = n.stringTagValue; break; case "homepage": recipe.homepage = n.stringTagValue; break; case "authors": recipe.authors ~= n.stringArrayTagValue; break; case "copyright": recipe.copyright = n.stringTagValue; break; case "license": recipe.license = n.stringTagValue; break; case "subPackage": subpacks ~= n; break; case "configuration": configs ~= n; break; case "buildType": auto name = n.stringTagValue(true); BuildSettingsTemplate bt; parseBuildSettings(n, bt, parent); recipe.buildTypes[name] = bt; break; case "toolchainRequirements": parseToolchainRequirements(recipe.toolchainRequirements, n); break; case "x:ddoxFilterArgs": recipe.ddoxFilterArgs ~= n.stringArrayTagValue; break; case "x:ddoxTool": recipe.ddoxTool = n.stringTagValue; break; } } enforceSDL(recipe.name.length > 0, "The package \"name\" field is missing or empty.", sdl); const full_name = parent.toString().length ? PackageName(parent.toString() ~ ":" ~ recipe.name) : PackageName(recipe.name); // parse general build settings parseBuildSettings(sdl, recipe.buildSettings, full_name); // parse configurations recipe.configurations.length = configs.length; foreach (i, n; configs) { parseConfiguration(n, recipe.configurations[i], full_name); } // finally parse all sub packages recipe.subPackages.length = subpacks.length; foreach (i, n; subpacks) { if (n.values.length) { recipe.subPackages[i].path = n.stringTagValue; } else { enforceSDL(n.attributes.length == 0, "No attributes allowed for inline sub package definitions.", n); parseSDL(recipe.subPackages[i].recipe, n, full_name); } } } Tag toSDL(const scope ref PackageRecipe recipe) { Tag ret = new Tag; void add(T)(string field, T value) { ret.add(new Tag(null, field, [Value(value)])); } add("name", recipe.name); if (recipe.version_.length) add("version", recipe.version_); if (recipe.description.length) add("description", recipe.description); if (recipe.homepage.length) add("homepage", recipe.homepage); if (recipe.authors.length) ret.add(new Tag(null, "authors", recipe.authors.map!(a => Value(a)).array)); if (recipe.copyright.length) add("copyright", recipe.copyright); if (recipe.license.length) add("license", recipe.license); foreach (name, settings; recipe.buildTypes) { auto t = new Tag(null, "buildType", [Value(name)]); t.add(settings.toSDL()); ret.add(t); } if (!recipe.toolchainRequirements.empty) { ret.add(toSDL(recipe.toolchainRequirements)); } if (recipe.ddoxFilterArgs.length) ret.add(new Tag("x", "ddoxFilterArgs", recipe.ddoxFilterArgs.map!(a => Value(a)).array)); if (recipe.ddoxTool.length) ret.add(new Tag("x", "ddoxTool", [Value(recipe.ddoxTool)])); ret.add(recipe.buildSettings.toSDL()); foreach(config; recipe.configurations) ret.add(config.toSDL()); foreach (i, subPackage; recipe.subPackages) { if (subPackage.path !is null) { add("subPackage", subPackage.path); } else { auto t = subPackage.recipe.toSDL(); t.name = "subPackage"; ret.add(t); } } return ret; } private void parseBuildSettings(Tag settings, ref BuildSettingsTemplate bs, in PackageName name) { foreach (setting; settings.all.tags) parseBuildSetting(setting, bs, name); } private void parseBuildSetting(Tag setting, ref BuildSettingsTemplate bs, in PackageName name) { switch (setting.fullName) { default: break; case "dependency": parseDependency(setting, bs, name); break; case "systemDependencies": bs.systemDependencies = setting.stringTagValue; break; case "targetType": bs.targetType = setting.stringTagValue.to!TargetType; break; case "targetName": bs.targetName = setting.stringTagValue; break; case "targetPath": bs.targetPath = setting.stringTagValue; break; case "workingDirectory": bs.workingDirectory = setting.stringTagValue; break; case "subConfiguration": auto args = setting.stringArrayTagValue; enforceSDL(args.length == 2, "Expecting package and configuration names as arguments.", setting); bs.subConfigurations[expandPackageName(args[0], name, setting)] = args[1]; break; case "dflags": setting.parsePlatformStringArray(bs.dflags); break; case "lflags": setting.parsePlatformStringArray(bs.lflags); break; case "libs": setting.parsePlatformStringArray(bs.libs); break; case "sourceFiles": setting.parsePlatformStringArray(bs.sourceFiles); break; case "sourcePaths": setting.parsePlatformStringArray(bs.sourcePaths); break; case "cSourcePaths": setting.parsePlatformStringArray(bs.cSourcePaths); break; case "excludedSourceFiles": setting.parsePlatformStringArray(bs.excludedSourceFiles); break; case "mainSourceFile": bs.mainSourceFile = setting.stringTagValue; break; case "injectSourceFiles": setting.parsePlatformStringArray(bs.injectSourceFiles); break; case "copyFiles": setting.parsePlatformStringArray(bs.copyFiles); break; case "extraDependencyFiles": setting.parsePlatformStringArray(bs.extraDependencyFiles); break; case "versions": setting.parsePlatformStringArray(bs.versions); break; case "debugVersions": setting.parsePlatformStringArray(bs.debugVersions); break; case "x:versionFilters": setting.parsePlatformStringArray(bs.versionFilters); break; case "x:debugVersionFilters": setting.parsePlatformStringArray(bs.debugVersionFilters); break; case "importPaths": setting.parsePlatformStringArray(bs.importPaths); break; case "cImportPaths": setting.parsePlatformStringArray(bs.cImportPaths); break; case "stringImportPaths": setting.parsePlatformStringArray(bs.stringImportPaths); break; case "preGenerateCommands": setting.parsePlatformStringArray(bs.preGenerateCommands); break; case "postGenerateCommands": setting.parsePlatformStringArray(bs.postGenerateCommands); break; case "preBuildCommands": setting.parsePlatformStringArray(bs.preBuildCommands); break; case "postBuildCommands": setting.parsePlatformStringArray(bs.postBuildCommands); break; case "preRunCommands": setting.parsePlatformStringArray(bs.preRunCommands); break; case "postRunCommands": setting.parsePlatformStringArray(bs.postRunCommands); break; case "environments": setting.parsePlatformStringAA(bs.environments); break; case "buildEnvironments": setting.parsePlatformStringAA(bs.buildEnvironments); break; case "runEnvironments": setting.parsePlatformStringAA(bs.runEnvironments); break; case "preGenerateEnvironments": setting.parsePlatformStringAA(bs.preGenerateEnvironments); break; case "postGenerateEnvironments": setting.parsePlatformStringAA(bs.postGenerateEnvironments); break; case "preBuildEnvironments": setting.parsePlatformStringAA(bs.preBuildEnvironments); break; case "postBuildEnvironments": setting.parsePlatformStringAA(bs.postBuildEnvironments); break; case "preRunEnvironments": setting.parsePlatformStringAA(bs.preRunEnvironments); break; case "postRunEnvironments": setting.parsePlatformStringAA(bs.postRunEnvironments); break; case "buildRequirements": setting.parsePlatformEnumArray!BuildRequirement(bs.buildRequirements); break; case "buildOptions": setting.parsePlatformEnumArray!BuildOption(bs.buildOptions); break; } } private void parseDependency(Tag t, ref BuildSettingsTemplate bs, in PackageName name) { enforceSDL(t.values.length != 0, "Missing dependency name.", t); enforceSDL(t.values.length == 1, "Multiple dependency names.", t); auto pkg = expandPackageName(t.values[0].expect!string(t), name, t); enforceSDL(pkg !in bs.dependencies, "The dependency '"~pkg~"' is specified more than once.", t); Dependency dep = Dependency.Any; auto attrs = t.attributes; if ("path" in attrs) { dep = Dependency(NativePath(attrs["path"][0].value.expect!string(t, t.fullName ~ " path"))); } else if ("repository" in attrs) { enforceSDL("version" in attrs, "Missing version specification.", t); dep = Dependency(Repository(attrs["repository"][0].value.expect!string(t, t.fullName ~ " repository"), attrs["version"][0].value.expect!string(t, t.fullName ~ " version"))); } else { enforceSDL("version" in attrs, "Missing version specification.", t); dep = Dependency(attrs["version"][0].value.expect!string(t, t.fullName ~ " version")); } if ("optional" in attrs) dep.optional = attrs["optional"][0].value.expect!bool(t, t.fullName ~ " optional"); if ("default" in attrs) dep.default_ = attrs["default"][0].value.expect!bool(t, t.fullName ~ " default"); bs.dependencies[pkg] = dep; BuildSettingsTemplate dbs; parseBuildSettings(t, bs.dependencies[pkg].settings, name); } private void parseConfiguration(Tag t, ref ConfigurationInfo ret, in PackageName name) { ret.name = t.stringTagValue(true); foreach (f; t.tags) { switch (f.fullName) { default: parseBuildSetting(f, ret.buildSettings, name); break; case "platforms": ret.platforms ~= f.stringArrayTagValue; break; } } } private Tag toSDL(const scope ref ConfigurationInfo config) { auto ret = new Tag(null, "configuration", [Value(config.name)]); if (config.platforms.length) ret.add(new Tag(null, "platforms", config.platforms[].map!(p => Value(p)).array)); ret.add(config.buildSettings.toSDL()); return ret; } private Tag[] toSDL(const scope ref BuildSettingsTemplate bs) { Tag[] ret; void add(string name, string value, string namespace = null) { ret ~= new Tag(namespace, name, [Value(value)]); } void adda(string name, string suffix, in string[] values, string namespace = null) { ret ~= new Tag(namespace, name, values[].map!(v => Value(v)).array, suffix.length ? [new Attribute(null, "platform", Value(suffix))] : null); } void addaa(string name, string suffix, in string[string] values, string namespace = null) { foreach (k, v; values) { ret ~= new Tag(namespace, name, [Value(k), Value(v)], suffix.length ? [new Attribute(null, "platform", Value(suffix))] : null); } } string[] toNameArray(T, U)(U bits) if(is(T == enum)) { string[] ret; foreach (m; __traits(allMembers, T)) if (bits & __traits(getMember, T, m)) ret ~= m; return ret; } foreach (pack, d; bs.dependencies) { Attribute[] attribs; d.visit!( (const Repository r) { attribs ~= new Attribute(null, "repository", Value(r.toString())); attribs ~= new Attribute(null, "version", Value(r.ref_)); }, (const NativePath p) { attribs ~= new Attribute(null, "path", Value(p.toString())); }, (const VersionRange v) { attribs ~= new Attribute(null, "version", Value(v.toString())); }, ); if (d.optional) attribs ~= new Attribute(null, "optional", Value(true)); if (d.default_) attribs ~= new Attribute(null, "default", Value(true)); auto t = new Tag(null, "dependency", [Value(pack)], attribs); if (d.settings !is typeof(d.settings).init) t.add(d.settings.toSDL()); ret ~= t; } if (bs.systemDependencies !is null) add("systemDependencies", bs.systemDependencies); if (bs.targetType != TargetType.autodetect) add("targetType", bs.targetType.to!string()); if (bs.targetPath.length) add("targetPath", bs.targetPath); if (bs.targetName.length) add("targetName", bs.targetName); if (bs.workingDirectory.length) add("workingDirectory", bs.workingDirectory); if (bs.mainSourceFile.length) add("mainSourceFile", bs.mainSourceFile); foreach (pack, conf; bs.subConfigurations) ret ~= new Tag(null, "subConfiguration", [Value(pack), Value(conf)]); foreach (suffix, arr; bs.dflags) adda("dflags", suffix, arr); foreach (suffix, arr; bs.lflags) adda("lflags", suffix, arr); foreach (suffix, arr; bs.libs) adda("libs", suffix, arr); foreach (suffix, arr; bs.sourceFiles) adda("sourceFiles", suffix, arr); foreach (suffix, arr; bs.sourcePaths) adda("sourcePaths", suffix, arr); foreach (suffix, arr; bs.cSourcePaths) adda("cSourcePaths", suffix, arr); foreach (suffix, arr; bs.excludedSourceFiles) adda("excludedSourceFiles", suffix, arr); foreach (suffix, arr; bs.injectSourceFiles) adda("injectSourceFiles", suffix, arr); foreach (suffix, arr; bs.copyFiles) adda("copyFiles", suffix, arr); foreach (suffix, arr; bs.extraDependencyFiles) adda("extraDependencyFiles", suffix, arr); foreach (suffix, arr; bs.versions) adda("versions", suffix, arr); foreach (suffix, arr; bs.debugVersions) adda("debugVersions", suffix, arr); foreach (suffix, arr; bs.versionFilters) adda("versionFilters", suffix, arr, "x"); foreach (suffix, arr; bs.debugVersionFilters) adda("debugVersionFilters", suffix, arr, "x"); foreach (suffix, arr; bs.importPaths) adda("importPaths", suffix, arr); foreach (suffix, arr; bs.cImportPaths) adda("cImportPaths", suffix, arr); foreach (suffix, arr; bs.stringImportPaths) adda("stringImportPaths", suffix, arr); foreach (suffix, arr; bs.preGenerateCommands) adda("preGenerateCommands", suffix, arr); foreach (suffix, arr; bs.postGenerateCommands) adda("postGenerateCommands", suffix, arr); foreach (suffix, arr; bs.preBuildCommands) adda("preBuildCommands", suffix, arr); foreach (suffix, arr; bs.postBuildCommands) adda("postBuildCommands", suffix, arr); foreach (suffix, arr; bs.preRunCommands) adda("preRunCommands", suffix, arr); foreach (suffix, arr; bs.postRunCommands) adda("postRunCommands", suffix, arr); foreach (suffix, aa; bs.environments) addaa("environments", suffix, aa); foreach (suffix, aa; bs.buildEnvironments) addaa("buildEnvironments", suffix, aa); foreach (suffix, aa; bs.runEnvironments) addaa("runEnvironments", suffix, aa); foreach (suffix, aa; bs.preGenerateEnvironments) addaa("preGenerateEnvironments", suffix, aa); foreach (suffix, aa; bs.postGenerateEnvironments) addaa("postGenerateEnvironments", suffix, aa); foreach (suffix, aa; bs.preBuildEnvironments) addaa("preBuildEnvironments", suffix, aa); foreach (suffix, aa; bs.postBuildEnvironments) addaa("postBuildEnvironments", suffix, aa); foreach (suffix, aa; bs.preRunEnvironments) addaa("preRunEnvironments", suffix, aa); foreach (suffix, aa; bs.postRunEnvironments) addaa("postRunEnvironments", suffix, aa); foreach (suffix, bits; bs.buildRequirements) adda("buildRequirements", suffix, toNameArray!BuildRequirement(bits)); foreach (suffix, bits; bs.buildOptions) adda("buildOptions", suffix, toNameArray!BuildOption(bits)); return ret; } private void parseToolchainRequirements(ref ToolchainRequirements tr, Tag tag) { foreach (attr; tag.attributes) tr.addRequirement(attr.name, attr.value.expect!string(tag)); } private Tag toSDL(const ref ToolchainRequirements tr) { Attribute[] attrs; if (tr.dub != VersionRange.Any) attrs ~= new Attribute("dub", Value(tr.dub.toString())); if (tr.frontend != VersionRange.Any) attrs ~= new Attribute("frontend", Value(tr.frontend.toString())); if (tr.dmd != VersionRange.Any) attrs ~= new Attribute("dmd", Value(tr.dmd.toString())); if (tr.ldc != VersionRange.Any) attrs ~= new Attribute("ldc", Value(tr.ldc.toString())); if (tr.gdc != VersionRange.Any) attrs ~= new Attribute("gdc", Value(tr.gdc.toString())); return new Tag(null, "toolchainRequirements", null, attrs); } private string expandPackageName(string name, in PackageName parent, Tag tag) { import std.algorithm : canFind; if (!name.startsWith(":")) return name; enforceSDL(!parent.sub.length, "Short-hand packages syntax not " ~ "allowed within sub packages: %s -> %s".format(parent, name), tag); return parent.toString() ~ name; } private string stringTagValue(Tag t, bool allow_child_tags = false) { enforceSDL(t.values.length > 0, format("Missing string value for '%s'.", t.fullName), t); enforceSDL(t.values.length == 1, format("Expected only one value for '%s'.", t.fullName), t); enforceSDL(allow_child_tags || t.tags.length == 0, format("No child tags allowed for '%s'.", t.fullName), t); // Q: should attributes be disallowed, or just ignored for forward compatibility reasons? //enforceSDL(t.attributes.length == 0, format("No attributes allowed for '%s'.", t.fullName), t); return t.values[0].expect!string(t); } private T expect(T)( Value value, Tag errorInfo, string customFieldName = null, string file = __FILE__, int line = __LINE__ ) { return value.match!( (T v) => v, (fallback) { enforceSDL(false, format("Expected value of type " ~ T.stringof ~ " for '%s', but got %s.", customFieldName.length ? customFieldName : errorInfo.fullName, typeof(fallback).stringof), errorInfo, file, line); return T.init; } ); } private string[] stringArrayTagValue(Tag t, bool allow_child_tags = false) { enforceSDL(allow_child_tags || t.tags.length == 0, format("No child tags allowed for '%s'.", t.fullName), t); // Q: should attributes be disallowed, or just ignored for forward compatibility reasons? //enforceSDL(t.attributes.length == 0, format("No attributes allowed for '%s'.", t.fullName), t); string[] ret; foreach (i, v; t.values) { ret ~= v.expect!string(t, text(t.fullName, "[", i, "]")); } return ret; } private string getPlatformSuffix(Tag t, string file = __FILE__, int line = __LINE__) { string platform; if ("platform" in t.attributes) platform = t.attributes["platform"][0].value.expect!string(t, t.fullName ~ " platform", file, line); return platform; } private void parsePlatformStringArray(Tag t, ref string[][string] dst) { string platform = t.getPlatformSuffix; dst[platform] ~= t.values.map!(v => v.expect!string(t)).array; } private void parsePlatformStringAA(Tag t, ref string[string][string] dst) { string platform = t.getPlatformSuffix; enforceSDL(t.values.length == 2, format("Values for '%s' must be 2 required.", t.fullName), t); dst[platform][t.values[0].expect!string(t)] = t.values[1].expect!string(t); } private void parsePlatformEnumArray(E, Es)(Tag t, ref Es[string] dst) { string platform = t.getPlatformSuffix; foreach (v; t.values) { if (platform !in dst) dst[platform] = Es.init; dst[platform] |= v.expect!string(t).to!E; } } private void enforceSDL(bool condition, lazy string message, Tag tag, string file = __FILE__, int line = __LINE__) { if (!condition) { throw new Exception(format("%s(%s): Error: %s", tag.location.file, tag.location.line + 1, message), file, line); } } // Just a wrapper around `parseSDL` for easier testing version (unittest) private void parseSDLTest(ref PackageRecipe recipe, string sdl) { parseSDL(recipe, parseSource(sdl, "testfile"), PackageName.init); } unittest { // test all possible fields auto sdl = `name "projectname"; description "project description"; homepage "http://example.com" authors "author 1" "author 2" authors "author 3" copyright "copyright string" license "license string" version "1.0.0" subPackage { name "subpackage1" } subPackage { name "subpackage2" dependency "projectname:subpackage1" version="*" } subPackage "pathsp3" configuration "config1" { platforms "windows" "linux" targetType "library" } configuration "config2" { platforms "windows-x86" targetType "executable" } buildType "debug" { dflags "-g" "-debug" } buildType "release" { dflags "-release" "-O" } toolchainRequirements dub="~>1.11.0" dmd="~>2.082" x:ddoxFilterArgs "-arg1" "-arg2" x:ddoxFilterArgs "-arg3" x:ddoxTool "ddoxtool" dependency ":subpackage1" optional=false path="." { dflags "-g" "-debug" } dependency "somedep" version="1.0.0" optional=true systemDependencies "system dependencies" targetType "executable" targetName "target name" targetPath "target path" workingDirectory "working directory" subConfiguration ":subpackage2" "library" buildRequirements "allowWarnings" "silenceDeprecations" buildOptions "verbose" "ignoreUnknownPragmas" libs "lib1" "lib2" libs "lib3" sourceFiles "source1" "source2" sourceFiles "source3" sourcePaths "sourcepath1" "sourcepath2" sourcePaths "sourcepath3" cSourcePaths "csourcepath1" "csourcepath2" cSourcePaths "csourcepath3" excludedSourceFiles "excluded1" "excluded2" excludedSourceFiles "excluded3" mainSourceFile "main source" injectSourceFiles "finalbinarysourcefile.d" "extrafile" copyFiles "copy1" "copy2" copyFiles "copy3" extraDependencyFiles "extradepfile1" "extradepfile2" extraDependencyFiles "extradepfile3" versions "version1" "version2" versions "version3" debugVersions "debug1" "debug2" debugVersions "debug3" x:versionFilters "version1" "version2" x:versionFilters "version3" x:versionFilters x:debugVersionFilters "debug1" "debug2" x:debugVersionFilters "debug3" x:debugVersionFilters importPaths "import1" "import2" importPaths "import3" cImportPaths "cimport1" "cimport2" cImportPaths "cimport3" stringImportPaths "string1" "string2" stringImportPaths "string3" preGenerateCommands "preg1" "preg2" preGenerateCommands "preg3" postGenerateCommands "postg1" "postg2" postGenerateCommands "postg3" preBuildCommands "preb1" "preb2" preBuildCommands "preb3" postBuildCommands "postb1" "postb2" postBuildCommands "postb3" preRunCommands "prer1" "prer2" preRunCommands "prer3" postRunCommands "postr1" "postr2" postRunCommands "postr3" environments "Var1" "env" buildEnvironments "Var2" "buildEnv" runEnvironments "Var3" "runEnv" preGenerateEnvironments "Var4" "preGenEnv" postGenerateEnvironments "Var5" "postGenEnv" preBuildEnvironments "Var6" "preBuildEnv" postBuildEnvironments "Var7" "postBuildEnv" preRunEnvironments "Var8" "preRunEnv" postRunEnvironments "Var9" "postRunEnv" dflags "df1" "df2" dflags "df3" lflags "lf1" "lf2" lflags "lf3" `; PackageRecipe rec1; parseSDLTest(rec1, sdl); PackageRecipe rec; parseSDL(rec, rec1.toSDL()); // verify that all fields are serialized properly assert(rec.name == "projectname"); assert(rec.description == "project description"); assert(rec.homepage == "http://example.com"); assert(rec.authors == ["author 1", "author 2", "author 3"]); assert(rec.copyright == "copyright string"); assert(rec.license == "license string"); assert(rec.version_ == "1.0.0"); assert(rec.subPackages.length == 3); assert(rec.subPackages[0].path == ""); assert(rec.subPackages[0].recipe.name == "subpackage1"); assert(rec.subPackages[1].path == ""); assert(rec.subPackages[1].recipe.name == "subpackage2"); assert(rec.subPackages[1].recipe.buildSettings.dependencies.length == 1); assert("projectname:subpackage1" in rec.subPackages[1].recipe.buildSettings.dependencies); assert(rec.subPackages[2].path == "pathsp3"); assert(rec.configurations.length == 2); assert(rec.configurations[0].name == "config1"); assert(rec.configurations[0].platforms == ["windows", "linux"]); assert(rec.configurations[0].buildSettings.targetType == TargetType.library); assert(rec.configurations[1].name == "config2"); assert(rec.configurations[1].platforms == ["windows-x86"]); assert(rec.configurations[1].buildSettings.targetType == TargetType.executable); assert(rec.buildTypes.length == 2); assert(rec.buildTypes["debug"].dflags == ["": ["-g", "-debug"]]); assert(rec.buildTypes["release"].dflags == ["": ["-release", "-O"]]); assert(rec.toolchainRequirements.dub == VersionRange.fromString("~>1.11.0")); assert(rec.toolchainRequirements.frontend == VersionRange.Any); assert(rec.toolchainRequirements.dmd == VersionRange.fromString("~>2.82.0")); assert(rec.toolchainRequirements.ldc == VersionRange.Any); assert(rec.toolchainRequirements.gdc == VersionRange.Any); assert(rec.ddoxFilterArgs == ["-arg1", "-arg2", "-arg3"], rec.ddoxFilterArgs.to!string); assert(rec.ddoxTool == "ddoxtool"); assert(rec.buildSettings.dependencies.length == 2); assert(rec.buildSettings.dependencies["projectname:subpackage1"].optional == false); assert(rec.buildSettings.dependencies["projectname:subpackage1"].path == NativePath(".")); assert(rec.buildSettings.dependencies["projectname:subpackage1"].settings.dflags == ["":["-g", "-debug"]]); assert(rec.buildSettings.dependencies["somedep"].version_.toString() == "1.0.0"); assert(rec.buildSettings.dependencies["somedep"].optional == true); assert(rec.buildSettings.systemDependencies == "system dependencies"); assert(rec.buildSettings.targetType == TargetType.executable); assert(rec.buildSettings.targetName == "target name"); assert(rec.buildSettings.targetPath == "target path"); assert(rec.buildSettings.workingDirectory == "working directory"); assert(rec.buildSettings.subConfigurations.length == 1); assert(rec.buildSettings.subConfigurations["projectname:subpackage2"] == "library"); assert(rec.buildSettings.buildRequirements == ["": cast(Flags!BuildRequirement)(BuildRequirement.allowWarnings | BuildRequirement.silenceDeprecations)]); assert(rec.buildSettings.buildOptions == ["": cast(Flags!BuildOption)(BuildOption.verbose | BuildOption.ignoreUnknownPragmas)]); assert(rec.buildSettings.libs == ["": ["lib1", "lib2", "lib3"]]); assert(rec.buildSettings.sourceFiles == ["": ["source1", "source2", "source3"]]); assert(rec.buildSettings.sourcePaths == ["": ["sourcepath1", "sourcepath2", "sourcepath3"]]); assert(rec.buildSettings.cSourcePaths == ["": ["csourcepath1", "csourcepath2", "csourcepath3"]]); assert(rec.buildSettings.excludedSourceFiles == ["": ["excluded1", "excluded2", "excluded3"]]); assert(rec.buildSettings.mainSourceFile == "main source"); assert(rec.buildSettings.sourceFiles == ["": ["source1", "source2", "source3"]]); assert(rec.buildSettings.injectSourceFiles == ["": ["finalbinarysourcefile.d", "extrafile"]]); assert(rec.buildSettings.extraDependencyFiles == ["": ["extradepfile1", "extradepfile2", "extradepfile3"]]); assert(rec.buildSettings.versions == ["": ["version1", "version2", "version3"]]); assert(rec.buildSettings.debugVersions == ["": ["debug1", "debug2", "debug3"]]); assert(rec.buildSettings.versionFilters == ["": ["version1", "version2", "version3"]]); assert(rec.buildSettings.debugVersionFilters == ["": ["debug1", "debug2", "debug3"]]); assert(rec.buildSettings.importPaths == ["": ["import1", "import2", "import3"]]); assert(rec.buildSettings.cImportPaths == ["": ["cimport1", "cimport2", "cimport3"]]); assert(rec.buildSettings.stringImportPaths == ["": ["string1", "string2", "string3"]]); assert(rec.buildSettings.preGenerateCommands == ["": ["preg1", "preg2", "preg3"]]); assert(rec.buildSettings.postGenerateCommands == ["": ["postg1", "postg2", "postg3"]]); assert(rec.buildSettings.preBuildCommands == ["": ["preb1", "preb2", "preb3"]]); assert(rec.buildSettings.postBuildCommands == ["": ["postb1", "postb2", "postb3"]]); assert(rec.buildSettings.preRunCommands == ["": ["prer1", "prer2", "prer3"]]); assert(rec.buildSettings.postRunCommands == ["": ["postr1", "postr2", "postr3"]]); assert(rec.buildSettings.environments == ["": ["Var1": "env"]]); assert(rec.buildSettings.buildEnvironments == ["": ["Var2": "buildEnv"]]); assert(rec.buildSettings.runEnvironments == ["": ["Var3": "runEnv"]]); assert(rec.buildSettings.preGenerateEnvironments == ["": ["Var4": "preGenEnv"]]); assert(rec.buildSettings.postGenerateEnvironments == ["": ["Var5": "postGenEnv"]]); assert(rec.buildSettings.preBuildEnvironments == ["": ["Var6": "preBuildEnv"]]); assert(rec.buildSettings.postBuildEnvironments == ["": ["Var7": "postBuildEnv"]]); assert(rec.buildSettings.preRunEnvironments == ["": ["Var8": "preRunEnv"]]); assert(rec.buildSettings.postRunEnvironments == ["": ["Var9": "postRunEnv"]]); assert(rec.buildSettings.dflags == ["": ["df1", "df2", "df3"]]); assert(rec.buildSettings.lflags == ["": ["lf1", "lf2", "lf3"]]); } unittest { // test platform identifiers auto sdl = `name "testproject" dflags "-a" "-b" platform="windows-x86" dflags "-c" platform="windows-x86" dflags "-e" "-f" dflags "-g" dflags "-h" "-i" platform="linux" dflags "-j" platform="linux" `; PackageRecipe rec; parseSDLTest(rec, sdl); assert(rec.buildSettings.dflags.length == 3); assert(rec.buildSettings.dflags["windows-x86"] == ["-a", "-b", "-c"]); assert(rec.buildSettings.dflags[""] == ["-e", "-f", "-g"]); assert(rec.buildSettings.dflags["linux"] == ["-h", "-i", "-j"]); } unittest { // test for missing name field import std.exception; auto sdl = `description "missing name"`; PackageRecipe rec; assertThrown(parseSDLTest(rec, sdl)); } unittest { // test single value fields import std.exception; PackageRecipe rec; assertThrown!Exception(parseSDLTest(rec, `name "hello" "world"`)); assertThrown!Exception(parseSDLTest(rec, `name`)); assertThrown!Exception(parseSDLTest(rec, `name 10`)); assertThrown!Exception(parseSDLTest(rec, `name "hello" { world }`)); assertThrown!Exception(parseSDLTest(rec, `name "" versions "hello" 10`)); } unittest { // test basic serialization PackageRecipe p; p.name = "test"; p.authors = ["foo", "bar"]; p.buildSettings.dflags["windows"] = ["-a"]; p.buildSettings.lflags[""] = ["-b", "-c"]; auto sdl = toSDL(p).toSDLDocument(); assert(sdl == `name "test" authors "foo" "bar" dflags "-a" platform="windows" lflags "-b" "-c" `); } unittest { // test that default is preserved after serialization immutable sdl = `name "optional-deps" dependency "foo-bar" version="1.1.2" optional=true default=true`; PackageRecipe rec; parseSDLTest(rec, sdl); with (rec.buildSettings) { assert(dependencies["foo-bar"].optional); assert(dependencies["foo-bar"].default_); } immutable back = toSDL(rec).toSDLDocument(); import std.algorithm : canFind; assert(back.canFind("default=true")); } unittest { auto sdl = "name \"test\"\nsourcePaths"; PackageRecipe rec; parseSDLTest(rec, sdl); assert("" in rec.buildSettings.sourcePaths); } unittest { auto sdl = "name \"test\"\ncSourcePaths"; PackageRecipe rec; parseSDLTest(rec, sdl); assert("" in rec.buildSettings.cSourcePaths); } unittest { auto sdl = `name "test" dependency "package" repository="git+https://some.url" version="12345678" `; PackageRecipe rec; parseSDLTest(rec, sdl); auto dependency = rec.buildSettings.dependencies["package"]; assert(!dependency.repository.empty); assert(dependency.repository.ref_ == "12345678"); } unittest { PackageRecipe p; p.name = "test"; auto repository = Repository("git+https://some.url", "12345678"); p.buildSettings.dependencies["package"] = Dependency(repository); auto sdl = toSDL(p).toSDLDocument(); assert(sdl == `name "test" dependency "package" repository="git+https://some.url" version="12345678" `); } dub-1.40.0/source/dub/recipe/selection.d000066400000000000000000000173251477246567400200570ustar00rootroot00000000000000/** * Contains type definition for the selections file * * The selections file, commonly known by its file name * `dub.selections.json`, is used by Dub to store resolved * dependencies. Its purpose is identical to other package * managers' lock file. */ module dub.recipe.selection; import dub.dependency; import dub.internal.vibecompat.inet.path : NativePath; import dub.internal.configy.Attributes; import dub.internal.dyaml.stdsumtype; import std.exception; deprecated("Use either `Selections!1` or `SelectionsFile` instead") public alias Selected = Selections!1; /** * Top level type for `dub.selections.json` * * To support multiple version, we expose a `SumType` which * contains the "real" version being parsed. */ public struct SelectionsFile { /// Private alias to avoid repetition private alias DataType = SumType!(Selections!0, Selections!1); /** * Get the `fileVersion` of this selection file * * The `fileVersion` is always present, no matter the version. * This is a convenience function that matches any version and allows * one to retrieve it. * * Note that the `fileVersion` can be an unsupported version. */ public uint fileVersion () const @safe pure nothrow @nogc { return this.content.match!((s) => s.fileVersion); } /** * Whether this dub.selections.json can be inherited by nested projects * without local dub.selections.json */ public bool inheritable () const @safe pure nothrow @nogc { return this.content.match!( (const Selections!0 _) => false, (const Selections!1 s) => s.inheritable, ); } /** * The content of this selections file * * The underlying content can be accessed using * `dub.internal.yaml.stdsumtype : match`, for example: * --- * SelectionsFile file = readSelectionsFile(); * file.content.match!( * (Selections!0 s) => logWarn("Unsupported version: %s", s.fileVersion), * (Selections!1 s) => logWarn("Old version (1), please upgrade!"), * (Selections!2 s) => logInfo("You are up to date"), * ); * --- */ public DataType content; /** * Deserialize the selections file according to its version * * This will first deserialize the `fileVersion` only, and then * the expected version if it is supported. Unsupported versions * will be returned inside a `Selections!0` struct, * which only contains a `fileVersion`. */ public static SelectionsFile fromYAML (scope ConfigParser!SelectionsFile parser) { import dub.internal.configy.Read; static struct OnlyVersion { uint fileVersion; } auto vers = parseConfig!OnlyVersion( CLIArgs.init, parser.node, StrictMode.Ignore); switch (vers.fileVersion) { case 1: return SelectionsFile(DataType(parser.parseAs!(Selections!1))); default: return SelectionsFile(DataType(Selections!0(vers.fileVersion))); } } } /** * A specific version of the selections file * * Currently, only two instantiations of this struct are possible: * - `Selections!0` is an invalid/unsupported version; * - `Selections!1` is the most widespread version; */ public struct Selections (ushort Version) { /// public uint fileVersion = Version; static if (Version == 0) { /* Invalid version */ } else static if (Version == 1) { /// The selected package and their matching versions public SelectedDependency[string] versions; /// Whether this dub.selections.json can be inherited by nested projects /// without local dub.selections.json @Optional public bool inheritable; } else static assert(false, "This version is not supported"); } /// Wrapper around `SelectedDependency` to do deserialization but still provide /// a `Dependency` object to client code. private struct SelectedDependency { public Dependency actual; alias actual this; /// Constructor, used in `fromYAML` public this (inout(Dependency) dep) inout @safe pure nothrow @nogc { this.actual = dep; } /// Allow external code to assign to this object as if it was a `Dependency` public ref SelectedDependency opAssign (Dependency dep) return pure nothrow @nogc { this.actual = dep; return this; } /// Read a `Dependency` from the config file - Required to support both short and long form static SelectedDependency fromYAML (scope ConfigParser!SelectedDependency p) { import dub.internal.dyaml.node; if (p.node.nodeID == NodeID.scalar) return SelectedDependency(Dependency(Version(p.node.as!string))); auto d = p.parseAs!YAMLFormat; if (d.path.length) return SelectedDependency(Dependency(NativePath(d.path))); else { assert(d.version_.length); if (d.repository.length) return SelectedDependency(Dependency(Repository(d.repository, d.version_))); return SelectedDependency(Dependency(Version(d.version_))); } } /// In-file representation of a dependency as permitted in `dub.selections.json` private struct YAMLFormat { @Optional @Name("version") string version_; @Optional string path; @Optional string repository; public void validate () const scope @safe pure { enforce(this.version_.length || this.path.length || this.repository.length, "Need to provide a version string, or an object with one of the following fields: `version`, `path`, or `repository`"); enforce(!this.path.length || !this.repository.length, "Cannot provide a `path` dependency if a repository dependency is used"); enforce(!this.path.length || !this.version_.length, "Cannot provide a `path` dependency if a `version` dependency is used"); enforce(!this.repository.length || this.version_.length, "Cannot provide a `repository` dependency without a `version`"); } } } // Ensure we can read all type of dependencies unittest { import dub.internal.configy.Read : parseConfigString; immutable string content = `{ "fileVersion": 1, "versions": { "simple": "1.5.6", "branch": "~master", "branch2": "~main", "path": { "path": "../some/where" }, "repository": { "repository": "git+https://github.com/dlang/dub", "version": "123456123456123456" } } }`; auto file = parseConfigString!SelectionsFile(content, "/dev/null"); assert(file.fileVersion == 1); auto s = file.content.match!( (Selections!1 s) => s, (s) { assert(0); return Selections!(1).init; }, ); assert(!s.inheritable); assert(s.versions.length == 5); assert(s.versions["simple"] == Dependency(Version("1.5.6"))); assert(s.versions["branch"] == Dependency(Version("~master"))); assert(s.versions["branch2"] == Dependency(Version("~main"))); assert(s.versions["path"] == Dependency(NativePath("../some/where"))); assert(s.versions["repository"] == Dependency(Repository("git+https://github.com/dlang/dub", "123456123456123456"))); } // with optional `inheritable` Boolean unittest { import dub.internal.configy.Read : parseConfigString; immutable string content = `{ "fileVersion": 1, "inheritable": true, "versions": { "simple": "1.5.6", } }`; auto s = parseConfigString!SelectionsFile(content, "/dev/null"); assert(s.inheritable); } // Test reading an unsupported version unittest { import dub.internal.configy.Read : parseConfigString; immutable string content = `{"fileVersion": 9999, "thisis": "notrecognized"}`; auto s = parseConfigString!SelectionsFile(content, "/dev/null"); assert(s.fileVersion == 9999); } dub-1.40.0/source/dub/semver.d000066400000000000000000000311251477246567400161160ustar00rootroot00000000000000/** Implements version validation and comparison according to the semantic versioning specification. The general format of a semantic version is: a.b.c[-x.y...][+x.y...] a/b/c must be integer numbers with no leading zeros, and x/y/... must be either numbers or identifiers containing only ASCII alphabetic characters or hyphens. Identifiers may not start with a digit. See_Also: http://semver.org/ Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.semver; import std.string; import std.algorithm : max; import std.conv; @safe: /** Validates a version string according to the SemVer specification. */ bool isValidVersion(scope string ver) pure @nogc nothrow { // NOTE: this is not by spec, but to ensure sane input if (ver.length > 256) return false; // a auto sepi = ver.indexOf('.'); if (sepi < 0) return false; if (!isValidNumber(ver[0 .. sepi])) return false; ver = ver[sepi+1 .. $]; // c sepi = ver.indexOf('.'); if (sepi < 0) return false; if (!isValidNumber(ver[0 .. sepi])) return false; ver = ver[sepi+1 .. $]; // c sepi = ver.indexOfAny("-+"); if (sepi < 0) sepi = ver.length; if (!isValidNumber(ver[0 .. sepi])) return false; ver = ver[sepi .. $]; // prerelease tail if (ver.length > 0 && ver[0] == '-') { ver = ver[1 .. $]; sepi = ver.indexOf('+'); if (sepi < 0) sepi = ver.length; if (!isValidIdentifierChain(ver[0 .. sepi])) return false; ver = ver[sepi .. $]; } // build tail if (ver.length > 0 && ver[0] == '+') { ver = ver[1 .. $]; if (!isValidIdentifierChain(ver, true)) return false; ver = null; } assert(ver.length == 0); return true; } /// unittest { assert(isValidVersion("1.9.0")); assert(isValidVersion("0.10.0")); assert(!isValidVersion("01.9.0")); assert(!isValidVersion("1.09.0")); assert(!isValidVersion("1.9.00")); assert(isValidVersion("1.0.0-alpha")); assert(isValidVersion("1.0.0-alpha.1")); assert(isValidVersion("1.0.0-0.3.7")); assert(isValidVersion("1.0.0-x.7.z.92")); assert(isValidVersion("1.0.0-x.7-z.92")); assert(!isValidVersion("1.0.0-00.3.7")); assert(!isValidVersion("1.0.0-0.03.7")); assert(isValidVersion("1.0.0-alpha+001")); assert(isValidVersion("1.0.0+20130313144700")); assert(isValidVersion("1.0.0-beta+exp.sha.5114f85")); assert(!isValidVersion(" 1.0.0")); assert(!isValidVersion("1. 0.0")); assert(!isValidVersion("1.0 .0")); assert(!isValidVersion("1.0.0 ")); assert(!isValidVersion("1.0.0-a_b")); assert(!isValidVersion("1.0.0+")); assert(!isValidVersion("1.0.0-")); assert(!isValidVersion("1.0.0-+a")); assert(!isValidVersion("1.0.0-a+")); assert(!isValidVersion("1.0")); assert(!isValidVersion("1.0-1.0")); } /** Determines if a given valid SemVer version has a pre-release suffix. */ bool isPreReleaseVersion(scope string ver) pure @nogc nothrow in { assert(isValidVersion(ver)); } do { foreach (i; 0 .. 2) { auto di = ver.indexOf('.'); assert(di > 0); ver = ver[di+1 .. $]; } auto di = ver.indexOf('-'); if (di < 0) return false; return isValidNumber(ver[0 .. di]); } /// unittest { assert(isPreReleaseVersion("1.0.0-alpha")); assert(isPreReleaseVersion("1.0.0-alpha+b1")); assert(isPreReleaseVersion("0.9.0-beta.1")); assert(!isPreReleaseVersion("0.9.0")); assert(!isPreReleaseVersion("0.9.0+b1")); } /** Compares the precedence of two SemVer version strings. The version strings must be validated using `isValidVersion` before being passed to this function. Note that the build meta data suffix (if any) is being ignored when comparing version numbers. Returns: Returns a negative number if `a` is a lower version than `b`, `0` if they are equal, and a positive number otherwise. */ int compareVersions(scope string a, scope string b) pure @nogc { // This needs to be a nested function as we can't pass local scope // variables by `ref` int compareNumber() @safe pure @nogc { int res = 0; while (true) { if (a[0] != b[0] && res == 0) res = a[0] - b[0]; a = a[1 .. $]; b = b[1 .. $]; auto aempty = !a.length || (a[0] < '0' || a[0] > '9'); auto bempty = !b.length || (b[0] < '0' || b[0] > '9'); if (aempty != bempty) return bempty - aempty; if (aempty) return res; } } // compare a.b.c numerically if (auto ret = compareNumber()) return ret; assert(a[0] == '.' && b[0] == '.'); a = a[1 .. $]; b = b[1 .. $]; if (auto ret = compareNumber()) return ret; assert(a[0] == '.' && b[0] == '.'); a = a[1 .. $]; b = b[1 .. $]; if (auto ret = compareNumber()) return ret; // give precedence to non-prerelease versions bool apre = a.length > 0 && a[0] == '-'; bool bpre = b.length > 0 && b[0] == '-'; if (apre != bpre) return bpre - apre; if (!apre) return 0; // compare the prerelease tail lexicographically do { a = a[1 .. $]; b = b[1 .. $]; if (auto ret = compareIdentifier(a, b)) return ret; } while (a.length > 0 && b.length > 0 && a[0] != '+' && b[0] != '+'); // give longer prerelease tails precedence bool aempty = a.length == 0 || a[0] == '+'; bool bempty = b.length == 0 || b[0] == '+'; if (aempty == bempty) { assert(aempty); return 0; } return bempty - aempty; } /// unittest { assert(compareVersions("1.0.0", "1.0.0") == 0); assert(compareVersions("1.0.0+b1", "1.0.0+b2") == 0); assert(compareVersions("1.0.0", "2.0.0") < 0); assert(compareVersions("1.0.0-beta", "1.0.0") < 0); assert(compareVersions("1.0.1", "1.0.0") > 0); } unittest { void assertLess(string a, string b) { assert(compareVersions(a, b) < 0, "Failed for "~a~" < "~b); assert(compareVersions(b, a) > 0); assert(compareVersions(a, a) == 0); assert(compareVersions(b, b) == 0); } assertLess("1.0.0", "2.0.0"); assertLess("2.0.0", "2.1.0"); assertLess("2.1.0", "2.1.1"); assertLess("1.0.0-alpha", "1.0.0"); assertLess("1.0.0-alpha", "1.0.0-alpha.1"); assertLess("1.0.0-alpha.1", "1.0.0-alpha.beta"); assertLess("1.0.0-alpha.beta", "1.0.0-beta"); assertLess("1.0.0-beta", "1.0.0-beta.2"); assertLess("1.0.0-beta.2", "1.0.0-beta.11"); assertLess("1.0.0-beta.11", "1.0.0-rc.1"); assertLess("1.0.0-rc.1", "1.0.0"); assert(compareVersions("1.0.0", "1.0.0+1.2.3") == 0); assert(compareVersions("1.0.0", "1.0.0+1.2.3-2") == 0); assert(compareVersions("1.0.0+asdasd", "1.0.0+1.2.3") == 0); assertLess("2.0.0", "10.0.0"); assertLess("1.0.0-2", "1.0.0-10"); assertLess("1.0.0-99", "1.0.0-1a"); assertLess("1.0.0-99", "1.0.0-a"); assertLess("1.0.0-alpha", "1.0.0-alphb"); assertLess("1.0.0-alphz", "1.0.0-alphz0"); assertLess("1.0.0-alphZ", "1.0.0-alpha"); } /** Increments a given (partial) version number to the next higher version. Prerelease and build metadata information is ignored. The given version can skip the minor and patch digits. If no digits are skipped, the next minor version will be selected. If the patch or minor versions are skipped, the next major version will be selected. This function corresponds to the semantics of the "~>" comparison operator's upper bound. The semantics of this are the same as for the "approximate" version specifier from rubygems. (https://github.com/rubygems/rubygems/tree/81d806d818baeb5dcb6398ca631d772a003d078e/lib/rubygems/version.rb) See_Also: `expandVersion` */ string bumpVersion(string ver) pure { // Cut off metadata and prerelease information. auto mi = ver.indexOfAny("+-"); if (mi > 0) ver = ver[0..mi]; // Increment next to last version from a[.b[.c]]. auto splitted = () @trusted { return split(ver, "."); } (); // DMD 2.065.0 assert(splitted.length > 0 && splitted.length <= 3, "Version corrupt: " ~ ver); auto to_inc = splitted.length == 3? 1 : 0; splitted = splitted[0 .. to_inc+1]; splitted[to_inc] = to!string(to!int(splitted[to_inc]) + 1); // Fill up to three components to make valid SemVer version. while (splitted.length < 3) splitted ~= "0"; return splitted.join("."); } /// unittest { assert("1.0.0" == bumpVersion("0")); assert("1.0.0" == bumpVersion("0.0")); assert("0.1.0" == bumpVersion("0.0.0")); assert("1.3.0" == bumpVersion("1.2.3")); assert("1.3.0" == bumpVersion("1.2.3+metadata")); assert("1.3.0" == bumpVersion("1.2.3-pre.release")); assert("1.3.0" == bumpVersion("1.2.3-pre.release+metadata")); } /** Increments a given version number to the next incompatible version. Prerelease and build metadata information is removed. This implements the "^" comparison operator, which represents "non-breaking SemVer compatibility." With 0.x.y releases, any release can break. With x.y.z releases, only major releases can break. */ string bumpIncompatibleVersion(string ver) pure { // Cut off metadata and prerelease information. auto mi = ver.indexOfAny("+-"); if (mi > 0) ver = ver[0..mi]; // Increment next to last version from a[.b[.c]]. auto splitted = () @trusted { return split(ver, "."); } (); // DMD 2.065.0 assert(splitted.length == 3, "Version corrupt: " ~ ver); if (splitted[0] == "0") splitted[2] = to!string(to!int(splitted[2]) + 1); else splitted = [to!string(to!int(splitted[0]) + 1), "0", "0"]; return splitted.join("."); } /// unittest { assert(bumpIncompatibleVersion("0.0.0") == "0.0.1"); assert(bumpIncompatibleVersion("0.1.2") == "0.1.3"); assert(bumpIncompatibleVersion("1.0.0") == "2.0.0"); assert(bumpIncompatibleVersion("1.2.3") == "2.0.0"); assert(bumpIncompatibleVersion("1.2.3+metadata") == "2.0.0"); assert(bumpIncompatibleVersion("1.2.3-pre.release") == "2.0.0"); assert(bumpIncompatibleVersion("1.2.3-pre.release+metadata") == "2.0.0"); } /** Takes a partial version and expands it to a valid SemVer version. This function corresponds to the semantics of the "~>" comparison operator's lower bound. See_Also: `bumpVersion` */ string expandVersion(string ver) pure { auto mi = ver.indexOfAny("+-"); auto sub = ""; if (mi > 0) { sub = ver[mi..$]; ver = ver[0..mi]; } auto splitted = () @trusted { return split(ver, "."); } (); // DMD 2.065.0 assert(splitted.length > 0 && splitted.length <= 3, "Version corrupt: " ~ ver); while (splitted.length < 3) splitted ~= "0"; return splitted.join(".") ~ sub; } /// unittest { assert("1.0.0" == expandVersion("1")); assert("1.0.0" == expandVersion("1.0")); assert("1.0.0" == expandVersion("1.0.0")); // These are rather exotic variants... assert("1.0.0-pre.release" == expandVersion("1-pre.release")); assert("1.0.0+meta" == expandVersion("1+meta")); assert("1.0.0-pre.release+meta" == expandVersion("1-pre.release+meta")); } private int compareIdentifier(scope ref string a, scope ref string b) pure @nogc { bool anumber = true; bool bnumber = true; bool aempty = true, bempty = true; int res = 0; while (true) { if (a[0] != b[0] && res == 0) res = a[0] - b[0]; if (anumber && (a[0] < '0' || a[0] > '9')) anumber = false; if (bnumber && (b[0] < '0' || b[0] > '9')) bnumber = false; a = a[1 .. $]; b = b[1 .. $]; aempty = !a.length || a[0] == '.' || a[0] == '+'; bempty = !b.length || b[0] == '.' || b[0] == '+'; if (aempty || bempty) break; } if (anumber && bnumber) { // the !empty value might be an identifier instead of a number, but identifiers always have precedence if (aempty != bempty) return bempty - aempty; return res; } else { if (anumber && aempty) return -1; if (bnumber && bempty) return 1; // this assumption is necessary to correctly classify 111A > 11111 (ident always > number)! static assert('0' < 'a' && '0' < 'A'); if (res != 0) return res; return bempty - aempty; } } private bool isValidIdentifierChain(scope string str, bool allow_leading_zeros = false) pure @nogc nothrow { if (str.length == 0) return false; while (str.length) { auto end = str.indexOf('.'); if (end < 0) end = str.length; if (!isValidIdentifier(str[0 .. end], allow_leading_zeros)) return false; if (end < str.length) str = str[end+1 .. $]; else break; } return true; } private bool isValidIdentifier(scope string str, bool allow_leading_zeros = false) pure @nogc nothrow { if (str.length < 1) return false; bool numeric = true; foreach (ch; str) { switch (ch) { default: return false; case 'a': .. case 'z': case 'A': .. case 'Z': case '-': numeric = false; break; case '0': .. case '9': break; } } if (!allow_leading_zeros && numeric && str[0] == '0' && str.length > 1) return false; return true; } private bool isValidNumber(string str) pure @nogc nothrow { if (str.length < 1) return false; foreach (ch; str) if (ch < '0' || ch > '9') return false; // don't allow leading zeros if (str[0] == '0' && str.length > 1) return false; return true; } private ptrdiff_t indexOfAny(scope string str, in char[] chars) pure @nogc nothrow { ptrdiff_t ret = -1; foreach (ch; chars) { auto idx = str.indexOf(ch); if (idx >= 0 && (ret < 0 || idx < ret)) ret = idx; } return ret; } dub-1.40.0/source/dub/test/000077500000000000000000000000001477246567400154255ustar00rootroot00000000000000dub-1.40.0/source/dub/test/base.d000066400000000000000000000533611477246567400165140ustar00rootroot00000000000000/******************************************************************************* Base utilities (types, functions) used in tests The main type in this module is `TestDub`. `TestDub` is a class that inherits from `Dub` and inject dependencies in it to avoid relying on IO. First and foremost, by overriding `makePackageManager` and returning a `TestPackageManager` instead, we avoid hitting the local filesystem and instead present a view of the "local packages" that is fully in-memory. Likewise, by providing a `MockPackageSupplier`, we can imitate the behavior of the registry without relying on it. Leftover_IO: Note that reliance on IO was originally all over the place in the Dub codebase. For this reason, **new tests might find themselves doing I/O**. When that happens, one should isolate the place which does I/O and refactor the code to make dependency injection possible and practical. An example of this is any place calling `Package.load`, `readPackageRecipe`, or `Package.findPackageFile`. Supported_features: In order to make writing tests possible and practical, not every features where implemented in `TestDub`. Notably, path-based packages are not supported at the moment, as they would need a better filesystem abstraction. However, it would be desirable to add support for them at some point in the future. Writing_tests: `TestDub` exposes a few extra features to make writing tests easier. Ideally, those extra features should be kept to a minimum, as a convenient API for writing tests is likely to be a convenient API for library and application developers as well. It is expected that most tests will be centered about the `Project`, also known as the "main package" that is loaded and drives Dub's logic when common operations such as `dub build` are performed. A minimalistic and documented unittest can be found in this module, showing the various features of the test framework. Logging: Dub writes to stdout / stderr in various places. While it would be desirable to do dependency injection on it, the benefits brought by doing so currently doesn't justify the amount of work required. If unittests for some reason trigger messages being written to stdout/stderr, make sure that the logging functions are being used instead of bare `write` / `writeln`. *******************************************************************************/ module dub.test.base; version (unittest): import std.array; public import std.algorithm; import std.datetime.systime; import std.exception; import std.format; import std.string; import dub.data.settings; public import dub.dependency; public import dub.dub; public import dub.package_; import dub.internal.io.mockfs; import dub.internal.vibecompat.core.file : FileInfo; public import dub.internal.io.filesystem; import dub.packagemanager; import dub.packagesuppliers.packagesupplier; import dub.project; import dub.recipe.io : parsePackageRecipe; import dub.recipe.selection; /// Example of a simple unittest for a project with a single dependency unittest { // Enabling this would provide some more verbose output, which makes // debugging a failing unittest much easier. version (none) { enableLogging(); scope(exit) disableLogging(); } // Initialization is best done as a delegate passed to `TestDub` constructor, // which receives an `FSEntry` representing the root of the filesystem. // Various low-level functions are exposed (mkdir, writeFile, ...), // as well as higher-level functions (`writePackageFile`). scope dub = new TestDub((scope Filesystem root) { // `a` will be loaded as the project while `b` will be loaded // as a simple package. The recipe files can be in JSON or SDL format, // here we use both to demonstrate this. root.writeFile(TestDub.ProjectPath ~ "dub.json", `{ "name": "a", "dependencies": { "b": "~>1.0" } }`); root.writeFile(TestDub.ProjectPath ~ "dub.selections.json", `{"fileVersion": 1, "versions": {"b": "1.1.0"}}`); // Note that you currently need to add the `version` to the package root.writePackageFile("b", "1.0.0", `name "b" version "1.0.0"`, PackageFormat.sdl); root.writePackageFile("b", "1.1.0", `name "b" version "1.1.0"`, PackageFormat.sdl); root.writePackageFile("b", "1.2.0", `name "b" version "1.2.0"`, PackageFormat.sdl); }); // `Dub.loadPackage` will set this package as the project // While not required, it follows the common Dub use case. dub.loadPackage(); // Simple tests can be performed using the public API assert(dub.project.hasAllDependencies(), "project has missing dependencies"); assert(dub.project.getDependency("b", true), "Missing 'b' dependency"); assert(dub.project.getDependency("b", true).version_ == Version("1.1.0")); // While it is important to make your tests fail before you make them pass, // as is common with TDD, it can also be useful to test simple assumptions // as part of your basic tests. Here we want to make sure `getDependency` // doesn't always return something regardless of its first argument. // Note that this package segments modules by categories, e.g. dependencies, // and tests are run serially in a module, so one may rely on previous tests // having passed to avoid repeating some assumptions. assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); // This triggers the dependency resolution process that happens // when one does not have a selection file in the project. // Dub will resolve dependencies and generate the selection file // (in memory). If your test has set dependencies / no dependencies, // this will not be needed. dub.upgrade(UpgradeOptions.select); assert(dub.project.getDependency("b", true).version_ == Version("1.1.0")); /// Now actually upgrade dependencies in memory dub.upgrade(UpgradeOptions.select | UpgradeOptions.upgrade); assert(dub.project.getDependency("b", true).version_ == Version("1.2.0")); /// Adding a package to the registry require the version and at list a recipe dub.getRegistry().add(Version("1.3.0"), (scope Filesystem pkg) { // This is required pkg.writeFile(NativePath(`dub.sdl`), `name "b"`); // Any other files can be present, as a normal package pkg.mkdir(NativePath("source/b/")); pkg.writeFile( NativePath("main.d"), "module b.main; void main() {}"); }); // Fetch the package from the registry dub.upgrade(UpgradeOptions.select | UpgradeOptions.upgrade); assert(dub.project.getDependency("b", true).version_ == Version("1.3.0")); } // TODO: Remove and handle logging the same way we handle other IO import dub.internal.logging; public void enableLogging() { setLogLevel(LogLevel.debug_); } public void disableLogging() { setLogLevel(LogLevel.none); } /** * An instance of Dub that does not rely on the environment * * This instance of dub should not read any environment variables, * nor should it do any file IO, to make it usable and reliable in unittests. * Currently it reads environment variables but does not read the configuration. * * Note that since the design of Dub was centered on the file system for so long, * `NativePath` is still a core part of how one interacts with this class. * In order to be as close to the production code as possible, this class * use the following conventions: * - The project is located under `/dub/project/`; * - The user and system packages are under `/dub/user/packages/` and * `/dub/system/packages/`, respectively; * Those paths don't need to exists, but they are what one might see * when writing and debugging unittests. */ public class TestDub : Dub { /// The virtual filesystem that this instance acts on public MockFS fs; /** * Redundant reference to the registry * * We currently create 2 `MockPackageSupplier`s hidden behind a * `FallbackPackageSupplier` (see base implementation). * The fallback is never used, and we need to provide the user * a mean to access the registry so they can add packages to it. */ protected MockPackageSupplier registry; /// Convenience constants for use in unittests version (Windows) public static immutable Root = NativePath("T:\\dub\\"); else public static immutable Root = NativePath("/dub/"); /// Ditto public static immutable ProjectPath = Root ~ "project"; /// Ditto public static immutable SpecialDirs Paths = { temp: Root ~ "temp/", systemSettings: Root ~ "system/", userSettings: Root ~ "user/", userPackages: Root ~ "user/", cache: Root ~ "user/" ~ "cache/", }; /*************************************************************************** Instantiate a new `TestDub` instance with the provided filesystem state This exposes the raw virtual filesystem to the user, allowing any kind of customization to happen: Empty directory, non-writeable ones, etc... Params: dg = Delegate to be called with the filesystem, before `TestDub` instantiation is performed; root = The root path for this instance (forwarded to Dub) extras = Extras `PackageSupplier`s (forwarded to Dub) skip = What `PackageSupplier`s to skip (forwarded to Dub) ***************************************************************************/ public this (scope void delegate(scope Filesystem root) dg = null, string root = ProjectPath.toNativeString(), PackageSupplier[] extras = null, SkipPackageSuppliers skip = SkipPackageSuppliers.none) { /// Create the fs & its base structure auto fs_ = new MockFS(); fs_.mkdir(Paths.temp); fs_.mkdir(Paths.systemSettings); fs_.mkdir(Paths.userSettings); fs_.mkdir(Paths.userPackages); fs_.mkdir(Paths.cache); fs_.mkdir(ProjectPath); if (dg !is null) dg(fs_); this(fs_, root, extras, skip); } /// Workaround https://issues.dlang.org/show_bug.cgi?id=24388 when called /// when called with (null, ...). public this (typeof(null) _, string root = ProjectPath.toNativeString(), PackageSupplier[] extras = null, SkipPackageSuppliers skip = SkipPackageSuppliers.none) { alias TType = void delegate(scope Filesystem); this(TType.init, root, extras, skip); } /// Internal constructor private this(MockFS fs_, string root, PackageSupplier[] extras, SkipPackageSuppliers skip) { this.fs = fs_; super(root, extras, skip); } /*************************************************************************** Get a new `Dub` instance with the same filesystem This creates a new `TestDub` instance with the existing filesystem, allowing one to write tests that would normally require multiple Dub instantiation (e.g. test that `fetch` is idempotent). Like the main `TestDub` constructor, it allows to do modifications to the filesystem before the new instantiation is made. Params: dg = Delegate to be called with the filesystem, before `TestDub` instantiation is performed; Returns: A new `TestDub` instance referencing the same filesystem as `this`. ***************************************************************************/ public TestDub newTest (scope void delegate(scope Filesystem root) dg = null, string root = ProjectPath.toNativeString(), PackageSupplier[] extras = null, SkipPackageSuppliers skip = SkipPackageSuppliers.none) { if (dg !is null) dg(this.fs); return new TestDub(this.fs, root, extras, skip); } /// Avoid loading user configuration protected override Settings loadConfig(ref SpecialDirs dirs) const { dirs = Paths; return Settings.init; } /// protected override PackageManager makePackageManager() { assert(this.fs !is null); return new TestPackageManager(this.fs); } /// See `MockPackageSupplier` documentation for this class' implementation protected override PackageSupplier makePackageSupplier(string url) { auto r = new MockPackageSupplier(url); if (this.registry is null) this.registry = r; return r; } /** * Returns a fully typed `TestPackageManager` * * This exposes the fully typed `PackageManager`, so that client * can call convenience functions on it directly. */ public override @property inout(TestPackageManager) packageManager() inout { return cast(inout(TestPackageManager)) this.m_packageManager; } /** * Returns a fully-typed `MockPackageSupplier` * * This exposes the first (and usually sole) `PackageSupplier` if typed * as `MockPackageSupplier` so that client can call convenience functions * on it directly. */ public @property inout(MockPackageSupplier) getRegistry() inout { // This will not work with `SkipPackageSupplier`. assert(this.registry !is null, "The registry hasn't been instantiated?"); return this.registry; } } /** * A `PackageManager` suitable to be used in unittests * * This `PackageManager` does not perform any IO. It imitates the base * `PackageManager`, exposing 3 locations, but loading of packages is not * automatic and needs to be done by passing a `Package` instance. */ package class TestPackageManager : PackageManager { /// `loadSCMPackage` will strip some part of the remote / repository, /// which we need to mimic to provide a usable API. private struct GitReference { /// this (in Repository repo) { this.remote = repo.remote.chompPrefix("git+"); this.ref_ = repo.ref_.chompPrefix("~"); } /// this (in string remote, in string gitref) { this.remote = remote; this.ref_ = gitref; } string remote; string ref_; } /// List of all SCM packages that can be fetched by this instance protected string[GitReference] scm; this(Filesystem filesystem) { NativePath local = TestDub.ProjectPath ~ ".dub/packages/"; NativePath user = TestDub.Paths.userSettings ~ "packages/"; NativePath system = TestDub.Paths.systemSettings ~ "packages/"; super(filesystem, local, user, system); } /** * Re-Implementation of `gitClone`. * * The base implementation will do a `git` clone, to the file-system. * We need to mock both the `git` part and the write to the file system. */ protected override bool gitClone(string remote, string gitref, in NativePath dest) { if (auto pstr = GitReference(remote, gitref) in this.scm) { this.fs.mkdir(dest); this.fs.writeFile(dest ~ "dub.json", *pstr); return true; } return false; } /// Add a reachable SCM package to this `PackageManager` public void addTestSCMPackage(in Repository repo, string dub_json) { this.scm[GitReference(repo)] = dub_json; } /// Overriden because we currently don't have a way to do dependency /// injection on `dub.internal.utils : lockFile`. public override Package store(ubyte[] data, PlacementLocation dest, in PackageName name, in Version vers) { // Most of the code is copied from the base method assert(!name.sub.length, "Cannot store a subpackage, use main package instead"); NativePath dstpath = this.getPackagePath(dest, name, vers.toString()); this.fs.mkdir(dstpath.parentPath()); if (this.fs.existsFile(dstpath)) return this.getPackage(name, vers, dest); return this.store_(data, dstpath, name, vers); } } /** * Implements a `PackageSupplier` that doesn't do any IO * * This `PackageSupplier` needs to be pre-loaded with `Package` it can * find during the setup phase of the unittest. */ public class MockPackageSupplier : PackageSupplier { /// Internal duplication to avoid having to deserialize the zip content private struct PkgData { /// PackageRecipe recipe; /// ubyte[] data; } /// Mapping of package name to package zip data, ordered by `Version` protected PkgData[Version][PackageName] pkgs; /// URL this was instantiated with protected string url; /// public this(string url) { this.url = url; } /** * Adds a package to this `PackageSupplier` * * The registry API bakes in Zip files / binary data. * When adding a package here, just provide an `Filesystem` * representing the package directory, which will be converted * to ZipFile / `ubyte[]` and returned by `fetchPackage`. * * This use a delegate approach similar to `TestDub` constructor: * a delegate must be provided to initialize the package content. * The delegate will be called once and is expected to contain, * at its root, the package. * * The name of the package will be defined from the recipe file. * It's version, however, must be provided as parameter. * * Params: * vers = The `Version` of this package. * dg = A delegate that will populate its parameter with the * content of the package. */ public void add (in Version vers, scope void delegate(scope Filesystem root) dg) { scope pkgRoot = new MockFS(); dg(pkgRoot); string recipe = pkgRoot.existsFile(NativePath("dub.json")) ? "dub.json" : null; if (recipe is null) recipe = pkgRoot.existsFile(NativePath("dub.sdl")) ? "dub.sdl" : null; if (recipe is null) recipe = pkgRoot.existsFile(NativePath("package.json")) ? "package.json" : null; // Note: If you want to provide an invalid package, override // [Mock]PackageSupplier. Most tests will expect a well-behaving // registry so this assert is here to help with writing tests. assert(recipe !is null, "No package recipe found: Expected dub.json or dub.sdl"); auto pkgRecipe = parsePackageRecipe( pkgRoot.readText(NativePath(recipe)), recipe); pkgRecipe.version_ = vers.toString(); const name = PackageName(pkgRecipe.name); this.pkgs[name][vers] = PkgData( pkgRecipe, pkgRoot.serializeToZip("%s-%s/".format(name, vers))); } /// public override @property string description() { return "unittest PackageSupplier for: " ~ this.url; } /// public override Version[] getVersions(in PackageName name) { if (auto ppkgs = name.main in this.pkgs) return (*ppkgs).keys; return null; } /// public override ubyte[] fetchPackage(in PackageName name, in VersionRange dep, bool pre_release) { return this.getBestMatch(name, dep, pre_release).data; } /// public override Json fetchPackageRecipe(in PackageName name, in VersionRange dep, bool pre_release) { import dub.recipe.json; auto match = this.getBestMatch(name, dep, pre_release); if (!match.data.length) return Json.init; auto res = toJson(match.recipe); return res; } /// protected PkgData getBestMatch ( in PackageName name, in VersionRange dep, bool pre_release) { auto ppkgs = name.main in this.pkgs; if (ppkgs is null) return typeof(return).init; PkgData match; foreach (vers, pr; *ppkgs) if ((!vers.isPreRelease || pre_release) && dep.matches(vers) && (!match.data.length || Version(match.recipe.version_) < vers)) { match.recipe = pr.recipe; match.data = pr.data; } return match; } /// public override SearchResult[] searchPackages(string query) { assert(0, this.url ~ " - searchPackages not implemented for: " ~ query); } } /** * Convenience function to write a package file * * Allows to write a package file (and only a package file) for a certain * package name and version. * * Params: * root = The root Filesystem * name = The package name (typed as string for convenience) * vers = The package version * recipe = The text of the package recipe * fmt = The format used for `recipe` (default to JSON) * location = Where to place the package (default to user location) */ public void writePackageFile (Filesystem root, in string name, in string vers, in string recipe, in PackageFormat fmt = PackageFormat.json, in PlacementLocation location = PlacementLocation.user) { const path = getPackagePath(name, vers, location); root.mkdir(path); root.writeFile( path ~ (fmt == PackageFormat.json ? "dub.json" : "dub.sdl"), recipe); } /// Returns: The final destination a specific package needs to be stored in public static NativePath getPackagePath(in string name_, string vers, PlacementLocation location = PlacementLocation.user) { PackageName name = PackageName(name_); // Keep in sync with `dub.packagemanager: PackageManager.getPackagePath` // and `Location.getPackagePath` NativePath result (in NativePath base) { NativePath res = base ~ name.main.toString() ~ vers ~ name.main.toString(); res.endsWithSlash = true; return res; } final switch (location) { case PlacementLocation.user: return result(TestDub.Paths.userSettings ~ "packages/"); case PlacementLocation.system: return result(TestDub.Paths.systemSettings ~ "packages/"); case PlacementLocation.local: return result(TestDub.ProjectPath ~ "/.dub/packages/"); } } dub-1.40.0/source/dub/test/dependencies.d000066400000000000000000000155401477246567400202250ustar00rootroot00000000000000/******************************************************************************* Test for dependencies This module is mostly concerned with dependency resolutions and visible user behavior. Tests that check how different recipe would interact with one another, and how conflicts are resolved or reported, belong here. The project (the loaded package) is usually named 'a' and dependencies use single-letter, increasing name, for simplicity. Version 1.0.0 is used where versions do not matter. Packages are usually created in reverse dependency order when possible, unless the creation order matters. Test that deal with dependency resolution should not concern themselves with the registry: instead, packages are added to the `PackageManager`, as that makes testing the core logic more robust without adding a layer of complexity brought by the `PackageSupplier`. Most tests have 3 parts: First, setup the various packages. Then, run `dub.upgrade(UpgradeOptions.select)` to create the selection. Finally, run tests on the resulting state. *******************************************************************************/ module dub.test.dependencies; version (unittest): import dub.test.base; // Ensure that simple dependencies get resolved correctly unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.sdl", `name "a" version "1.0.0" dependency "b" version="*" dependency "c" version="*" `); root.writePackageFile("b", "1.0.0", `name "b" version "1.0.0"`, PackageFormat.sdl); root.writePackageFile("c", "1.0.0", `name "c" version "1.0.0"`, PackageFormat.sdl); }); dub.loadPackage(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project has missing dependencies"); assert(dub.project.getDependency("b", true), "Missing 'b' dependency"); assert(dub.project.getDependency("c", true), "Missing 'c' dependency"); assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); } // Test that indirect dependencies get resolved correctly unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.sdl", `name "a" dependency "b" version="*"`); root.writePackageFile("b", "1.0.0", `name "b" version "1.0.0" dependency "c" version="*"`, PackageFormat.sdl); root.writePackageFile("c", "1.0.0", `name "c" version "1.0.0"`, PackageFormat.sdl); }); dub.loadPackage(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project has missing dependencies"); assert(dub.project.getDependency("b", true), "Missing 'b' dependency"); assert(dub.project.getDependency("c", true), "Missing 'c' dependency"); assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); } // Simple diamond dependency unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.sdl", `name "a" dependency "b" version="*" dependency "c" version="*"`); root.writePackageFile("b", "1.0.0", `name "b" version "1.0.0" dependency "d" version="*"`, PackageFormat.sdl); root.writePackageFile("c", "1.0.0", `name "c" version "1.0.0" dependency "d" version="*"`, PackageFormat.sdl); root.writePackageFile("d", "1.0.0", `name "d" version "1.0.0"`, PackageFormat.sdl); }); dub.loadPackage(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project has missing dependencies"); assert(dub.project.getDependency("b", true), "Missing 'b' dependency"); assert(dub.project.getDependency("c", true), "Missing 'c' dependency"); assert(dub.project.getDependency("d", true), "Missing 'd' dependency"); assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); } // Missing dependencies trigger an error unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.sdl", `name "a" dependency "b" version="*"`); }); dub.loadPackage(); try dub.upgrade(UpgradeOptions.select); catch (Exception exc) assert(exc.message() == `Failed to find any versions for package b, referenced by a ~master`); assert(!dub.project.hasAllDependencies(), "project should have missing dependencies"); assert(dub.project.getDependency("b", true) is null, "Found 'b' dependency"); assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); // Add the missing dependency to our PackageManager dub.fs.writePackageFile(`b`, "1.0.0", `name "b" version "1.0.0"`, PackageFormat.sdl); dub.packageManager.refresh(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project have missing dependencies"); assert(dub.project.getDependency("b", true), "Missing 'b' dependency"); assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); } // Issue 2695 - Nonsensical dependencies // Dependencies should resolve versions correctly regardless of the order they get requested unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.sdl", `name "main" dependency "a" version="*" dependency "b" version="*"`); root.writePackageFile("a", "0.0.0", `name "a" dependency "c" version="~>0.0.0"`, PackageFormat.sdl); root.writePackageFile("b", "0.0.0", `name "b" dependency "c" version="0.0.0"`, PackageFormat.sdl); root.writePackageFile("c", "0.0.0", `name "c" version "0.0.0"`, PackageFormat.sdl); root.writePackageFile("c", "0.0.1", `name "c" version "0.0.1"`, PackageFormat.sdl); }); dub.loadPackage(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project have missing dependencies"); assert(dub.project.getDependency("c", true).version_ == Version("0.0.0")); } unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.sdl", `name "main" dependency "a" version="*" dependency "b" version="*"`); root.writePackageFile("a", "1.0.0", `name "a" dependency "c" version="0.0.0"`, PackageFormat.sdl); root.writePackageFile("b", "1.0.0", `name "b" dependency "c" version="~>0.0.0"`, PackageFormat.sdl); root.writePackageFile("c", "0.0.0", `name "c" version "0.0.0"`, PackageFormat.sdl); root.writePackageFile("c", "0.0.1", `name "c" version "0.0.1"`, PackageFormat.sdl); }); dub.loadPackage(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project have missing dependencies"); assert(dub.project.getDependency("c", true).version_ == Version("0.0.0")); }dub-1.40.0/source/dub/test/others.d000066400000000000000000000113321477246567400170760ustar00rootroot00000000000000/******************************************************************************* Tests that don't fit in existing categories *******************************************************************************/ module dub.test.others; version (unittest): import std.algorithm; import std.format; import dub.test.base; // https://github.com/dlang/dub/issues/2696 unittest { const ValidURL = `git+https://example.com/dlang/dub`; // Taken from a commit in the dub repository const ValidHash = "54339dff7ce9ec24eda550f8055354f712f15800"; const Template = `{"name": "%s", "version": "1.0.0", "dependencies": { "dep1": { "repository": "%s", "version": "%s" }}}`; scope dub = new TestDub((scope Filesystem fs) { // Invalid URL, valid hash fs.writePackageFile("a", "1.0.0", Template.format("a", "git+https://nope.nope", ValidHash)); // Valid URL, invalid hash fs.writePackageFile("b", "1.0.0", Template.format("b", ValidURL, "invalid")); // Valid URL, valid hash fs.writePackageFile("c", "1.0.0", Template.format("c", ValidURL, ValidHash)); }); dub.packageManager.addTestSCMPackage( Repository(ValidURL, ValidHash), `{ "name": "dep1" }`); try dub.loadPackage(dub.packageManager.getPackage(PackageName("a"), Version("1.0.0"))); catch (Exception exc) assert(exc.message.canFind("Unable to fetch")); try dub.loadPackage(dub.packageManager.getPackage(PackageName("b"), Version("1.0.0"))); catch (Exception exc) assert(exc.message.canFind("Unable to fetch")); dub.loadPackage(dub.packageManager.getPackage(PackageName("c"), Version("1.0.0"))); assert(dub.project.hasAllDependencies()); assert(dub.project.getDependency("dep1", true), "Missing 'dep1' dependency"); } // Test for https://github.com/dlang/dub/pull/2481 // Make sure packages found with `add-path` take priority. unittest { const AddPathDir = TestDub.Paths.temp ~ "addpath/"; const BDir = AddPathDir ~ "b/"; scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.json", `{ "name": "a", "dependencies": { "b": "~>1.0" } }`); root.writePackageFile("b", "1.0.0", `name "b" version "1.0.0"`, PackageFormat.sdl); root.mkdir(BDir); root.writeFile(BDir ~ "dub.json", `{"name": "b", "version": "1.0.0" }`); }); dub.loadPackage(); assert(!dub.project.hasAllDependencies()); dub.upgrade(UpgradeOptions.select); // Test that without add-path, we get a package in the userPackage const oldDir = dub.project.getDependency("b", true).path(); assert(oldDir == TestDub.Paths.userPackages ~ "packages/b/1.0.0/b/", oldDir.toNativeString()); // Now run `add-path` dub.addSearchPath(AddPathDir.toNativeString(), dub.defaultPlacementLocation); // We need a new instance to test scope newDub = dub.newTest(); newDub.loadPackage(); assert(newDub.project.hasAllDependencies()); const actualDir = newDub.project.getDependency("b", true).path(); assert(actualDir == BDir, actualDir.toNativeString()); } // Check that SCM-only dependencies don't lead to a scan of the FS unittest { const ValidURL = `git+https://example.com/dlang/dub`; // Taken from a commit in the dub repository const ValidHash = "54339dff7ce9ec24eda550f8055354f712f15800"; const Template = `{"name": "%s", "version": "1.0.0", "dependencies": { "dep1": { "repository": "%s", "version": "%s" }}}`; scope dub = new TestDub((scope Filesystem fs) { // This should never be read fs.writePackageFile("poison", "1.0.0", `poison`); fs.writeFile(TestDub.ProjectPath ~ "dub.json", `{ "name": "a", "dependencies": {"b": { "repository": "` ~ ValidURL ~ `", "version": "` ~ ValidHash ~ `" }} }`); }); dub.packageManager.addTestSCMPackage( Repository(ValidURL, ValidHash), `{"name":"b"}`); dub.loadPackage(); assert(dub.project.hasAllDependencies()); } // Check that a simple build does not lead to the cache being scanned unittest { scope dub = new TestDub((scope Filesystem fs) { // This should never be read fs.writePackageFile("b", "1.0.0", `poison`); fs.writePackageFile("b", "1.1.0", `poison`); // Dependency resolution may trigger scan, so we need a selections file fs.writeFile(TestDub.ProjectPath ~ "dub.json", `{ "name": "a", "dependencies": {"b":"~>1.0"}}`); fs.writeFile(TestDub.ProjectPath ~ "dub.selections.json", `{"fileVersion":1,"versions":{"b":"1.0.4"}}`); fs.writePackageFile("b", "1.0.4", `{"name":"b","version":"1.0.4"}`); }); dub.loadPackage(); assert(dub.project.hasAllDependencies()); } dub-1.40.0/source/dub/test/selections_from_parent_dir.d000066400000000000000000000126411477246567400232000ustar00rootroot00000000000000/******************************************************************************* Test inheritable flag of selections file Selections files can have an `inheritable` flag that is used to have a central selections file, e.g. in the case of a monorepo. *******************************************************************************/ module dub.test.selections_from_parent_dir; version (unittest): import dub.test.base; import std.string : replace; // dub.selections.json can be inherited from parent directories unittest { const pkg1Dir = TestDub.ProjectPath ~ "pkg1"; const pkg2Dir = TestDub.ProjectPath ~ "pkg2"; const path = TestDub.ProjectPath ~ "dub.selections.json"; const dubSelectionsJsonContent = `{ "fileVersion": 1, "inheritable": true, "versions": { "pkg1": {"path":"pkg1"} } } `; scope dub = new TestDub((scope Filesystem fs) { fs.mkdir(pkg1Dir); fs.writeFile(pkg1Dir ~ "dub.sdl", `name "pkg1" targetType "none"`); fs.mkdir(pkg2Dir); fs.writeFile(pkg2Dir ~ "dub.sdl", `name "pkg2" targetType "library" # don't specify a path, require inherited dub.selections.json to make it path-based (../pkg1) dependency "pkg1" version="*"`); // important: dub.selections.json in *parent* directory fs.writeFile(path, dubSelectionsJsonContent); }, pkg2Dir.toNativeString()); // pkg2 is our root package dub.loadPackage(); assert(dub.project.hasAllDependencies()); // the relative path should have been adjusted (`pkg1` => `../pkg1`) assert(dub.project.selections.getSelectedVersion(PackageName("pkg1")).path == NativePath("../pkg1")); // invoking `dub upgrade` for the pkg2 root package should generate a local dub.selections.json, // leaving the inherited one untouched dub.upgrade(UpgradeOptions.select); const nestedPath = pkg2Dir ~ "dub.selections.json"; assert(dub.fs.existsFile(nestedPath)); assert(dub.fs.readFile(path) == dubSelectionsJsonContent, "Inherited dub.selections.json modified after dub upgrade!"); const nestedContent = dub.fs.readText(nestedPath); assert(nestedContent == dubSelectionsJsonContent.replace(`{"path":"pkg1"}`, `{"path":"../pkg1"}`), "Unexpected nestedContent:\n" ~ nestedContent); } // a non-inheritable dub.selections.json breaks the inheritance chain unittest { const root = TestDub.ProjectPath ~ "root"; const root_a = root ~ "a"; const root_a_b = root_a ~ "b"; scope dub_ = new TestDub((scope Filesystem fs) { // inheritable root/dub.selections.json fs.mkdir(root); fs.writeFile(root ~ "dub.selections.json", `{ "fileVersion": 1, "inheritable": true, "versions": { "dub": "1.38.0" } } `); // non-inheritable root/a/dub.selections.json fs.mkdir(root_a); fs.writeFile(root_a ~ "dub.selections.json", `{ "fileVersion": 1, "versions": { "dub": "1.37.0" } } `); // We need packages for `loadPackage` fs.mkdir(root_a_b); fs.writeFile(root_a_b ~ `dub.json`, `{"name":"ab","dependencies":{"dub":"~>1.0"}}`); fs.writeFile(root_a ~ `dub.json`, `{"name":"a","dependencies":{"dub":"~>1.0"}}`); fs.writeFile(root ~ `dub.json`, `{"name":"r","dependencies":{"dub":"~>1.0"}}`); fs.writePackageFile(`dub`, `1.37.0`, `{"name":"dub","version":"1.37.0"}`); fs.writePackageFile(`dub`, `1.38.0`, `{"name":"dub","version":"1.38.0"}`); }); // no selections for root/a/b/ { auto dub = dub_.newTest(); const result = dub.packageManager.readSelections(root_a_b); assert(result.isNull()); dub.loadPackage(root_a_b); assert(!dub.project.hasAllDependencies()); } // local selections for root/a/ { auto dub = dub_.newTest(); const result = dub.packageManager.readSelections(root_a); assert(!result.isNull()); assert(result.get().absolutePath == root_a ~ "dub.selections.json"); assert(!result.get().selectionsFile.inheritable); dub.loadPackage(root_a); assert(dub.project.hasAllDependencies()); assert(dub.project.dependencies()[0].name == "dub"); assert(dub.project.dependencies()[0].version_ == Version("1.37.0")); } // local selections for root/ { auto dub = dub_.newTest(); const result = dub.packageManager.readSelections(root); assert(!result.isNull()); assert(result.get().absolutePath == root ~ "dub.selections.json"); assert(result.get().selectionsFile.inheritable); dub.loadPackage(root); assert(dub.project.hasAllDependencies()); assert(dub.project.dependencies()[0].name == "dub"); assert(dub.project.dependencies()[0].version_ == Version("1.38.0")); } // after removing non-inheritable root/a/dub.selections.json: inherited root selections for root/a/b/ { auto dub = dub_.newTest((scope Filesystem fs) { fs.removeFile(root_a ~ "dub.selections.json"); }); const result = dub.packageManager.readSelections(root_a_b); assert(!result.isNull()); assert(result.get().absolutePath == root ~ "dub.selections.json"); assert(result.get().selectionsFile.inheritable); dub.loadPackage(root_a_b); assert(dub.project.hasAllDependencies()); assert(dub.project.dependencies()[0].name == "dub"); assert(dub.project.dependencies()[0].version_ == Version("1.38.0")); } } dub-1.40.0/source/dub/test/subpackages.d000066400000000000000000000044651477246567400200730ustar00rootroot00000000000000/******************************************************************************* Test for subpackages Subpackages are packages that are part of a 'main' packages. Their version is that of their main (parent) package. They are referenced using a column, e.g. `mainpkg:subpkg`. Nested subpackages are disallowed. *******************************************************************************/ module dub.test.subpackages; version(unittest): import dub.test.base; /// Test of the PackageManager APIs unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.json", `{ "name": "a", "dependencies": { "b:a": "~>1.0", "b:b": "~>1.0" } }`); root.writePackageFile("b", "1.0.0", `{ "name": "b", "version": "1.0.0", "subPackages": [ { "name": "a" }, { "name": "b" } ] }`); }); dub.loadPackage(); dub.upgrade(UpgradeOptions.select); assert(dub.project.hasAllDependencies(), "project has missing dependencies"); assert(dub.project.getDependency("b:b", true), "Missing 'b:b' dependency"); assert(dub.project.getDependency("b:a", true), "Missing 'b:a' dependency"); assert(dub.project.getDependency("no", true) is null, "Returned unexpected dependency"); assert(dub.packageManager().getPackage(PackageName("b:a"), Version("1.0.0")).name == "b:a"); assert(dub.packageManager().getPackage(PackageName("b:b"), Version("1.0.0")).name == "b:b"); assert(dub.packageManager().getPackage(PackageName("b"), Version("1.0.0")).name == "b"); assert(!dub.packageManager().getPackage(PackageName("b:b"), Version("1.1.0"))); } // https://github.com/dlang/dub/issues/2973 unittest { scope dub = new TestDub((scope Filesystem root) { root.writeFile(TestDub.ProjectPath ~ "dub.json", `{ "name": "a", "dependencies": { "b:a": "~>1.0" } }`); root.writeFile(TestDub.ProjectPath ~ "dub.selections.json", `{ "fileVersion": 1, "versions": { "b": "1.0.0" } }`); root.writePackageFile("b", "1.0.0", `{ "name": "b", "version": "1.0.0", "subPackages": [ { "name": "a" } ] }`); }); dub.loadPackage(); assert(dub.project.hasAllDependencies(), "project has missing dependencies"); assert(dub.project.getDependency("b:a", true), "Missing 'b:a' dependency"); } dub-1.40.0/source/dub/version_.d000066400000000000000000000000621477246567400164350ustar00rootroot00000000000000module dub.version_; enum dubVersion = "v1.40.0"; dub-1.40.0/test/000077500000000000000000000000001477246567400133535ustar00rootroot00000000000000dub-1.40.0/test/.gitignore000066400000000000000000000041221477246567400153420ustar00rootroot00000000000000*.a *.lib *.so *.dll 1-exec-simple/exec-simple 1-staticLib-simple/__test__library__ 2-dynLib-dep/dynlib-dep 2-sourceLib-dep/sourcelib-dep 2-staticLib-dep/staticlib-dep custom-unittest/custom-unittest path-subpackage-ref/test subpackage-ref/test subpackage-common-with-sourcefile-globbing/mypackage* version-spec/**/CMakeLists.txt version-spec/**/foo.cmake version-spec/**/foo /test_registry /issue_2051_running_unittests_from_dub_single_file_packages_fails /run-unittest test.log custom-source-main-bug487/custom-source-main-bug487 3-copyFiles/bin/ ignore-hidden-1/ignore-hidden-1 ignore-hidden-2/ignore-hidden-2 expected-import-path-output expected-string-import-path-output expected-describe-data-1-list-output expected-describe-data-2-dmd-output expected-issue616-output describe-project/dummy.dat describe-project/dummy-dep1.dat */main/main */*test-library */*test-application */exec-simple issue1474/ext/fortytwo.d issue2452/ext/fortytwo.d cov-ctfe/test issue1003-check-empty-ld-flags/issue1003-empty-ld-flags issue1053-extra-files-visuald/LICENSE.txt issue1053-extra-files-visuald/README.txt issue1053-extra-files-visuald/extra_files.sln issue1117-extra-dependency-files/test issue1396-pre-post-run-commands/test issue1396-pre-post-run-commands/test.txt issue1477-subpackage-visuald-paths/library_subpackage_a.sln issue1477-subpackage-visuald-paths/sub/subpackage_a/subpackage_a.sln issue1504-envvar-in-path/test issue1773-lint/report.json issue2085-target-none-visuald/root.sln issue2086-copyfiles-subpackage-targetpath/sub/sub issue2086-copyfiles-subpackage-targetpath/sub/to_be_deployed.txt issue2262-exact-cached-version-match/testproj issue361-optional-deps/cmp.tmp issue838-custom-cache-paths/test issue97-targettype-none/a/issue97-targettype-none_a issue97-targettype-none/b/issue97-targettype-none_b issue990-download-optional-selected/b output-1504.txt version-filters-diamond/version-filters-diamond version-filters-none/version-filters-none version-filters-source-dep/version-filters-source-dep version-filters/version-filters version-spec/newfoo/foo-test-application version-spec/oldfoo/foo-test-application dub-1.40.0/test/0-init-fail-json.script.d000066400000000000000000000016651477246567400200130ustar00rootroot00000000000000/+ dub.sdl: name "0-init-fail-json" dependency "common" path="./common" +/ module _0_init_fail_json; import std.file : exists, remove; import std.path : buildPath; import std.process : environment, spawnProcess, wait; import common; int main() { enum packname = "0-init-fail-pack"; enum deps = "logger PACKAGE_DONT_EXIST"; // would be very unlucky if it does exist... auto dub = environment.get("DUB"); if (!dub.length) die(`Environment variable "DUB" must be defined to run the tests.`); //** if $$DUB init -n $packname $deps -f json 2>/dev/null; then if (!spawnProcess([dub, "init", "-n", packname, deps, "-f", "json"]).wait) die("Init with unknown non-existing dependency expected to fail"); //** if [ -e $packname/dub.json ]; then # package is there, it should have failed const filepath = buildPath(packname, "dub.json"); if (filepath.exists) { remove(packname); die(filepath ~ " was not created"); } return 0; } dub-1.40.0/test/0-init-fail.sh000077500000000000000000000007471477246567400157330ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh packname="0-init-fail-pack" deps="logger PACKAGE_DONT_EXIST" # would be very unlucky if it does exist... if $DUB init -n $packname $deps 2>/dev/null; then die $LINENO 'Init with unknown non-existing dependency expected to fail' fi function cleanup { rm -rf $packname } if [ -e $packname/dub.sdl ]; then # package is there, it should have failed cleanup die $LINENO "$packname/dub.sdl was not created" fi dub-1.40.0/test/0-init-interactive.default_name.dub.sdl000066400000000000000000000001351477246567400226650ustar00rootroot00000000000000name "0-init-interactive" description "desc" authors "author" copyright "copy" license "gpl" dub-1.40.0/test/0-init-interactive.dub.json000066400000000000000000000001611477246567400204300ustar00rootroot00000000000000{ "authors": [ "author" ], "copyright": "copy", "description": "desc", "license": "gpl", "name": "test" }dub-1.40.0/test/0-init-interactive.dub.sdl000066400000000000000000000001171477246567400202420ustar00rootroot00000000000000name "test" description "desc" authors "author" copyright "copy" license "gpl" dub-1.40.0/test/0-init-interactive.license_gpl3.dub.sdl000066400000000000000000000001301477246567400226030ustar00rootroot00000000000000name "test" description "desc" authors "author" copyright "copy" license "GPL-3.0-only" dub-1.40.0/test/0-init-interactive.license_mpl2.dub.sdl000066400000000000000000000001231477246567400226120ustar00rootroot00000000000000name "test" description "desc" authors "author" copyright "copy" license "MPL-2.0" dub-1.40.0/test/0-init-interactive.license_proprietary.dub.sdl000066400000000000000000000001271477246567400243240ustar00rootroot00000000000000name "test" description "desc" authors "author" copyright "copy" license "proprietary" dub-1.40.0/test/0-init-interactive.sh000077500000000000000000000034571477246567400173360ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh packname="0-init-interactive" function cleanup { rm -rf $packname } function runTest { local inp=$1 local comp=$2 local dub_ext=${comp##*.} local outp=$(echo -e $inp | $DUB init $packname) if [ ! -e $packname/dub.$dub_ext ]; then # it failed cleanup die $LINENO "No dub.$dub_ext file has been generated for test $comp with input '$inp'. Output: $outp" fi if ! diff $packname/dub.$dub_ext "$CURR_DIR"/$comp; then cleanup die $LINENO "Contents of generated dub.$dub_ext not as expected." fi cleanup } # sdl package format runTest '1\ntest\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.dub.sdl # select package format out of bounds runTest '3\n1\ntest\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.dub.sdl # select package format not numeric, but in list runTest 'sdl\ntest\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.dub.sdl # selected value not numeric and not in list runTest 'sdlf\n1\ntest\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.dub.sdl # default name runTest '1\n\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.default_name.dub.sdl # json package format runTest '2\ntest\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.dub.json # default package format runTest '\ntest\ndesc\nauthor\ngpl\ncopy\n\n' 0-init-interactive.dub.json # select license runTest '1\ntest\ndesc\nauthor\n6\n3\ncopy\n\n' 0-init-interactive.license_gpl3.dub.sdl # select license (with description) runTest '1\ntest\ndesc\nauthor\n9\n3\ncopy\n\n' 0-init-interactive.license_mpl2.dub.sdl # select license out of bounds runTest '1\ntest\ndesc\nauthor\n21\n6\n3\ncopy\n\n' 0-init-interactive.license_gpl3.dub.sdl # default license runTest '1\ntest\ndesc\nauthor\n\ncopy\n\n' 0-init-interactive.license_proprietary.dub.sdl dub-1.40.0/test/0-init-multi-json.sh000077500000000000000000000012311477246567400171060ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh packname="0-init-multi-pack" deps="openssl logger" type="vibe.d" $DUB init -n $packname $deps --type=$type -f json function cleanup { rm -rf $packname } if [ ! -e $packname/dub.json ]; then die $LINENO '$packname/dub.json not created' else # check if resulting dub.json has all dependencies in tow deps="$deps vibe-d"; IFS=" " read -a arr <<< "$deps" for ele in "${arr[@]}" do if [ `grep -c "$ele" $packname/dub.json` -ne 1 ]; then #something went wrong cleanup die $LINENO "$ele not in $packname/dub.json" fi done cleanup fi dub-1.40.0/test/0-init-multi.sh000077500000000000000000000012531477246567400161430ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh packname="0-init-multi-pack" deps="openssl logger" type="vibe.d" $DUB init -n $packname $deps --type=$type --format sdl function cleanup { rm -rf $packname } if [ ! -e $packname/dub.sdl ]; then cleanup die $LINENO 'No dub.sdl file has been generated.' else # check if resulting dub.sdl has all dependencies in tow deps="$deps vibe-d"; IFS=" " read -a arr <<< "$deps" for ele in "${arr[@]}" do if [ `grep -c "$ele" $packname/dub.sdl` -ne 1 ]; then #something went wrong cleanup die $LINENO "$ele not in $packname/dub.sdl" fi done cleanup fi dub-1.40.0/test/0-init-simple-json.sh000077500000000000000000000004351477246567400172520ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh packname="0-init-simple-pack" $DUB init -n $packname -f json function cleanup { rm -rf $packname } if [ ! -e $packname/dub.json ]; then cleanup die $LINENO 'No dub.json file has been generated.' fi cleanup dub-1.40.0/test/0-init-simple.sh000077500000000000000000000004541477246567400163040ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh packname="0-init-simple-pack" $DUB init -n $packname --format sdl function cleanup { rm -rf $packname } if [ ! -e $packname/dub.sdl ]; then # it failed cleanup die $LINENO 'No dub.sdl file has been generated.' fi cleanup dub-1.40.0/test/1-dynLib-simple/000077500000000000000000000000001477246567400162215ustar00rootroot00000000000000dub-1.40.0/test/1-dynLib-simple/.no_build_gdc000066400000000000000000000000001477246567400206200ustar00rootroot00000000000000dub-1.40.0/test/1-dynLib-simple/.no_run000066400000000000000000000000001477246567400175100ustar00rootroot00000000000000dub-1.40.0/test/1-dynLib-simple/dub.json000066400000000000000000000001041477246567400176610ustar00rootroot00000000000000{ "name": "dynlib-simple", "targetType": "dynamicLibrary" } dub-1.40.0/test/1-dynLib-simple/source/000077500000000000000000000000001477246567400175215ustar00rootroot00000000000000dub-1.40.0/test/1-dynLib-simple/source/dynlib/000077500000000000000000000000001477246567400210025ustar00rootroot00000000000000dub-1.40.0/test/1-dynLib-simple/source/dynlib/app.d000066400000000000000000000003321477246567400217250ustar00rootroot00000000000000module dynlib.app; import std.stdio; version (unittest) {} else version (Windows) version (DigitalMars) { import core.sys.windows.dll; mixin SimpleDllMain; } export void entry() { writeln(__FUNCTION__); } dub-1.40.0/test/1-exec-simple-package-json/000077500000000000000000000000001477246567400202645ustar00rootroot00000000000000dub-1.40.0/test/1-exec-simple-package-json/package.json000066400000000000000000000000761477246567400225550ustar00rootroot00000000000000{ "name": "exec-simple", "targetType": "executable" } dub-1.40.0/test/1-exec-simple-package-json/source/000077500000000000000000000000001477246567400215645ustar00rootroot00000000000000dub-1.40.0/test/1-exec-simple-package-json/source/app.d000066400000000000000000000000761477246567400225140ustar00rootroot00000000000000import std.stdio; void main() { writeln(__FUNCTION__); } dub-1.40.0/test/1-exec-simple/000077500000000000000000000000001477246567400157245ustar00rootroot00000000000000dub-1.40.0/test/1-exec-simple/dub.json000066400000000000000000000000761477246567400173740ustar00rootroot00000000000000{ "name": "exec-simple", "targetType": "executable" } dub-1.40.0/test/1-exec-simple/source/000077500000000000000000000000001477246567400172245ustar00rootroot00000000000000dub-1.40.0/test/1-exec-simple/source/app.d000066400000000000000000000000761477246567400201540ustar00rootroot00000000000000import std.stdio; void main() { writeln(__FUNCTION__); } dub-1.40.0/test/1-sourceLib-simple/000077500000000000000000000000001477246567400167275ustar00rootroot00000000000000dub-1.40.0/test/1-sourceLib-simple/.no_build000066400000000000000000000000001477246567400205110ustar00rootroot00000000000000dub-1.40.0/test/1-sourceLib-simple/dub.json000066400000000000000000000001061477246567400203710ustar00rootroot00000000000000{ "name": "sourcelib-simple", "targetType": "sourceLibrary" } dub-1.40.0/test/1-sourceLib-simple/source/000077500000000000000000000000001477246567400202275ustar00rootroot00000000000000dub-1.40.0/test/1-sourceLib-simple/source/sourcelib/000077500000000000000000000000001477246567400222165ustar00rootroot00000000000000dub-1.40.0/test/1-sourceLib-simple/source/sourcelib/app.d000066400000000000000000000001251477246567400231410ustar00rootroot00000000000000module sourcelib.app; import std.stdio; void entry() { writeln(__FUNCTION__); } dub-1.40.0/test/1-staticLib-simple/000077500000000000000000000000001477246567400167165ustar00rootroot00000000000000dub-1.40.0/test/1-staticLib-simple/.no_run000066400000000000000000000000001477246567400202050ustar00rootroot00000000000000dub-1.40.0/test/1-staticLib-simple/dub.json000066400000000000000000000001061477246567400203600ustar00rootroot00000000000000{ "name": "staticlib-simple", "targetType": "staticLibrary" } dub-1.40.0/test/1-staticLib-simple/source/000077500000000000000000000000001477246567400202165ustar00rootroot00000000000000dub-1.40.0/test/1-staticLib-simple/source/staticlib/000077500000000000000000000000001477246567400221745ustar00rootroot00000000000000dub-1.40.0/test/1-staticLib-simple/source/staticlib/app.d000066400000000000000000000001251477246567400231170ustar00rootroot00000000000000module staticlib.app; import std.stdio; void entry() { writeln(__FUNCTION__); } dub-1.40.0/test/2-dynLib-dep/000077500000000000000000000000001477246567400155015ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-dep/.no_build_windows000066400000000000000000000000351477246567400210450ustar00rootroot00000000000000# workaround for Issue 23177 dub-1.40.0/test/2-dynLib-dep/dub.json000066400000000000000000000004051477246567400171450ustar00rootroot00000000000000{ "name": "dynlib-dep", "dependencies": { "dynlib-simple": { "path": "../1-dynLib-simple/" } }, "lflags-linux": ["-rpath", "$$ORIGIN"], "lflags-darwin": ["-rpath", "@executable_path"], "dflags-ldc": ["-link-defaultlib-shared"] } dub-1.40.0/test/2-dynLib-dep/source/000077500000000000000000000000001477246567400170015ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-dep/source/app.d000066400000000000000000000000751477246567400177300ustar00rootroot00000000000000module app; import dynlib.app; void main() { entry(); } dub-1.40.0/test/2-dynLib-with-staticLib-dep/000077500000000000000000000000001477246567400203665ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-with-staticLib-dep/.no_build_gdc000066400000000000000000000000001477246567400227650ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-with-staticLib-dep/.no_run000066400000000000000000000000001477246567400216550ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-with-staticLib-dep/dub.json000066400000000000000000000002561477246567400220360ustar00rootroot00000000000000{ "name": "dynlib-with-staticlib-dep", "targetType": "dynamicLibrary", "dependencies": { "staticlib-simple": { "path": "../1-staticLib-simple/" } } } dub-1.40.0/test/2-dynLib-with-staticLib-dep/source/000077500000000000000000000000001477246567400216665ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-with-staticLib-dep/source/dynlib/000077500000000000000000000000001477246567400231475ustar00rootroot00000000000000dub-1.40.0/test/2-dynLib-with-staticLib-dep/source/dynlib/app.d000066400000000000000000000003311477246567400240710ustar00rootroot00000000000000module dynlib.app; import std.stdio; import staticlib.app; version (unittest) {} else version (Windows) version (DigitalMars) { import core.sys.windows.dll; mixin SimpleDllMain; } void foo() { entry(); } dub-1.40.0/test/2-sourceLib-dep/000077500000000000000000000000001477246567400162075ustar00rootroot00000000000000dub-1.40.0/test/2-sourceLib-dep/dub.json000066400000000000000000000002661477246567400176600ustar00rootroot00000000000000{ "name": "sourcelib-dep", "description": "Testing sourceLibrary dependency.", "dependencies": { "sourcelib-simple": { "path": "../1-sourceLib-simple/" } } } dub-1.40.0/test/2-sourceLib-dep/source/000077500000000000000000000000001477246567400175075ustar00rootroot00000000000000dub-1.40.0/test/2-sourceLib-dep/source/app.d000066400000000000000000000001001477246567400204230ustar00rootroot00000000000000module app; import sourcelib.app; void main() { entry(); } dub-1.40.0/test/2-staticLib-dep/000077500000000000000000000000001477246567400161765ustar00rootroot00000000000000dub-1.40.0/test/2-staticLib-dep/dub.json000066400000000000000000000002661477246567400176470ustar00rootroot00000000000000{ "name": "staticlib-dep", "description": "Testing staticLibrary dependency.", "dependencies": { "staticlib-simple": { "path": "../1-staticLib-simple/" } } } dub-1.40.0/test/2-staticLib-dep/source/000077500000000000000000000000001477246567400174765ustar00rootroot00000000000000dub-1.40.0/test/2-staticLib-dep/source/app.d000066400000000000000000000001001477246567400204120ustar00rootroot00000000000000module app; import staticlib.app; void main() { entry(); } dub-1.40.0/test/3-copyFiles/000077500000000000000000000000001477246567400154505ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/.no_test000066400000000000000000000000001477246567400171120ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/000077500000000000000000000000001477246567400163615ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/file_to_copy.txt000066400000000000000000000000311477246567400215670ustar00rootroot00000000000000file_to_copy.txt content dub-1.40.0/test/3-copyFiles/data/file_to_copy_mask1.txt000066400000000000000000000000361477246567400226700ustar00rootroot00000000000000file_to_copy_mask1.txt contentdub-1.40.0/test/3-copyFiles/data/file_to_copy_mask2.txt000066400000000000000000000000371477246567400226720ustar00rootroot00000000000000file_to_copy_mask2.txt content dub-1.40.0/test/3-copyFiles/data/res/000077500000000000000000000000001477246567400171525ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/.nocopy/000077500000000000000000000000001477246567400205375ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/.nocopy/file_inside_dot_prefixed_dir.txt000066400000000000000000000000151477246567400271400ustar00rootroot00000000000000some content dub-1.40.0/test/3-copyFiles/data/res/hdpi/000077500000000000000000000000001477246567400200765ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/hdpi/file1.txt000066400000000000000000000000231477246567400216320ustar00rootroot00000000000000hdpi/file1 content dub-1.40.0/test/3-copyFiles/data/res/hdpi/file2.txt000066400000000000000000000000231477246567400216330ustar00rootroot00000000000000hdpi/file2 content dub-1.40.0/test/3-copyFiles/data/res/hdpi/file3.txt000066400000000000000000000000231477246567400216340ustar00rootroot00000000000000hdpi/file3 content dub-1.40.0/test/3-copyFiles/data/res/hdpi/nested_dir/000077500000000000000000000000001477246567400222165ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/hdpi/nested_dir/nested_file.txt000066400000000000000000000000501477246567400252330ustar00rootroot00000000000000hdpi/nested_dir/nested_file.txt content dub-1.40.0/test/3-copyFiles/data/res/i18n/000077500000000000000000000000001477246567400177315ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/i18n/resource_en.txt000066400000000000000000000000301477246567400227740ustar00rootroot00000000000000i18n - english resourcesdub-1.40.0/test/3-copyFiles/data/res/i18n/resource_fr.txt000066400000000000000000000000301477246567400230010ustar00rootroot00000000000000i18n - french resources dub-1.40.0/test/3-copyFiles/data/res/ldpi/000077500000000000000000000000001477246567400201025ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/ldpi/file1.txt000066400000000000000000000000231477246567400216360ustar00rootroot00000000000000ldpi/file1 content dub-1.40.0/test/3-copyFiles/data/res/ldpi/file2.txt000066400000000000000000000000231477246567400216370ustar00rootroot00000000000000ldpi/file2 content dub-1.40.0/test/3-copyFiles/data/res/ldpi/file3.txt000066400000000000000000000000231477246567400216400ustar00rootroot00000000000000ldpi/file3 content dub-1.40.0/test/3-copyFiles/data/res/mdpi/000077500000000000000000000000001477246567400201035ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/data/res/mdpi/file1.txt000066400000000000000000000000231477246567400216370ustar00rootroot00000000000000mdpi/file1 content dub-1.40.0/test/3-copyFiles/data/res/mdpi/file2.txt000066400000000000000000000000231477246567400216400ustar00rootroot00000000000000mdpi/file2 content dub-1.40.0/test/3-copyFiles/data/res/mdpi/file3.txt000066400000000000000000000000231477246567400216410ustar00rootroot00000000000000mdpi/file3 content dub-1.40.0/test/3-copyFiles/dub.json000066400000000000000000000003101477246567400171070ustar00rootroot00000000000000{ "name": "copyfiles-test", "targetType": "executable", "targetPath": "bin", "copyFiles": [ "data/res", "data/res/*dpi", "data/file_to_copy.txt", "data/file_to_copy_mask*.txt"] } dub-1.40.0/test/3-copyFiles/source/000077500000000000000000000000001477246567400167505ustar00rootroot00000000000000dub-1.40.0/test/3-copyFiles/source/app.d000066400000000000000000000026051477246567400177000ustar00rootroot00000000000000import std.algorithm, std.array, std.file, std.path; void main(string[] args) { immutable root = args[0].dirName; // get the bin dir immutable pfx = root.length + "/".length; auto actualFiles = dirEntries(root, SpanMode.breadth).map!(n => n[pfx .. $]).array.sort().release; string[] expectedFiles; version (Windows) { expectedFiles ~= "copyfiles-test.exe"; expectedFiles ~= "copyfiles-test.pdb"; } else { expectedFiles ~= "copyfiles-test"; } expectedFiles ~= [ "file_to_copy.txt", "file_to_copy_mask1.txt", "file_to_copy_mask2.txt", "hdpi", "hdpi/file1.txt", "hdpi/file2.txt", "hdpi/file3.txt", "hdpi/nested_dir", "hdpi/nested_dir/nested_file.txt", "ldpi", "ldpi/file1.txt", "ldpi/file2.txt", "ldpi/file3.txt", "mdpi", "mdpi/file1.txt", "mdpi/file2.txt", "mdpi/file3.txt", "res", "res/.nocopy", "res/.nocopy/file_inside_dot_prefixed_dir.txt", "res/hdpi", "res/hdpi/file1.txt", "res/hdpi/file2.txt", "res/hdpi/file3.txt", "res/hdpi/nested_dir", "res/hdpi/nested_dir/nested_file.txt", "res/i18n", "res/i18n/resource_en.txt", "res/i18n/resource_fr.txt", "res/ldpi", "res/ldpi/file1.txt", "res/ldpi/file2.txt", "res/ldpi/file3.txt", "res/mdpi", "res/mdpi/file1.txt", "res/mdpi/file2.txt", "res/mdpi/file3.txt" ]; version (Windows) { expectedFiles = expectedFiles.map!(f => f.replace('/', '\\')).array; } assert(actualFiles == expectedFiles, actualFiles.join(", ")); } dub-1.40.0/test/4-describe-data-1-list.sh000077500000000000000000000140551477246567400176560ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/describe-project temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT if ! $DUB describe --compiler=$DC --filter-versions \ --data-list \ '--data= target-type , target-path , target-name ' \ '--data= working-directory ' \ --data=main-source-file \ '--data=dflags,lflags' \ '--data=libs, linker-files' \ '--data=source-files, copy-files' \ '--data=versions, debug-versions' \ --data=import-paths \ --data=string-import-paths \ --data=import-files \ --data=string-import-files \ --data=pre-generate-commands \ --data=post-generate-commands \ --data=pre-build-commands \ --data=post-build-commands \ '--data=requirements, options' \ --data=default-config \ --data=configs \ --data=default-build \ --data=builds \ > "$temp_file"; then die $LINENO 'Printing project data failed!' fi # Create the expected output path file to compare against. expected_file="$CURR_DIR/expected-describe-data-1-list-output" # --data=target-type echo "executable" > "$expected_file" echo >> "$expected_file" # --data=target-path echo "$CURR_DIR/describe-project/" >> "$expected_file" echo >> "$expected_file" # --data=target-name echo "describe-project" >> "$expected_file" echo >> "$expected_file" # --data=working-directory echo "$CURR_DIR/describe-project/" >> "$expected_file" echo >> "$expected_file" # --data=main-source-file echo "$CURR_DIR/describe-project/src/dummy.d" >> "$expected_file" echo >> "$expected_file" # --data=dflags echo "--some-dflag" >> "$expected_file" echo "--another-dflag" >> "$expected_file" echo >> "$expected_file" # --data=lflags echo "--some-lflag" >> "$expected_file" echo "--another-lflag" >> "$expected_file" echo >> "$expected_file" # --data=libs echo "somelib" >> "$expected_file" echo "anotherlib" >> "$expected_file" echo >> "$expected_file" # --data=linker-files echo "$CURR_DIR/describe-dependency-3/libdescribe-dependency-3.a" >> "$expected_file" echo "$CURR_DIR/describe-project/some.a" >> "$expected_file" echo "$CURR_DIR/describe-dependency-1/dep.a" >> "$expected_file" echo >> "$expected_file" # --data=source-files echo "$CURR_DIR/describe-project/src/dummy.d" >> "$expected_file" echo "$CURR_DIR/describe-dependency-1/source/dummy.d" >> "$expected_file" echo >> "$expected_file" # --data=copy-files echo "$CURR_DIR/describe-project/data/dummy.dat" >> "$expected_file" echo "$CURR_DIR/describe-dependency-1/data/*" >> "$expected_file" echo >> "$expected_file" # --data=versions echo "someVerIdent" >> "$expected_file" echo "anotherVerIdent" >> "$expected_file" echo "Have_describe_dependency_3" >> "$expected_file" echo >> "$expected_file" # --data=debug-versions echo "someDebugVerIdent" >> "$expected_file" echo "anotherDebugVerIdent" >> "$expected_file" echo >> "$expected_file" # --data=import-paths echo "$CURR_DIR/describe-project/src/" >> "$expected_file" echo "$CURR_DIR/describe-dependency-1/source/" >> "$expected_file" echo "$CURR_DIR/describe-dependency-2/some-path/" >> "$expected_file" echo "$CURR_DIR/describe-dependency-3/dep3-source/" >> "$expected_file" echo >> "$expected_file" # --data=string-import-paths echo "$CURR_DIR/describe-project/views/" >> "$expected_file" echo "$CURR_DIR/describe-dependency-2/some-extra-string-import-path/" >> "$expected_file" echo "$CURR_DIR/describe-dependency-3/dep3-string-import-path/" >> "$expected_file" echo >> "$expected_file" # --data=import-files echo "$CURR_DIR/describe-dependency-2/some-path/dummy.d" >> "$expected_file" echo >> "$expected_file" # --data=string-import-files echo "$CURR_DIR/describe-project/views/dummy.d" >> "$expected_file" #echo "$CURR_DIR/describe-dependency-2/some-extra-string-import-path/dummy.d" >> "$expected_file" # This is missing from result, is that a bug? echo >> "$expected_file" # --data=pre-generate-commands echo "./do-preGenerateCommands.sh" >> "$expected_file" echo "../describe-dependency-1/dependency-preGenerateCommands.sh" >> "$expected_file" echo >> "$expected_file" # --data=post-generate-commands echo "./do-postGenerateCommands.sh" >> "$expected_file" echo "../describe-dependency-1/dependency-postGenerateCommands.sh" >> "$expected_file" echo >> "$expected_file" # --data=pre-build-commands echo "./do-preBuildCommands.sh" >> "$expected_file" echo "../describe-dependency-1/dependency-preBuildCommands.sh" >> "$expected_file" echo >> "$expected_file" # --data=post-build-commands echo "./do-postBuildCommands.sh" >> "$expected_file" echo "../describe-dependency-1/dependency-postBuildCommands.sh" >> "$expected_file" echo >> "$expected_file" # --data=requirements echo "allowWarnings" >> "$expected_file" echo "disallowInlining" >> "$expected_file" echo "requireContracts" >> "$expected_file" echo >> "$expected_file" # --data=options echo "debugMode" >> "$expected_file" # releaseMode is not included, even though it's specified, because the requireContracts requirement drops it echo "debugInfo" >> "$expected_file" echo "stackStomping" >> "$expected_file" echo "warnings" >> "$expected_file" echo >> "$expected_file" # --data=default-config echo "my-project-config" >> "$expected_file" echo >> "$expected_file" # --data=configs echo "my-project-config" >> "$expected_file" echo >> "$expected_file" # --data=default-build echo "debug" >> "$expected_file" echo >> "$expected_file" # --data=builds echo "debug" >> "$expected_file" echo "plain" >> "$expected_file" echo "release" >> "$expected_file" echo "release-debug" >> "$expected_file" echo "release-nobounds" >> "$expected_file" echo "unittest" >> "$expected_file" echo "profile" >> "$expected_file" echo "profile-gc" >> "$expected_file" echo "docs" >> "$expected_file" echo "ddox" >> "$expected_file" echo "cov" >> "$expected_file" echo "cov-ctfe" >> "$expected_file" echo "unittest-cov" >> "$expected_file" echo "unittest-cov-ctfe" >> "$expected_file" echo "syntax" >> "$expected_file" # echo >> "$expected_file" if ! diff "$expected_file" "$temp_file"; then echo "Result:" cat "$temp_file" die $LINENO 'The project data did not match the expected output!' fi dub-1.40.0/test/4-describe-data-2-dmd.sh000077500000000000000000000064541477246567400174540ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh if [ "${DC}" != "dmd" ]; then echo Skipping DMD-centric test on configuration that lacks DMD. exit fi cd "$CURR_DIR"/describe-project temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT if ! $DUB describe --compiler=$DC --filter-versions \ --data=main-source-file \ --data=dflags,lflags \ --data=libs,linker-files \ --data=source-files \ --data=versions \ --data=debug-versions \ --data=import-paths \ --data=string-import-paths \ --data=import-files \ --data=options \ > "$temp_file"; then die $LINENO 'Printing project data failed!' fi # Create the expected output path file to compare against. expected_file="$CURR_DIR/expected-describe-data-2-dmd-output" # check if escaping is required . "$CURR_DIR/4-describe-data-check-escape" # --data=main-source-file echo -n "$(escaped "$CURR_DIR/describe-project/src/dummy.d") " > "$expected_file" # --data=dflags echo -n "--some-dflag " >> "$expected_file" echo -n "--another-dflag " >> "$expected_file" # --data=lflags echo -n "-L--some-lflag " >> "$expected_file" echo -n "-L--another-lflag " >> "$expected_file" # --data=libs echo -n "-L-lsomelib " >> "$expected_file" echo -n "-L-lanotherlib " >> "$expected_file" # --data=linker-files echo -n "$(escaped "$CURR_DIR/describe-dependency-3/libdescribe-dependency-3.a") " >> "$expected_file" echo -n "$(escaped "$CURR_DIR/describe-project/some.a") " >> "$expected_file" echo -n "$(escaped "$CURR_DIR/describe-dependency-1/dep.a") " >> "$expected_file" # --data=source-files echo -n "$(escaped "$CURR_DIR/describe-project/src/dummy.d") " >> "$expected_file" echo -n "$(escaped "$CURR_DIR/describe-dependency-1/source/dummy.d") " >> "$expected_file" # --data=versions echo -n "-version=someVerIdent " >> "$expected_file" echo -n "-version=anotherVerIdent " >> "$expected_file" echo -n "-version=Have_describe_dependency_3 " >> "$expected_file" # --data=debug-versions echo -n "-debug=someDebugVerIdent " >> "$expected_file" echo -n "-debug=anotherDebugVerIdent " >> "$expected_file" # --data=import-paths echo -n "$(escaped "-I$CURR_DIR/describe-project/src/") " >> "$expected_file" echo -n "$(escaped "-I$CURR_DIR/describe-dependency-1/source/") " >> "$expected_file" echo -n "$(escaped "-I$CURR_DIR/describe-dependency-2/some-path/") " >> "$expected_file" echo -n "$(escaped "-I$CURR_DIR/describe-dependency-3/dep3-source/") " >> "$expected_file" # --data=string-import-paths echo -n "$(escaped "-J$CURR_DIR/describe-project/views/") " >> "$expected_file" echo -n "$(escaped "-J$CURR_DIR/describe-dependency-2/some-extra-string-import-path/") " >> "$expected_file" echo -n "$(escaped "-J$CURR_DIR/describe-dependency-3/dep3-string-import-path/") " >> "$expected_file" # --data=import-files echo -n "$(escaped "$CURR_DIR/describe-dependency-2/some-path/dummy.d") " >> "$expected_file" # --data=options echo -n "-debug " >> "$expected_file" # releaseMode is not included, even though it's specified, because the requireContracts requirement drops it echo -n "-g " >> "$expected_file" echo -n "-gx " >> "$expected_file" echo -n "-wi" >> "$expected_file" echo "" >> "$expected_file" if ! diff "$expected_file" "$temp_file"; then die $LINENO 'The project data did not match the expected output!' fi dub-1.40.0/test/4-describe-data-3-zero-delim.sh000077500000000000000000000075441477246567400207610ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/describe-project temp_file_normal=$(mktemp $(basename $0).XXXXXX) temp_file_zero_delim=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file_normal rm $temp_file_zero_delim } trap cleanup EXIT # Test list-style project data if ! $DUB describe --compiler=$DC --data-list \ --data=target-type \ --data=target-path \ --data=target-name \ --data=working-directory \ --data=main-source-file \ --data=dflags \ --data=lflags \ --data=libs \ --data=linker-files \ --data=source-files \ --data=copy-files \ --data=versions \ --data=debug-versions \ --data=import-paths \ --data=string-import-paths \ --data=import-files \ --data=string-import-files \ --data=pre-generate-commands \ --data=post-generate-commands \ --data=pre-build-commands \ --data=post-build-commands \ --data=requirements \ --data=options \ > "$temp_file_normal"; then die $LINENO 'Printing list-style project data failed!' fi if ! $DUB describe --compiler=$DC --data-0 --data-list \ --data=target-type \ --data=target-path \ --data=target-name \ --data=working-directory \ --data=main-source-file \ --data=dflags \ --data=lflags \ --data=libs \ --data=linker-files \ --data=source-files \ --data=copy-files \ --data=versions \ --data=debug-versions \ --data=import-paths \ --data=string-import-paths \ --data=import-files \ --data=string-import-files \ --data=pre-generate-commands \ --data=post-generate-commands \ --data=pre-build-commands \ --data=post-build-commands \ --data=requirements \ --data=options \ | xargs -0 printf "%s\n" > "$temp_file_zero_delim"; then die $LINENO 'Printing null-delimited list-style project data failed!' fi if ! diff -b -B "$temp_file_normal" "$temp_file_zero_delim"; then die $LINENO 'The null-delimited list-style project data did not match the expected output!' fi # Test --import-paths if ! $DUB describe --compiler=$DC --import-paths \ > "$temp_file_normal"; then die $LINENO 'Printing --import-paths failed!' fi if ! $DUB describe --compiler=$DC --data-0 --import-paths \ | xargs -0 printf "%s\n" > "$temp_file_zero_delim"; then die $LINENO 'Printing null-delimited --import-paths failed!' fi if ! diff -b -B "$temp_file_normal" "$temp_file_zero_delim"; then die $LINENO 'The null-delimited --import-paths data did not match the expected output!' fi # DMD-only beyond this point if [ "${DC}" != "dmd" ]; then echo Skipping DMD-centric tests on configuration that lacks DMD. exit fi # Test dmd-style --data=versions if ! $DUB describe --compiler=$DC --data=versions \ > "$temp_file_normal"; then die $LINENO 'Printing dmd-style --data=versions failed!' fi if ! $DUB describe --compiler=$DC --data-0 --data=versions \ | xargs -0 printf "%s " > "$temp_file_zero_delim"; then die $LINENO 'Printing null-delimited dmd-style --data=versions failed!' fi if ! diff -b -B "$temp_file_normal" "$temp_file_zero_delim"; then die $LINENO 'The null-delimited dmd-style --data=versions did not match the expected output!' fi # check if escaping is required . "$CURR_DIR/4-describe-data-check-escape" # Test dmd-style --data=source-files if ! $DUB describe --compiler=$DC --data=source-files \ > "$temp_file_normal"; then die $LINENO 'Printing dmd-style --data=source-files failed!' fi if ! $DUB describe --compiler=$DC --data-0 --data=source-files \ | xargs -0 printf "$(escaped "%s") " > "$temp_file_zero_delim"; then die $LINENO 'Printing null-delimited dmd-style --data=source-files failed!' fi if ! diff -b -B "$temp_file_normal" "$temp_file_zero_delim"; then die $LINENO 'The null-delimited dmd-style --data=source-files did not match the expected output!' fi dub-1.40.0/test/4-describe-data-check-escape000066400000000000000000000006701477246567400204420ustar00rootroot00000000000000dmd_ver=$($DC --version | grep -Eo "v2\.[0-9][0-9][0-9].[0-9]") dmd_minor=$(echo $dmd_ver | grep -Eo "[0-9][0-9][0-9]") dmd_micro=${dmd_ver: -1} if [[ $dmd_minor$dmd_micro < 1022 || "$CURR_DIR" =~ [[:space:]] ]]; then echo "Expecting escaped paths" escape=1 else echo "Not expecting escaped paths" escape=0 fi function escaped { if [ $escape -eq 1 ]; then echo -n "'$1'" else echo -n "$1" fi } dub-1.40.0/test/4-describe-import-paths.sh000077500000000000000000000015641477246567400202660ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/describe-project temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT if ! $DUB describe --compiler=$DC --import-paths > "$temp_file"; then die $LINENO 'Printing import paths failed!' fi # Create the expected output path file to compare against. echo "$CURR_DIR/describe-project/src/" > "$CURR_DIR/expected-import-path-output" echo "$CURR_DIR/describe-dependency-1/source/" >> "$CURR_DIR/expected-import-path-output" echo "$CURR_DIR/describe-dependency-2/some-path/" >> "$CURR_DIR/expected-import-path-output" echo "$CURR_DIR/describe-dependency-3/dep3-source/" >> "$CURR_DIR/expected-import-path-output" if ! diff "$CURR_DIR"/expected-import-path-output "$temp_file"; then die $LINENO 'The import paths did not match the expected output!' fi dub-1.40.0/test/4-describe-json.sh000077500000000000000000000004641477246567400166060ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/describe-project temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT if ! $DUB describe --compiler=$DC > "$temp_file"; then die $LINENO 'Printing describe JSON failed!' fi dub-1.40.0/test/4-describe-string-import-paths.sh000077500000000000000000000015551477246567400215720ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/describe-project temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT if ! $DUB describe --compiler=$DC --string-import-paths > "$temp_file"; then die $LINENO 'Printing string import paths failed!' fi # Create the expected output path file to compare against. echo "$CURR_DIR/describe-project/views/" > "$CURR_DIR/expected-string-import-path-output" echo "$CURR_DIR/describe-dependency-2/some-extra-string-import-path/" >> "$CURR_DIR/expected-string-import-path-output" echo "$CURR_DIR/describe-dependency-3/dep3-string-import-path/" >> "$CURR_DIR/expected-string-import-path-output" if ! diff "$CURR_DIR"/expected-string-import-path-output "$temp_file"; then die $LINENO 'The string import paths did not match the expected output!' fi dub-1.40.0/test/5-convert-stdout.sh000077500000000000000000000006401477246567400170540ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/1-exec-simple EXPECTED="name \"exec-simple\" targetType \"executable\"" RESULT=`${DUB} convert -s -f sdl` if [ ! -f dub.json ]; then die $LINENO 'Package recipe got modified!' fi if [ -f dub.sdl ]; then die $LINENO 'An SDL recipe got written.' fi if [ "$RESULT" != "$EXPECTED" ]; then die $LINENO 'Unexpected SDLang output.' fi dub-1.40.0/test/5-convert.sh000077500000000000000000000012611477246567400155340ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/5-convert temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT cp dub.sdl dub.sdl.ref $DUB convert -f json if [ -f "dub.sdl" ]; then die $LINENO 'Old recipe file not removed.'; fi if [ ! -f "dub.json" ]; then die $LINENO 'New recipe file not created.'; fi $DUB convert -f sdl if [ -f "dub.json" ]; then die $LINENO 'Old recipe file not removed.'; fi if [ ! -f "dub.sdl" ]; then die $LINENO 'New recipe file not created.'; fi if ! diff "dub.sdl" "dub.sdl.ref"; then die $LINENO 'The project data did not match the expected output!' fi rm dub.sdl.ref dub-1.40.0/test/5-convert/000077500000000000000000000000001477246567400151755ustar00rootroot00000000000000dub-1.40.0/test/5-convert/.no_build000066400000000000000000000000021477246567400167610ustar00rootroot00000000000000 dub-1.40.0/test/5-convert/dub.sdl000066400000000000000000000022401477246567400164510ustar00rootroot00000000000000name "describe-dependency-1" version "~master" description "A test describe project" homepage "fake.com" authors "nobody" copyright "Copyright © 2015, nobody" license "BSD 2-clause" x:ddoxFilterArgs "dfa1" "dfa2" x:ddoxTool "ddoxtool" dependency "describe-dependency-1:sub1" version="*" targetType "sourceLibrary" subConfiguration "describe-dependency-1:sub1" "library" dflags "--another-dflag" lflags "--another-lflag" libs "anotherlib" sourceFiles "dep.lib" platform="windows" sourcePaths "source/" copyFiles "data/*" versions "anotherVerIdent" debugVersions "anotherDebugVerIdent" importPaths "source/" preGenerateCommands "../describe-dependency-1/dependency-preGenerateCommands.sh" platform="posix" postGenerateCommands "../describe-dependency-1/dependency-postGenerateCommands.sh" platform="posix" preBuildCommands "../describe-dependency-1/dependency-preBuildCommands.sh" platform="posix" postBuildCommands "../describe-dependency-1/dependency-postBuildCommands.sh" platform="posix" buildRequirements "requireContracts" buildOptions "stackStomping" configuration "my-dependency-1-config" { targetType "sourceLibrary" } subPackage { name "sub1" } subPackage { name "sub2" } dub-1.40.0/test/cache-generated-test-config.sh000077500000000000000000000056541477246567400211430ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/cache-generated-test-config rm -rf $HOME/.dub/cache/cache-generated-test-config/ DUB_CODE_CACHE_PATH="$HOME/.dub/cache/cache-generated-test-config/~master/code/" ## default test ${DUB} test --compiler=${DC} STAT="stat -c '%Y'" [[ "$OSTYPE" == "darwin"* ]] && STAT="stat -f '%m' -t '%Y'" EXECUTABLE_TIME="$(${STAT} cache-generated-test-config-test-library)" [ -z "$EXECUTABLE_TIME" ] && die $LINENO 'no EXECUTABLE_TIME was found' MAIN_TIME="$(${STAT} "$(ls $DUB_CODE_CACHE_PATH/*/dub_test_root.d)")" [ -z "$MAIN_TIME" ] && die $LINENO 'no MAIN_TIME was found' ${DUB} test --compiler=${DC} MAIN_FILES_COUNT=$(ls $DUB_CODE_CACHE_PATH/*/dub_test_root.d | wc -l) [ $MAIN_FILES_COUNT -ne 1 ] && die $LINENO 'DUB generated more then one main file' [ "$EXECUTABLE_TIME" != "$(${STAT} cache-generated-test-config-test-library)" ] && die $LINENO 'The executable has been rebuilt' [ "$MAIN_TIME" != "$(${STAT} "$(ls $DUB_CODE_CACHE_PATH/*/dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' ## test with empty DFLAGS environment variable DFLAGS="" ${DUB} test --compiler=${DC} STAT="stat -c '%Y'" [[ "$OSTYPE" == "darwin"* ]] && STAT="stat -f '%m' -t '%Y'" EXECUTABLE_TIME="$(${STAT} cache-generated-test-config-test-library)" [ -z "$EXECUTABLE_TIME" ] && die $LINENO 'no EXECUTABLE_TIME was found' MAIN_TIME="$(${STAT} "$(ls $DUB_CODE_CACHE_PATH/*-\$DFLAGS-*/dub_test_root.d)")" [ -z "$MAIN_TIME" ] && die $LINENO 'no MAIN_TIME was found' DFLAGS="" ${DUB} test --compiler=${DC} MAIN_FILES_COUNT=$(ls $DUB_CODE_CACHE_PATH/*-\$DFLAGS-*/dub_test_root.d | wc -l) [ $MAIN_FILES_COUNT -ne 1 ] && die $LINENO 'DUB generated more then one main file' [ "$EXECUTABLE_TIME" != "$(${STAT} cache-generated-test-config-test-library)" ] && die $LINENO 'The executable has been rebuilt' [ "$MAIN_TIME" != "$(${STAT} "$(ls $DUB_CODE_CACHE_PATH/*-\$DFLAGS-*/dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' ## test with DFLAGS environment variable DFLAGS="-g" ${DUB} test --compiler=${DC} STAT="stat -c '%Y'" [[ "$OSTYPE" == "darwin"* ]] && STAT="stat -f '%m' -t '%Y'" EXECUTABLE_TIME="$(${STAT} cache-generated-test-config-test-library)" [ -z "$EXECUTABLE_TIME" ] && die $LINENO 'no EXECUTABLE_TIME was found' MAIN_TIME="$(${STAT} "$(ls $DUB_CODE_CACHE_PATH/*-\$DFLAGS-*/dub_test_root.d)")" [ -z "$MAIN_TIME" ] && die $LINENO 'no MAIN_TIME was found' DFLAGS="-g" ${DUB} test --compiler=${DC} MAIN_FILES_COUNT=$(ls $DUB_CODE_CACHE_PATH/*-\$DFLAGS-*/dub_test_root.d | wc -l) [ $MAIN_FILES_COUNT -ne 1 ] && die $LINENO 'DUB generated more then one main file' [ "$EXECUTABLE_TIME" != "$(${STAT} cache-generated-test-config-test-library)" ] && die $LINENO 'The executable has been rebuilt' [ "$MAIN_TIME" != "$(${STAT} "$(ls $DUB_CODE_CACHE_PATH/*-\$DFLAGS-*/dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' exit 0 dub-1.40.0/test/cache-generated-test-config/000077500000000000000000000000001477246567400205725ustar00rootroot00000000000000dub-1.40.0/test/cache-generated-test-config/.no_build000066400000000000000000000000001477246567400223540ustar00rootroot00000000000000dub-1.40.0/test/cache-generated-test-config/.no_run000066400000000000000000000000001477246567400220610ustar00rootroot00000000000000dub-1.40.0/test/cache-generated-test-config/.no_test000066400000000000000000000000001477246567400222340ustar00rootroot00000000000000dub-1.40.0/test/cache-generated-test-config/dub.sdl000066400000000000000000000000761477246567400220530ustar00rootroot00000000000000name "cache-generated-test-config" targetType "staticLibrary"dub-1.40.0/test/cache-generated-test-config/source/000077500000000000000000000000001477246567400220725ustar00rootroot00000000000000dub-1.40.0/test/cache-generated-test-config/source/test.d000066400000000000000000000000541477246567400232150ustar00rootroot00000000000000module test; unittest { assert(true); }dub-1.40.0/test/colored-output.sh000077500000000000000000000022421477246567400166770ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/1-exec-simple # Test that --color=never disables colors correctly printf "Expecting 0: " ${DUB} build --color=never --compiler=${DC} 2>&1 | { ! \grep $'^\x1b\[' -c; } # Test that --color=auto detects no TTY correctly printf "Expecting 0: " ${DUB} build --color=auto --compiler=${DC} 2>&1 | { ! \grep $'^\x1b\[' -c; } # Test that no --color= has same behaviour as --color=auto printf "Expecting 0: " ${DUB} build --compiler=${DC} 2>&1 | { ! \grep $'^\x1b\[' -c; } # Test that --color=always enables colors in any case printf "Expecting non-0: " ${DUB} build --color=always --compiler=${DC} 2>&1 | \grep $'^\x1b\[' -c # Test forwarding to dmd flag -color # Test that --color=always set dmd flag -color printf "Expecting non-0: " ${DUB} build -v --color=always --compiler=${DC} -f 2>&1 | \grep '\-color' -c # Test that --color=never set no dmd flag printf "Expecting 0: " ${DUB} build -v --color=never --compiler=${DC} -f 2>&1 | { ! \grep '\-color' -c; } # Test that --color=auto set no dmd flag printf "Expecting 0: " ${DUB} build -v --color=auto --compiler=${DC} -f 2>&1 | { ! \grep '\-color' -c; } dub-1.40.0/test/common.sh000066400000000000000000000032051477246567400151770ustar00rootroot00000000000000SOURCE_FILE=$_ set -ueEo pipefail function log() { echo -e "\033[0;33m[INFO] $@\033[0m" echo "[INFO] $@" >> $(dirname "${BASH_SOURCE[0]}")/test.log } # lineno[, msg] function die() { local line=$1 local msg=${2:-command failed} local supplemental=${3:-} echo "[ERROR] $SOURCE_FILE:$1 $msg" | tee -a $(dirname "${BASH_SOURCE[0]}")/test.log | cat 1>&2 if [ ! -z "$supplemental" ]; then echo "$supplemental" | >&2 sed 's|^| |g' fi exit 1 } trap 'die $LINENO' ERR # Get a random port for the test to use # This isn't foolproof but should fail less than handcrafted approaches function getRandomPort() { # Get the PID of this script as a way to get a random port, # and make sure the value is > 1024, as ports < 1024 are priviledged # and require root priviledges. # We also need to make sure the value is not > ushort.max PORT=$(($$ % 65536)) if [ $PORT -le 1024 ]; then PORT=$(($PORT + 1025)) fi echo $PORT } # Emulate GNU readlink's behavior on non-GNU readlink (e.g. MacOSX / BSD's) # Credit to https://stackoverflow.com/a/1116890 function gnureadlink() { TARGET_FILE=$1 cd `dirname $TARGET_FILE` TARGET_FILE=`basename $TARGET_FILE` # Iterate down a (possible) chain of symlinks while [ -L "$TARGET_FILE" ] do TARGET_FILE=`readlink $TARGET_FILE` cd `dirname $TARGET_FILE` TARGET_FILE=`basename $TARGET_FILE` done # Compute the canonicalized name by finding the physical path # for the directory we're in and appending the target file. PHYS_DIR=`pwd -P` RESULT=$PHYS_DIR/$TARGET_FILE echo $RESULT } dub-1.40.0/test/common/000077500000000000000000000000001477246567400146435ustar00rootroot00000000000000dub-1.40.0/test/common/.no_build000066400000000000000000000000001477246567400164250ustar00rootroot00000000000000dub-1.40.0/test/common/.no_run000066400000000000000000000000001477246567400161320ustar00rootroot00000000000000dub-1.40.0/test/common/.no_test000066400000000000000000000000001477246567400163050ustar00rootroot00000000000000dub-1.40.0/test/common/dub.sdl000066400000000000000000000002401477246567400161150ustar00rootroot00000000000000name "common" description "Utility package for test suite" authors "drug007" copyright "The D language Foundation" license "BSL-1.0" targetType "sourceLibrary" dub-1.40.0/test/common/source/000077500000000000000000000000001477246567400161435ustar00rootroot00000000000000dub-1.40.0/test/common/source/common.d000066400000000000000000000016721477246567400176060ustar00rootroot00000000000000module common; import std.conv : text; import std.stdio : File, stdout, stderr; /// Name of the log file enum logFile = "test.log"; /// has true if some test fails bool any_errors = false; /// prints (non error) message to standard output and log file void log(Args...)(Args args) if (Args.length) { const str = text("[INFO] ", args); version(Windows) stdout.writeln(str); else stdout.writeln("\033[0;33m", str, "\033[0m"); stdout.flush; File(logFile, "a").writeln(str); } /// prints error message to standard error stream and log file /// and set any_errors var to true value to indicate that some /// test fails void logError(Args...)(Args args) { const str = text("[ERROR] ", args); version(Windows) stderr.writeln(str); else stderr.writeln("\033[0;31m", str, "\033[0m"); stderr.flush; File(logFile, "a").writeln(str); any_errors = true; } void die(Args...)(Args args) { stderr.writeln(args); throw new Exception("Test failed"); } dub-1.40.0/test/cov-ctfe.sh000077500000000000000000000001761477246567400154240ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh "$DUB" run --root "$DIR"/cov-ctfe --build=cov-ctfe dub-1.40.0/test/cov-ctfe/000077500000000000000000000000001477246567400150615ustar00rootroot00000000000000dub-1.40.0/test/cov-ctfe/.no_build000066400000000000000000000000001477246567400166430ustar00rootroot00000000000000dub-1.40.0/test/cov-ctfe/.no_run000066400000000000000000000000001477246567400163500ustar00rootroot00000000000000dub-1.40.0/test/cov-ctfe/.no_test000066400000000000000000000000001477246567400165230ustar00rootroot00000000000000dub-1.40.0/test/cov-ctfe/dub.sdl000066400000000000000000000001361477246567400163370ustar00rootroot00000000000000name "test" version "1.0.0" targetType "executable" dflags "-cov=100" mainSourceFile "test.d" dub-1.40.0/test/cov-ctfe/test.d000066400000000000000000000003011477246567400161770ustar00rootroot00000000000000int f(int x) { return x + 1; } int g(int x) { return x * 2; } enum gResult = g(12); // execute g() at compile-time int main(string[] args) { assert(f(11) + gResult == 36); return 0; } dub-1.40.0/test/custom-source-main-bug487/000077500000000000000000000000001477246567400201235ustar00rootroot00000000000000dub-1.40.0/test/custom-source-main-bug487/.gitignore000066400000000000000000000000461477246567400221130ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj dub-1.40.0/test/custom-source-main-bug487/.no_run000066400000000000000000000000001477246567400214120ustar00rootroot00000000000000dub-1.40.0/test/custom-source-main-bug487/.no_test000066400000000000000000000000001477246567400215650ustar00rootroot00000000000000dub-1.40.0/test/custom-source-main-bug487/dub.json000066400000000000000000000001331477246567400215650ustar00rootroot00000000000000{ "name": "custom-source-main-bug487", "sourcePaths": ["mysrc"], "dependencies": { } } dub-1.40.0/test/custom-source-main-bug487/mysrc/000077500000000000000000000000001477246567400212605ustar00rootroot00000000000000dub-1.40.0/test/custom-source-main-bug487/mysrc/app.d000066400000000000000000000000201477246567400221750ustar00rootroot00000000000000void main() { } dub-1.40.0/test/custom-unittest/000077500000000000000000000000001477246567400165425ustar00rootroot00000000000000dub-1.40.0/test/custom-unittest/dub.json000066400000000000000000000006021477246567400202050ustar00rootroot00000000000000{ "name": "custom-unittest", "configurations": [ { "name": "application", "targetType": "executable" }, { "name": "library", "excludedSourceFiles": ["source/app.d"], "targetType": "library" }, { "name": "unittest", "targetType": "executable", "excludedSourceFiles": ["source/app.d"], "sourcePaths": ["test/"], "importPaths": ["test/"] } ] } dub-1.40.0/test/custom-unittest/source/000077500000000000000000000000001477246567400200425ustar00rootroot00000000000000dub-1.40.0/test/custom-unittest/source/app.d000066400000000000000000000001511477246567400207640ustar00rootroot00000000000000module app; import lib; import std.stdio; void main() { writeln("Running application."); libFunc(); } dub-1.40.0/test/custom-unittest/source/lib.d000066400000000000000000000001321477246567400207510ustar00rootroot00000000000000module lib; void libFunc() { import std.stdio; writefln("Library function called."); } dub-1.40.0/test/custom-unittest/test/000077500000000000000000000000001477246567400175215ustar00rootroot00000000000000dub-1.40.0/test/custom-unittest/test/main.d000066400000000000000000000001231477246567400206060ustar00rootroot00000000000000module main; void main() { import std.stdio; writefln("Running unit tests."); } dub-1.40.0/test/d-versions.sh000077500000000000000000000002211477246567400157760ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/d-versions ${DUB} build --d-version=FromCli1 --d-version=FromCli2 dub-1.40.0/test/d-versions/000077500000000000000000000000001477246567400154445ustar00rootroot00000000000000dub-1.40.0/test/d-versions/.gitignore000066400000000000000000000000131477246567400174260ustar00rootroot00000000000000d-versions dub-1.40.0/test/d-versions/.no_build000066400000000000000000000000001477246567400172260ustar00rootroot00000000000000dub-1.40.0/test/d-versions/.no_run000066400000000000000000000000001477246567400167330ustar00rootroot00000000000000dub-1.40.0/test/d-versions/.no_test000066400000000000000000000000001477246567400171060ustar00rootroot00000000000000dub-1.40.0/test/d-versions/dub.sdl000066400000000000000000000000221477246567400167140ustar00rootroot00000000000000name "d-versions" dub-1.40.0/test/d-versions/source/000077500000000000000000000000001477246567400167445ustar00rootroot00000000000000dub-1.40.0/test/d-versions/source/app.d000066400000000000000000000002731477246567400176730ustar00rootroot00000000000000version (FromCli1) enum has1 = true; else enum has1 = false; version (FromCli2) enum has2 = true; else enum has2 = false; static assert(has1); static assert(has2); void main() { } dub-1.40.0/test/dc-env.sh000077500000000000000000000001621477246567400150650ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue2012-dc-env $DUB app.d ${DC} dub-1.40.0/test/ddox.sh000077500000000000000000000011331477246567400146460ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh # gdc 4.8.5 not working with ddox due to missing # std.experimental.allocator.mallocator for libdparse if [ ${DC} = gdc ]; then exit 0 fi (cd $CURR_DIR/ddox/default && $DUB build -b ddox) grep -qF ddox_project $CURR_DIR/ddox/default/docs/index.html $DUB add-local $CURR_DIR/ddox/custom-tool (cd $CURR_DIR/ddox/custom && $DUB build -b ddox) grep -qF custom-tool $CURR_DIR/ddox/custom/docs/custom_tool_output diff $CURR_DIR/ddox/custom-tool/public/copied $CURR_DIR/ddox/custom/docs/copied $DUB remove-local $CURR_DIR/ddox/custom-tool dub-1.40.0/test/ddox.sh.min_frontend000066400000000000000000000000061477246567400173220ustar00rootroot000000000000002.072 dub-1.40.0/test/ddox/000077500000000000000000000000001477246567400143115ustar00rootroot00000000000000dub-1.40.0/test/ddox/.no_build000066400000000000000000000000001477246567400160730ustar00rootroot00000000000000dub-1.40.0/test/ddox/custom-tool/000077500000000000000000000000001477246567400165765ustar00rootroot00000000000000dub-1.40.0/test/ddox/custom-tool/.gitignore000066400000000000000000000000621477246567400205640ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj custom-tool dub-1.40.0/test/ddox/custom-tool/dub.sdl000066400000000000000000000001551477246567400200550ustar00rootroot00000000000000name "custom-tool" description "A minimal D application." copyright "Copyright © 2015, dawg" authors "dawg" dub-1.40.0/test/ddox/custom-tool/public/000077500000000000000000000000001477246567400200545ustar00rootroot00000000000000dub-1.40.0/test/ddox/custom-tool/public/copied000066400000000000000000000000101477246567400212310ustar00rootroot00000000000000content dub-1.40.0/test/ddox/custom-tool/source/000077500000000000000000000000001477246567400200765ustar00rootroot00000000000000dub-1.40.0/test/ddox/custom-tool/source/app.d000066400000000000000000000003201477246567400210160ustar00rootroot00000000000000import std.file, std.stdio, std.string; void main(string[] args) { if (args[1] != "generate-html") return; mkdirRecurse(args[$-1]); File(args[$-1]~"/custom_tool_output", "w").writeln(args.join(" ")); } dub-1.40.0/test/ddox/custom/000077500000000000000000000000001477246567400156235ustar00rootroot00000000000000dub-1.40.0/test/ddox/custom/.gitignore000066400000000000000000000000531477246567400176110ustar00rootroot00000000000000.dub docs docs.json __dummy.html *.o *.obj dub-1.40.0/test/ddox/custom/dub.sdl000066400000000000000000000000551477246567400171010ustar00rootroot00000000000000name "ddox-project" x:ddoxTool "custom-tool" dub-1.40.0/test/ddox/custom/source/000077500000000000000000000000001477246567400171235ustar00rootroot00000000000000dub-1.40.0/test/ddox/custom/source/ddox_project.d000066400000000000000000000000611477246567400217510ustar00rootroot00000000000000/// module ddox_project; /// docstring int foo; dub-1.40.0/test/ddox/default/000077500000000000000000000000001477246567400157355ustar00rootroot00000000000000dub-1.40.0/test/ddox/default/.gitignore000066400000000000000000000000531477246567400177230ustar00rootroot00000000000000.dub docs docs.json __dummy.html *.o *.obj dub-1.40.0/test/ddox/default/dub.sdl000066400000000000000000000000241477246567400172070ustar00rootroot00000000000000name "ddox-project" dub-1.40.0/test/ddox/default/source/000077500000000000000000000000001477246567400172355ustar00rootroot00000000000000dub-1.40.0/test/ddox/default/source/ddox_project.d000066400000000000000000000000611477246567400220630ustar00rootroot00000000000000/// module ddox_project; /// docstring int foo; dub-1.40.0/test/depen-build-settings/000077500000000000000000000000001477246567400174015ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/.gitignore000066400000000000000000000001101477246567400213610ustar00rootroot00000000000000depend.json depend2.json depen-build-settings.json depen-build-settings dub-1.40.0/test/depen-build-settings/.no_build_dmd000066400000000000000000000000001477246567400220070ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/.no_build_gdc000066400000000000000000000000001477246567400220000ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/.no_test000066400000000000000000000000001477246567400210430ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/depend/000077500000000000000000000000001477246567400206405ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/depend/depend2/000077500000000000000000000000001477246567400221615ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/depend/depend2/dub.json000066400000000000000000000001351477246567400236250ustar00rootroot00000000000000{ "targetType": "library", "description": "A minimal D application.", "name": "depend2" } dub-1.40.0/test/depen-build-settings/depend/depend2/source/000077500000000000000000000000001477246567400234615ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/depend/depend2/source/depend2.d000066400000000000000000000002531477246567400251470ustar00rootroot00000000000000import std.stdio; version (must_be_defined) {} else static assert(0, "Expected must_be_defined to be set"); extern (C) void depend2_func() { writeln("depend2_func"); } dub-1.40.0/test/depen-build-settings/depend/dub.json000066400000000000000000000002261477246567400223050ustar00rootroot00000000000000{ "targetType": "library", "description": "A minimal D application.", "name": "depend1", "dependencies": { "depend2": { "version" : "*" } } } dub-1.40.0/test/depen-build-settings/depend/source/000077500000000000000000000000001477246567400221405ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/depend/source/depend.d000066400000000000000000000003351477246567400235450ustar00rootroot00000000000000import std.stdio; version (must_be_defined) {} else static assert(0, "Expected must_be_defined to be set"); extern (C) void depend2_func(); extern (C) void depend1_func() { writeln("depend1_func"); depend2_func(); } dub-1.40.0/test/depen-build-settings/dub.json000066400000000000000000000005431477246567400210500ustar00rootroot00000000000000{ "description": "A minimal D application.", "name": "depen-build-settings", "dependencies": { "depend1": { "version" : "*", "dflags" : ["-X"] } }, "configurations": [ { "name" : "defaultconfig", "targetType": "executable", "dependencies": { "depend1": { "version" : "*", "dflags" : ["--d-version=must_be_defined"] } } } ] } dub-1.40.0/test/depen-build-settings/dub.selections.json000066400000000000000000000001621477246567400232140ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "depend1": {"path":"depend/"}, "depend2": {"path":"depend/depend2/"} } } dub-1.40.0/test/depen-build-settings/source/000077500000000000000000000000001477246567400207015ustar00rootroot00000000000000dub-1.40.0/test/depen-build-settings/source/app.d000066400000000000000000000005431477246567400216300ustar00rootroot00000000000000import std.stdio; import std.file; extern (C) void depend1_func(); version (must_be_defined) static assert(0, "Expected must_be_defined not to be set"); void main() { writeln("Edit source/app.d to start your project."); depend1_func(); assert(!exists("depen-build-settings.json")); assert(exists("depend2.json")); assert(exists("depend.json")); } dub-1.40.0/test/describe-dependency-1/000077500000000000000000000000001477246567400174055ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-1/.no_build000066400000000000000000000000021477246567400211710ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-1/data/000077500000000000000000000000001477246567400203165ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-1/data/dummy-dep1.dat000066400000000000000000000000021477246567400227620ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-1/dependency-postGenerateCommands.sh000077500000000000000000000000241477246567400261760ustar00rootroot00000000000000#!/usr/bin/env bash dub-1.40.0/test/describe-dependency-1/dependency-preGenerateCommands.sh000077500000000000000000000000241477246567400257770ustar00rootroot00000000000000#!/usr/bin/env bash dub-1.40.0/test/describe-dependency-1/dub.json000066400000000000000000000021171477246567400210530ustar00rootroot00000000000000{ "name": "describe-dependency-1", "targetType": "sourceLibrary", "description": "A test describe project", "authors": ["nobody"], "homepage": "fake.com", "license": "BSD 2-clause", "copyright": "Copyright © 2015, nobody", "sourceFiles-posix": ["dep.a"], "sourceFiles-windows": ["dep.lib"], "dflags": ["--another-dflag"], "lflags": ["--another-lflag"], "libs": ["anotherlib"], "copyFiles": ["data/*"], "versions": ["anotherVerIdent"], "debugVersions": ["anotherDebugVerIdent"], "preGenerateCommands-posix": ["../describe-dependency-1/dependency-preGenerateCommands.sh"], "postGenerateCommands-posix": ["../describe-dependency-1/dependency-postGenerateCommands.sh"], "preBuildCommands-posix": ["../describe-dependency-1/dependency-preBuildCommands.sh"], "postBuildCommands-posix": ["../describe-dependency-1/dependency-postBuildCommands.sh"], "buildRequirements": ["requireContracts"], "buildOptions": ["stackStomping"], "configurations": [ { "name": "my-dependency-1-config" } ], } dub-1.40.0/test/describe-dependency-1/otherdir/000077500000000000000000000000001477246567400212255ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-1/otherdir/dummy.d000066400000000000000000000000011477246567400225140ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-1/source/000077500000000000000000000000001477246567400207055ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-1/source/dummy.d000066400000000000000000000000751477246567400222070ustar00rootroot00000000000000version (anotherVerIdent) {} debug (anotherDebugVerIdent) {} dub-1.40.0/test/describe-dependency-2/000077500000000000000000000000001477246567400174065ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-2/.no_build000066400000000000000000000000021477246567400211720ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-2/dub.json000066400000000000000000000005341477246567400210550ustar00rootroot00000000000000{ "name": "describe-dependency-2", "targetType": "sourceLibrary", "description": "A test describe project", "authors": ["nobody"], "homepage": "fake.com", "license": "BSD 2-clause", "copyright": "Copyright © 2015, nobody", "importPaths": ["some-path"], "stringImportPaths": ["some-extra-string-import-path"], } dub-1.40.0/test/describe-dependency-2/some-extra-string-import-path/000077500000000000000000000000001477246567400252405ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-2/some-extra-string-import-path/dummy.d000066400000000000000000000000011477246567400265270ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-2/some-path/000077500000000000000000000000001477246567400213035ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-2/some-path/dummy.d000066400000000000000000000000011477246567400225720ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-3/000077500000000000000000000000001477246567400174075ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-3/.no_build000066400000000000000000000000021477246567400211730ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-3/dep3-source/000077500000000000000000000000001477246567400215405ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-3/dep3-source/dummy.d000066400000000000000000000000011477246567400230270ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-3/dep3-string-import-path/000077500000000000000000000000001477246567400240105ustar00rootroot00000000000000dub-1.40.0/test/describe-dependency-3/dep3-string-import-path/dummy.d000066400000000000000000000000011477246567400252770ustar00rootroot00000000000000 dub-1.40.0/test/describe-dependency-3/dub.json000066400000000000000000000006341477246567400210570ustar00rootroot00000000000000{ "name": "describe-dependency-3", "targetType": "staticLibrary", "description": "A test describe project", "authors": ["nobody"], "homepage": "fake.com", "license": "BSD 2-clause", "copyright": "Copyright © 2015, nobody", "importPaths": ["dep3-source"], "sourcePaths": ["dep3-source"], "stringImportPaths": ["dep3-string-import-path"], "buildOptions": ["profile"] } dub-1.40.0/test/describe-project/000077500000000000000000000000001477246567400165775ustar00rootroot00000000000000dub-1.40.0/test/describe-project/.no_build000066400000000000000000000000021477246567400203630ustar00rootroot00000000000000 dub-1.40.0/test/describe-project/data/000077500000000000000000000000001477246567400175105ustar00rootroot00000000000000dub-1.40.0/test/describe-project/data/dummy.dat000066400000000000000000000000021477246567400213250ustar00rootroot00000000000000 dub-1.40.0/test/describe-project/do-postGenerateCommands.sh000077500000000000000000000000241477246567400236540ustar00rootroot00000000000000#!/usr/bin/env bash dub-1.40.0/test/describe-project/do-preGenerateCommands.sh000077500000000000000000000000241477246567400234550ustar00rootroot00000000000000#!/usr/bin/env bash dub-1.40.0/test/describe-project/dub.json000066400000000000000000000027571477246567400202570ustar00rootroot00000000000000{ "name": "describe-project", "targetType": "executable", "description": "A test describe project", "authors": ["nobody"], "homepage": "fake.com", "license": "BSD 2-clause", "copyright": "Copyright © 2015, nobody", "mainSourceFile": "src/dummy.d", "sourceFiles-posix": ["./some.a"], "sourceFiles-windows": ["./some.lib"], "dflags": ["--some-dflag"], "lflags": ["--some-lflag"], "libs": ["somelib"], "copyFiles": ["data/dummy.dat"], "versions": ["someVerIdent"], "debugVersions": ["someDebugVerIdent"], "preGenerateCommands-posix": ["./do-preGenerateCommands.sh"], "postGenerateCommands-posix": ["./do-postGenerateCommands.sh"], "preBuildCommands-posix": ["./do-preBuildCommands.sh"], "postBuildCommands-posix": ["./do-postBuildCommands.sh"], "buildRequirements": ["allowWarnings", "disallowInlining"], "buildOptions": ["releaseMode", "debugInfo"], "dependencies": { "describe-dependency-1": { "version": "1.0", "path": "../describe-dependency-1" }, "describe-dependency-2": { "version": "1.0", "path": "../describe-dependency-2" }, "describe-dependency-3": { "version": "1.0", "path": "../describe-dependency-3" } }, "configurations": [ { "name": "my-project-config" } ], "subConfigurations": { "describe-dependency-1": "my-dependency-1-config" }, } dub-1.40.0/test/describe-project/src/000077500000000000000000000000001477246567400173665ustar00rootroot00000000000000dub-1.40.0/test/describe-project/src/dummy.d000066400000000000000000000001371477246567400206670ustar00rootroot00000000000000version (Have_describe_dependency_3) {} version (someVerIdent) {} debug (someDebugVerIdent) {} dub-1.40.0/test/describe-project/views/000077500000000000000000000000001477246567400177345ustar00rootroot00000000000000dub-1.40.0/test/describe-project/views/dummy.d000066400000000000000000000000011477246567400212230ustar00rootroot00000000000000 dub-1.40.0/test/dpath-variable.sh000077500000000000000000000016411477246567400165770ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh export DPATH="${CURR_DIR}/dpath-variable/dpath" rm -rf "$DPATH" cd "${CURR_DIR}/dpath-variable" "${DUB}" upgrade if [[ ! -f "$DPATH/dub/packages/gitcompatibledubpackage/1.0.1/gitcompatibledubpackage/dub.json" ]]; then die $LINENO 'Did not get dependencies installed into $DPATH.' fi # just for making this shell script easier to write, copy the variable DPATH_ALIAS="$DPATH" # unset the variable so DUB doesn't pick it up though unset DPATH rm -rf "$DPATH_ALIAS" echo '{"dubHome":"'"$DPATH_ALIAS"/dub2'"}' > "${CURR_DIR}/dpath-variable/dub.settings.json" function cleanup { rm "${CURR_DIR}/dpath-variable/dub.settings.json" } trap cleanup EXIT "${DUB}" upgrade if [[ ! -f "$DPATH_ALIAS/dub2/packages/gitcompatibledubpackage/1.0.1/gitcompatibledubpackage/dub.json" ]]; then die $LINENO 'Did not get dependencies installed into dubHome (set from config).' fi dub-1.40.0/test/dpath-variable/000077500000000000000000000000001477246567400162365ustar00rootroot00000000000000dub-1.40.0/test/dpath-variable/.gitignore000066400000000000000000000000061477246567400202220ustar00rootroot00000000000000dpath dub-1.40.0/test/dpath-variable/dub.json000066400000000000000000000001321477246567400176770ustar00rootroot00000000000000{ "name": "dpath-variable", "dependencies": { "gitcompatibledubpackage": "1.0.1" } } dub-1.40.0/test/dpath-variable/source/000077500000000000000000000000001477246567400175365ustar00rootroot00000000000000dub-1.40.0/test/dpath-variable/source/app.d000066400000000000000000000000201477246567400204530ustar00rootroot00000000000000void main() { } dub-1.40.0/test/dub-as-a-library-cwd.sh000077500000000000000000000001541477246567400175200ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh $DUB --root="$CURR_DIR/dub-as-a-library-cwd" dub-1.40.0/test/dub-as-a-library-cwd/000077500000000000000000000000001477246567400171615ustar00rootroot00000000000000dub-1.40.0/test/dub-as-a-library-cwd/.gitignore000066400000000000000000000000261477246567400211470ustar00rootroot00000000000000/dub-as-a-library-cwd dub-1.40.0/test/dub-as-a-library-cwd/.no_test000066400000000000000000000000001477246567400206230ustar00rootroot00000000000000dub-1.40.0/test/dub-as-a-library-cwd/dub.json000066400000000000000000000001671477246567400206320ustar00rootroot00000000000000{ "name": "dub-as-a-library-cwd", "workingDirectory": ".", "dependencies": { "dub": { "path": "../.." } } } dub-1.40.0/test/dub-as-a-library-cwd/source/000077500000000000000000000000001477246567400204615ustar00rootroot00000000000000dub-1.40.0/test/dub-as-a-library-cwd/source/app.d000066400000000000000000000023061477246567400214070ustar00rootroot00000000000000import dub.compilers.buildsettings; import dub.compilers.compiler; import dub.dub; import dub.generators.generator; import dub.internal.vibecompat.inet.path; import std.algorithm; import std.file; import std.path; import std.stdio; void main(string[] args) { auto project = buildNormalizedPath(getcwd, "subproject"); chdir(buildNormalizedPath(getcwd, "..")); bool found; auto dub = new Dub(project, null, SkipPackageSuppliers.none); dub.packageManager.getOrLoadPackage(NativePath(project)); dub.loadPackage(); dub.project.validate(); GeneratorSettings gs; gs.buildType = "debug"; gs.config = "application"; gs.compiler = getCompiler(dub.defaultCompiler); gs.run = false; gs.force = true; gs.tempBuild = true; gs.platform = gs.compiler.determinePlatform(gs.buildSettings, dub.defaultCompiler, dub.defaultArchitecture); gs.compileCallback = (status, output) { found = output.canFind("FIND_THIS_STRING"); if (!found) stderr.writeln("Did not find required string!\nExit status:", status, "\n\nOutput:\n", output); }; stderr.writeln("Checking if building works from a library in a different cwd:"); dub.generateProject("build", gs); stderr.writeln("Success: ", found); assert(found); } dub-1.40.0/test/dub-as-a-library-cwd/subproject/000077500000000000000000000000001477246567400213415ustar00rootroot00000000000000dub-1.40.0/test/dub-as-a-library-cwd/subproject/dub.sdl000066400000000000000000000000221477246567400226110ustar00rootroot00000000000000name "subproject" dub-1.40.0/test/dub-as-a-library-cwd/subproject/source/000077500000000000000000000000001477246567400226415ustar00rootroot00000000000000dub-1.40.0/test/dub-as-a-library-cwd/subproject/source/app.d000066400000000000000000000001411477246567400235620ustar00rootroot00000000000000module app; deprecated("FIND_THIS_STRING") void foo() { } void main(string[] args) { foo(); } dub-1.40.0/test/dub-custom-root-2/000077500000000000000000000000001477246567400165555ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root-2/.gitignore000066400000000000000000000000621477246567400205430ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj /target-exe dub-1.40.0/test/dub-custom-root-2/.no_run000066400000000000000000000000001477246567400200440ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root-2/.no_test000066400000000000000000000000001477246567400202170ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root-2/dub.json000066400000000000000000000000711477246567400202200ustar00rootroot00000000000000{ "name": "target-exe", "workingDirectory": "source" } dub-1.40.0/test/dub-custom-root-2/source/000077500000000000000000000000001477246567400200555ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root-2/source/app.d000066400000000000000000000003741477246567400210060ustar00rootroot00000000000000import std.file; import std.path; import std.stdio; import std.string; void main() { // run me from test/ with dub --root=dub-custom-root string cwd = getcwd.chomp("/"); assert(cwd.endsWith("test/dub-custom-root-2/source"), cwd); writeln("ok"); } dub-1.40.0/test/dub-custom-root.sh000077500000000000000000000002241477246567400167530ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh pushd "$CURR_DIR" $DUB --root=dub-custom-root $DUB --root=dub-custom-root-2 popd dub-1.40.0/test/dub-custom-root/000077500000000000000000000000001477246567400164165ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root/.gitignore000066400000000000000000000000621477246567400204040ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj /target-exe dub-1.40.0/test/dub-custom-root/.no_run000066400000000000000000000000001477246567400177050ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root/.no_test000066400000000000000000000000001477246567400200600ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root/dub.json000066400000000000000000000000321477246567400200560ustar00rootroot00000000000000{ "name": "target-exe" } dub-1.40.0/test/dub-custom-root/source/000077500000000000000000000000001477246567400177165ustar00rootroot00000000000000dub-1.40.0/test/dub-custom-root/source/app.d000066400000000000000000000002741477246567400206460ustar00rootroot00000000000000import std.file; import std.path; import std.stdio; void main() { // run me from test/ with dub --root=test/dub-custom-root assert(getcwd.baseName == "test", getcwd); writeln("ok"); } dub-1.40.0/test/dub_test_root.sh000077500000000000000000000006441477246567400165720ustar00rootroot00000000000000#!/usr/bin/env bash # Make sure the auto-generated 'dub_test_root' module is importable for # non-all-at-once compilations too. set -euo pipefail TMPDIR=$(mktemp -d "$(basename "$0").XXXXXX") pushd "$TMPDIR" function cleanup { popd rm -rf "$TMPDIR" } trap cleanup EXIT echo 'name "foo"' > dub.sdl mkdir -p source echo 'import dub_test_root : allModules;' > source/foo.d $DUB test --build-mode=singleFile dub-1.40.0/test/dustmite-no-redirect-test/000077500000000000000000000000001477246567400203775ustar00rootroot00000000000000dub-1.40.0/test/dustmite-no-redirect-test/.no_build000066400000000000000000000000001477246567400221610ustar00rootroot00000000000000dub-1.40.0/test/dustmite-no-redirect-test/project/000077500000000000000000000000001477246567400220455ustar00rootroot00000000000000dub-1.40.0/test/dustmite-no-redirect-test/project/dub.json000066400000000000000000000000511477246567400235060ustar00rootroot00000000000000{ "name": "dustmite-no-redirect-test" } dub-1.40.0/test/dustmite-no-redirect-test/project/source/000077500000000000000000000000001477246567400233455ustar00rootroot00000000000000dub-1.40.0/test/dustmite-no-redirect-test/project/source/app.d000066400000000000000000000001531477246567400242710ustar00rootroot00000000000000extern(C) int printf(const scope char*, ...); void main() { printf("This text should be shown!\n"); } dub-1.40.0/test/dustmite-no-redirect.sh000077500000000000000000000011011477246567400177520ustar00rootroot00000000000000#!/usr/bin/env bash if ! command -v dustmite &> /dev/null then echo "Skipping test because dustmite is not installed!" exit 0 fi . $(dirname "${BASH_SOURCE[0]}")/common.sh DM_TEST="$CURR_DIR/dustmite-no-redirect-test/project" DM_TMP="$DM_TEST-dusting" EXPECTED="This text should be shown!" LOG="$DM_TEST.log" rm -rf $DM_TMP $DM_TMP.* $DUB --root=$DM_TEST dustmite --no-redirect --program-status=1 $DM_TMP &> $LOG || true if ! grep -q "$EXPECTED" "$LOG" then cat $LOG die $LINENO "Diff between expected and actual output" fi rm -rf $DM_TMP $DM_TMP.* $LOG dub-1.40.0/test/environment-variables.script.d000066400000000000000000000145561477246567400213500ustar00rootroot00000000000000/+ dub.json: { "name": "environment_variables" } +/ module environment_variables; import std; void main() { auto currDir = environment.get("CURR_DIR", __FILE_FULL_PATH__.dirName()); // preGenerateCommands uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.preGenerateEnvironments < root.preGenerateEnvironments // preBuildCommands uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.buildEnvironments < root.buildEnvironments < deppkg.preBuildEnvironments < root.preBuildEnvironments // Build tools uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.buildEnvironments < root.buildEnvironments // postBuildCommands uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.buildEnvironments < root.buildEnvironments < deppkg.postBuildEnvironments < root.postBuildEnvironments // postGenerateCommands uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.postGenerateEnvironments < root.postGenerateEnvironments // preRunCommands uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.runEnvironments < root.runEnvironments < deppkg.preRunEnvironments < root.preRunEnvironments // User application uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.runEnvironments < root.runEnvironments // postRunCommands uses system.environments < settings.environments < deppkg.environments < root.environments < deppkg.runEnvironments < root.runEnvironments < deppkg.postRunEnvironments < root.postRunEnvironments // Test cases covers: // preGenerateCommands [in root] // priority check: system.environments < settings.environments // priority check: settings.environments < deppkg.environments // priority check: deppkg.environments < root.environments // priority check: root.environments < deppkg.preGenerateEnvironments // priority check: deppkg.preGenerateEnvironments < root.preGenerateEnvironments // postGenerateCommands [in root] // expantion check: deppkg.VAR4 // preBuildCommands [in deppkg] // root.environments < deppkg.buildEnvironments // deppkg.buildEnvironments < root.buildEnvironments // root.buildEnvironments < deppkg.postBuildEnvironments // deppkg.preBuildEnvironments < root.preBuildEnvironments // postBuildCommands [in deppkg] // expantion check: deppkg.VAR4 // preRunCommands [in deppkg][in root] // expantion check: deppkg.VAR4 // Application run // expantion check: root.VAR1 // expantion check: settings.VAR2 // expantion check: root.VAR3 // expantion check: deppkg.VAR4 // expantion check: system.VAR5 // expantion check: system.SYSENVVAREXPCHECK // postRunCommands [in deppkg][in root] // expantion check: deppkg.VAR4 auto res = execute([environment.get("DUB", "dub"), "run", "-f"], [ "PRIORITYCHECK_SYS_SET": "system.PRIORITYCHECK_SYS_SET", "SYSENVVAREXPCHECK": "system.SYSENVVAREXPCHECK", "VAR5": "system.VAR5" ], Config.none, size_t.max, currDir.buildPath("environment-variables")); scope (failure) writeln("environment-variables test failed... Testing stdout is:\n-----\n", res.output); // preGenerateCommands [in root] assert(res.output.canFind("root.preGenerate: setting.PRIORITYCHECK_SYS_SET"), "preGenerate environment variables priority check is failed."); assert(res.output.canFind("root.preGenerate: deppkg.PRIORITYCHECK_SET_DEP"), "preGenerate environment variables priority check is failed."); assert(res.output.canFind("root.preGenerate: deppkg.PRIORITYCHECK_DEP_ROOT"), "preGenerate environment variables priority check is failed."); assert(res.output.canFind("root.preGenerate: deppkg.PRIORITYCHECK_ROOT_DEPSPEC"), "preGenerate environment variables priority check is failed."); assert(res.output.canFind("root.preGenerate: root.PRIORITYCHECK_DEPSPEC_ROOTSPEC"), "preGenerate environment variables priority check is failed."); // postGenerateCommands [in root] assert(res.output.canFind("root.postGenerate: deppkg.VAR4", "postGenerate environment variables expantion check is failed.")); // preBuildCommands [in deppkg] assert(res.output.canFind("deppkg.preBuild: deppkg.PRIORITYCHECK_ROOT_DEPBLDSPEC"), "preBuild environment variables priority check is failed."); assert(res.output.canFind("deppkg.preBuild: root.PRIORITYCHECK_DEPBLDSPEC_ROOTBLDSPEC"), "preBuild environment variables priority check is failed."); assert(res.output.canFind("deppkg.preBuild: deppkg.PRIORITYCHECK_ROOTBLDSPEC_DEPSPEC"), "preBuild environment variables priority check is failed."); assert(res.output.canFind("deppkg.preBuild: root.PRIORITYCHECK_DEPSPEC_ROOTSPEC"), "preBuild environment variables priority check is failed."); // postBuildCommands [in deppkg] assert(res.output.canFind("deppkg.postBuild: deppkg.VAR4"), "postBuild environment variables expantion check is failed."); // preRunCommands [in deppkg][in root] assert(!res.output.canFind("deppkg.preRun: deppkg.VAR4"), "preRun that is defined dependent library does not call."); assert(res.output.canFind("root.preRun: deppkg.VAR4"), "preRun environment variables expantion check is failed."); // Application run assert(res.output.canFind("app.run: root.VAR1"), "run environment variables expantion check is failed."); assert(res.output.canFind("app.run: settings.VAR2"), "run environment variables expantion check is failed."); assert(res.output.canFind("app.run: root.VAR3"), "run environment variables expantion check is failed."); assert(res.output.canFind("app.run: deppkg.VAR4"), "run environment variables expantion check is failed."); assert(res.output.canFind("app.run: system.VAR5"), "run environment variables expantion check is failed."); assert(res.output.canFind("app.run: system.SYSENVVAREXPCHECK"), "run environment variables expantion check is failed."); // postRunCommands [in deppkg][in root] assert(!res.output.canFind("deppkg.postRun: deppkg.VAR4"), "postRunCommands that is defined dependent library does not call."); assert(res.output.canFind("root.postRun: deppkg.VAR4"), "postRun environment variables expantion check is failed."); } dub-1.40.0/test/environment-variables/000077500000000000000000000000001477246567400176655ustar00rootroot00000000000000dub-1.40.0/test/environment-variables/.gitignore000066400000000000000000000002111477246567400216470ustar00rootroot00000000000000* !.no_build !.no_test !.no_run !dub.json !dub.settings.json !source !source/app.d !deppkg !deppkg/dub.json !deppkg/source/deppkg/foo.d dub-1.40.0/test/environment-variables/.no_build000066400000000000000000000000021477246567400214510ustar00rootroot00000000000000 dub-1.40.0/test/environment-variables/.no_run000066400000000000000000000000021477246567400211560ustar00rootroot00000000000000 dub-1.40.0/test/environment-variables/.no_test000066400000000000000000000000021477246567400213310ustar00rootroot00000000000000 dub-1.40.0/test/environment-variables/deppkg/000077500000000000000000000000001477246567400211375ustar00rootroot00000000000000dub-1.40.0/test/environment-variables/deppkg/dub.json000066400000000000000000000023051477246567400226040ustar00rootroot00000000000000{ "name": "deppkg", "targetType": "library", "environments": { "VAR1": "deppkg.VAR1", "VAR3": "deppkg.VAR3", "VAR4": "deppkg.VAR4", "PRIORITYCHECK_SET_DEP": "deppkg.PRIORITYCHECK_SET_DEP" }, "buildEnvironments": { "PRIORITYCHECK_ROOT_DEPBLDSPEC": "deppkg.PRIORITYCHECK_ROOT_DEPBLDSPEC", "PRIORITYCHECK_DEPBLDSPEC_ROOTBLDSPEC": "deppkg.PRIORITYCHECK_DEPBLDSPEC_ROOTBLDSPEC" }, "preGenerateEnvironments": { "PRIORITYCHECK_DEP_ROOT": "deppkg.PRIORITYCHECK_DEP_ROOT", "PRIORITYCHECK_ROOT_DEPSPEC": "deppkg.PRIORITYCHECK_ROOT_DEPSPEC", "PRIORITYCHECK_DEPSPEC_ROOTSPEC": "deppkg.PRIORITYCHECK_DEPSPEC_ROOTSPEC" }, "preBuildEnvironments": { "PRIORITYCHECK_ROOTBLDSPEC_DEPSPEC": "deppkg.PRIORITYCHECK_ROOTBLDSPEC_DEPSPEC", "PRIORITYCHECK_DEPSPEC_ROOTSPEC": "deppkg.PRIORITYCHECK_DEPSPEC_ROOTSPEC" }, "preBuildCommands": [ "echo deppkg.preBuild: $PRIORITYCHECK_ROOT_DEPBLDSPEC", "echo deppkg.preBuild: $PRIORITYCHECK_DEPBLDSPEC_ROOTBLDSPEC", "echo deppkg.preBuild: $PRIORITYCHECK_ROOTBLDSPEC_DEPSPEC", "echo deppkg.preBuild: $PRIORITYCHECK_DEPSPEC_ROOTSPEC" ], "postBuildCommands": ["echo deppkg.postBuild: $VAR4"], "preRunCommands": ["echo deppkg.preRun: $VAR4"] } dub-1.40.0/test/environment-variables/deppkg/source/000077500000000000000000000000001477246567400224375ustar00rootroot00000000000000dub-1.40.0/test/environment-variables/deppkg/source/deppkg/000077500000000000000000000000001477246567400237115ustar00rootroot00000000000000dub-1.40.0/test/environment-variables/deppkg/source/deppkg/foo.d000066400000000000000000000000661477246567400246430ustar00rootroot00000000000000module deppkg.foo; import std.stdio; void foo() { } dub-1.40.0/test/environment-variables/dub.json000066400000000000000000000037341477246567400213410ustar00rootroot00000000000000{ "name": "environment-variables", "dependencies": { "deppkg": {"path": "deppkg"} }, "environments": { "ENVIRONMENTS": "root.environments", "VAR1": "root.VAR1", "VAR3": "root.VAR3", "PRIORITYCHECK_DEP_ROOT": "root.PRIORITYCHECK_DEP_ROOT", "PRIORITYCHECK_ROOT_DEPSPEC": "root.PRIORITYCHECK_ROOT_DEPSPEC", "PRIORITYCHECK_ROOT_DEPBLDSPEC": "root.PRIORITYCHECK_ROOT_DEPBLDSPEC" }, "buildEnvironments": { "BUILD_ENVIRONMENTS": "root.buildEnvironments", "PRIORITYCHECK_ROOTBLDSPEC_DEPSPEC": "root.PRIORITYCHECK_ROOTBLDSPEC_DEPSPEC" }, "runEnvironments": { "RUN_ENVIRONMENTS": "root.runEnvironments" }, "preGenerateEnvironments": { "PRE_GENERATE_ENVIRONMENTS": "root.preGenerateEnvironments", "PRIORITYCHECK_DEPSPEC_ROOTSPEC": "root.PRIORITYCHECK_DEPSPEC_ROOTSPEC" }, "postGenerateEnvironments": { "POST_GENERATE_ENVIRONMENTS": "root.postGenerateEnvironments" }, "preBuildEnvironments": { "PRE_BUILD_ENVIRONMENTS": "root.preBuildEnvironments", "PRIORITYCHECK_DEPBLDSPEC_ROOTBLDSPEC": "root.PRIORITYCHECK_DEPBLDSPEC_ROOTBLDSPEC", "PRIORITYCHECK_DEPSPEC_ROOTSPEC": "root.PRIORITYCHECK_DEPSPEC_ROOTSPEC" }, "postBuildEnvironments": { "POST_BUILD_ENVIRONMENTS": "root.postBuildEnvironments" }, "preRunEnvironments": { "PRE_RUN_ENVIRONMENTS": "root.preRunEnvironments" }, "postRunEnvironments": { "POST_RUN_ENVIRONMENTS": "root.postRunEnvironments" }, "preGenerateCommands": [ "echo root.preGenerate: $PRIORITYCHECK_SYS_SET", "echo root.preGenerate: $PRIORITYCHECK_SET_DEP", "echo root.preGenerate: $PRIORITYCHECK_DEP_ROOT", "echo root.preGenerate: $PRIORITYCHECK_ROOT_DEPSPEC", "echo root.preGenerate: $PRIORITYCHECK_DEPSPEC_ROOTSPEC" ], "postGenerateCommands": ["echo root.postGenerate: $VAR4"], "preBuildCommands": ["echo root.preBuild: $VAR4"], "postBuildCommands": ["echo root.postBuild: $VAR4"], "preRunCommands": ["echo root.preRun: $VAR4"], "postRunCommands": ["echo root.postRun: $VAR4"] } dub-1.40.0/test/environment-variables/dub.settings.json000066400000000000000000000002631477246567400231720ustar00rootroot00000000000000{ "defaultEnvironments": { "VAR2": "settings.VAR2", "PRIORITYCHECK_SYS_SET": "setting.PRIORITYCHECK_SYS_SET", "PRIORITYCHECK_SET_DEP": "setting.PRIORITYCHECK_SET_DEP" } } dub-1.40.0/test/environment-variables/source/000077500000000000000000000000001477246567400211655ustar00rootroot00000000000000dub-1.40.0/test/environment-variables/source/app.d000066400000000000000000000005741477246567400221200ustar00rootroot00000000000000import std.stdio; import std.process; void main() { writeln("app.run: ", environment.get("VAR1", "")); writeln("app.run: ", environment.get("VAR2", "")); writeln("app.run: ", environment.get("VAR3", "")); writeln("app.run: ", environment.get("VAR4", "")); writeln("app.run: ", environment.get("VAR5", "")); writeln("app.run: ", environment.get("SYSENVVAREXPCHECK", "")); } dub-1.40.0/test/expected-issue1037-output000066400000000000000000000003221477246567400200730ustar00rootroot00000000000000Error Unresolvable dependencies to package gitcompatibledubpackage: b @DIR/b depends on gitcompatibledubpackage ~>1.0.2 issue1037-better-dependency-messages ~master depends on gitcompatibledubpackage 1.0.1 dub-1.40.0/test/feat663-search.sh000077500000000000000000000010411477246567400163270ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh if ${DUB} search 2>/dev/null; then die $LINENO '`dub search` succeeded' fi if ${DUB} search nonexistent123456789package 2>/dev/null; then die $LINENO '`dub search nonexistent123456789package` succeeded' fi if ! OUTPUT=$(${DUB} search '"dub-registry"' -v 2>&1); then die $LINENO '`dub search "dub-registry"` failed' "$OUTPUT" fi if ! grep -q '^\s\sdub-registry (.*)\s'<<<"$OUTPUT"; then die $LINENO '`grep -q '"'"'^\s\sdub-registry (.*)\s'"'"'` failed' "$OUTPUT" fi dub-1.40.0/test/fetchzip.sh000077500000000000000000000050461477246567400155330ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh PORT=$(getRandomPort) ${DUB} remove gitcompatibledubpackage --non-interactive 2>/dev/null || true ${DUB} build --single "$DIR"/test_registry.d "$DIR"/test_registry --folder="$DIR/issue1336-registry" --port=$PORT & PID=$! sleep 1 trap 'kill $PID 2>/dev/null || true' exit echo "Trying to download gitcompatibledubpackage (1.0.4)" timeout 1s ${DUB} fetch gitcompatibledubpackage@1.0.4 --skip-registry=all --registry=http://localhost:$PORT if [ $? -eq 124 ]; then die $LINENO 'Fetching from responsive registry should not time-out.' fi ${DUB} remove gitcompatibledubpackage@1.0.4 echo "Downloads should be retried when the zip is corrupted - gitcompatibledubpackage (1.0.3)" zipOut=$(! timeout 1s ${DUB} fetch gitcompatibledubpackage@1.0.3 --skip-registry=all --registry=http://localhost:$PORT 2>&1) rc=$? if ! zipCount=$(grep -Fc 'Failed to extract zip archive' <<<"$zipOut") || [ "$zipCount" -lt 3 ] ; then echo '========== +Output was ==========' >&2 echo "$zipOut" >&2 echo '========== -Output was ==========' >&2 die $LINENO 'DUB should have tried to download the zip archive multiple times.' elif [ $rc -eq 124 ]; then die $LINENO 'DUB timed out unexpectedly.' fi if ${DUB} remove gitcompatibledubpackage --non-interactive 2>/dev/null; then die $LINENO 'DUB should not have installed a broken package.' fi echo "HTTP status errors on downloads should be retried - gitcompatibledubpackage (1.0.2)" retryOut=$(! timeout 1s ${DUB} fetch gitcompatibledubpackage@1.0.2 --skip-registry=all --registry=http://localhost:$PORT --vverbose 2>&1) rc=$? if ! retryCount=$(echo "$retryOut" | grep -Fc 'Bad Gateway') || [ "$retryCount" -lt 3 ] ; then echo '========== +Output was ==========' >&2 echo "$retryOut" >&2 echo '========== -Output was ==========' >&2 die $LINENO "DUB should have retried download on server error multiple times, but only tried $retryCount times." elif [ $rc -eq 124 ]; then die $LINENO 'DUB timed out unexpectedly.' fi if ${DUB} remove gitcompatibledubpackage --non-interactive 2>/dev/null; then die $LINENO 'DUB should not have installed a package.' fi echo "HTTP status errors on downloads should retry with fallback mirror - gitcompatibledubpackage (1.0.2)" timeout 1s "$DUB" fetch gitcompatibledubpackage@1.0.2 --skip-registry=all --registry="http://localhost:$PORT http://localhost:$PORT/fallback" if [ $? -eq 124 ]; then die $LINENO 'Fetching from responsive registry should not time-out.' fi ${DUB} remove gitcompatibledubpackage@1.0.2 dub-1.40.0/test/fetchzip.sh.min_frontend000066400000000000000000000000061477246567400202000ustar00rootroot000000000000002.077 dub-1.40.0/test/filesystem-version-with-buildinfo.sh000077500000000000000000000006731477246567400225110ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh ${DUB} remove fs-json-dubpackage --non-interactive 2>/dev/null || true echo "Trying to get fs-json-dubpackage (1.0.7)" ${DUB} fetch fs-json-dubpackage@1.0.7 --skip-registry=all --registry=file://"$DIR"/filesystem-version-with-buildinfo if ! ${DUB} remove fs-json-dubpackage@1.0.7 2>/dev/null; then die $LINENO 'DUB did not install package from file system.' fi dub-1.40.0/test/filesystem-version-with-buildinfo/000077500000000000000000000000001477246567400221445ustar00rootroot00000000000000dub-1.40.0/test/filesystem-version-with-buildinfo/.no_build000066400000000000000000000000001477246567400237260ustar00rootroot00000000000000dub-1.40.0/test/filesystem-version-with-buildinfo/fs-json-dubpackage-1.0.7+build-9-9-9.zip000066400000000000000000000013201477246567400305450ustar00rootroot00000000000000PKMfs-json-dubpackage-1.0.7/PK M9F88!fs-json-dubpackage-1.0.7/dub.json{ "name": "fs-json-dubpackage", "sourcePaths": ["."] }PK!M?c'VX fs-json-dubpackage-1.0.7/hello.d 0 |E.E'~8ZjI[n9™#1F0մn9uXKC_o&j5.8xPK?M$fs-json-dubpackage-1.0.7/ #f#fk6PK? M9F88!$ 7fs-json-dubpackage-1.0.7/dub.json A{77PK?!M?c'VX $ fs-json-dubpackage-1.0.7/hello.d G28787PKPB(9e3972be4c63790c32257220f40c0af7dc41bec5dub-1.40.0/test/git-dependency/000077500000000000000000000000001477246567400162525ustar00rootroot00000000000000dub-1.40.0/test/git-dependency/dub.json000066400000000000000000000003501477246567400177150ustar00rootroot00000000000000{ "name": "git-dependency", "dependencies": { "gitcompatibledubpackage": { "repository": "git+https://github.com/dlang-community/gitcompatibledubpackage.git", "version": "ccb31bf6a655437176ec02e04c2305a8c7c90d67" } } } dub-1.40.0/test/git-dependency/src/000077500000000000000000000000001477246567400170415ustar00rootroot00000000000000dub-1.40.0/test/git-dependency/src/app.d000066400000000000000000000001361477246567400177660ustar00rootroot00000000000000import gitcompatibledubpackage.subdir.file; void main() { assert(!hasTheWorldExploded()); } dub-1.40.0/test/help.sh000077500000000000000000000017111477246567400146420ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh ### It shows the general help message if ! { ${DUB} help | grep "Manages the DUB project in the current directory."; } then die $LINENO 'DUB did not print the default help message, with the `help` command.' fi if ! { ${DUB} -h | grep "Manages the DUB project in the current directory."; } then die $LINENO 'DUB did not print the default help message, with the `-h` argument.' fi if ! { ${DUB} --help | grep "Manages the DUB project in the current directory."; } then die $LINENO 'DUB did not print the default help message, with the `--help` argument.' fi ### It shows the build command help if ! { ${DUB} build -h | grep "Builds a package"; } then die $LINENO 'DUB did not print the build help message, with the `-h` argument.' fi if ! { ${DUB} build --help | grep "Builds a package"; } then die $LINENO 'DUB did not print the build help message, with the `--help` argument.' fi dub-1.40.0/test/ignore-hidden-1/000077500000000000000000000000001477246567400162255ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-1/.gitignore000066400000000000000000000000461477246567400202150ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj dub-1.40.0/test/ignore-hidden-1/.no_run000066400000000000000000000000001477246567400175140ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-1/.no_test000066400000000000000000000000001477246567400176670ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-1/dub.json000066400000000000000000000000401477246567400176640ustar00rootroot00000000000000{ "name": "ignore-hidden-1", } dub-1.40.0/test/ignore-hidden-1/source/000077500000000000000000000000001477246567400175255ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-1/source/.hidden.d000066400000000000000000000002131477246567400211770ustar00rootroot00000000000000// need module declarations as '.' is not allowed in module names module hidden; static assert(0, "Dub should not compile "~__FILE__~"."); dub-1.40.0/test/ignore-hidden-1/source/app.d000066400000000000000000000000201477246567400204420ustar00rootroot00000000000000void main() { } dub-1.40.0/test/ignore-hidden-2/000077500000000000000000000000001477246567400162265ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-2/.gitignore000066400000000000000000000000461477246567400202160ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj dub-1.40.0/test/ignore-hidden-2/.no_run000066400000000000000000000000001477246567400175150ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-2/.no_test000066400000000000000000000000001477246567400176700ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-2/dub.json000066400000000000000000000001061477246567400176700ustar00rootroot00000000000000{ "name": "ignore-hidden-2", "sourceFiles": ["source/.hidden.d"], } dub-1.40.0/test/ignore-hidden-2/source/000077500000000000000000000000001477246567400175265ustar00rootroot00000000000000dub-1.40.0/test/ignore-hidden-2/source/.hidden.d000066400000000000000000000000171477246567400212020ustar00rootroot00000000000000module hidden; dub-1.40.0/test/ignore-hidden-2/source/app.d000066400000000000000000000000401477246567400204450ustar00rootroot00000000000000import hidden; void main() { } dub-1.40.0/test/ignore-useless-arch-switch/000077500000000000000000000000001477246567400205315ustar00rootroot00000000000000dub-1.40.0/test/ignore-useless-arch-switch/.no_test000066400000000000000000000000001477246567400221730ustar00rootroot00000000000000dub-1.40.0/test/ignore-useless-arch-switch/dub.sdl000066400000000000000000000000721477246567400220060ustar00rootroot00000000000000name "ignore-useless-arch-switch" targetType "executable" dub-1.40.0/test/ignore-useless-arch-switch/source/000077500000000000000000000000001477246567400220315ustar00rootroot00000000000000dub-1.40.0/test/ignore-useless-arch-switch/source/app.d000066400000000000000000000016741477246567400227660ustar00rootroot00000000000000import std.json; import std.path; import std.process; import std.stdio; string getCacheFile (in string[] program) { auto p = execute(program); with (p) { if (status != 0) { assert(false, "Failed to invoke dub describe: " ~ output); } return output.parseJSON["targets"][0]["cacheArtifactPath"].str; } } void main() { version (X86_64) string archArg = "x86_64"; else version (X86) string archArg = "x86"; else { string archArg; writeln("Skipping because of unsupported architecture"); return; } const describeProgram = [ environment["DUB"], "describe", "--compiler=" ~ environment["DC"], "--root=" ~ __FILE_FULL_PATH__.dirName.dirName, ]; immutable plainCacheFile = describeProgram.getCacheFile; const describeWithArch = describeProgram ~ [ "--arch=" ~ archArg ]; immutable archCacheFile = describeWithArch.getCacheFile; assert(plainCacheFile == archCacheFile, "--arch shouldn't have modified the cache file"); } dub-1.40.0/test/injected-from-dependency/000077500000000000000000000000001477246567400202155ustar00rootroot00000000000000dub-1.40.0/test/injected-from-dependency/.no_test000066400000000000000000000000001477246567400216570ustar00rootroot00000000000000dub-1.40.0/test/injected-from-dependency/ahook.d000066400000000000000000000001171477246567400214620ustar00rootroot00000000000000module ahook; shared static this() { import vars; valueStoredHere = 1337; } dub-1.40.0/test/injected-from-dependency/dub.json000066400000000000000000000005211477246567400216600ustar00rootroot00000000000000{ "description": "Test if source file expected to be injected was into binary", "name": "injected-from-dependency", "targetType": "executable", "dependencies": { ":toload": "*" }, "subPackages": [ { "name": "toload", "sourcePaths": ["toload"], "importPaths": ["toload"], "injectSourceFiles": ["ahook.d"] } ] } dub-1.40.0/test/injected-from-dependency/source/000077500000000000000000000000001477246567400215155ustar00rootroot00000000000000dub-1.40.0/test/injected-from-dependency/source/entry.d000066400000000000000000000001001477246567400230120ustar00rootroot00000000000000void main() { import vars; assert(valueStoredHere == 1337); } dub-1.40.0/test/injected-from-dependency/toload/000077500000000000000000000000001477246567400214775ustar00rootroot00000000000000dub-1.40.0/test/injected-from-dependency/toload/vars.d000066400000000000000000000000431477246567400226140ustar00rootroot00000000000000module vars; int valueStoredHere; dub-1.40.0/test/interactive-remove.sh000077500000000000000000000030131477246567400175170ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh # This test messes with the user's package directory # Hence it's a pretty bad test, but we need it. # Ideally, we should not have this run by default / run it in a container. # In the meantime, in order to make it pass on developer's machines, # we need to nuke every `dub` version in the user cache... $DUB remove dub -n || true DUBPKGPATH=${DPATH+"$DPATH/dub/packages/dub"} DUBPKGPATH=${DUBPKGPATH:-"$HOME/.dub/packages/dub"} $DUB fetch dub@1.9.0 && [ -d $DUBPKGPATH/1.9.0/dub ] $DUB fetch dub@1.10.0 && [ -d $DUBPKGPATH/1.10.0/dub ] echo 1 | $DUB remove dub | tr -d '\n' | grep --ignore-case 'select.*1\.9\.0.*1\.10\.0.*' if [ -d $DUBPKGPATH/1.9.0/dub ]; then die $LINENO 'Failed to remove dub-1.9.0' fi $DUB fetch dub@1.9.0 && [ -d $DUBPKGPATH/1.9.0/dub ] # EOF aborts remove echo -xn '' | $DUB remove dub if [ ! -d $DUBPKGPATH/1.9.0/dub ] || [ ! -d $DUBPKGPATH/1.10.0/dub ]; then die $LINENO 'Aborted dub still removed a package' fi # validates input echo -e 'abc\n4\n-1\n3' | $DUB remove dub if [ -d $DUBPKGPATH/1.9.0/dub ] || [ -d $DUBPKGPATH/1.10.0/dub ]; then die $LINENO 'Failed to remove all version of dub' fi $DUB fetch dub@1.9.0 && [ -d $DUBPKGPATH/1.9.0/dub ] $DUB fetch dub@1.10.0 && [ -d $DUBPKGPATH/1.10.0/dub ] # is non-interactive with a $DUB remove dub@1.9.0 $DUB remove dub@1.10.0 if [ -d $DUBPKGPATH/1.9.0/dub ] || [ -d $DUBPKGPATH/1.10.0/dub ]; then die $LINENO 'Failed to non-interactively remove specified versions' fi dub-1.40.0/test/issue1003-check-empty-ld-flags.sh000077500000000000000000000002261477246567400212440ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1003-check-empty-ld-flags ${DUB} build --compiler=${DC} --force dub-1.40.0/test/issue1003-check-empty-ld-flags/000077500000000000000000000000001477246567400207055ustar00rootroot00000000000000dub-1.40.0/test/issue1003-check-empty-ld-flags/dub.json000066400000000000000000000002441477246567400223520ustar00rootroot00000000000000{ "authors": [ "--" ], "copyright": "Copyright © 2019, --", "description": "--", "license": "--", "name": "issue1003-empty-ld-flags", "lflags": [""] } dub-1.40.0/test/issue1003-check-empty-ld-flags/source/000077500000000000000000000000001477246567400222055ustar00rootroot00000000000000dub-1.40.0/test/issue1003-check-empty-ld-flags/source/app.d000066400000000000000000000001311477246567400231250ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue1004-override-config.sh000077500000000000000000000002361477246567400204300ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1004-override-config ${DUB} build --bare main --override-config a/success dub-1.40.0/test/issue1004-override-config/000077500000000000000000000000001477246567400200705ustar00rootroot00000000000000dub-1.40.0/test/issue1004-override-config/.no_build000066400000000000000000000000001477246567400216520ustar00rootroot00000000000000dub-1.40.0/test/issue1004-override-config/a/000077500000000000000000000000001477246567400203105ustar00rootroot00000000000000dub-1.40.0/test/issue1004-override-config/a/a.d000066400000000000000000000000331477246567400206710ustar00rootroot00000000000000module a; void test() { } dub-1.40.0/test/issue1004-override-config/a/dub.sdl000066400000000000000000000001441477246567400215650ustar00rootroot00000000000000name "a" configuration "fail" { } configuration "success" { sourceFiles "a.d" importPaths "." } dub-1.40.0/test/issue1004-override-config/main/000077500000000000000000000000001477246567400210145ustar00rootroot00000000000000dub-1.40.0/test/issue1004-override-config/main/dub.sdl000066400000000000000000000000471477246567400222730ustar00rootroot00000000000000name "main" dependency "a" version="*" dub-1.40.0/test/issue1004-override-config/main/source/000077500000000000000000000000001477246567400223145ustar00rootroot00000000000000dub-1.40.0/test/issue1004-override-config/main/source/main.d000066400000000000000000000000441477246567400234030ustar00rootroot00000000000000import a; void main() { test(); } dub-1.40.0/test/issue1005-configuration-resolution.sh000077500000000000000000000002131477246567400224120ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1005-configuration-resolution ${DUB} build --bare main dub-1.40.0/test/issue1005-configuration-resolution/000077500000000000000000000000001477246567400220575ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/.no_build000066400000000000000000000000001477246567400236410ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/a/000077500000000000000000000000001477246567400222775ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/a/dub.sdl000066400000000000000000000002061477246567400235530ustar00rootroot00000000000000name "a" dependency "b" version="*" configuration "x" { subConfiguration "b" "x" } configuration "y" { subConfiguration "b" "y" } dub-1.40.0/test/issue1005-configuration-resolution/b/000077500000000000000000000000001477246567400223005ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/b/dub.sdl000066400000000000000000000000661477246567400235600ustar00rootroot00000000000000name "b" configuration "x" { } configuration "y" { }dub-1.40.0/test/issue1005-configuration-resolution/b/source/000077500000000000000000000000001477246567400236005ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/b/source/b.d000066400000000000000000000000311477246567400241600ustar00rootroot00000000000000module b; void foo() {} dub-1.40.0/test/issue1005-configuration-resolution/c/000077500000000000000000000000001477246567400223015ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/c/dub.sdl000066400000000000000000000000441477246567400235550ustar00rootroot00000000000000name "c" dependency "a" version="*" dub-1.40.0/test/issue1005-configuration-resolution/main/000077500000000000000000000000001477246567400230035ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/main/dub.sdl000066400000000000000000000001351477246567400242600ustar00rootroot00000000000000name "main" dependency "b" version="*" dependency "c" version="*" subConfiguration "b" "y" dub-1.40.0/test/issue1005-configuration-resolution/main/source/000077500000000000000000000000001477246567400243035ustar00rootroot00000000000000dub-1.40.0/test/issue1005-configuration-resolution/main/source/app.d000066400000000000000000000000431477246567400252250ustar00rootroot00000000000000import b; void main() { foo(); } dub-1.40.0/test/issue1024-selective-upgrade.sh000077500000000000000000000007551477246567400207660ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1024-selective-upgrade echo "{\"fileVersion\": 1,\"versions\": {\"a\": \"1.0.0\", \"b\": \"1.0.0\"}}" > main/dub.selections.json $DUB upgrade --bare --root=main a if ! grep -c -e "\"a\": \"1.0.1\"" main/dub.selections.json; then die $LINENO "Specified dependency was not upgraded." fi if grep -c -e "\"b\": \"1.0.1\"" main/dub.selections.json; then die $LINENO "Non-specified dependency got upgraded." fi dub-1.40.0/test/issue1024-selective-upgrade/000077500000000000000000000000001477246567400204205ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/.no_build000066400000000000000000000000001477246567400222020ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/a-1.0.0/000077500000000000000000000000001477246567400212725ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/a-1.0.0/dub.sdl000066400000000000000000000000311477246567400225420ustar00rootroot00000000000000name "a" version "1.0.0" dub-1.40.0/test/issue1024-selective-upgrade/a-1.0.1/000077500000000000000000000000001477246567400212735ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/a-1.0.1/dub.sdl000066400000000000000000000000311477246567400225430ustar00rootroot00000000000000name "a" version "1.0.1" dub-1.40.0/test/issue1024-selective-upgrade/b-1.0.0/000077500000000000000000000000001477246567400212735ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/b-1.0.0/dub.sdl000066400000000000000000000000311477246567400225430ustar00rootroot00000000000000name "b" version "1.0.0" dub-1.40.0/test/issue1024-selective-upgrade/b-1.0.1/000077500000000000000000000000001477246567400212745ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/b-1.0.1/dub.sdl000066400000000000000000000000311477246567400225440ustar00rootroot00000000000000name "b" version "1.0.1" dub-1.40.0/test/issue1024-selective-upgrade/main/000077500000000000000000000000001477246567400213445ustar00rootroot00000000000000dub-1.40.0/test/issue1024-selective-upgrade/main/dub.sdl000066400000000000000000000001161477246567400226200ustar00rootroot00000000000000name "test" dependency "a" version="~>1.0.0" dependency "b" version="~>1.0.0" dub-1.40.0/test/issue103-single-file-package-json.d000066400000000000000000000002111477246567400216220ustar00rootroot00000000000000/+ dub.json: { "name": "single-file-test" } +/ module hello; void main() { import std.stdio : writeln; writeln("Hello, World!"); } dub-1.40.0/test/issue103-single-file-package-no-ext000077500000000000000000000003361477246567400216540ustar00rootroot00000000000000#!../bin/dub /+ dub.sdl: name "single-file-test" +/ module hello; void main(string[] args) { import std.stdio : writeln; assert(args.length == 4 && args[1 .. 4] == ["foo", "--", "bar"]); writeln("Hello, World!"); } dub-1.40.0/test/issue103-single-file-package-w-dep.d000066400000000000000000000002371477246567400216750ustar00rootroot00000000000000/+ dub.sdl: name "single-file-test" dependency "sourcelib-simple" path="1-sourceLib-simple" +/ module hello; import sourcelib.app; void main() { entry(); } dub-1.40.0/test/issue103-single-file-package.d000077500000000000000000000003361477246567400206660ustar00rootroot00000000000000#!../bin/dub /+ dub.sdl: name "single-file-test" +/ module hello; void main(string[] args) { import std.stdio : writeln; assert(args.length == 4 && args[1 .. 4] == ["foo", "--", "bar"]); writeln("Hello, World!"); } dub-1.40.0/test/issue103-single-file-package.sh000077500000000000000000000016661477246567400210640ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR} rm -f single-file-test ${DUB} run --single issue103-single-file-package-json.d --compiler=${DC} if [ ! -f single-file-test ]; then die $LINENO 'Normal invocation did not produce a binary in the current directory' fi rm single-file-test ./issue103-single-file-package.d foo -- bar ${DUB} ./issue103-single-file-package foo -- bar ./issue103-single-file-package-no-ext foo -- bar ${DUB} issue103-single-file-package-w-dep.d if [ -f single-file-test ]; then die $LINENO 'Shebang invocation produced binary in current directory' fi if ! { ${DUB} run --single issue103-single-file-package-w-dep.d --temp-build 2>&1 || true; } | grep -cF "To force a rebuild"; then echo "Invocation triggered unnecessary rebuild." exit 1 fi if ${DUB} "issue103-single-file-package-error.d" 2> /dev/null; then echo "Invalid package comment syntax did not trigger an error." exit 1 fi dub-1.40.0/test/issue1037-better-dependency-messages.sh000077500000000000000000000011031477246567400225540ustar00rootroot00000000000000#!/bin/bash set -e -o pipefail cd ${CURR_DIR}/issue1037-better-dependency-messages temp_file=$(mktemp $(basename $0).XXXXXX) temp_file2=$(mktemp $(basename $0).XXXXXX) expected_file="$CURR_DIR/expected-issue1037-output" function cleanup { rm -f $temp_file rm -f $temp_file2 } trap cleanup EXIT sed "s#DIR#$CURR_DIR/issue1037-better-dependency-messages#" "$expected_file" > "$temp_file2" $DUB upgrade 2>$temp_file && exit 1 # dub upgrade should fail if ! diff "$temp_file2" "$temp_file"; then die $LINENO 'output not containing conflict information' fi exit 0 dub-1.40.0/test/issue1037-better-dependency-messages/000077500000000000000000000000001477246567400222225ustar00rootroot00000000000000dub-1.40.0/test/issue1037-better-dependency-messages/.no_build000066400000000000000000000000001477246567400240040ustar00rootroot00000000000000dub-1.40.0/test/issue1037-better-dependency-messages/.no_run000066400000000000000000000000001477246567400235110ustar00rootroot00000000000000dub-1.40.0/test/issue1037-better-dependency-messages/.no_test000066400000000000000000000000001477246567400236640ustar00rootroot00000000000000dub-1.40.0/test/issue1037-better-dependency-messages/b/000077500000000000000000000000001477246567400224435ustar00rootroot00000000000000dub-1.40.0/test/issue1037-better-dependency-messages/b/dub.json000066400000000000000000000001161477246567400241060ustar00rootroot00000000000000{ "name": "b", "dependencies": { "gitcompatibledubpackage": "~>1.0.2" } }dub-1.40.0/test/issue1037-better-dependency-messages/dub.json000066400000000000000000000002371477246567400236710ustar00rootroot00000000000000{ "name": "issue1037-better-dependency-messages", "dependencies": { "gitcompatibledubpackage": "1.0.1", "b": { "path": "b", "version": "*" } } }dub-1.40.0/test/issue1040-run-with-ver.sh000077500000000000000000000020441477246567400177140ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh if ! [ -d ${CURR_DIR}/issue1040-tmpdir ]; then mkdir ${CURR_DIR}/issue1040-tmpdir touch ${CURR_DIR}/issue1040-tmpdir/.no_build touch ${CURR_DIR}/issue1040-tmpdir/.no_run touch ${CURR_DIR}/issue1040-tmpdir/.no_test function cleanup { rm -rf ${CURR_DIR}/issue1040-tmpdir } trap cleanup EXIT fi cd ${CURR_DIR}/issue1040-tmpdir $DUB fetch dub@1.27.0 --cache=local $DUB fetch dub@1.28.0 --cache=local $DUB fetch dub@1.29.0 --cache=local if { $DUB fetch dub@1.28.0 --cache=local || true; } | grep -cF 'Fetching' > /dev/null; then die $LINENO 'Test for doubly fetch of the specified version has failed.' fi if ! { $DUB run dub -q --cache=local -- --version || true; } | grep -cF 'DUB version 1.29.0' > /dev/null; then die $LINENO 'Test for selection of the latest fetched version has failed.' fi if ! { $DUB run dub@1.28.0 -q --cache=local -- --version || true; } | grep -cF 'DUB version 1.28.0' > /dev/null; then die $LINENO 'Test for selection of the specified version has failed.' fi dub-1.40.0/test/issue1053-extra-files-visuald.sh000077500000000000000000000014741477246567400212470ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "${CURR_DIR}/issue1053-extra-files-visuald" || die "Could not cd." "$DUB" generate visuald if [ `grep -c -e "saturate.vert" .dub/extra_files.visualdproj` -ne 1 ]; then die $LINENO 'Regression of issue #1053.' fi if [ `grep -c -e "warp.geom" .dub/extra_files.visualdproj` -ne 1 ]; then die $LINENO 'Regression of issue #1053.' fi if [ `grep -c -e "LICENSE.txt" .dub/extra_files.visualdproj` -ne 1 ]; then die $LINENO 'Regression of issue #1053.' fi if [ `grep -c -e "README.txt" .dub/extra_files.visualdproj` -ne 1 ]; then die $LINENO 'Regression of issue #1053.' fi if [ `grep -e "README.txt" .dub/extra_files.visualdproj | grep -c -e 'copy /Y $(InputPath) $(TargetDir)'` -ne 1 ]; then die $LINENO 'Copying of copyFiles seems broken for visuald.' fi dub-1.40.0/test/issue1053-extra-files-visuald/000077500000000000000000000000001477246567400207025ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/dub.json000066400000000000000000000003071477246567400223470ustar00rootroot00000000000000{ "name": "extra_files", "targetType": "executable", "extraDependencyFiles": [ "shaders/*" ], "copyFiles": [ "text/LICENSE.txt", "text/README.txt" ] } dub-1.40.0/test/issue1053-extra-files-visuald/shaders/000077500000000000000000000000001477246567400223335ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/shaders/saturate.vert000066400000000000000000000000001477246567400250530ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/shaders/warp.geom000066400000000000000000000000001477246567400241430ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/source/000077500000000000000000000000001477246567400222025ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/source/app.d000066400000000000000000000000171477246567400231250ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue1053-extra-files-visuald/text/000077500000000000000000000000001477246567400216665ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/text/LICENSE.txt000066400000000000000000000000001477246567400234770ustar00rootroot00000000000000dub-1.40.0/test/issue1053-extra-files-visuald/text/README.txt000066400000000000000000000000001477246567400233520ustar00rootroot00000000000000dub-1.40.0/test/issue1070-init-mistakes-dirs-as-files.sh000077500000000000000000000002621477246567400225710ustar00rootroot00000000000000#!/bin/bash cd ${CURR_DIR}/issue1070-init-mistakes-dirs-as-files ${DUB} init 2>&1 | grep -c "The target directory already contains a 'source/' directory. Aborting." > /dev/nulldub-1.40.0/test/issue1070-init-mistakes-dirs-as-files/000077500000000000000000000000001477246567400222325ustar00rootroot00000000000000dub-1.40.0/test/issue1070-init-mistakes-dirs-as-files/.no_build000066400000000000000000000000001477246567400240140ustar00rootroot00000000000000dub-1.40.0/test/issue1070-init-mistakes-dirs-as-files/.no_run000066400000000000000000000000001477246567400235210ustar00rootroot00000000000000dub-1.40.0/test/issue1070-init-mistakes-dirs-as-files/.no_test000066400000000000000000000000001477246567400236740ustar00rootroot00000000000000dub-1.40.0/test/issue1070-init-mistakes-dirs-as-files/source/000077500000000000000000000000001477246567400235325ustar00rootroot00000000000000dub-1.40.0/test/issue1070-init-mistakes-dirs-as-files/source/.empty000066400000000000000000000000001477246567400246570ustar00rootroot00000000000000dub-1.40.0/test/issue1091-bogus-rebuild.sh000077500000000000000000000004361477246567400201210ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/1-exec-simple rm -f dub.selections.json ${DUB} clean ${DUB} build --compiler=${DC} 2>&1 | grep -e 'building configuration' -c ${DUB} build --compiler=${DC} 2>&1 | { ! grep -e 'building configuration' -c; } dub-1.40.0/test/issue1117-extra-dependency-files.sh000077500000000000000000000010241477246567400217060ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1117-extra-dependency-files # Ensure the test can be re-run ${DUB} clean if ! { ${DUB} build 2>&1 || true; } | grep -cF 'building configuration'; then die $LINENO 'Build was not executed.' fi if ! { ${DUB} build 2>&1 || true; } | grep -cF 'is up to date'; then die $LINENO 'Build was executed.' fi touch ./dependency.txt if ! { ${DUB} build 2>&1 || true; } | grep -cF 'building configuration'; then die $LINENO 'Build was not executed.' fi dub-1.40.0/test/issue1117-extra-dependency-files/000077500000000000000000000000001477246567400213525ustar00rootroot00000000000000dub-1.40.0/test/issue1117-extra-dependency-files/.gitignore000066400000000000000000000000701477246567400233370ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ *.exe *.o *.obj *.lst dub-1.40.0/test/issue1117-extra-dependency-files/.no_build000066400000000000000000000000001477246567400231340ustar00rootroot00000000000000dub-1.40.0/test/issue1117-extra-dependency-files/dependency.txt000066400000000000000000000000051477246567400242240ustar00rootroot00000000000000Hellodub-1.40.0/test/issue1117-extra-dependency-files/dub.json000066400000000000000000000001001477246567400230060ustar00rootroot00000000000000{ "name": "test", "extraDependencyFiles": ["dependency.txt"] }dub-1.40.0/test/issue1117-extra-dependency-files/source/000077500000000000000000000000001477246567400226525ustar00rootroot00000000000000dub-1.40.0/test/issue1117-extra-dependency-files/source/app.d000066400000000000000000000000151477246567400235730ustar00rootroot00000000000000void main(){}dub-1.40.0/test/issue1136-temp-copy-files.sh000077500000000000000000000001671477246567400203740ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1136-temp-copy-files "$DUB" app.d dub-1.40.0/test/issue1136-temp-copy-files/000077500000000000000000000000001477246567400200315ustar00rootroot00000000000000dub-1.40.0/test/issue1136-temp-copy-files/.no_build000066400000000000000000000000001477246567400216130ustar00rootroot00000000000000dub-1.40.0/test/issue1136-temp-copy-files/app.d000066400000000000000000000004361477246567400207610ustar00rootroot00000000000000/+ dub.sdl: name "app" dependency "mylib" path="./mylib" +/ import std.exception: enforce; import std.file: exists, thisExePath; import std.path: dirName, buildPath; void main() { string filePath = buildPath(thisExePath.dirName, "helloworld.txt"); enforce(filePath.exists); }dub-1.40.0/test/issue1136-temp-copy-files/mylib/000077500000000000000000000000001477246567400211455ustar00rootroot00000000000000dub-1.40.0/test/issue1136-temp-copy-files/mylib/dub.sdl000066400000000000000000000000731477246567400224230ustar00rootroot00000000000000name "mylib" copyFiles "./helloworld.txt" targetType "none"dub-1.40.0/test/issue1136-temp-copy-files/mylib/helloworld.txt000066400000000000000000000000141477246567400240540ustar00rootroot00000000000000hello world!dub-1.40.0/test/issue1158-stdin-for-single-files.sh000077500000000000000000000003731477246567400216460ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1158-stdin-for-single-files if ! { cat stdin.d | ${DUB} - --value=v 2>&1 || true; } | grep -cF '["--value=v"]'; then die $LINENO 'Stdin for single files failed.' fidub-1.40.0/test/issue1158-stdin-for-single-files/000077500000000000000000000000001477246567400213045ustar00rootroot00000000000000dub-1.40.0/test/issue1158-stdin-for-single-files/.no_build000066400000000000000000000000001477246567400230660ustar00rootroot00000000000000dub-1.40.0/test/issue1158-stdin-for-single-files/stdin.d000066400000000000000000000001541477246567400225720ustar00rootroot00000000000000/+ dub.sdl: name "hello" +/ void main(string[] args) { import std.stdio : writeln; writeln(args[1..$]); }dub-1.40.0/test/issue1180-local-cache-broken.sh000077500000000000000000000014401477246567400207620ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh PORT=$(getRandomPort) "$DUB" remove maven-dubpackage --root="$DIR/issue1180-local-cache-broken" --non-interactive 2>/dev/null || true "$DUB" build --single "$DIR"/test_registry.d "$DIR"/test_registry --folder="$DIR/issue1416-maven-repo-pkg-supplier" --port=$PORT & PID=$! sleep 1 trap 'kill $PID 2>/dev/null || true' exit echo "Trying to download maven-dubpackage (1.0.5)" "$DUB" upgrade --root="$DIR/issue1180-local-cache-broken" --cache=local --skip-registry=all --registry=mvn+http://localhost:$PORT/maven/release/dubpackages if ! "$DUB" remove maven-dubpackage@1.0.5 --root="$DIR/issue1180-local-cache-broken" --non-interactive 2>/dev/null; then die $LINENO 'DUB did not install package from maven registry.' fi dub-1.40.0/test/issue1180-local-cache-broken.sh.min_frontend000066400000000000000000000000061477246567400234350ustar00rootroot000000000000002.077 dub-1.40.0/test/issue1180-local-cache-broken/000077500000000000000000000000001477246567400204245ustar00rootroot00000000000000dub-1.40.0/test/issue1180-local-cache-broken/.gitignore000066400000000000000000000000231477246567400224070ustar00rootroot00000000000000test *.o *.exe .dubdub-1.40.0/test/issue1180-local-cache-broken/.no_build000066400000000000000000000000001477246567400222060ustar00rootroot00000000000000dub-1.40.0/test/issue1180-local-cache-broken/dub.json000066400000000000000000000001111477246567400220620ustar00rootroot00000000000000{ "name": "test", "dependencies": { "maven-dubpackage": "1.0.5" } }dub-1.40.0/test/issue1180-local-cache-broken/source/000077500000000000000000000000001477246567400217245ustar00rootroot00000000000000dub-1.40.0/test/issue1180-local-cache-broken/source/app.d000066400000000000000000000000151477246567400226450ustar00rootroot00000000000000void main(){}dub-1.40.0/test/issue1194-warn-wrong-subconfig.sh000077500000000000000000000020401477246567400214310ustar00rootroot00000000000000#!/usr/bin/env bash set -e OUTPUT=`${DUB} build --root ${CURR_DIR}/issue1194-warn-wrong-subconfig 2>&1 || true` trap 'printf "%s" "Failing received output:\n$OUTPUT" | hexdump -C' ERR # make sure the proper errors occur in the output echo "$OUTPUT" | fgrep -c 'sub configuration directive "bar" -> [baz] references a package that is not specified as a dependency' > /dev/null echo $OUTPUT | fgrep -c 'sub configuration directive "staticlib-simple" -> [foo] references a configuration that does not exist' > /dev/null ! echo $OUTPUT | fgrep -c 'sub configuration directive "sourcelib-simple" -> [library] references a package that is not specified as a dependency' > /dev/null ! echo $OUTPUT | fgrep -c 'sub configuration directive "sourcelib-simple" -> [library] references a configuration that does not exist' > /dev/null # make sure no bogs warnings are issued for packages with no sub configuration directives OUTPUT=`${DUB} build --root ${CURR_DIR}/1-exec-simple 2>&1` ! echo $OUTPUT | grep -c 'sub configuration directive.*references' > /dev/null dub-1.40.0/test/issue1194-warn-wrong-subconfig/000077500000000000000000000000001477246567400210765ustar00rootroot00000000000000dub-1.40.0/test/issue1194-warn-wrong-subconfig/.no_build000066400000000000000000000000001477246567400226600ustar00rootroot00000000000000dub-1.40.0/test/issue1194-warn-wrong-subconfig/dub.sdl000066400000000000000000000004201477246567400223500ustar00rootroot00000000000000name "test" dependency "staticlib-simple" path="../1-staticLib-simple" dependency "sourcelib-simple" path="../1-sourceLib-simple" targetType "executable" subConfiguration "staticlib-simple" "foo" subConfiguration "bar" "baz" subConfiguration "sourcelib-simple" "library" dub-1.40.0/test/issue1194-warn-wrong-subconfig/source/000077500000000000000000000000001477246567400223765ustar00rootroot00000000000000dub-1.40.0/test/issue1194-warn-wrong-subconfig/source/app.d000066400000000000000000000000171477246567400233210ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue1262-version-inheritance-diamond/000077500000000000000000000000001477246567400224015ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/.gitignore000066400000000000000000000005341477246567400243730ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1262-version-inheritance-diamond issue1262-version-inheritance-diamond.so issue1262-version-inheritance-diamond.dylib issue1262-version-inheritance-diamond.dll issue1262-version-inheritance-diamond.a issue1262-version-inheritance-diamond.lib issue1262-version-inheritance-diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance-diamond/.no_run000066400000000000000000000000001477246567400236700ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/.no_test000066400000000000000000000000001477246567400240430ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/daughter/000077500000000000000000000000001477246567400242045ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/daughter/.gitignore000066400000000000000000000002101477246567400261650ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ daughter.so daughter.dylib daughter.dll daughter.a daughter.lib daughter-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance-diamond/daughter/dub.sdl000066400000000000000000000001131477246567400254550ustar00rootroot00000000000000name "daughter" versions "Daughter" dependency "diamond" path="../diamond" dub-1.40.0/test/issue1262-version-inheritance-diamond/daughter/source/000077500000000000000000000000001477246567400255045ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/daughter/source/dummy.d000066400000000000000000000004701477246567400270050ustar00rootroot00000000000000module daughter.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) {} else static assert(0, "Expected Son to be set"); version (Diamond) {} else static assert(0, "Expected Diamond to be set"); dub-1.40.0/test/issue1262-version-inheritance-diamond/diamond/000077500000000000000000000000001477246567400240145ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/diamond/.gitignore000066400000000000000000000002021477246567400257760ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ diamond.so diamond.dylib diamond.dll diamond.a diamond.lib diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance-diamond/diamond/dub.sdl000066400000000000000000000000421477246567400252660ustar00rootroot00000000000000name "diamond" versions "Diamond" dub-1.40.0/test/issue1262-version-inheritance-diamond/diamond/source/000077500000000000000000000000001477246567400253145ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/diamond/source/dummy.d000066400000000000000000000004671477246567400266230ustar00rootroot00000000000000module diamond.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) {} else static assert(0, "Expected Son to be set"); version (Diamond) {} else static assert(0, "Expected Diamond to be set"); dub-1.40.0/test/issue1262-version-inheritance-diamond/dub.sdl000066400000000000000000000002011477246567400236500ustar00rootroot00000000000000name "issue1262-version-inheritance-diamond" versions "Parent" dependency "daughter" path="daughter" dependency "son" path="son" dub-1.40.0/test/issue1262-version-inheritance-diamond/son/000077500000000000000000000000001477246567400232005ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/son/.gitignore000066400000000000000000000001521477246567400251660ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ son.so son.dylib son.dll son.a son.lib son-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance-diamond/son/dub.sdl000066400000000000000000000001011477246567400244460ustar00rootroot00000000000000name "son" versions "Son" dependency "diamond" path="../diamond" dub-1.40.0/test/issue1262-version-inheritance-diamond/son/source/000077500000000000000000000000001477246567400245005ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/son/source/dummy.d000066400000000000000000000004631477246567400260030ustar00rootroot00000000000000module son.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) {} else static assert(0, "Expected Son to be set"); version (Diamond) {} else static assert(0, "Expected Diamond to be set"); dub-1.40.0/test/issue1262-version-inheritance-diamond/source/000077500000000000000000000000001477246567400237015ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance-diamond/source/app.d000066400000000000000000000004611477246567400246270ustar00rootroot00000000000000version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) {} else static assert(0, "Expected Son to be set"); version (Diamond) {} else static assert(0, "Expected Diamond to be set"); void main() { } dub-1.40.0/test/issue1262-version-inheritance/000077500000000000000000000000001477246567400207705ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/.gitignore000066400000000000000000000004441477246567400227620ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1262-version-inheritance issue1262-version-inheritance.so issue1262-version-inheritance.dylib issue1262-version-inheritance.dll issue1262-version-inheritance.a issue1262-version-inheritance.lib issue1262-version-inheritance-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance/.no_run000066400000000000000000000000001477246567400222570ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/.no_test000066400000000000000000000000001477246567400224320ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/daughter/000077500000000000000000000000001477246567400225735ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/daughter/.gitignore000066400000000000000000000002101477246567400245540ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ daughter.so daughter.dylib daughter.dll daughter.a daughter.lib daughter-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance/daughter/dub.sdl000066400000000000000000000000451477246567400240500ustar00rootroot00000000000000name "daughter" versions "Daughter" dub-1.40.0/test/issue1262-version-inheritance/daughter/source/000077500000000000000000000000001477246567400240735ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/daughter/source/dummy.d000066400000000000000000000003511477246567400253720ustar00rootroot00000000000000module daughter.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) static assert(0, "Expected Son to no be set"); dub-1.40.0/test/issue1262-version-inheritance/dub.sdl000066400000000000000000000001711477246567400222450ustar00rootroot00000000000000name "issue1262-version-inheritance" versions "Parent" dependency "daughter" path="daughter" dependency "son" path="son" dub-1.40.0/test/issue1262-version-inheritance/son/000077500000000000000000000000001477246567400215675ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/son/.gitignore000066400000000000000000000001521477246567400235550ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ son.so son.dylib son.dll son.a son.lib son-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1262-version-inheritance/son/dub.sdl000066400000000000000000000000321477246567400230400ustar00rootroot00000000000000name "son" versions "Son" dub-1.40.0/test/issue1262-version-inheritance/son/source/000077500000000000000000000000001477246567400230675ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/son/source/dummy.d000066400000000000000000000003451477246567400243710ustar00rootroot00000000000000module son.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) static assert(0, "Expected Daughter to not be set"); version (Son) {} else static assert(0, "Expected Son to be set"); dub-1.40.0/test/issue1262-version-inheritance/source/000077500000000000000000000000001477246567400222705ustar00rootroot00000000000000dub-1.40.0/test/issue1262-version-inheritance/source/app.d000066400000000000000000000003471477246567400232210ustar00rootroot00000000000000version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) {} else static assert(0, "Expected Son to be set"); void main() { } dub-1.40.0/test/issue1277.sh000077500000000000000000000002561477246567400153660ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1003-check-empty-ld-flags # It should fail ! ${DUB} --root=${CURR_DIR}/issue1277/ build dub-1.40.0/test/issue1277/000077500000000000000000000000001477246567400150245ustar00rootroot00000000000000dub-1.40.0/test/issue1277/.no_build000066400000000000000000000000001477246567400166060ustar00rootroot00000000000000dub-1.40.0/test/issue1277/source/000077500000000000000000000000001477246567400163245ustar00rootroot00000000000000dub-1.40.0/test/issue1277/source/app.d000066400000000000000000000000171477246567400172470ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue130-unicode-СНАЯАСТЕЯЅ/000077500000000000000000000000001477246567400255215ustar00rootroot00000000000000dub-1.40.0/test/issue130-unicode-СНАЯАСТЕЯЅ/dub.sdl000066400000000000000000000000151477246567400267730ustar00rootroot00000000000000name "tests" dub-1.40.0/test/issue130-unicode-СНАЯАСТЕЯЅ/source/000077500000000000000000000000001477246567400270215ustar00rootroot00000000000000dub-1.40.0/test/issue130-unicode-СНАЯАСТЕЯЅ/source/app.d000066400000000000000000000000711477246567400277440ustar00rootroot00000000000000import std.stdio; void main() { writeln("Success."); } dub-1.40.0/test/issue1336-registry/000077500000000000000000000000001477246567400166665ustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/.gitignore000066400000000000000000000003041477246567400206530ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1336-registry.so issue1336-registry.dylib issue1336-registry.dll issue1336-registry.a issue1336-registry.lib issue1336-registry-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1336-registry/.no_build000066400000000000000000000000001477246567400204500ustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/api/000077500000000000000000000000001477246567400174375ustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/api/packages/000077500000000000000000000000001477246567400212155ustar00rootroot00000000000000infos__packages=%5B%22gitcompatibledubpackage%22%5D&include_dependencies=true&minimize=true000066400000000000000000000006331477246567400421040ustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/api/packages{ "gitcompatibledubpackage": {"versions":[{"configurations":[{"name":"exe"},{"name":"lib"}],"version":"~master","name":"gitcompatibledubpackage"},{"version":"1.0.1","name":"gitcompatibledubpackage"},{"version":"1.0.2","name":"gitcompatibledubpackage"},{"version":"1.0.3","name":"gitcompatibledubpackage"},{"configurations":[{"name":"exe"},{"name":"lib"}],"version":"1.0.4","name":"gitcompatibledubpackage"}]} }dub-1.40.0/test/issue1336-registry/packages/000077500000000000000000000000001477246567400204445ustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/packages/gitcompatibledubpackage/000077500000000000000000000000001477246567400252765ustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/packages/gitcompatibledubpackage/1.0.2.zip0000777000000000000000000000000014772465674002763721.0.4.zipustar00rootroot00000000000000dub-1.40.0/test/issue1336-registry/packages/gitcompatibledubpackage/1.0.3.zip000066400000000000000000000000131477246567400264530ustar00rootroot00000000000000BROKEN ZIP dub-1.40.0/test/issue1336-registry/packages/gitcompatibledubpackage/1.0.4.zip000066400000000000000000000037711477246567400264720ustar00rootroot00000000000000PK QZ2K gitcompatibledubpackage-1.0.4/UT YPK QZ2KNj*' gitcompatibledubpackage-1.0.4/README.mdUT Y=j0 DP{,eS)D(k7,п_٥=C̴0v3I9MR}%i^NuZ 'u.d SjLH Ԛ%J >O;ފ]'JFʳmˌp  ˦ Oܭ`78NN:QB1i?{;!jsa{XȺ#S0Wr!`b5~s[~)k"'ֹBSP>jzN,ޅf^KIl,;Mbfz},ʩ.NKIdz+NnA`v՜; ^5};6bEI򴛹,OU_}SЩ#)@ά PK QZ2KV\% gitcompatibledubpackage-1.0.4/hello.dUT YO)IUHɷ*LQM(.)KUH,J/R܂= ei(yLQp uRҴPK QZ2K% gitcompatibledubpackage-1.0.4/subdir/UT YPK QZ2KOn+ gitcompatibledubpackage-1.0.4/subdir/file.dUT Ym10 Ew"`{6`(vJU {FM9mC2\pyƓLRULӎ>x5bF@iE)5؋PK QZ2K gitcompatibledubpackage-1.0.4/UT YPK QZ2KNj*' Egitcompatibledubpackage-1.0.4/README.mdUT YPK QZ2K4E% Sgitcompatibledubpackage-1.0.4/dub.sdlUT YPK QZ2KV\% gitcompatibledubpackage-1.0.4/hello.dUT YPK QZ2K% gitcompatibledubpackage-1.0.4/subdir/UT YPK QZ2KOn+ gitcompatibledubpackage-1.0.4/subdir/file.dUT YPK)(9e3972be4c63790c32257220f40c0af7dc41bec5dub-1.40.0/test/issue1350-transitive-none-deps/000077500000000000000000000000001477246567400210705ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/.gitignore000066400000000000000000000001651477246567400230620ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ test test.so test.dylib test.dll test.a test.lib test-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1350-transitive-none-deps/.no_run000066400000000000000000000000001477246567400223570ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/.no_test000066400000000000000000000000001477246567400225320ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/common-dep/000077500000000000000000000000001477246567400231265ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/common-dep/common.d000066400000000000000000000000171477246567400245610ustar00rootroot00000000000000module common; dub-1.40.0/test/issue1350-transitive-none-deps/common-dep/dub.sdl000066400000000000000000000001161477246567400244020ustar00rootroot00000000000000name "common-dep" targetType "library" importPaths "." sourceFiles "common.d" dub-1.40.0/test/issue1350-transitive-none-deps/common-none/000077500000000000000000000000001477246567400233155ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/common-none/dub.sdl000066400000000000000000000001221477246567400245660ustar00rootroot00000000000000name "common-none" targetType "none" dependency "common-dep" path="../common-dep" dub-1.40.0/test/issue1350-transitive-none-deps/dep1/000077500000000000000000000000001477246567400217215ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/dep1/dep1.d000066400000000000000000000000341477246567400227140ustar00rootroot00000000000000module dep1; import common; dub-1.40.0/test/issue1350-transitive-none-deps/dep1/dub.sdl000066400000000000000000000001401477246567400231720ustar00rootroot00000000000000name "dep1" importPaths "." sourceFiles "dep1.d" dependency "common-none" path="../common-none" dub-1.40.0/test/issue1350-transitive-none-deps/dep2/000077500000000000000000000000001477246567400217225ustar00rootroot00000000000000dub-1.40.0/test/issue1350-transitive-none-deps/dep2/dep2.d000066400000000000000000000000341477246567400227160ustar00rootroot00000000000000module dep2; import common; dub-1.40.0/test/issue1350-transitive-none-deps/dep2/dub.sdl000066400000000000000000000001401477246567400231730ustar00rootroot00000000000000name "dep2" importPaths "." sourceFiles "dep2.d" dependency "common-none" path="../common-none" dub-1.40.0/test/issue1350-transitive-none-deps/dub.sdl000066400000000000000000000001651477246567400223500ustar00rootroot00000000000000name "test" targetType "executable" sourceFiles "test.d" dependency "dep1" path="dep1" dependency "dep2" path="dep2" dub-1.40.0/test/issue1350-transitive-none-deps/test.d000066400000000000000000000000711477246567400222120ustar00rootroot00000000000000module test; import dep1, dep2, common; void main() {} dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs.sh000077500000000000000000000020701477246567400225330ustar00rootroot00000000000000#!/usr/bin/env bash set -e . $(dirname "${BASH_SOURCE[0]}")/common.sh BASEDIR=${CURR_DIR}/issue1372-ignore-files-in-hidden-dirs rm -rf ${BASEDIR}/.dub rm -rf ${BASEDIR}/issue1372 echo "Compile and ignore hidden directories" ${DUB} build --root ${BASEDIR} --config=normal --force OUTPUT=`${BASEDIR}/issue1372` if [[ "$OUTPUT" != "no hidden file compiled" ]]; then die $LINENO "Normal compilation failed"; fi rm -rf ${BASEDIR}/.dub rm -rf ${BASEDIR}/issue1372 echo "Compile and explcitly include file in hidden directories" ${DUB} build --root ${BASEDIR} --config=hiddenfile --force OUTPUT=`${BASEDIR}/issue1372` if [[ "$OUTPUT" != "hidden file compiled" ]]; then die $LINENO "Hidden file compilation failed"; fi rm -rf ${BASEDIR}/.dub rm -rf ${BASEDIR}/issue1372 echo "Compile and explcitly include extra hidden directories" ${DUB} build --root ${BASEDIR} --config=hiddendir --force OUTPUT=`${BASEDIR}/issue1372` if [[ "$OUTPUT" != "hidden dir compiled" ]]; then die $LINENO "Hidden directory compilation failed"; fi rm -rf ${BASEDIR}/.dub rm -rf ${BASEDIR}/issue1372 dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/000077500000000000000000000000001477246567400221755ustar00rootroot00000000000000dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/.hiddensource/000077500000000000000000000000001477246567400247275ustar00rootroot00000000000000dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/.hiddensource/hello.d000066400000000000000000000001311477246567400261720ustar00rootroot00000000000000module hello; import std.stdio; void helloFun() { writeln("hidden dir compiled"); } dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/dub.json000066400000000000000000000011571477246567400236460ustar00rootroot00000000000000{ "name": "issue1372", "mainSourceFile": "source/app.d", "configurations": [ { "name": "normal", "targetType": "executable" }, { "name": "hiddenfile", "targetType": "executable", "versions" : ["UseHiddenFile"], "sourceFiles":["source/.compileMe/hello.d"] }, { "name": "hiddendir", "targetType": "executable", "versions" : ["UseHiddenFile"], "sourcePaths":["source", ".hiddensource"], "importPaths":["source", ".hiddensource"] }] } dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/source/000077500000000000000000000000001477246567400234755ustar00rootroot00000000000000dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/source/.AppleDouble/000077500000000000000000000000001477246567400257475ustar00rootroot00000000000000dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/source/.AppleDouble/app.d000066400000000000000000000001351477246567400266730ustar00rootroot00000000000000This file needs to contain something to show the issue up. If it's empty, it'll get ignored. dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/source/.compileMe/000077500000000000000000000000001477246567400254655ustar00rootroot00000000000000dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/source/.compileMe/hello.d000066400000000000000000000001321477246567400267310ustar00rootroot00000000000000module hello; import std.stdio; void helloFun() { writeln("hidden file compiled"); } dub-1.40.0/test/issue1372-ignore-files-in-hidden-dirs/source/app.d000066400000000000000000000004401477246567400244200ustar00rootroot00000000000000import std.stdio; void main() { version(UseHiddenFile) { import hello; helloFun(); } else { static assert(!__traits(compiles, { import hello; helloFun(); })); writeln("no hidden file compiled"); } } dub-1.40.0/test/issue1396-pre-post-run-commands.sh000077500000000000000000000004721477246567400215400ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1396-pre-post-run-commands rm -rf .dub rm -rf test.txt "$DUB" if ! grep -c -e "pre-run" test.txt; then die $LINENO 'pre run not executed.' fi if ! grep -c -e "post-run-0" test.txt; then die $LINENO 'post run not executed.' fi dub-1.40.0/test/issue1396-pre-post-run-commands/000077500000000000000000000000001477246567400211765ustar00rootroot00000000000000dub-1.40.0/test/issue1396-pre-post-run-commands/.no_build000066400000000000000000000000001477246567400227600ustar00rootroot00000000000000dub-1.40.0/test/issue1396-pre-post-run-commands/dub.sdl000066400000000000000000000001251477246567400224520ustar00rootroot00000000000000name "test" preRunCommands "echo pre-run >> test.txt" postRunCommands "./post-run.sh"dub-1.40.0/test/issue1396-pre-post-run-commands/post-run.sh000077500000000000000000000000611477246567400233210ustar00rootroot00000000000000echo post-run-$DUB_TARGET_EXIT_STATUS >> test.txtdub-1.40.0/test/issue1396-pre-post-run-commands/source/000077500000000000000000000000001477246567400224765ustar00rootroot00000000000000dub-1.40.0/test/issue1396-pre-post-run-commands/source/app.d000066400000000000000000000000171477246567400234210ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue1401-file-system-pkg-supplier/000077500000000000000000000000001477246567400216705ustar00rootroot00000000000000dub-1.40.0/test/issue1401-file-system-pkg-supplier/.no_build000066400000000000000000000000001477246567400234520ustar00rootroot00000000000000dub-1.40.0/test/issue1401-file-system-pkg-supplier/fs-json-dubpackage-1.0.7.zip000066400000000000000000000013201477246567400265140ustar00rootroot00000000000000PKMfs-json-dubpackage-1.0.7/PK M9F88!fs-json-dubpackage-1.0.7/dub.json{ "name": "fs-json-dubpackage", "sourcePaths": ["."] }PK!M?c'VX fs-json-dubpackage-1.0.7/hello.d 0 |E.E'~8ZjI[n9™#1F0մn9uXKC_o&j5.8xPK?M$fs-json-dubpackage-1.0.7/ #f#fk6PK? M9F88!$ 7fs-json-dubpackage-1.0.7/dub.json A{77PK?!M?c'VX $ fs-json-dubpackage-1.0.7/hello.d G28787PKPB(9e3972be4c63790c32257220f40c0af7dc41bec5dub-1.40.0/test/issue1401-file-system-pkg-supplier/fs-sdl-dubpackage-1.0.5.zip000066400000000000000000000014201477246567400263240ustar00rootroot00000000000000PKMfs-sdl-dubpackage-1.0.5/PKΦM`Cfs-sdl-dubpackage-1.0.5/dub.sdleA = oD/01]ą}'_ą4Le('fE@OJlu.FP6)xXCh#O*fOo+ 7 ֥KgbN:wƼ5,#PKҦMr VXfs-sdl-dubpackage-1.0.5/hello.d 0 |E.E~8ZjI[n9™#1F0մn9uXKC_o&j5.8xPK?M$fs-sdl-dubpackage-1.0.5/ 8787k6PK?ΦM`C$ 6fs-sdl-dubpackage-1.0.5/dub.sdl T'77PK?ҦMr VX$ fs-sdl-dubpackage-1.0.5/hello.d <-8787PKL(9e3972be4c63790c32257220f40c0af7dc41bec5dub-1.40.0/test/issue1401-file-system-pkg-supplier/fs-sdl-dubpackage-1.0.6.zip000066400000000000000000000014201477246567400263250ustar00rootroot00000000000000PKMfs-sdl-dubpackage-1.0.6/PKæM`Cfs-sdl-dubpackage-1.0.6/dub.sdleA = oD/01]ą}'_ą4Le('fE@OJlu.FP6)xXCh#O*fOo+ 7 ֥KgbN:wƼ5,#PKئMhPVXfs-sdl-dubpackage-1.0.6/hello.d 0 |E.E~8ZjI[n9™#1F0մn9uXKC_o&j5.8xPK?M$fs-sdl-dubpackage-1.0.6/ ԗԗJ_PK?æM`C$ 6fs-sdl-dubpackage-1.0.6/dub.sdl qwwPK?ئMhPVX$ fs-sdl-dubpackage-1.0.6/hello.d ەd3ԗԗPKL(9e3972be4c63790c32257220f40c0af7dc41bec5dub-1.40.0/test/issue1401-filesystem-supplier.sh000077500000000000000000000021321477246567400213710ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh ${DUB} remove fs-json-dubpackage --non-interactive 2>/dev/null || true ${DUB} remove fs-sdl-dubpackage --non-interactive 2>/dev/null || true echo "Trying to get fs-sdl-dubpackage (1.0.5)" ${DUB} fetch fs-sdl-dubpackage --version=1.0.5 --skip-registry=all --registry=file://"$DIR"/issue1401-file-system-pkg-supplier if ! ${DUB} remove fs-sdl-dubpackage@1.0.5 2>/dev/null; then die $LINENO 'DUB did not install package from file system.' fi echo "Trying to get fs-sdl-dubpackage (latest)" ${DUB} fetch fs-sdl-dubpackage --skip-registry=all --registry=file://"$DIR"/issue1401-file-system-pkg-supplier if ! ${DUB} remove fs-sdl-dubpackage@1.0.6 2>/dev/null; then die $LINENO 'DUB did not install latest package from file system.' fi echo "Trying to get fs-json-dubpackage (1.0.7)" ${DUB} fetch fs-json-dubpackage@1.0.7 --skip-registry=all --registry=file://"$DIR"/issue1401-file-system-pkg-supplier if ! ${DUB} remove fs-json-dubpackage@1.0.7 2>/dev/null; then die $LINENO 'DUB did not install package from file system.' fi dub-1.40.0/test/issue1408-inherit-linker-files/000077500000000000000000000000001477246567400210425ustar00rootroot00000000000000dub-1.40.0/test/issue1408-inherit-linker-files/.no_run000066400000000000000000000000001477246567400223310ustar00rootroot00000000000000dub-1.40.0/test/issue1408-inherit-linker-files/.no_test000066400000000000000000000000001477246567400225040ustar00rootroot00000000000000dub-1.40.0/test/issue1408-inherit-linker-files/dep.d000066400000000000000000000000141477246567400217520ustar00rootroot00000000000000module dep; dub-1.40.0/test/issue1408-inherit-linker-files/dub.sdl000066400000000000000000000005251477246567400223220ustar00rootroot00000000000000name "test" targetType "executable" dependency ":dep" version="*" sourceFiles "main.d" // make lib.d available for import importPaths "." subPackage { name "dep" sourceFiles "dep.d" sourceFiles "lib/liblib.a" platform="posix" sourceFiles "lib/lib.lib" platform="windows" preBuildCommands "$DUB_EXE build --root=\"$PACKAGE_DIR/lib\"" } dub-1.40.0/test/issue1408-inherit-linker-files/lib.d000066400000000000000000000000311477246567400217470ustar00rootroot00000000000000module lib; void foo(); dub-1.40.0/test/issue1408-inherit-linker-files/lib/000077500000000000000000000000001477246567400216105ustar00rootroot00000000000000dub-1.40.0/test/issue1408-inherit-linker-files/lib/dub.sdl000066400000000000000000000000721477246567400230650ustar00rootroot00000000000000name "lib" targetType "staticLibrary" sourceFiles "lib.d" dub-1.40.0/test/issue1408-inherit-linker-files/lib/lib.d000066400000000000000000000000341477246567400225200ustar00rootroot00000000000000module lib; void foo() { } dub-1.40.0/test/issue1408-inherit-linker-files/main.d000066400000000000000000000000451477246567400221320ustar00rootroot00000000000000import lib; void main() { foo(); } dub-1.40.0/test/issue1416-maven-repo-pkg-supplier.sh000077500000000000000000000022251477246567400220460ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh PORT=$(getRandomPort) ${DUB} remove maven-dubpackage --non-interactive 2>/dev/null || true ${DUB} build --single "$DIR"/test_registry.d "$DIR"/test_registry --folder="$DIR/issue1416-maven-repo-pkg-supplier" --port=$PORT & PID=$! sleep 1 trap 'kill $PID 2>/dev/null || true' exit echo "Trying to download maven-dubpackage (1.0.5)" ${DUB} fetch maven-dubpackage@1.0.5 --skip-registry=all --registry=mvn+http://localhost:$PORT/maven/release/dubpackages if ! ${DUB} remove maven-dubpackage@1.0.5 2>/dev/null; then die $LINENO 'DUB did not install package from maven registry.' fi echo "Trying to download maven-dubpackage (latest)" ${DUB} fetch maven-dubpackage --skip-registry=all --registry=mvn+http://localhost:$PORT/maven/release/dubpackages if ! ${DUB} remove maven-dubpackage@1.0.6 2>/dev/null; then die $LINENO 'DUB fetch did not install latest package from maven registry.' fi echo "Trying to search (exact) maven-dubpackage" ${DUB} search maven-dubpackage --skip-registry=all --registry=mvn+http://localhost:$PORT/maven/release/dubpackages | grep -c "maven-dubpackage (1.0.6)" dub-1.40.0/test/issue1416-maven-repo-pkg-supplier.sh.min_frontend000066400000000000000000000000061477246567400245170ustar00rootroot000000000000002.077 dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/000077500000000000000000000000001477246567400215065ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/.gitignore000066400000000000000000000000701477246567400234730ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ *.exe *.o *.obj *.lst dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/.no_build000066400000000000000000000000001477246567400232700ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/000077500000000000000000000000001477246567400226145ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/000077500000000000000000000000001477246567400242345ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/000077500000000000000000000000001477246567400265055ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/maven-dubpackage/000077500000000000000000000000001477246567400316775ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/maven-dubpackage/1.0.5/000077500000000000000000000000001477246567400323405ustar00rootroot00000000000000maven-dubpackage-1.0.5.zip000066400000000000000000000014161477246567400366400ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/maven-dubpackage/1.0.5PKKwLmaven-dubpackage-1.0.5/PKRwLHmaven-dubpackage-1.0.5/dub.sdleA = pm``LE\[?FN,ho TR,Ut ^ Z C龆Sl JC]x!t Lyr ]lV}CPKR2K[\maven-dubpackage-1.0.5/hello.d; 0=b4^ Xx+$ą|dj!kfRqWx 9L”öP5m‚UNa^ymPK?KwL$maven-dubpackage-1.0.5/ VjVj}iPK?RwLH$ 5maven-dubpackage-1.0.5/dub.sdl O5+i+iPK?R2K[\$ maven-dubpackage-1.0.5/hello.d ~0VjVjPKI(9e3972be4c63790c32257220f40c0af7dc41bec5dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/maven-dubpackage/1.0.6/000077500000000000000000000000001477246567400323415ustar00rootroot00000000000000maven-dubpackage-1.0.6.zip000066400000000000000000000014161477246567400366420ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/maven-dubpackage/1.0.6PK@TxLmaven-dubpackage-1.0.6/PKRwLHmaven-dubpackage-1.0.6/dub.sdleA = pm``LE\[?FN,ho TR,Ut ^ Z C龆Sl JC]x!t Lyr ]lV}CPKR2K[\maven-dubpackage-1.0.6/hello.d; 0=b4^ Xx+$ą|dj!kfRqWx 9L”öP5m‚UNa^ymPK?@TxL$maven-dubpackage-1.0.6/ =S=S3=SPK?RwLH$ 5maven-dubpackage-1.0.6/dub.sdl O5 [=S [=SPK?R2K[\$ maven-dubpackage-1.0.6/hello.d ~0=S=SPKI(9e3972be4c63790c32257220f40c0af7dc41bec5maven-metadata.xml000066400000000000000000000006011477246567400352230ustar00rootroot00000000000000dub-1.40.0/test/issue1416-maven-repo-pkg-supplier/maven/release/dubpackages/maven-dubpackage dubpackages maven-dubpackage 1.0.6 1.0.6 1.0.5 1.0.6 20180317184845 dub-1.40.0/test/issue1427-betterC/000077500000000000000000000000001477246567400164075ustar00rootroot00000000000000dub-1.40.0/test/issue1427-betterC/.gitignore000066400000000000000000000000231477246567400203720ustar00rootroot00000000000000test *.o *.exe .dubdub-1.40.0/test/issue1427-betterC/.min_frontend000066400000000000000000000000061477246567400210660ustar00rootroot000000000000002.078 dub-1.40.0/test/issue1427-betterC/.no_run000066400000000000000000000000001477246567400176760ustar00rootroot00000000000000dub-1.40.0/test/issue1427-betterC/.no_test000066400000000000000000000000001477246567400200510ustar00rootroot00000000000000dub-1.40.0/test/issue1427-betterC/dub.json000066400000000000000000000000611477246567400200510ustar00rootroot00000000000000{ "name": "test", "buildOptions": ["betterC"] }dub-1.40.0/test/issue1427-betterC/source/000077500000000000000000000000001477246567400177075ustar00rootroot00000000000000dub-1.40.0/test/issue1427-betterC/source/app.d000066400000000000000000000001131477246567400206270ustar00rootroot00000000000000version(D_BetterC) {} else static assert(false); extern(C) void main() { }dub-1.40.0/test/issue1447-build-settings-vars.sh000077500000000000000000000012721477246567400212700ustar00rootroot00000000000000#!/usr/bin/env bash set -e . $(dirname "${BASH_SOURCE[0]}")/common.sh if [[ `uname -m` == "i386" ]]; then ARCH=x86 elif [[ `uname -m` == "i686" ]]; then ARCH=x86 elif [[ `uname -m` == "arm64" ]]; then ARCH="aarch64" else ARCH=$(uname -m) fi rm -rf ${CURR_DIR}/issue1447-build-settings-vars/.dub rm -rf ${CURR_DIR}/issue1447-build-settings-vars/test ${DUB} build --root ${CURR_DIR}/issue1447-build-settings-vars --arch=$ARCH OUTPUT=`${CURR_DIR}/issue1447-build-settings-vars/test` rm -rf ${CURR_DIR}/issue1447-build-settings-vars/.dub rm -rf ${CURR_DIR}/issue1447-build-settings-vars/test if [[ "$OUTPUT" != "$ARCH" ]]; then die $LINENO "Build settings ARCH var incorrect"; fi dub-1.40.0/test/issue1447-build-settings-vars/000077500000000000000000000000001477246567400207275ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/.no_run000066400000000000000000000000001477246567400222160ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/.no_test000066400000000000000000000000001477246567400223710ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/dub.json000066400000000000000000000000711477246567400223720ustar00rootroot00000000000000{ "name": "test", "stringImportPaths": ["view-$ARCH"] }dub-1.40.0/test/issue1447-build-settings-vars/source/000077500000000000000000000000001477246567400222275ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/source/app.d000066400000000000000000000000751477246567400231560ustar00rootroot00000000000000import std.stdio; void main() { writeln(import("arch")); } dub-1.40.0/test/issue1447-build-settings-vars/view-aarch64/000077500000000000000000000000001477246567400231275ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/view-aarch64/arch000066400000000000000000000000071477246567400237640ustar00rootroot00000000000000aarch64dub-1.40.0/test/issue1447-build-settings-vars/view-x86/000077500000000000000000000000001477246567400223245ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/view-x86/arch000066400000000000000000000000031477246567400231550ustar00rootroot00000000000000x86dub-1.40.0/test/issue1447-build-settings-vars/view-x86_64/000077500000000000000000000000001477246567400226355ustar00rootroot00000000000000dub-1.40.0/test/issue1447-build-settings-vars/view-x86_64/arch000066400000000000000000000000061477246567400234710ustar00rootroot00000000000000x86_64dub-1.40.0/test/issue1474-generate-source.script.d000066400000000000000000000016151477246567400215640ustar00rootroot00000000000000/+ dub.sdl: name "issue1474-generate-source" +/ module issue1474_generate_source; import std.process; import std.stdio; import std.algorithm; import std.path; int main() { const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub")); const curr_dir = environment.get("CURR_DIR", buildPath(__FILE_FULL_PATH__.dirName)); const dc = environment.get("DC", "dmd"); const cmd = [dub, "build", "--compiler", dc]; const result = execute(cmd, null, Config.none, size_t.max, curr_dir.buildPath("issue1474")); if (result.status || result.output.canFind("Failed")) { writefln("\n> %-(%s %)", cmd); writeln("==========================================================="); writeln(result.output); writeln("==========================================================="); writeln("Last command failed with exit code ", result.status, '\n'); return 1; } return 0; } dub-1.40.0/test/issue1474/000077500000000000000000000000001477246567400150235ustar00rootroot00000000000000dub-1.40.0/test/issue1474/.no_build000066400000000000000000000000001477246567400166050ustar00rootroot00000000000000dub-1.40.0/test/issue1474/dub.json000066400000000000000000000005341477246567400164720ustar00rootroot00000000000000{ "name": "generated-sources", "description": "Example of using pre generate commands to generate source code.", "sourceFiles": ["ext/*.d"], "preGenerateCommands-posix": [ "echo 'extern(C) int fun42 () { return 42; }' > ext/fortytwo.d" ], "preGenerateCommands-windows": [ "echo extern(C) int fun42 () { return 42; }> ext/fortytwo.d" ] } dub-1.40.0/test/issue1474/ext/000077500000000000000000000000001477246567400156235ustar00rootroot00000000000000dub-1.40.0/test/issue1474/ext/kekw.d000066400000000000000000000000561477246567400167320ustar00rootroot00000000000000extern(C) string funkekw () { return "KEKW";} dub-1.40.0/test/issue1474/source/000077500000000000000000000000001477246567400163235ustar00rootroot00000000000000dub-1.40.0/test/issue1474/source/app.d000066400000000000000000000002411477246567400172450ustar00rootroot00000000000000import std.stdio; extern(C) string funkekw (); extern(C) int fun42 (); void main() { writefln("ShouldBe42: %s", fun42()); writefln("Juan: %s", funkekw()); } dub-1.40.0/test/issue1477-subpackage-visuald-paths.sh000077500000000000000000000015451477246567400222570ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh # Check project files generated from project "root" cd ${CURR_DIR}/issue1477-subpackage-visuald-paths rm -rf .dub ${DUB} generate visuald :subpackage_a if ! grep " ${CURR_DIR}/output-1504.txt grep "env_variables_work" < ${CURR_DIR}/output-1504.txt # Don't manage to make it work #grep "Invalid source" < ${CURR_DIR}/output-1504.txt && true dub-1.40.0/test/issue1504-envvar-in-path/000077500000000000000000000000001477246567400176525ustar00rootroot00000000000000dub-1.40.0/test/issue1504-envvar-in-path/.no_build000066400000000000000000000000001477246567400214340ustar00rootroot00000000000000dub-1.40.0/test/issue1504-envvar-in-path/dub.json000066400000000000000000000001031477246567400213110ustar00rootroot00000000000000{ "name": "test", "stringImportPaths": ["$MY_VARIABLE"] } dub-1.40.0/test/issue1504-envvar-in-path/source/000077500000000000000000000000001477246567400211525ustar00rootroot00000000000000dub-1.40.0/test/issue1504-envvar-in-path/source/app.d000066400000000000000000000000651477246567400221000ustar00rootroot00000000000000pragma(msg, import("message.txt")); void main() { } dub-1.40.0/test/issue1504-envvar-in-path/teststrings/000077500000000000000000000000001477246567400222435ustar00rootroot00000000000000dub-1.40.0/test/issue1504-envvar-in-path/teststrings/message.txt000066400000000000000000000000221477246567400244220ustar00rootroot00000000000000env_variables_workdub-1.40.0/test/issue1505-single-file-package-dynamic-library.d000066400000000000000000000002741477246567400240370ustar00rootroot00000000000000/+ dub.sdl: name "single-file-test-dynamic-library" targetType "dynamicLibrary" +/ module hellolib; version(Windows) { import core.sys.windows.dll; mixin SimpleDllMain; } dub-1.40.0/test/issue1505-single-file-package-dynamic-library.sh000077500000000000000000000011651477246567400242310ustar00rootroot00000000000000#!/usr/bin/env bash set -eux -o pipefail . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR} rm -f libsingle-file-test-dynamic-library.{so,dylib} rm -f single-file-test-dynamic-library.dll ${DUB} build --single issue1505-single-file-package-dynamic-library.d if [[ ! -f libsingle-file-test-dynamic-library.so ]] \ && [[ ! -f libsingle-file-test-dynamic-library.dylib ]] \ && [[ ! -f single-file-test-dynamic-library.dll ]]; then die $LINENO 'Normal invocation did not produce a dynamic library in the current directory' fi rm -f libsingle-file-test-dynamic-library.{so,dylib} rm -f single-file-test-dynamic-library.dll dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree.sh000077500000000000000000000017531477246567400231660ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh PORT=$(getRandomPort) ${DUB} remove maven-dubpackage-a --non-interactive 2>/dev/null || true ${DUB} remove maven-dubpackage-b --non-interactive 2>/dev/null || true ${DUB} build --single "$DIR"/test_registry.d "$DIR"/test_registry --folder="$DIR/issue1524-maven-upgrade-dependency-tree" --port=$PORT & PID=$! sleep 1 trap 'kill $PID 2>/dev/null || true' exit echo "Trying to download maven-dubpackage-a (1.0.5) with dependency to maven-dubpackage-b (1.0.6)" ${DUB} upgrade --root "$DIR/issue1524-maven-upgrade-dependency-tree" --skip-registry=standard --registry=mvn+http://localhost:$PORT/maven/release/dubpackages if ! ${DUB} remove maven-dubpackage-a@1.0.5 2>/dev/null; then die $LINENO 'DUB did not install package "maven-dubpackage-a" from maven registry.' fi if ! ${DUB} remove maven-dubpackage-b@1.0.6 2>/dev/null; then die $LINENO 'DUB did not install package "maven-dubpackage-b" from maven registry.' fi dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree.sh.min_frontend000066400000000000000000000000061477246567400256320ustar00rootroot000000000000002.077 dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/000077500000000000000000000000001477246567400226215ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/.gitignore000066400000000000000000000000701477246567400246060ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ *.exe *.o *.obj *.lst dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/.no_build000066400000000000000000000000001477246567400244030ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/dub.json000066400000000000000000000001141477246567400242620ustar00rootroot00000000000000{ "name": "test", "dependencies": { "maven-dubpackage-a": "~>1.0.5" } }dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/000077500000000000000000000000001477246567400237275ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/000077500000000000000000000000001477246567400253475ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages/000077500000000000000000000000001477246567400276205ustar00rootroot00000000000000maven-dubpackage-a/000077500000000000000000000000001477246567400331515ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages1.0.5/000077500000000000000000000000001477246567400336125ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages/maven-dubpackage-amaven-dubpackage-a-1.0.5.zip000066400000000000000000000014661477246567400404140ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages/maven-dubpackage-a/1.0.5PKMmaven-dubpackage-a-1.0.5/PKM9bӍ maven-dubpackage-a-1.0.5/dub.sdlm;0ާ'Jj$ ǎ!8;)-g$ dubpackages maven-dubpackage-a 1.0.5 1.0.5 1.0.5 20180317184845 maven-dubpackage-b/000077500000000000000000000000001477246567400331525ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages1.0.6/000077500000000000000000000000001477246567400336145ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages/maven-dubpackage-bmaven-dubpackage-b-1.0.6.zip000066400000000000000000000014331477246567400404120ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages/maven-dubpackage-b/1.0.6PKMmaven-dubpackage-b-1.0.6/PKȤME|" maven-dubpackage-b-1.0.6/dub.sdleK 0yV]cA7pMRq8?C-\i$eSe>EMXAyBn m[e#xgёzw}4V|PKR2K[\ maven-dubpackage-b-1.0.6/hello.d; 0=b4^ Xx+$ą|dj!kfRqWx 9L”öP5m‚UNa^ymPK?M$maven-dubpackage-b-1.0.6/ &o4&o4n4PK?ȤME|" $ 7maven-dubpackage-b-1.0.6/dub.sdl )!4n4n4PK?R2K[\ $ maven-dubpackage-b-1.0.6/hello.d ~0&o4&o4PKO(9e3972be4c63790c32257220f40c0af7dc41bec5maven-metadata.xml000066400000000000000000000005441477246567400365630ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/maven/release/dubpackages/maven-dubpackage-b dubpackages maven-dubpackage-b 1.0.6 1.0.6 1.0.6 20180317184845 dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/source/000077500000000000000000000000001477246567400241215ustar00rootroot00000000000000dub-1.40.0/test/issue1524-maven-upgrade-dependency-tree/source/app.d000066400000000000000000000000151477246567400250420ustar00rootroot00000000000000void main(){}dub-1.40.0/test/issue1531-toolchain-requirements.sh000077500000000000000000000050471477246567400220610ustar00rootroot00000000000000#!/usr/bin/env bash set -e . $(dirname "${BASH_SOURCE[0]}")/common.sh cat << EOF | $DUB - || die $LINENO "Did not pass without toolchainRequirements" /+ dub.sdl: +/ void main() {} EOF # pass test dub requirement given as $1 function test_dub_req_pass { cat << EOF | $DUB - || die $LINENO "Did not pass requirement dub=\"$1\"" /+ dub.sdl: toolchainRequirements dub="$1" +/ void main() {} EOF } # fail test dub requirement given as $1 function test_dub_req_fail { ! cat << EOF | $DUB - || die $LINENO "Did not pass requirement dub=\"$1\"" /+ dub.sdl: toolchainRequirements dub="$1" +/ void main() {} EOF } test_dub_req_pass ">=1.7.0" test_dub_req_fail "~>0.9" test_dub_req_fail "~>999.0" # extract compiler version if [[ $DC == *ldc* ]] || [[ $DC == *ldmd* ]]; then VER_REG='\((([[:digit:]]+)(\.[[:digit:]]+\.[[:digit:]]+[A-Za-z0-9.+-]*))\)' DC_NAME=ldc elif [[ $DC == *dmd* ]]; then VER_REG='v(([[:digit:]]+)(\.[[:digit:]]+\.[[:digit:]]+[A-Za-z0-9.+-]*))' DC_NAME=dmd elif [[ $DC == *gdc* ]]; then VER_REG='\) (([[:digit:]]+)(\.[[:digit:]]+\.[[:digit:]]+[A-Za-z0-9.+-]*))' DC_NAME=gdc else die $LINENO "Did not recognize compiler" fi if [[ $($DC --version) =~ $VER_REG ]]; then DC_VER=${BASH_REMATCH[1]} DC_VER_MAJ=${BASH_REMATCH[2]} DC_VER_REM=${BASH_REMATCH[3]} $DC --version echo $DC version is $DC_VER else $DC --version die $LINENO "Could not extract compiler version" fi # create test app directory TMPDIR=$(mktemp -d /tmp/dubtest1531_XXXXXX) mkdir -p $TMPDIR/source cat << EOF > $TMPDIR/source/app.d module dubtest1531; void main() {} EOF # write dub.sdl with compiler requirement given as $1 function write_cl_req { cat << EOF > $TMPDIR/dub.sdl name "dubtest1531" toolchainRequirements ${DC_NAME}="$1" EOF } # pass test compiler requirement given as $1 function test_cl_req_pass { echo "Expecting success on '$DC $1'" 2>&1 write_cl_req $1 $DUB build -q --compiler=$DC --root=$TMPDIR || die $LINENO "Did not pass with $DC_NAME=\"$1\"" } # fail test compiler requirement given as $1 function test_cl_req_fail { echo "Expecting failure on '$DC $1'" 2>&1 write_cl_req $1 ! $DUB --compiler=$DC --root=$TMPDIR || die $LINENO "Did not fail with $DC_NAME=\"$1\"" } test_cl_req_pass "==$DC_VER" test_cl_req_pass ">=$DC_VER" test_cl_req_fail ">$DC_VER" test_cl_req_pass "<=$DC_VER" test_cl_req_fail "<$DC_VER" test_cl_req_pass ">=$DC_VER <$(($DC_VER_MAJ + 1))$DC_VER_REM" test_cl_req_pass "~>$DC_VER" test_cl_req_fail "~>$(($DC_VER_MAJ + 1))$DC_VER_REM" test_cl_req_fail no rm -rf $TMPDIR dub-1.40.0/test/issue1551-var-escaping/000077500000000000000000000000001477246567400173745ustar00rootroot00000000000000dub-1.40.0/test/issue1551-var-escaping/dub.json000066400000000000000000000002171477246567400210410ustar00rootroot00000000000000{ "name": "issue1551-var-escaping", "preGenerateCommands": [ "echo $${DUB_PACKAGE_DIR}", "echo $$DUB_PACKAGE_DIR" ] } dub-1.40.0/test/issue1551-var-escaping/source/000077500000000000000000000000001477246567400206745ustar00rootroot00000000000000dub-1.40.0/test/issue1551-var-escaping/source/app.d000066400000000000000000000001311477246567400216140ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue1556-fetch-and-build-pkgs/000077500000000000000000000000001477246567400207125ustar00rootroot00000000000000dub-1.40.0/test/issue1556-fetch-and-build-pkgs/.no_build000066400000000000000000000000001477246567400224740ustar00rootroot00000000000000dub-1.40.0/test/issue1556-fetch-and-build-pkgs/dependency-package-1.0.0.zip000066400000000000000000000014341477246567400256010ustar00rootroot00000000000000PK Mdependency-package-1.0.0/UT N\l^ux PKئMhPTX dependency-package-1.0.0/hello.dUT \\ux O)IUHɷ*LQM(.)KUH,J/R܂= ei()iZsrPKnnPmހ dependency-package-1.0.0/dub.sdlUT l^l^ux mK 1}NzõIL' yq! +4Y$`![68q]]\} ]١^zžM<^>we DdI=~A?p̹5lF|PK MAdependency-package-1.0.0/UTN\ux PKئMhPTX Sdependency-package-1.0.0/hello.dUT\ux PKnnPmހ dependency-package-1.0.0/dub.sdlUTl^ux PK+dub-1.40.0/test/issue1556-fetch-and-build-pkgs/main-package-1.0.0.zip000066400000000000000000000013221477246567400244030ustar00rootroot00000000000000PK Mmain-package-1.0.0/UT \l^ux PKnnP>Xkmain-package-1.0.0/dub.jsonUT l^l^ux TKMURPM-HLNLOUJ%$d壕bA)y)yəJ V \Hp#F(qqrrPK!M?c'TXmain-package-1.0.0/hello.dUT \vl^ux O)IUHɷ*LQM(.)KUH,J/R܂= ei(+iZsrPK MAmain-package-1.0.0/UT\ux PKnnP>XkMmain-package-1.0.0/dub.jsonUTl^ux PK!M?c'TXmain-package-1.0.0/hello.dUT\ux PKdub-1.40.0/test/issue1556-fetch-and-build.sh000077500000000000000000000010371477246567400203100ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh dub remove main-package --non-interactive 2>/dev/null || true dub remove dependency-package --non-interactive 2>/dev/null || true echo "Trying to fetch fs-sdl-dubpackage" "$DUB" --cache=local fetch main-package --skip-registry=all --registry=file://"$DIR"/issue1556-fetch-and-build-pkgs echo "Trying to build it (should fetch dependency-package)" "$DUB" --cache=local build main-package --skip-registry=all --registry=file://"$DIR"/issue1556-fetch-and-build-pkgs dub-1.40.0/test/issue1567-fetch-sub-package.sh000077500000000000000000000007401477246567400206350ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh DIR=$(dirname "${BASH_SOURCE[0]}") packname="fetch-sub-package-dubpackage" sub_packagename="my-sub-package" ${DUB} remove $packname --non-interactive 2>/dev/null || true ${DUB} fetch "$packname:$sub_packagename" --skip-registry=all --registry=file://"$DIR"/issue1567-fetch-sub-package if ! ${DUB} remove $packname@1.0.1 2>/dev/null; then die $LINENO 'DUB did not install package $packname:$sub_packagename.' fi dub-1.40.0/test/issue1567-fetch-sub-package/000077500000000000000000000000001477246567400202755ustar00rootroot00000000000000dub-1.40.0/test/issue1567-fetch-sub-package/.no_build000066400000000000000000000000001477246567400220570ustar00rootroot00000000000000dub-1.40.0/test/issue1567-fetch-sub-package/.no_run000066400000000000000000000000001477246567400215640ustar00rootroot00000000000000dub-1.40.0/test/issue1567-fetch-sub-package/.no_test000066400000000000000000000000001477246567400217370ustar00rootroot00000000000000dub-1.40.0/test/issue1567-fetch-sub-package/fetch-sub-package-dubpackage-1.0.1.zip000066400000000000000000000037101477246567400270120ustar00rootroot00000000000000PKjN#fetch-sub-package-dubpackage-1.0.1/PKjN~Ca+fetch-sub-package-dubpackage-1.0.1/dub.json]̽ Y{3FUG?B!{O!snF 8iD z!*>DЕAΠ;,& AS Jrkb~gFPKәjN2fetch-sub-package-dubpackage-1.0.1/my-sub-package/PK jN$Z;==:fetch-sub-package-dubpackage-1.0.1/my-sub-package/dub.json{ "name": "my-sub-package", "targetType": "executable" }PKәjN9fetch-sub-package-dubpackage-1.0.1/my-sub-package/source/PK jNf`0LL>fetch-sub-package-dubpackage-1.0.1/my-sub-package/source/app.dimport std.stdio: writeln; void main() { writeln("my-sub-package"); }PKәjN*fetch-sub-package-dubpackage-1.0.1/source/PK jNN2ٝ44/fetch-sub-package-dubpackage-1.0.1/source/app.dvoid main() { assert(false, "Should not run"); }PK?jN#$fetch-sub-package-dubpackage-1.0.1/ UBmUBmPGmPK?jN~Ca+$ Afetch-sub-package-dubpackage-1.0.1/dub.json ik =m@Gm@GmPK?әjN2$fetch-sub-package-dubpackage-1.0.1/my-sub-package/ HmHmqGmPK? jN$Z;==:$ ;fetch-sub-package-dubpackage-1.0.1/my-sub-package/dub.json GIm:Hm:HmPK?әjN9$fetch-sub-package-dubpackage-1.0.1/my-sub-package/source/ HmHmHmPK? jNf`0LL>$ 'fetch-sub-package-dubpackage-1.0.1/my-sub-package/source/app.d w_mHmHmPK?әjN*$fetch-sub-package-dubpackage-1.0.1/source/ sImsIm$ImPK? jNN2ٝ44/$ fetch-sub-package-dubpackage-1.0.1/source/app.d (sImsImPKdub-1.40.0/test/issue1574-addcommand.sh000077500000000000000000000021361477246567400174520ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh PORT=$(getRandomPort) tempDir="issue1574-addcommand" "$DUB" build --single "$DIR"/test_registry.d "$DIR"/test_registry --folder="$DIR/issue1336-registry" --port=$PORT & PID=$! sleep 1 function cleanup { cd .. rm -rf $tempDir kill $PID 2>/dev/null || true } trap cleanup EXIT $DUB init --non-interactive --format=json $tempDir cd $tempDir echo "import gitcompatibledubpackage.subdir.file; void main(){}" > source/app.d $DUB add gitcompatibledubpackage --skip-registry=all --registry=http://localhost:$PORT grep -q '"gitcompatibledubpackage"\s*:\s*"~>1\.0\.4"' dub.json $DUB add gitcompatibledubpackage=1.0.2 non-existing-issue1574-pkg='~>9.8.7' --skip-registry=all grep -q '"gitcompatibledubpackage"\s*:\s*"1\.0\.2"' dub.json grep -q '"non-existing-issue1574-pkg"\s*:\s*"~>9\.8\.7"' dub.json if $DUB add foo@1.2.3 gitcompatibledubpackage='~>a.b.c' --skip-registry=all; then die $LINENO 'Adding non-semver spec should error' fi if grep -q '"foo"' dub.json; then die $LINENO 'Failing add command should not write recipe file' fi dub-1.40.0/test/issue1574-addcommand.sh.min_frontend000066400000000000000000000000061477246567400221220ustar00rootroot000000000000002.077 dub-1.40.0/test/issue1636-betterC-dub-test.sh000077500000000000000000000001511477246567400204720ustar00rootroot00000000000000#!/bin/bash cd ${CURR_DIR}/issue1636-betterC-dub-test ${DUB} test | grep -c "TEST_WAS_RUN" > /dev/null dub-1.40.0/test/issue1636-betterC-dub-test/000077500000000000000000000000001477246567400201365ustar00rootroot00000000000000dub-1.40.0/test/issue1636-betterC-dub-test/.gitignore000066400000000000000000000000241477246567400221220ustar00rootroot00000000000000test *.o *.exe .dub dub-1.40.0/test/issue1636-betterC-dub-test/.min_frontend000066400000000000000000000000061477246567400226150ustar00rootroot000000000000002.078 dub-1.40.0/test/issue1636-betterC-dub-test/.no_run000066400000000000000000000000001477246567400214250ustar00rootroot00000000000000dub-1.40.0/test/issue1636-betterC-dub-test/dub.json000066400000000000000000000000621477246567400216010ustar00rootroot00000000000000{ "name": "test", "buildOptions": ["betterC"] } dub-1.40.0/test/issue1636-betterC-dub-test/source/000077500000000000000000000000001477246567400214365ustar00rootroot00000000000000dub-1.40.0/test/issue1636-betterC-dub-test/source/lib.d000066400000000000000000000002511477246567400223470ustar00rootroot00000000000000import core.stdc.stdio : printf; version(D_BetterC) {} else static assert(false); int foo() { return 2; } unittest { assert(foo == 2); printf("TEST_WAS_RUN\n"); } dub-1.40.0/test/issue1645-dflags-build.sh000077500000000000000000000006161477246567400177200ustar00rootroot00000000000000#!/usr/bin/env bash set -e # If DFLAGS are not processed, dub for library would fail DFLAGS="-w" $DUB build --root="$CURR_DIR"/1-staticLib-simple --build=plain if DFLAGS="-asfdsf" $DUB build --root="$CURR_DIR"/1-staticLib-simple --build=plain 2>/dev/null; then echo "Should not accept this DFLAGS"; false # fail fi $DUB build --root="$CURR_DIR"/1-staticLib-simple --build=plain --build=plain dub-1.40.0/test/issue1651-custom-dub-init-type.sh000077500000000000000000000012341477246567400213570ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh DIR=$(dirname "${BASH_SOURCE[0]}") packname="custom-dub-init-type-sample" $DUB remove custom-dub-init-dubpackage --non-interactive 2>/dev/null || true $DUB init -n $packname --format sdl -t custom-dub-init-dubpackage --skip-registry=all --registry=file://"$DIR"/issue1651-custom-dub-init-type -- --foo=bar function cleanup { rm -rf $packname } if [ ! -e $packname/dub.sdl ]; then # it failed cleanup die $LINENO 'No dub.sdl file has been generated.' fi cd $packname if ! { ${DUB} 2>&1 || true; } | grep -cF 'foo=bar'; then cd .. cleanup die $LINENO 'Custom init type.' fi cd .. cleanup dub-1.40.0/test/issue1651-custom-dub-init-type/000077500000000000000000000000001477246567400210205ustar00rootroot00000000000000dub-1.40.0/test/issue1651-custom-dub-init-type/.no_build000066400000000000000000000000001477246567400226020ustar00rootroot00000000000000dub-1.40.0/test/issue1651-custom-dub-init-type/.no_run000066400000000000000000000000001477246567400223070ustar00rootroot00000000000000dub-1.40.0/test/issue1651-custom-dub-init-type/.no_test000066400000000000000000000000001477246567400224620ustar00rootroot00000000000000dub-1.40.0/test/issue1651-custom-dub-init-type/custom-dub-init-dubpackage-1.0.1.zip000066400000000000000000000040471477246567400273130ustar00rootroot00000000000000PKofjN!custom-dub-init-dubpackage-1.0.1/PK iN$a{)custom-dub-init-dubpackage-1.0.1/dub.jsonEM uwh-y&wOߦ0|3/g|{a.CTR'ܝňb0S};$~1A++TFk;,/PKofjN+custom-dub-init-dubpackage-1.0.1/init-exec/PK sfjN~8883custom-dub-init-dubpackage-1.0.1/init-exec/dub.json{ "name": "init-exec", "targetType": "executable" }PKofjN2custom-dub-init-dubpackage-1.0.1/init-exec/source/PKYNBħ7custom-dub-init-dubpackage-1.0.1/init-exec/source/app.d]Pj0 =/6î ݱv/xjKl+t^9.ۃ@H===aAm[ @jɡ4ݏ =켗 |[4(_Ph<~ũ, &{;V=0^C0o6[c9k/ҹ5=5r; iL(goh4bye+g&a]?H*금oNsR,;]C%CIy%~,=wQ|PKofjN(custom-dub-init-dubpackage-1.0.1/source/PK jNN2ٝ44-custom-dub-init-dubpackage-1.0.1/source/app.dvoid main() { assert(false, "Should not run"); }PK?ofjN!$custom-dub-init-dubpackage-1.0.1/ 77*7PK? iN$a{)$ ?custom-dub-init-dubpackage-1.0.1/dub.json X;7X;7PK?ofjN+$custom-dub-init-dubpackage-1.0.1/init-exec/ %7%7v7PK? sfjN~8883$ 0custom-dub-init-dubpackage-1.0.1/init-exec/dub.json e1777PK?ofjN2$custom-dub-init-dubpackage-1.0.1/init-exec/source/ s7s7%7PK?YNBħ7$ custom-dub-init-dubpackage-1.0.1/init-exec/source/app.d (Cs7s7PK?ofjN($Vcustom-dub-init-dubpackage-1.0.1/source/ >^7>^77PK? jNN2ٝ44-$ custom-dub-init-dubpackage-1.0.1/source/app.d (>^7>^7PKdub-1.40.0/test/issue1691-build-subpkg.sh000077500000000000000000000001741477246567400177530ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh $DUB build --root="$CURR_DIR/issue1691-build-subpkg" :subpkg dub-1.40.0/test/issue1691-build-subpkg/000077500000000000000000000000001477246567400174125ustar00rootroot00000000000000dub-1.40.0/test/issue1691-build-subpkg/.gitignore000066400000000000000000000003641477246567400214050ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ /issue1691-build-subpkg issue1691-build-subpkg.so issue1691-build-subpkg.dylib issue1691-build-subpkg.dll issue1691-build-subpkg.a issue1691-build-subpkg.lib issue1691-build-subpkg-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1691-build-subpkg/.no_build000066400000000000000000000000001477246567400211740ustar00rootroot00000000000000dub-1.40.0/test/issue1691-build-subpkg/dub.sdl000066400000000000000000000001261477246567400206670ustar00rootroot00000000000000name "issue1691-build-subpkg" dependency ":subpkg" version="*" subPackage "./subpkg/" dub-1.40.0/test/issue1691-build-subpkg/source/000077500000000000000000000000001477246567400207125ustar00rootroot00000000000000dub-1.40.0/test/issue1691-build-subpkg/source/app.d000066400000000000000000000000001477246567400216250ustar00rootroot00000000000000dub-1.40.0/test/issue1691-build-subpkg/subpkg/000077500000000000000000000000001477246567400207055ustar00rootroot00000000000000dub-1.40.0/test/issue1691-build-subpkg/subpkg/dub.sdl000066400000000000000000000000161477246567400221600ustar00rootroot00000000000000name "subpkg" dub-1.40.0/test/issue1691-build-subpkg/subpkg/source/000077500000000000000000000000001477246567400222055ustar00rootroot00000000000000dub-1.40.0/test/issue1691-build-subpkg/subpkg/source/subpkg.d000066400000000000000000000000001477246567400236330ustar00rootroot00000000000000dub-1.40.0/test/issue1739-project-settings-file.sh000077500000000000000000000006641477246567400216130ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR} echo "{\"defaultArchitecture\": \"foo\"}" > "dub.settings.json" function cleanup { rm "dub.settings.json" } trap cleanup EXIT if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Unsupported architecture: foo"; then die $LINENO 'DUB did not find the project configuration with an adjacent architecture.' fi dub-1.40.0/test/issue1773-lint.sh000077500000000000000000000006501477246567400163310ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue1773-lint rm -rf report.json if ! { ${DUB} lint || true; } | grep -cF "Parameter args is never used."; then die $LINENO 'DUB lint did not find expected warning.' fi ${DUB} lint --report-file report.json if ! grep -c -e "Parameter args is never used." report.json; then die $LINENO 'Linter report did not contain expected warning.' fi dub-1.40.0/test/issue1773-lint/000077500000000000000000000000001477246567400157715ustar00rootroot00000000000000dub-1.40.0/test/issue1773-lint/.gitignore000066400000000000000000000000701477246567400177560ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ *.exe *.o *.obj *.lst dub-1.40.0/test/issue1773-lint/.no_build000066400000000000000000000000001477246567400175530ustar00rootroot00000000000000dub-1.40.0/test/issue1773-lint/dub.json000066400000000000000000000000231477246567400174310ustar00rootroot00000000000000{ "name": "test" }dub-1.40.0/test/issue1773-lint/source/000077500000000000000000000000001477246567400172715ustar00rootroot00000000000000dub-1.40.0/test/issue1773-lint/source/app.d000066400000000000000000000000351477246567400202140ustar00rootroot00000000000000void main(string[] args) { }dub-1.40.0/test/issue1775/000077500000000000000000000000001477246567400150275ustar00rootroot00000000000000dub-1.40.0/test/issue1775/.no_run000066400000000000000000000000001477246567400163160ustar00rootroot00000000000000dub-1.40.0/test/issue1775/.no_test000066400000000000000000000000001477246567400164710ustar00rootroot00000000000000dub-1.40.0/test/issue1775/dub.json000066400000000000000000000003111477246567400164670ustar00rootroot00000000000000{ "name": "test", "targetName": "test-application", "preBuildCommands-posix": [ "[ -f issue1775.marker ]" ], "preBuildCommands-windows": [ "if not exist issue1775.marker exit /b 1" ] } dub-1.40.0/test/issue1775/issue1775.marker000066400000000000000000000000001477246567400176740ustar00rootroot00000000000000dub-1.40.0/test/issue1775/source/000077500000000000000000000000001477246567400163275ustar00rootroot00000000000000dub-1.40.0/test/issue1775/source/app.d000066400000000000000000000000171477246567400172520ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue1788-incomplete-string-import-override/000077500000000000000000000000001477246567400236215ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/b/000077500000000000000000000000001477246567400240425ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/b/dub.sdl000066400000000000000000000000111477246567400253100ustar00rootroot00000000000000name "b" dub-1.40.0/test/issue1788-incomplete-string-import-override/b/source/000077500000000000000000000000001477246567400253425ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/b/source/b/000077500000000000000000000000001477246567400255635ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/b/source/b/foo.d000066400000000000000000000002321477246567400265100ustar00rootroot00000000000000module b.foo; string bar() { static immutable l = import("layout.diet"); pragma(msg, l); static assert(l == "fancylayout.diet"); return import(l); } dub-1.40.0/test/issue1788-incomplete-string-import-override/b/views/000077500000000000000000000000001477246567400251775ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/b/views/layout.diet000066400000000000000000000000111477246567400273530ustar00rootroot00000000000000not fancydub-1.40.0/test/issue1788-incomplete-string-import-override/c/000077500000000000000000000000001477246567400240435ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/c/dub.sdl000066400000000000000000000000111477246567400253110ustar00rootroot00000000000000name "c" dub-1.40.0/test/issue1788-incomplete-string-import-override/c/source/000077500000000000000000000000001477246567400253435ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/c/source/dummy.d000066400000000000000000000000001477246567400266310ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/c/views/000077500000000000000000000000001477246567400252005ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/c/views/fancylayout.diet000066400000000000000000000000051477246567400304000ustar00rootroot00000000000000fancydub-1.40.0/test/issue1788-incomplete-string-import-override/dub.sdl000066400000000000000000000000721477246567400250760ustar00rootroot00000000000000name "a" dependency "b" path="b" dependency "c" path="c" dub-1.40.0/test/issue1788-incomplete-string-import-override/source/000077500000000000000000000000001477246567400251215ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/source/app.d000066400000000000000000000001671477246567400260520ustar00rootroot00000000000000import b.foo; void main() { static assert(import("layout.diet") == "fancylayout.diet"); assert(bar() == "fancy"); } dub-1.40.0/test/issue1788-incomplete-string-import-override/views/000077500000000000000000000000001477246567400247565ustar00rootroot00000000000000dub-1.40.0/test/issue1788-incomplete-string-import-override/views/layout.diet000066400000000000000000000000201477246567400271320ustar00rootroot00000000000000fancylayout.dietdub-1.40.0/test/issue1856-build-unittest.sh000077500000000000000000000033451477246567400203450ustar00rootroot00000000000000#!/usr/bin/env bash set -euo pipefail TMPDIR=$(mktemp -d "$(basename "$0").XXXXXX") function cleanup { rm -rf "$TMPDIR" } trap cleanup EXIT # no unittest config cat > "$TMPDIR/no_ut.d" < "$TMPDIR/partial_ut.d" < "$TMPDIR/partial_ut2.d" < "$TMPDIR/full_ut.d" <&1 || true; } | grep -cF " -lowmem " > /dev/null; then die $LINENO 'DUB build with lowmem did not find -lowmem option.' fi if ! { ${DUB} test --root ${DIR}/issue1867-lowmem -v -f 2>&1 || true; } | grep -cF " -lowmem " > /dev/null; then die $LINENO 'DUB test with lowmem did not find -lowmem option.' fi if ! { ${DUB} run --root ${DIR}/issue1867-lowmem -v -f 2>&1 || true; } | grep -cF " -lowmem " > /dev/null; then die $LINENO 'DUB test with lowmem did not find -lowmem option.' fi if ! { ${DUB} describe --root ${DIR}/issue1867-lowmem --data=options --data-list --verror 2>&1 || true; } | grep -cF "lowmem" > /dev/null; then die $LINENO 'DUB describe --data=options --data-list with lowmem did not find lowmem option.' fi dub-1.40.0/test/issue1867-lowmem/000077500000000000000000000000001477246567400163275ustar00rootroot00000000000000dub-1.40.0/test/issue1867-lowmem/.gitignore000066400000000000000000000003121477246567400203130ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ /issue1867-lowmem issue1867-lowmem.so issue1867-lowmem.dylib issue1867-lowmem.dll issue1867-lowmem.a issue1867-lowmem.lib issue1867-lowmem-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/issue1867-lowmem/.no_build000066400000000000000000000000001477246567400201110ustar00rootroot00000000000000dub-1.40.0/test/issue1867-lowmem/.no_run000066400000000000000000000000001477246567400176160ustar00rootroot00000000000000dub-1.40.0/test/issue1867-lowmem/.no_test000066400000000000000000000000001477246567400177710ustar00rootroot00000000000000dub-1.40.0/test/issue1867-lowmem/dub.sdl000066400000000000000000000000301477246567400175760ustar00rootroot00000000000000name "issue1867-lowmem" dub-1.40.0/test/issue1867-lowmem/dub.settings.json000066400000000000000000000000361477246567400216320ustar00rootroot00000000000000{ "defaultLowMemory": true } dub-1.40.0/test/issue1867-lowmem/source/000077500000000000000000000000001477246567400176275ustar00rootroot00000000000000dub-1.40.0/test/issue1867-lowmem/source/app.d000066400000000000000000000001311477246567400205470ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue2012-dc-env/000077500000000000000000000000001477246567400161625ustar00rootroot00000000000000dub-1.40.0/test/issue2012-dc-env/.no_build000066400000000000000000000000001477246567400177440ustar00rootroot00000000000000dub-1.40.0/test/issue2012-dc-env/app.d000066400000000000000000000005451477246567400171130ustar00rootroot00000000000000#!/usr/bin/env dub /+ dub.sdl: name "app" +/ import std.format; void main(string[] args) { version (LDC) immutable expected = "ldc2"; version (DigitalMars) immutable expected = "dmd"; version (GNU) immutable expected = "gdc"; assert(expected == args[1], format!"Expected '%s' but got '%s'"(expected, args[1])); } dub-1.40.0/test/issue2046-ignored-optional-with-path/000077500000000000000000000000001477246567400221725ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/.no_build000066400000000000000000000000001477246567400237540ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/.no_run000066400000000000000000000000001477246567400234610ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/.no_test000066400000000000000000000000001477246567400236340ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/dub.json000066400000000000000000000001001477246567400236260ustar00rootroot00000000000000{ "name": "issue2046", "dependencies": { "libbar": "*" } } dub-1.40.0/test/issue2046-ignored-optional-with-path/dub.selections.json-nofoo000066400000000000000000000001071477246567400271220ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "libbar": {"path":"libbar"} } } dub-1.40.0/test/issue2046-ignored-optional-with-path/dub.selections.json-usefoo000066400000000000000000000001461477246567400273050ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "libfoo": {"path":"libfoo"}, "libbar": {"path":"libbar"} } } dub-1.40.0/test/issue2046-ignored-optional-with-path/libbar/000077500000000000000000000000001477246567400234255ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/libbar/dub.json000066400000000000000000000001611477246567400250700ustar00rootroot00000000000000{ "name": "libbar", "dependencies": { "libfoo": {"path": "../libfoo", "version": "*", "optional": true} } } dub-1.40.0/test/issue2046-ignored-optional-with-path/libbar/source/000077500000000000000000000000001477246567400247255ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/libbar/source/libbar/000077500000000000000000000000001477246567400261605ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/libbar/source/libbar/bar.d000066400000000000000000000002611477246567400270700ustar00rootroot00000000000000module libbar.bar; void function() bar; static this() { version (Have_libfoo) import libfoo.foo; else static void foo() { import std; writeln("no-foo"); } bar = &foo; } dub-1.40.0/test/issue2046-ignored-optional-with-path/libfoo/000077500000000000000000000000001477246567400234445ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/libfoo/dub.json000066400000000000000000000000261477246567400251070ustar00rootroot00000000000000{ "name": "libfoo" } dub-1.40.0/test/issue2046-ignored-optional-with-path/libfoo/source/000077500000000000000000000000001477246567400247445ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/libfoo/source/libfoo/000077500000000000000000000000001477246567400262165ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/libfoo/source/libfoo/foo.d000066400000000000000000000001021477246567400271370ustar00rootroot00000000000000module libfoo.foo; import std; void foo() { writeln("use-foo"); } dub-1.40.0/test/issue2046-ignored-optional-with-path/source/000077500000000000000000000000001477246567400234725ustar00rootroot00000000000000dub-1.40.0/test/issue2046-ignored-optional-with-path/source/app.d000066400000000000000000000000541477246567400244160ustar00rootroot00000000000000import libbar.bar; void main() { bar(); } dub-1.40.0/test/issue2051_running_unittests_from_dub_single_file_packages_fails.d000066400000000000000000000040751477246567400303010ustar00rootroot00000000000000/+ dub.sdl: name "issue2051" +/ import std.algorithm : any; import std.conv : text; import std.file : tempDir; import std.stdio : File, writeln; import std.string : lineSplitter; import std.path : buildPath, buildNormalizedPath; import std.process : environment, executeShell; auto executeCommand(string command) { import std.exception : enforce; auto dub = executeShell(command); writeln("--- dub output:"); foreach(line; dub.output.lineSplitter) writeln("\t", line); writeln("--- end of dub output"); return dub.status; } int main() { auto dub = environment.get("DUB"); if (!dub.length) dub = buildPath(".", "bin", "dub"); string destinationDirectory = tempDir; // remove any ending slahes (which can for some reason be added at the end by tempDir, which fails on OSX) https://issues.dlang.org/show_bug.cgi?id=22738 destinationDirectory = buildNormalizedPath(destinationDirectory); string filename; // check if the single file package with dependency compiles and runs { filename = destinationDirectory.buildPath("issue2051_success.d"); auto f = File(filename, "w"); f.write( `#!/usr/bin/env dub /+ dub.sdl: name "issue2051" dependency "taggedalgebraic" version="~>0.11.0" +/ version(unittest) {} else void main() { } unittest { import taggedalgebraic; static union Base { int i; string str; } auto dummy = TaggedAlgebraic!Base(1721); assert(dummy == 1721); } ` ); } const rc1 = text(dub, " test --single \"", filename, "\"").executeCommand; if (rc1) writeln("\nError. Unittests failed."); else writeln("\nOk. Unittest passed."); // Check if dub `test` command runs unittests for single file package { filename = destinationDirectory.buildPath("issue2051_fail.d"); auto f = File(filename, "w"); f.write( `#!/usr/bin/env dub /+ dub.sdl: name "issue2051" +/ version(unittest) {} else void main() { } unittest { assert(0); } ` ); } const rc2 = text(dub, " test --single \"", filename, "\"").executeCommand; if (rc2) writeln("\nOk. Unittests failed."); else writeln("\nError. Unittest passed."); return rc1 | !rc2; } dub-1.40.0/test/issue2085-target-none-visuald.sh000077500000000000000000000004051477246567400212460ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "${CURR_DIR}/issue2085-target-none-visuald" || die "Could not cd." "$DUB" generate visuald if grep -c -e \"
\" .dub/root.visualdproj; then die $LINENO 'Regression of issue #2085.' fi dub-1.40.0/test/issue2085-target-none-visuald/000077500000000000000000000000001477246567400207105ustar00rootroot00000000000000dub-1.40.0/test/issue2085-target-none-visuald/.no_build000066400000000000000000000000001477246567400224720ustar00rootroot00000000000000dub-1.40.0/test/issue2085-target-none-visuald/.no_run000066400000000000000000000000001477246567400221770ustar00rootroot00000000000000dub-1.40.0/test/issue2085-target-none-visuald/dub.json000066400000000000000000000002201477246567400223470ustar00rootroot00000000000000{ "name": "root", "targetType": "none", "dependencies": { "root:sub": "*" }, "subPackages": [ "sub" ] } dub-1.40.0/test/issue2085-target-none-visuald/sub/000077500000000000000000000000001477246567400215015ustar00rootroot00000000000000dub-1.40.0/test/issue2085-target-none-visuald/sub/dub.json000066400000000000000000000001171477246567400231450ustar00rootroot00000000000000{ "name": "sub", "targetType": "executable", "targetName": "sub" } dub-1.40.0/test/issue2085-target-none-visuald/sub/source/000077500000000000000000000000001477246567400230015ustar00rootroot00000000000000dub-1.40.0/test/issue2085-target-none-visuald/sub/source/app.d000066400000000000000000000000171477246567400237240ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath.sh000077500000000000000000000003121477246567400236150ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "${CURR_DIR}/issue2086-copyfiles-subpackage-targetpath" || die "Could not cd." rm -f "sub/to_be_deployed.txt" "$DUB" build ./sub/sub dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/000077500000000000000000000000001477246567400232625ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/.no_run000066400000000000000000000000001477246567400245510ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/.no_test000066400000000000000000000000001477246567400247240ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/dub.json000066400000000000000000000002201477246567400247210ustar00rootroot00000000000000{ "name": "root", "targetType": "none", "dependencies": { "root:sub": "*" }, "subPackages": [ "sub" ] } dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/sub/000077500000000000000000000000001477246567400240535ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/sub/dub.json000066400000000000000000000001731477246567400255210ustar00rootroot00000000000000{ "name": "sub", "targetType": "executable", "targetName": "sub", "copyFiles": [ "files/*" ] } dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/sub/files/000077500000000000000000000000001477246567400251555ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/sub/files/to_be_deployed.txt000066400000000000000000000000001477246567400306610ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/sub/source/000077500000000000000000000000001477246567400253535ustar00rootroot00000000000000dub-1.40.0/test/issue2086-copyfiles-subpackage-targetpath/sub/source/app.d000066400000000000000000000003451477246567400263020ustar00rootroot00000000000000import std.exception: enforce; import std.file: exists, thisExePath; import std.path: dirName, buildPath; void main() { string filePath = buildPath(thisExePath.dirName, "to_be_deployed.txt"); enforce(filePath.exists); } dub-1.40.0/test/issue2190-unset-TEMP.script.d000066400000000000000000000021041477246567400203630ustar00rootroot00000000000000/+ dub.json: { "name": "issue2190_unset_TEMP" } +/ module issue2190_unset_TEMP.script; int main() { import std.stdio; import std.algorithm; import std.path; import std.process; const dir = __FILE_FULL_PATH__.dirName(); // doesn't matter, just pick something const file = buildPath(dir, "single-file-sdl-default-name.d"); const dub = environment.get("DUB", buildPath(dirName(dir), "bin", "dub.exe")); int exitCode; void runTest(scope const string[] cmd) { const result = execute(cmd); if (result.status || result.output.canFind("Failed")) { writefln("\n> %-(%s %)", cmd); writeln("==========================================================="); writeln(result.output); writeln("==========================================================="); writeln("Last command failed with exit code ", result.status, '\n'); exitCode = 1; } } environment.remove("TEMP"); // only guaranteed to be there on Windows // See: runDubCommandLine in commandline version(Windows) { runTest([ dub, "build", "--single", file, ]); } return exitCode; } dub-1.40.0/test/issue2192-environment-variables.sh000077500000000000000000000015401477246567400216700ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh if [ -n "${DUB_PACKAGE-}" ]; then die $LINENO '$DUB_PACKAGE must not be set when running this test!' fi if ! { $DUB build --force --root "$CURR_DIR/issue2192-environment-variables" --skip-registry=all; }; then die $LINENO 'Failed to build package with built-in environment variables.' fi if [ -s "$CURR_DIR/issue2192-environment-variables/package.txt" ]; then rm "$CURR_DIR/issue2192-environment-variables/package.txt" else die $LINENO 'Expected generated package.txt file is missing.' fi OUTPUT=$($DUB describe --root "$CURR_DIR/issue2192-environment-variables" --skip-registry=all --data=pre-build-commands --data-list) if [ "$OUTPUT" != "echo 'issue2192-environment-variables' > package.txt" ]; then die $LINENO 'describe did not contain subtituted values or the correct package name' fi dub-1.40.0/test/issue2192-environment-variables/000077500000000000000000000000001477246567400213315ustar00rootroot00000000000000dub-1.40.0/test/issue2192-environment-variables/.no_run000066400000000000000000000000001477246567400226200ustar00rootroot00000000000000dub-1.40.0/test/issue2192-environment-variables/dub.sdl000066400000000000000000000001341477246567400226050ustar00rootroot00000000000000name "issue2192-environment-variables" preBuildCommands "echo '$DUB_PACKAGE' > package.txt" dub-1.40.0/test/issue2192-environment-variables/source/000077500000000000000000000000001477246567400226315ustar00rootroot00000000000000dub-1.40.0/test/issue2192-environment-variables/source/lib.d000066400000000000000000000000141477246567400235370ustar00rootroot00000000000000module lib; dub-1.40.0/test/issue2234-copy-read-only-files.script.d000066400000000000000000000060041477246567400224260ustar00rootroot00000000000000/+ dub.json: { "name": "issue2234_copy_read_only_files" } +/ /* When DUB copies read-only files to the targetPath, the read-only flag must be removed. If not, any subsequent copy operations will fail. Version control systems such as Git Large File Storage typically mark binary files as read-only, to prevent simultaneous edits in unmergeable formats. */ module issue2234_copy_read_only_files.script; import std.algorithm.searching, std.algorithm.iteration, std.stdio, std.process, std.path, std.file; int main() { const project_dir = buildPath(__FILE_FULL_PATH__.dirName, "issue2234-copy-read-only-files"); const deployment_dir = buildPath(project_dir, "bin"); auto deployables = dirEntries(buildPath(project_dir, "files"), "*", SpanMode.depth).filter!isFile; // Prepare environment. if (deployment_dir.exists) { foreach (entry; dirEntries(deployment_dir, "*", SpanMode.depth)) { if (entry.isDir) entry.rmdir; else { entry.makeWritable; entry.remove; } } deployment_dir.rmdir; } foreach (ref f; deployables) f.makeReadOnly; // Execute test. const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub.exe")); const cmd = [dub, "build", "--build=release"]; const result = execute(cmd, null, Config.none, size_t.max, project_dir); if (result.status || result.output.canFind("Failed")) { writefln("\n> %-(%s %)", cmd); writeln("==========================================================="); writeln(result.output); writeln("==========================================================="); writeln("Last command failed with exit code ", result.status, '\n'); return 1; } foreach (deployed; dirEntries(deployment_dir, "*", SpanMode.depth).filter!isFile) if (!isWritable(deployed)) { writeln(deployed, " is expected to be writable, but it is not."); return 1; } return 0; } void makeReadOnly(string name) { version (Windows) { import core.sys.windows.windows; name.setAttributes(name.getAttributes() | FILE_ATTRIBUTE_READONLY); } else version (Posix) { import core.sys.posix.sys.stat; name.setAttributes(name.getAttributes() & ~(S_IWUSR | S_IWGRP | S_IWOTH)); } else static assert("Needs implementation."); import std.exception; import std.stdio; assertThrown!ErrnoException(File(name, "w")); } void makeWritable(string name) { version (Windows) { import core.sys.windows.windows; name.setAttributes(name.getAttributes() & ~FILE_ATTRIBUTE_READONLY); } else version (Posix) { import core.sys.posix.sys.stat; name.setAttributes(name.getAttributes() | S_IWUSR); } else static assert("Needs implementation."); import std.exception; import std.stdio; assertNotThrown!ErrnoException(File(name, "w")); } bool isWritable(string name) { version (Windows) { import core.sys.windows.windows; return (name.getAttributes() & FILE_ATTRIBUTE_READONLY) == 0; } else version (Posix) { import core.sys.posix.sys.stat; return (name.getAttributes() & S_IWUSR) != 0; } else static assert("Needs implementation."); } dub-1.40.0/test/issue2234-copy-read-only-files/000077500000000000000000000000001477246567400207565ustar00rootroot00000000000000dub-1.40.0/test/issue2234-copy-read-only-files/.gitignore000066400000000000000000000000041477246567400227400ustar00rootroot00000000000000bin dub-1.40.0/test/issue2234-copy-read-only-files/dub.json000066400000000000000000000002071477246567400224220ustar00rootroot00000000000000{ "name": "issue2234_copy_read_only_files", "copyFiles": [ "files/to_be_deployed.bin", "files/images" ], "targetPath": "bin" } dub-1.40.0/test/issue2234-copy-read-only-files/files/000077500000000000000000000000001477246567400220605ustar00rootroot00000000000000dub-1.40.0/test/issue2234-copy-read-only-files/files/images/000077500000000000000000000000001477246567400233255ustar00rootroot00000000000000dub-1.40.0/test/issue2234-copy-read-only-files/files/images/to_be_deployed.img000066400000000000000000000000001477246567400267660ustar00rootroot00000000000000dub-1.40.0/test/issue2234-copy-read-only-files/files/to_be_deployed.bin000066400000000000000000000000001477246567400255150ustar00rootroot00000000000000dub-1.40.0/test/issue2234-copy-read-only-files/source/000077500000000000000000000000001477246567400222565ustar00rootroot00000000000000dub-1.40.0/test/issue2234-copy-read-only-files/source/app.d000066400000000000000000000000171477246567400232010ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue2258-dynLib-exe-dep/000077500000000000000000000000001477246567400175705ustar00rootroot00000000000000dub-1.40.0/test/issue2258-dynLib-exe-dep/.no_build_dmd000066400000000000000000000000001477246567400221760ustar00rootroot00000000000000dub-1.40.0/test/issue2258-dynLib-exe-dep/.no_build_gdc000066400000000000000000000000001477246567400221670ustar00rootroot00000000000000dub-1.40.0/test/issue2258-dynLib-exe-dep/dub.json000066400000000000000000000004471477246567400212420ustar00rootroot00000000000000{ "name": "dynlib-exe-dep", "targetType": "executable", "dependencies": { "dynlib-simple": { "path": "../1-dynLib-simple/" } }, "lflags-linux": ["-rpath", "$$ORIGIN"], "lflags-darwin": ["-rpath", "@executable_path"], "dflags-ldc": ["-link-defaultlib-shared"] } dub-1.40.0/test/issue2258-dynLib-exe-dep/source/000077500000000000000000000000001477246567400210705ustar00rootroot00000000000000dub-1.40.0/test/issue2258-dynLib-exe-dep/source/app.d000066400000000000000000000001071477246567400220130ustar00rootroot00000000000000module app; import dynlib.app; extern(C) void main() { entry(); } dub-1.40.0/test/issue2262-exact-cached-version-match.sh000077500000000000000000000026131477246567400224440ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname ${BASH_SOURCE[0]})/common.sh PACK_PATH="$CURR_DIR"/issue2262-exact-cached-version-match # make sure that there are no left-over selections files rm -f $PACK_PATH/dub.selections.json # make sure that there are no cached versions of the dependency dub remove gitcompatibledubpackage@* -n || true # build normally, should select 1.0.4 if ! ${DUB} build --root $PACK_PATH | grep "gitcompatibledubpackage 1\.0\.4:"; then die $LINENO 'The initial build failed.' fi dub remove gitcompatibledubpackage@* -n || true # build with git dependency to a specific commit cat > $PACK_PATH/dub.selections.json << EOF { "fileVersion": 1, "versions": { "gitcompatibledubpackage": { "repository": "git+https://github.com/dlang-community/gitcompatibledubpackage.git", "version": "ccb31bf6a655437176ec02e04c2305a8c7c90d67" } } } EOF if ! ${DUB} build --root $PACK_PATH | grep "gitcompatibledubpackage 1\.0\.4+commit\.2\.gccb31bf:"; then die $LINENO 'The build with a specific commit failed.' fi # select 1.0.4 again cat > $PACK_PATH/dub.selections.json << EOF { "fileVersion": 1, "versions": { "gitcompatibledubpackage": "1.0.4" } } EOF if ! ${DUB} build --root $PACK_PATH | grep "gitcompatibledubpackage 1\.0\.4:"; then die $LINENO 'The second 1.0.4 build failed.' fi # clean up rm -f $PACK_PATH/dub.selections.json dub-1.40.0/test/issue2262-exact-cached-version-match/000077500000000000000000000000001477246567400221035ustar00rootroot00000000000000dub-1.40.0/test/issue2262-exact-cached-version-match/.no_build000066400000000000000000000000001477246567400236650ustar00rootroot00000000000000dub-1.40.0/test/issue2262-exact-cached-version-match/dub.sdl000066400000000000000000000001071477246567400233570ustar00rootroot00000000000000name "testproj" dependency "gitcompatibledubpackage" version="~>1.0.4" dub-1.40.0/test/issue2262-exact-cached-version-match/source/000077500000000000000000000000001477246567400234035ustar00rootroot00000000000000dub-1.40.0/test/issue2262-exact-cached-version-match/source/app.d000066400000000000000000000000171477246567400243260ustar00rootroot00000000000000void main() {} dub-1.40.0/test/issue2348-postbuildcommands.script.d000066400000000000000000000015321477246567400222220ustar00rootroot00000000000000/+ dub.sdl: name "issue2348" buildType "test" { buildOptions "syntaxOnly" postBuildCommands "echo xxx" } +/ module issue2348; import std.process; import std.stdio; import std.algorithm; import std.path; int main() { const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub.exe")); const cmd = [dub, "build", "--build=test", "--single", __FILE_FULL_PATH__]; const result = execute(cmd, null, Config.none, size_t.max, __FILE_FULL_PATH__.dirName); if (result.status || result.output.canFind("Failed")) { writefln("\n> %-(%s %)", cmd); writeln("==========================================================="); writeln(result.output); writeln("==========================================================="); writeln("Last command failed with exit code ", result.status, '\n'); return 1; } return 0; } dub-1.40.0/test/issue2377-dynLib-dep-extra-files.script.d000066400000000000000000000103711477246567400227060ustar00rootroot00000000000000/+ dub.sdl: name "issue2377_dynlib_dep_extra_files" +/ module issue2377_dynlib_dep_extra_files.script; import std.exception : enforce; import std.file; import std.path; version (DigitalMars) version (Windows) version = DMD_Windows; version (DMD_Windows) { void main() { import std.stdio; writeln("WARNING: skipping test '" ~ __FILE_FULL_PATH__.baseName ~ "' with DMD on Windows."); } } else: void main() { import std.process : environment; version (Windows) enum exeExt = ".exe"; else enum exeExt = ""; const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub"~exeExt)); enum testDir = buildPath(__FILE_FULL_PATH__.dirName, "issue2377-dynLib-dep-extra-files"); // 1. `parent` as root package (depending on dynamic/static dep1, which depends on dynamic/static dep2) chdir(buildPath(testDir, "parent")); if (exists("output")) rmdirRecurse("output"); // 1.1 dynlib config run(dub ~ " build -c dynlib"); chdir("output/dynlib"); assertDynLibExists("parent"); assertDynLibExists("dep1"); assertDynLibExists("dep2"); version (Windows) { assertFileExists("parent.pdb"); assertFileExists("parent.lib"); assertFileExists("parent.exp"); assertFileExists("dep1.pdb"); assertFileExists("dep1.lib"); assertFileExists("dep1.exp"); assertFileExists("dep2.pdb"); assertFileExists("dep2.lib"); assertFileExists("dep2.exp"); } chdir("../.."); // 1.2 dynlib_static config run(dub ~ " build -c dynlib_static"); chdir("output/dynlib_static"); assertDynLibExists("parent"); version (Windows) { assertFileExists("parent.pdb"); assertFileExists("parent.lib"); assertFileExists("parent.exp"); } enforce(!canFindFiles("*dep*"), "unexpected dependency files in statically linked dynlib output dir"); chdir("../.."); // 1.3 exe_static config run(dub ~ " build -c exe_static"); chdir("output/exe_static"); version (Windows) run(`.\parent.exe`); else run("./parent"); version (Windows) { assertFileExists("parent.pdb"); enforce(!exists("parent.lib"), "unexpected import .lib for executable"); enforce(!exists("parent.exp"), "unexpected .exp file for executable"); } enforce(!canFindFiles("*dep*"), "unexpected dependency files in statically linked executable output dir"); chdir("../.."); // 1.4 exe_dynamic config run(dub ~ " build -c exe_dynamic"); chdir("output/exe_dynamic"); version (Windows) run(`.\parent.exe`); else run(`LD_LIBRARY_PATH=".:${LD_LIBRARY_PATH:-}" ./parent`); assertDynLibExists("dep1"); assertDynLibExists("dep2"); version (Windows) { assertFileExists("dep1.pdb"); assertFileExists("dep2.pdb"); enforce(!canFindFiles("*.lib"), "unexpected import libs in dynamically linked executable output dir"); enforce(!canFindFiles("*.exp"), "unexpected .exp files in dynamically linked executable output dir"); } chdir("../.."); // 2. `framework` as root package (targetType `none`) chdir(buildPath(testDir, "framework")); run(dub ~ " build"); assertDynLibExists("dep1"); assertDynLibExists("dep2"); version (Windows) { assertFileExists("dep1.pdb"); assertFileExists("dep1.lib"); assertFileExists("dep1.exp"); assertFileExists("dep2.pdb"); assertFileExists("dep2.lib"); assertFileExists("dep2.exp"); } } void run(string command) { import std.process; const status = spawnShell(command).wait(); enforce(status == 0, "command '" ~ command ~ "' failed"); } void assertFileExists(string path) { enforce(exists(path), "expected file '" ~ path ~ "' not found"); } void assertDynLibExists(string name) { version (Windows) { enum prefix = ""; enum suffix = ".dll"; } else version (OSX) { enum prefix = "lib"; enum suffix = ".dylib"; } else { enum prefix = "lib"; enum suffix = ".so"; } assertFileExists(prefix ~ name ~ suffix); } bool canFindFiles(string pattern) { auto entries = dirEntries(".", pattern, SpanMode.shallow); return !entries.empty(); } dub-1.40.0/test/issue2377-dynLib-dep-extra-files/000077500000000000000000000000001477246567400212345ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/.gitignore000066400000000000000000000000201477246567400232140ustar00rootroot00000000000000/parent/output/ dub-1.40.0/test/issue2377-dynLib-dep-extra-files/.no_build000066400000000000000000000000001477246567400230160ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep1/000077500000000000000000000000001477246567400220655ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep1/dub.sdl000066400000000000000000000004571477246567400233510ustar00rootroot00000000000000name "dep1" dependency "dep2" path="../dep2" configuration "library" { targetType "staticLibrary" targetPath "output/library" subConfiguration "dep2" "library" } configuration "dynlib" { targetType "dynamicLibrary" targetPath "output/dynlib" subConfiguration "dep2" "dynlib" } dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep1/source/000077500000000000000000000000001477246567400233655ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep1/source/dep1.d000066400000000000000000000000731477246567400243630ustar00rootroot00000000000000module dep1; void foo1() { import dep2; foo2(); } dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep2/000077500000000000000000000000001477246567400220665ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep2/dub.sdl000066400000000000000000000003031477246567400233400ustar00rootroot00000000000000name "dep2" configuration "library" { targetType "staticLibrary" targetPath "output/library" } configuration "dynlib" { targetType "dynamicLibrary" targetPath "output/dynlib" } dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep2/source/000077500000000000000000000000001477246567400233665ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/dep2/source/dep2.d000066400000000000000000000000351477246567400243630ustar00rootroot00000000000000module dep2; void foo2() {} dub-1.40.0/test/issue2377-dynLib-dep-extra-files/framework/000077500000000000000000000000001477246567400232315ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/framework/dub.sdl000066400000000000000000000001461477246567400245100ustar00rootroot00000000000000name "framework" targetType "none" dependency "dep1" path="../dep1" subConfiguration "dep1" "dynlib" dub-1.40.0/test/issue2377-dynLib-dep-extra-files/parent/000077500000000000000000000000001477246567400225255ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/parent/dub.sdl000066400000000000000000000013571477246567400240110ustar00rootroot00000000000000name "parent" dependency "dep1" path="../dep1" configuration "dynlib" { targetType "dynamicLibrary" targetPath "output/dynlib" subConfiguration "dep1" "dynlib" } configuration "dynlib_static" { targetType "dynamicLibrary" targetPath "output/dynlib_static" subConfiguration "dep1" "library" } configuration "exe_static" { targetType "executable" targetPath "output/exe_static" subConfiguration "dep1" "library" } configuration "exe_dynamic" { targetType "executable" targetPath "output/exe_dynamic" subConfiguration "dep1" "dynlib" dflags "-link-defaultlib-shared" platform="ldc" dflags "-defaultlib=libphobos2.so" platform="linux-dmd" lflags "-rpath" "@executable_path" platform="osx" } dub-1.40.0/test/issue2377-dynLib-dep-extra-files/parent/source/000077500000000000000000000000001477246567400240255ustar00rootroot00000000000000dub-1.40.0/test/issue2377-dynLib-dep-extra-files/parent/source/app.d000066400000000000000000000004021477246567400247460ustar00rootroot00000000000000module app; // Add a dummy export to enforce creation of import .lib and .exp file for the (Windows) executable. // They shouldn't be copied to the output dir though. export void dummy() {} void main() { import parent; parent_bar(); dummy(); } dub-1.40.0/test/issue2377-dynLib-dep-extra-files/parent/source/parent.d000066400000000000000000000001031477246567400254550ustar00rootroot00000000000000module parent; void parent_bar() { import dep1; foo1(); } dub-1.40.0/test/issue2448/000077500000000000000000000000001477246567400150255ustar00rootroot00000000000000dub-1.40.0/test/issue2448/dub.json000066400000000000000000000001571477246567400164750ustar00rootroot00000000000000{ "name": "use-source-files", "description": "Example of using source files.", "sourceFiles": ["ext/*.d"] } dub-1.40.0/test/issue2448/ext/000077500000000000000000000000001477246567400156255ustar00rootroot00000000000000dub-1.40.0/test/issue2448/ext/kekw.d000066400000000000000000000000561477246567400167340ustar00rootroot00000000000000extern(C) string funkekw () { return "KEKW";} dub-1.40.0/test/issue2448/source/000077500000000000000000000000001477246567400163255ustar00rootroot00000000000000dub-1.40.0/test/issue2448/source/app.d000066400000000000000000000001431477246567400172500ustar00rootroot00000000000000import std.stdio; extern(C) string funkekw (); void main() { writefln("Juan: %s", funkekw()); } dub-1.40.0/test/issue2452/000077500000000000000000000000001477246567400150205ustar00rootroot00000000000000dub-1.40.0/test/issue2452/.no_test000066400000000000000000000000001477246567400164620ustar00rootroot00000000000000dub-1.40.0/test/issue2452/dub.json000066400000000000000000000006711477246567400164710ustar00rootroot00000000000000{ "name": "generated-sources-and-source-files-without-glob", "description": "Example of using pre generate commands and sourceFiles without glob.", "sourceFiles": ["ext/fortytwo.d"], "preGenerateCommands-posix": [ "mkdir -p ext", "echo 'extern(C) int fun42 () { return 42; }' > ext/fortytwo.d" ], "preGenerateCommands-windows": [ "if not exist ext mkdir ext", "echo extern(C) int fun42 () { return 42; } > ext/fortytwo.d" ] } dub-1.40.0/test/issue2452/source/000077500000000000000000000000001477246567400163205ustar00rootroot00000000000000dub-1.40.0/test/issue2452/source/app.d000066400000000000000000000001331477246567400172420ustar00rootroot00000000000000import std.stdio; import fortytwo; void main() { writefln("ShouldBe42: %s", fun42()); } dub-1.40.0/test/issue2574-mistyping-commands.sh000077500000000000000000000010651477246567400212060ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh $DUB upfrade 2>&1 >/dev/null && die $LINENO '"dub upfrade" should not succeed' if [ "$($DUB upfrade 2>&1 | grep -Fc "Unknown command: upfrade")" != "1" ]; then die $LINENO 'Missing Unknown command line' fi if [ "$($DUB upfrade 2>&1 | grep -Fc "Did you mean 'upgrade'?")" != "1" ]; then die $LINENO 'Missing upgrade suggestion' fi if [ "$($DUB upfrade 2>&1 | grep -Fc "build")" != "0" ]; then die $LINENO 'Did not expect to see build as a suggestion and did not want a full list of commands' fi dub-1.40.0/test/issue2587-subpackage-dependency-resolution.sh000077500000000000000000000003361477246567400240120ustar00rootroot00000000000000#!/usr/bin/env bash set -e . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "${CURR_DIR}/issue2587-subpackage-dependency-resolution/a" rm -f dub.selections.json $DUB upgrade -v $DUB run rm -f dub.selections.json $DUB run dub-1.40.0/test/issue2587-subpackage-dependency-resolution/000077500000000000000000000000001477246567400234515ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/.no_build000066400000000000000000000000001477246567400252330ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/.no_run000066400000000000000000000000001477246567400247400ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/.no_test000066400000000000000000000000001477246567400251130ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/a/000077500000000000000000000000001477246567400236715ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/a/.gitignore000066400000000000000000000001471477246567400256630ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ /a a.so a.dylib a.dll a.a a.lib a-test-* *.exe *.pdb *.o *.obj *.lst dub-1.40.0/test/issue2587-subpackage-dependency-resolution/a/dub.json000066400000000000000000000000771477246567400253420ustar00rootroot00000000000000{ "name": "a", "dependencies": { "b": {"path":"../b"} } } dub-1.40.0/test/issue2587-subpackage-dependency-resolution/a/source/000077500000000000000000000000001477246567400251715ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/a/source/app.d000066400000000000000000000000561477246567400261170ustar00rootroot00000000000000import b, c; void main() { doB(); doC(); } dub-1.40.0/test/issue2587-subpackage-dependency-resolution/b/000077500000000000000000000000001477246567400236725ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/b/.gitignore000066400000000000000000000001471477246567400256640ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ /b b.so b.dylib b.dll b.a b.lib b-test-* *.exe *.pdb *.o *.obj *.lst dub-1.40.0/test/issue2587-subpackage-dependency-resolution/b/dub.json000066400000000000000000000002311477246567400253330ustar00rootroot00000000000000{ "name": "b", "dependencies": { "b:sub":"*" }, "subPackages": [ { "name": "sub", "dependencies": { "c": {"path":"../c"} } } ] } dub-1.40.0/test/issue2587-subpackage-dependency-resolution/b/source/000077500000000000000000000000001477246567400251725ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/b/source/b.d000066400000000000000000000000321477246567400255530ustar00rootroot00000000000000module b; void doB() { } dub-1.40.0/test/issue2587-subpackage-dependency-resolution/c/000077500000000000000000000000001477246567400236735ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/c/.gitignore000066400000000000000000000001471477246567400256650ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ /c c.so c.dylib c.dll c.a c.lib c-test-* *.exe *.pdb *.o *.obj *.lst dub-1.40.0/test/issue2587-subpackage-dependency-resolution/c/dub.json000066400000000000000000000000211477246567400253310ustar00rootroot00000000000000{ "name": "c" } dub-1.40.0/test/issue2587-subpackage-dependency-resolution/c/source/000077500000000000000000000000001477246567400251735ustar00rootroot00000000000000dub-1.40.0/test/issue2587-subpackage-dependency-resolution/c/source/c.d000066400000000000000000000000321477246567400255550ustar00rootroot00000000000000module c; void doC() { } dub-1.40.0/test/issue2650-deprecated-modules/000077500000000000000000000000001477246567400205645ustar00rootroot00000000000000dub-1.40.0/test/issue2650-deprecated-modules/.no_build000066400000000000000000000000001477246567400223460ustar00rootroot00000000000000dub-1.40.0/test/issue2650-deprecated-modules/.no_run000066400000000000000000000000001477246567400220530ustar00rootroot00000000000000dub-1.40.0/test/issue2650-deprecated-modules/dub.sdl000066400000000000000000000001251477246567400220400ustar00rootroot00000000000000name "issue2650" targetType "sourceLibrary" buildRequirements "disallowDeprecations" dub-1.40.0/test/issue2650-deprecated-modules/source/000077500000000000000000000000001477246567400220645ustar00rootroot00000000000000dub-1.40.0/test/issue2650-deprecated-modules/source/test.d000066400000000000000000000000301477246567400232010ustar00rootroot00000000000000deprecated module test; dub-1.40.0/test/issue2684-recipe-file.sh000077500000000000000000000003371477246567400175530ustar00rootroot00000000000000#!/bin/bash cd ${CURR_DIR}/issue2684-recipe-file ${DUB} | grep -c "This was built using dub.json" > /dev/null ${DUB} --recipe=dubWithAnotherSource.json | grep -c "This was built using dubWithAnotherSource.json" > /dev/nulldub-1.40.0/test/issue2684-recipe-file/000077500000000000000000000000001477246567400172115ustar00rootroot00000000000000dub-1.40.0/test/issue2684-recipe-file/.gitignore000066400000000000000000000003631477246567400212030ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ /issue2684-recipe-file issue2684-recipe-file.so issue2684-recipe-file.dylib issue2684-recipe-file.dll issue2684-recipe-file.a issue2684-recipe-file.lib issue2684-recipe-file-test-* *.exe *.pdb *.o *.obj *.lst dub-1.40.0/test/issue2684-recipe-file/anotherSource/000077500000000000000000000000001477246567400220325ustar00rootroot00000000000000dub-1.40.0/test/issue2684-recipe-file/anotherSource/app.d000066400000000000000000000001521477246567400227550ustar00rootroot00000000000000module app; import std.stdio; void main() { writeln("This was built using dubWithAnotherSource.json"); }dub-1.40.0/test/issue2684-recipe-file/dub.json000066400000000000000000000002671477246567400206630ustar00rootroot00000000000000{ "authors": [ "Hipreme" ], "copyright": "Copyright © 2023, Hipreme", "description": "A minimal D application.", "license": "public domain", "name": "issue2684-recipe-file" }dub-1.40.0/test/issue2684-recipe-file/dubWithAnotherSource.json000066400000000000000000000003321477246567400242120ustar00rootroot00000000000000{ "authors": [ "Hipreme" ], "copyright": "Copyright © 2023, Hipreme", "description": "A minimal D application.", "sourcePaths": ["anotherSource"], "license": "public domain", "name": "issue2684-recipe-file" }dub-1.40.0/test/issue2684-recipe-file/source/000077500000000000000000000000001477246567400205115ustar00rootroot00000000000000dub-1.40.0/test/issue2684-recipe-file/source/app.d000066400000000000000000000002721477246567400214370ustar00rootroot00000000000000module app; import std.stdio; void main() { writeln("This was built using dub.json. Try using the other configuration by calling dub with: dub --recipe=dubWithAnotherSource.json "); } dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/000077500000000000000000000000001477246567400233005ustar00rootroot00000000000000dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/c_headers/000077500000000000000000000000001477246567400252155ustar00rootroot00000000000000dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/c_headers/foo.h000066400000000000000000000001061477246567400261460ustar00rootroot00000000000000#include int bar(void) { printf("func bar in foo.h\n"); } dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/dub.sdl000066400000000000000000000002311477246567400245520ustar00rootroot00000000000000name "issue2698-cimportpaths-broken-with-dmd-ldc" description "test issue 2698" authors "alexander bryan" cSourcePaths "source" cImportPaths "c_headers" dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/source/000077500000000000000000000000001477246567400246005ustar00rootroot00000000000000dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/source/app.d000066400000000000000000000000671477246567400255300ustar00rootroot00000000000000import std.stdio; import foo; void main() { bar(); } dub-1.40.0/test/issue2698-cimportpaths-broken-with-dmd-ldc/source/foo.c000066400000000000000000000000211477246567400255200ustar00rootroot00000000000000#include "foo.h" dub-1.40.0/test/issue2840-build-collision.sh000077500000000000000000000005241477246567400204470ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh pushd $(dirname "${BASH_SOURCE[0]}")/issue2840-build-collision # Copy before building, as dub uses timestamp to check for rebuild rm -rf nested/ && mkdir -p nested/ && cp -v build.d nested/ $DUB ./build.d $(pwd)/build.d pushd nested $DUB ./build.d $(pwd)/build.d popd popd dub-1.40.0/test/issue2840-build-collision/000077500000000000000000000000001477246567400201075ustar00rootroot00000000000000dub-1.40.0/test/issue2840-build-collision/.no_build000066400000000000000000000000001477246567400216710ustar00rootroot00000000000000dub-1.40.0/test/issue2840-build-collision/build.d000077500000000000000000000004651477246567400213630ustar00rootroot00000000000000#!/usr/bin/env dub /++ dub.json: { "name": "build" } +/ import std.format; immutable FullPath = __FILE_FULL_PATH__; void main (string[] args) { assert(args.length == 2, "Expected a single argument"); assert(args[1] == FullPath, format("%s != %s -- %s", args[1], FullPath, args[0])); } dub-1.40.0/test/issue346-redundant-flags.sh000077500000000000000000000003141477246567400203510ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue346-redundant-flags ${DUB} build --bare --force --compiler=${DC} -a x86_64 -v main 2>&1 | { ! grep -e '-m64 -m64' -c; } dub-1.40.0/test/issue346-redundant-flags/000077500000000000000000000000001477246567400200145ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/.no_build000066400000000000000000000000001477246567400215760ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/.no_run000066400000000000000000000000001477246567400213030ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/.no_test000066400000000000000000000000001477246567400214560ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/a/000077500000000000000000000000001477246567400202345ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/a/dub.json000066400000000000000000000000201477246567400216710ustar00rootroot00000000000000{ "name": "a" }dub-1.40.0/test/issue346-redundant-flags/a/source/000077500000000000000000000000001477246567400215345ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/a/source/a.d000066400000000000000000000000331477246567400221150ustar00rootroot00000000000000module a; void afun() { } dub-1.40.0/test/issue346-redundant-flags/b/000077500000000000000000000000001477246567400202355ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/b/dub.json000066400000000000000000000000201477246567400216720ustar00rootroot00000000000000{ "name": "b" }dub-1.40.0/test/issue346-redundant-flags/b/source/000077500000000000000000000000001477246567400215355ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/b/source/b.d000066400000000000000000000000331477246567400221170ustar00rootroot00000000000000module b; void bfun() { } dub-1.40.0/test/issue346-redundant-flags/main/000077500000000000000000000000001477246567400207405ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/main/dub.json000066400000000000000000000001331477246567400224020ustar00rootroot00000000000000{ "name": "main", "dependencies": { "a": {"path": "../a"}, "b": {"path": "../b"} } }dub-1.40.0/test/issue346-redundant-flags/main/source/000077500000000000000000000000001477246567400222405ustar00rootroot00000000000000dub-1.40.0/test/issue346-redundant-flags/main/source/main.d000066400000000000000000000000661477246567400233330ustar00rootroot00000000000000import a; import b; void main() { afun(); bfun(); }dub-1.40.0/test/issue361-optional-deps.sh000077500000000000000000000013101477246567400200430ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue361-optional-deps rm -rf a/.dub rm -rf a/b/.dub rm -rf main1/.dub rm -rf main2/.dub rm -f main1/dub.selections.json ${DUB} build --bare --compiler=${DC} main1 echo "{" > cmp.tmp echo " \"fileVersion\": 1," >> cmp.tmp echo " \"versions\": {" >> cmp.tmp echo " \"b\": \"~master\"" >> cmp.tmp echo " }" >> cmp.tmp echo "}" >> cmp.tmp diff cmp.tmp main1/dub.selections.json ${DUB} build --bare --compiler=${DC} main2 echo "{" > cmp.tmp echo " \"fileVersion\": 1," >> cmp.tmp echo " \"versions\": {" >> cmp.tmp echo " \"a\": \"~master\"" >> cmp.tmp echo " }" >> cmp.tmp echo "}" >> cmp.tmp diff cmp.tmp main2/dub.selections.json dub-1.40.0/test/issue361-optional-deps/000077500000000000000000000000001477246567400175115ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/.no_build000066400000000000000000000000001477246567400212730ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/a/000077500000000000000000000000001477246567400177315ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/a/dub.sdl000066400000000000000000000000101477246567400211760ustar00rootroot00000000000000name "a"dub-1.40.0/test/issue361-optional-deps/a/src/000077500000000000000000000000001477246567400205205ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/a/src/a.d000066400000000000000000000000001477246567400210730ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/b/000077500000000000000000000000001477246567400177325ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/b/dub.sdl000066400000000000000000000000101477246567400211770ustar00rootroot00000000000000name "b"dub-1.40.0/test/issue361-optional-deps/b/src/000077500000000000000000000000001477246567400205215ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/b/src/b.d000066400000000000000000000000001477246567400210750ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/main1/000077500000000000000000000000001477246567400205165ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/main1/dub.sdl000066400000000000000000000001531477246567400217730ustar00rootroot00000000000000name "main1" dependency "a" version="*" optional=true dependency "b" version="*" optional=true default=truedub-1.40.0/test/issue361-optional-deps/main1/src/000077500000000000000000000000001477246567400213055ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/main1/src/main1.d000066400000000000000000000000001477246567400224450ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/main2/000077500000000000000000000000001477246567400205175ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/main2/dub.sdl000066400000000000000000000001531477246567400217740ustar00rootroot00000000000000name "main2" dependency "a" version="*" optional=true dependency "b" version="*" optional=true default=truedub-1.40.0/test/issue361-optional-deps/main2/dub.selections.json000066400000000000000000000000721477246567400243320ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "a": "~master" } } dub-1.40.0/test/issue361-optional-deps/main2/src/000077500000000000000000000000001477246567400213065ustar00rootroot00000000000000dub-1.40.0/test/issue361-optional-deps/main2/src/main2.d000066400000000000000000000000001477246567400224470ustar00rootroot00000000000000dub-1.40.0/test/issue502-root-import/000077500000000000000000000000001477246567400172235ustar00rootroot00000000000000dub-1.40.0/test/issue502-root-import/dub.json000066400000000000000000000001621477246567400206670ustar00rootroot00000000000000{ "name": "issue502-root-import", "dependencies": { "gitcompatibledubpackage": "~>1.0" } }dub-1.40.0/test/issue502-root-import/source/000077500000000000000000000000001477246567400205235ustar00rootroot00000000000000dub-1.40.0/test/issue502-root-import/source/app.d000066400000000000000000000001731477246567400214510ustar00rootroot00000000000000import gitcompatibledubpackage.subdir.file; void main(string[] args) { } unittest { assert(!hasTheWorldExploded()); }dub-1.40.0/test/issue564-invalid-upgrade-dependency.sh000077500000000000000000000003261477246567400224670ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue564-invalid-upgrade-dependency rm -rf a-1.0.0/.dub rm -rf a-1.1.0/.dub rm -rf main/.dub ${DUB} build --bare --compiler=${DC} main dub-1.40.0/test/issue564-invalid-upgrade-dependency/000077500000000000000000000000001477246567400221275ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/.no_build000066400000000000000000000000001477246567400237110ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/.no_run000066400000000000000000000000001477246567400234160ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/.no_test000066400000000000000000000000001477246567400235710ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.0.0/000077500000000000000000000000001477246567400230015ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.0.0/dub.json000066400000000000000000000000471477246567400244470ustar00rootroot00000000000000{ "name": "a", "version": "1.0.0", } dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.0.0/source/000077500000000000000000000000001477246567400243015ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.0.0/source/a.d000066400000000000000000000000201477246567400246560ustar00rootroot00000000000000void test() { } dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.1.0/000077500000000000000000000000001477246567400230025ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.1.0/dub.json000066400000000000000000000001361477246567400244470ustar00rootroot00000000000000{ "name": "a", "version": "1.1.0", "dependencies": { "invalid": {"path": "invalid"} } } dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.1.0/source/000077500000000000000000000000001477246567400243025ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/a-1.1.0/source/a.d000066400000000000000000000000201477246567400246570ustar00rootroot00000000000000void test() { } dub-1.40.0/test/issue564-invalid-upgrade-dependency/main/000077500000000000000000000000001477246567400230535ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/main/dub.json000066400000000000000000000000721477246567400245170ustar00rootroot00000000000000{ "name": "main", "dependencies": { "a": "~>1.0" } } dub-1.40.0/test/issue564-invalid-upgrade-dependency/main/dub.selections.json000066400000000000000000000000701477246567400266640ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "a": "1.0.0" } } dub-1.40.0/test/issue564-invalid-upgrade-dependency/main/source/000077500000000000000000000000001477246567400243535ustar00rootroot00000000000000dub-1.40.0/test/issue564-invalid-upgrade-dependency/main/source/app.d000066400000000000000000000000441477246567400252760ustar00rootroot00000000000000import a; void main() { test(); } dub-1.40.0/test/issue586-subpack-dep.sh000077500000000000000000000003451477246567400175030ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue586-subpack-dep rm -rf a/.dub rm -rf a/b/.dub rm -rf main/.dub ${DUB} build --bare --compiler=${DC} main ${DUB} run --bare --compiler=${DC} main dub-1.40.0/test/issue586-subpack-dep/000077500000000000000000000000001477246567400171425ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/.no_build000066400000000000000000000000001477246567400207240ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/.no_run000066400000000000000000000000001477246567400204310ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/.no_test000066400000000000000000000000001477246567400206040ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/a/000077500000000000000000000000001477246567400173625ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/a/b/000077500000000000000000000000001477246567400176035ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/a/b/dub.sdl000066400000000000000000000000111477246567400210510ustar00rootroot00000000000000name "b" dub-1.40.0/test/issue586-subpack-dep/a/b/source/000077500000000000000000000000001477246567400211035ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/a/b/source/b.d000066400000000000000000000000431477246567400214660ustar00rootroot00000000000000module b; int bfun() { return 2; }dub-1.40.0/test/issue586-subpack-dep/a/dub.sdl000066400000000000000000000000651477246567400206410ustar00rootroot00000000000000name "a" dependency ":b" version="*" subPackage "b/" dub-1.40.0/test/issue586-subpack-dep/a/source/000077500000000000000000000000001477246567400206625ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/a/source/a.d000066400000000000000000000000711477246567400212450ustar00rootroot00000000000000module a; import b; int afun() { return 1 + bfun(); } dub-1.40.0/test/issue586-subpack-dep/main/000077500000000000000000000000001477246567400200665ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/main/dub.sdl000066400000000000000000000000761477246567400213470ustar00rootroot00000000000000name "main" dependency "a" version="*" targetType "executable"dub-1.40.0/test/issue586-subpack-dep/main/dub.selections.json000066400000000000000000000000721477246567400237010ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "a": "~master" } } dub-1.40.0/test/issue586-subpack-dep/main/source/000077500000000000000000000000001477246567400213665ustar00rootroot00000000000000dub-1.40.0/test/issue586-subpack-dep/main/source/c.d000066400000000000000000000000741477246567400217560ustar00rootroot00000000000000module c; import a; void main() { assert(afun() == 3); } dub-1.40.0/test/issue613-dynlib-pic.sh000077500000000000000000000003451477246567400173260ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue613-dynlib-pic rm -rf .dub if [ "${DC}" = "dmd" ]; then ${DUB} build --compiler=${DC} else echo "Skipping shared library test for ${DC}..." fi dub-1.40.0/test/issue613-dynlib-pic/000077500000000000000000000000001477246567400167655ustar00rootroot00000000000000dub-1.40.0/test/issue613-dynlib-pic/.gitignore000066400000000000000000000000461477246567400207550ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj dub-1.40.0/test/issue613-dynlib-pic/.no_build000066400000000000000000000000001477246567400205470ustar00rootroot00000000000000dub-1.40.0/test/issue613-dynlib-pic/.no_run000066400000000000000000000000001477246567400202540ustar00rootroot00000000000000dub-1.40.0/test/issue613-dynlib-pic/.no_test000066400000000000000000000000001477246567400204270ustar00rootroot00000000000000dub-1.40.0/test/issue613-dynlib-pic/dub.sdl000066400000000000000000000002601477246567400202410ustar00rootroot00000000000000name "issue613-dynlib-pic" targetType "dynamicLibrary" // TODO: instead of just testing build success, test if -shared, -fPIC and -defaultlib have all been specified correctlydub-1.40.0/test/issue613-dynlib-pic/source/000077500000000000000000000000001477246567400202655ustar00rootroot00000000000000dub-1.40.0/test/issue613-dynlib-pic/source/app.d000066400000000000000000000000201477246567400212020ustar00rootroot00000000000000void test() { }dub-1.40.0/test/issue616-describe-vs-generate-commands.sh000077500000000000000000000017641477246567400231020ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd "$CURR_DIR"/issue616-describe-vs-generate-commands temp_file=$(mktemp $(basename $0).XXXXXX) function cleanup { rm $temp_file } trap cleanup EXIT if ! $DUB describe --compiler=$DC --data-list --data=target-name \ > "$temp_file" 2>&1; then die $LINENO 'Printing project data failed!' fi # Create the expected output file to compare stdout against. expected_file="$CURR_DIR/expected-issue616-output" echo "preGenerateCommands: DUB_PACKAGES_USED=issue616-describe-vs-generate-commands,issue616-subpack,issue616-subsubpack" > "$expected_file" echo "$CURR_DIR/issue616-describe-vs-generate-commands/src/" >> "$expected_file" echo "$CURR_DIR/issue616-subpack/src/" >> "$expected_file" echo "$CURR_DIR/issue616-subsubpack/src/" >> "$expected_file" echo "issue616-describe-vs-generate-commands" >> "$expected_file" if ! diff "$expected_file" "$temp_file"; then die $LINENO 'The stdout output did not match the expected output!' fi dub-1.40.0/test/issue616-describe-vs-generate-commands/000077500000000000000000000000001477246567400225335ustar00rootroot00000000000000dub-1.40.0/test/issue616-describe-vs-generate-commands/.no_build000066400000000000000000000000021477246567400243170ustar00rootroot00000000000000 dub-1.40.0/test/issue616-describe-vs-generate-commands/do-preGenerateCommands.sh000077500000000000000000000005711477246567400274200ustar00rootroot00000000000000#!/usr/bin/env bash if [ -n "${dub_issue616}" ]; then echo 'Fail! preGenerateCommands recursion detected!' >&2 exit 0 # Don't return a non-zero error code here. This way the test gives a better diagnostic. fi echo preGenerateCommands: DUB_PACKAGES_USED=$DUB_PACKAGES_USED >&2 export dub_issue616=true $DUB describe --compiler=$DC --data-list --data=import-paths >&2 dub-1.40.0/test/issue616-describe-vs-generate-commands/dub.json000066400000000000000000000004731477246567400242040ustar00rootroot00000000000000{ "name": "issue616-describe-vs-generate-commands", "targetType": "executable", "preGenerateCommands-posix": ["cd $PACKAGE_DIR && ./do-preGenerateCommands.sh"], "dependencies": { "issue616-subpack": { "version": "1.0", "path": "../issue616-subpack" } } } dub-1.40.0/test/issue616-describe-vs-generate-commands/src/000077500000000000000000000000001477246567400233225ustar00rootroot00000000000000dub-1.40.0/test/issue616-describe-vs-generate-commands/src/dummy.d000066400000000000000000000000011477246567400246110ustar00rootroot00000000000000 dub-1.40.0/test/issue616-subpack/000077500000000000000000000000001477246567400163665ustar00rootroot00000000000000dub-1.40.0/test/issue616-subpack/.no_build000066400000000000000000000000021477246567400201520ustar00rootroot00000000000000 dub-1.40.0/test/issue616-subpack/dub.json000066400000000000000000000003231477246567400200310ustar00rootroot00000000000000{ "name": "issue616-subpack", "targetType": "library", "dependencies": { "issue616-subsubpack": { "version": "1.0", "path": "../issue616-subsubpack" } } } dub-1.40.0/test/issue616-subpack/src/000077500000000000000000000000001477246567400171555ustar00rootroot00000000000000dub-1.40.0/test/issue616-subpack/src/dummy.d000066400000000000000000000000011477246567400204440ustar00rootroot00000000000000 dub-1.40.0/test/issue616-subsubpack/000077500000000000000000000000001477246567400171005ustar00rootroot00000000000000dub-1.40.0/test/issue616-subsubpack/.no_build000066400000000000000000000000021477246567400206640ustar00rootroot00000000000000 dub-1.40.0/test/issue616-subsubpack/dub.json000066400000000000000000000001031477246567400205370ustar00rootroot00000000000000{ "name": "issue616-subsubpack", "targetType": "library" } dub-1.40.0/test/issue616-subsubpack/src/000077500000000000000000000000001477246567400176675ustar00rootroot00000000000000dub-1.40.0/test/issue616-subsubpack/src/dummy.d000066400000000000000000000000011477246567400211560ustar00rootroot00000000000000 dub-1.40.0/test/issue672-upgrade-optional.sh000077500000000000000000000005061477246567400205520ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue672-upgrade-optional rm -rf b/.dub echo "{\"fileVersion\": 1,\"versions\": {\"dub\": \"1.5.0\"}}" > dub.selections.json ${DUB} upgrade if ! grep -c -e "\"dub\": \"1.6.0\"" dub.selections.json; then die $LINENO 'Dependency not upgraded.' fi dub-1.40.0/test/issue672-upgrade-optional/000077500000000000000000000000001477246567400202125ustar00rootroot00000000000000dub-1.40.0/test/issue672-upgrade-optional/.no_build000066400000000000000000000000001477246567400217740ustar00rootroot00000000000000dub-1.40.0/test/issue672-upgrade-optional/dub.sdl000066400000000000000000000001021477246567400214610ustar00rootroot00000000000000name "b" dependency "dub" version=">=1.5.0 <=1.6.0" optional=true dub-1.40.0/test/issue672-upgrade-optional/dub.selections.json000066400000000000000000000000721477246567400240250ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "dub": "1.5.0" } } dub-1.40.0/test/issue674-concurrent-dub.sh000077500000000000000000000005401477246567400202320ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh TMPDIR=$(mktemp -d $(basename $0).XXXXXX) function cleanup { rm -rf ${TMPDIR} } trap cleanup EXIT cd ${TMPDIR} && $DUB fetch --cache=local bloom & pid1=$! sleep 0.5 cd ${TMPDIR} && $DUB fetch --cache=local bloom & pid2=$! wait $pid1 wait $pid2 [ -d ${TMPDIR}/.dub/packages/bloom* ]dub-1.40.0/test/issue686-multiple-march.sh000077500000000000000000000003131477246567400202240ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue686-multiple-march ${DUB} build --bare --force --compiler=${DC} -a x86_64 -v main 2>&1 | { ! grep -e '-m64 -m64' -c; } dub-1.40.0/test/issue686-multiple-march/000077500000000000000000000000001477246567400176705ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/.no_build000066400000000000000000000000001477246567400214520ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/.no_run000066400000000000000000000000001477246567400211570ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/.no_test000066400000000000000000000000001477246567400213320ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/a/000077500000000000000000000000001477246567400201105ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/a/dub.json000066400000000000000000000000771477246567400215610ustar00rootroot00000000000000{ "name": "a", "dependencies": { "b": {"path": "../b"} } }dub-1.40.0/test/issue686-multiple-march/a/source/000077500000000000000000000000001477246567400214105ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/a/source/a.d000066400000000000000000000000331477246567400217710ustar00rootroot00000000000000module a; void afun() { } dub-1.40.0/test/issue686-multiple-march/b/000077500000000000000000000000001477246567400201115ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/b/dub.json000066400000000000000000000000201477246567400215460ustar00rootroot00000000000000{ "name": "b" }dub-1.40.0/test/issue686-multiple-march/b/source/000077500000000000000000000000001477246567400214115ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/b/source/b.d000066400000000000000000000000331477246567400217730ustar00rootroot00000000000000module b; void bfun() { } dub-1.40.0/test/issue686-multiple-march/main/000077500000000000000000000000001477246567400206145ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/main/dub.json000066400000000000000000000001331477246567400222560ustar00rootroot00000000000000{ "name": "main", "dependencies": { "a": {"path": "../a"}, "b": {"path": "../b"} } }dub-1.40.0/test/issue686-multiple-march/main/source/000077500000000000000000000000001477246567400221145ustar00rootroot00000000000000dub-1.40.0/test/issue686-multiple-march/main/source/main.d000066400000000000000000000000661477246567400232070ustar00rootroot00000000000000import a; import b; void main() { afun(); bfun(); }dub-1.40.0/test/issue754-path-selection-fail/000077500000000000000000000000001477246567400205715ustar00rootroot00000000000000dub-1.40.0/test/issue754-path-selection-fail/a-1.0/000077500000000000000000000000001477246567400213055ustar00rootroot00000000000000dub-1.40.0/test/issue754-path-selection-fail/a-1.0/dub.sdl000066400000000000000000000000311477246567400225550ustar00rootroot00000000000000name "a" version "1.0.0" dub-1.40.0/test/issue754-path-selection-fail/a-1.0/source/000077500000000000000000000000001477246567400226055ustar00rootroot00000000000000dub-1.40.0/test/issue754-path-selection-fail/a-1.0/source/a.d000066400000000000000000000000311477246567400231640ustar00rootroot00000000000000module a; void test() {}dub-1.40.0/test/issue754-path-selection-fail/a-2.0/000077500000000000000000000000001477246567400213065ustar00rootroot00000000000000dub-1.40.0/test/issue754-path-selection-fail/a-2.0/dub.sdl000066400000000000000000000000321477246567400225570ustar00rootroot00000000000000name "a" version "2.0.0" dub-1.40.0/test/issue754-path-selection-fail/dub.sdl000066400000000000000000000000501477246567400220420ustar00rootroot00000000000000name "test" dependency "a" path="a-2.0" dub-1.40.0/test/issue754-path-selection-fail/dub.selections.json000066400000000000000000000001021477246567400243760ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "a": {"path": "a-1.0"} } } dub-1.40.0/test/issue754-path-selection-fail/source/000077500000000000000000000000001477246567400220715ustar00rootroot00000000000000dub-1.40.0/test/issue754-path-selection-fail/source/app.d000066400000000000000000000000441477246567400230140ustar00rootroot00000000000000import a; void main() { test(); } dub-1.40.0/test/issue777-bogus-path-dependency/000077500000000000000000000000001477246567400211335ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/b/000077500000000000000000000000001477246567400213545ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/b/a.d000066400000000000000000000000001477246567400217270ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/b/dub.sdl000066400000000000000000000002051477246567400226270ustar00rootroot00000000000000name "b" targetType "none" configuration "a" { dependency "c" version="*" } configuration "b" { dependency "c" path="../c-err" } dub-1.40.0/test/issue777-bogus-path-dependency/c-err/000077500000000000000000000000001477246567400221435ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/c-err/dub.sdl000066400000000000000000000000111477246567400234110ustar00rootroot00000000000000name "c" dub-1.40.0/test/issue777-bogus-path-dependency/c-err/source/000077500000000000000000000000001477246567400234435ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/c-err/source/lib.d000066400000000000000000000000411477246567400243510ustar00rootroot00000000000000module lib; void c() { error } dub-1.40.0/test/issue777-bogus-path-dependency/c/000077500000000000000000000000001477246567400213555ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/c/dub.sdl000066400000000000000000000000111477246567400226230ustar00rootroot00000000000000name "c" dub-1.40.0/test/issue777-bogus-path-dependency/c/source/000077500000000000000000000000001477246567400226555ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/c/source/lib.d000066400000000000000000000000321477246567400235630ustar00rootroot00000000000000module lib; void c() { } dub-1.40.0/test/issue777-bogus-path-dependency/dub.sdl000066400000000000000000000000441477246567400224070ustar00rootroot00000000000000name "test" dependency "b" path="b" dub-1.40.0/test/issue777-bogus-path-dependency/dub.selections.json000066400000000000000000000001221477246567400247420ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "b": {"path":"b"}, "c": {"path":"c"} } } dub-1.40.0/test/issue777-bogus-path-dependency/source/000077500000000000000000000000001477246567400224335ustar00rootroot00000000000000dub-1.40.0/test/issue777-bogus-path-dependency/source/app.d000066400000000000000000000000431477246567400233550ustar00rootroot00000000000000import lib; void main() { c(); } dub-1.40.0/test/issue782-gtkd-pkg-config.sh000077500000000000000000000022171477246567400202560ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh if [ $(uname) != "Linux" ]; then echo "Skipping issue782-dtkd-pkg-config test on non-Linux platform..." elif [ "${DC}" != "dmd" ]; then echo "Skipping issue782-dtkd-pkg-config test for ${DC}..." else echo ${CURR_DIR-$(pwd)} # the ${CURR_DIR-$(pwd)} allows running issue782-gtkd-pkg-config.sh stand-alone from the test directory cd ${CURR_DIR-$(pwd)}/issue782-gtkd-pkg-config rm -rf fake-gtkd/.dub rm -f fake-gtkd/libfake-gtkd.so rm -rf main/.dub rm -f main/fake-gtkd-test echo ${DUB} cd fake-gtkd && ${DUB} build --compiler=${DC} cd ../main # `run` needs to find the fake-gtkd shared library, so set LD_LIBRARY_PATH to where it is export LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-}${LD_LIBRARY_PATH:+:}$PWD/../fake-gtkd # pkg-config needs to find our .pc file which is in $PWD/../fake-gtkd/pkgconfig, so set PKG_CONFIG_PATH accordingly export PKG_CONFIG_PATH=$PWD/../fake-gtkd/pkgconfig ${DUB} run --force --compiler=${DC} cd .. rm -rf fake-gtkd/.dub rm fake-gtkd/libfake-gtkd.so rm -rf main/.dub rm main/fake-gtkd-test fi dub-1.40.0/test/issue782-gtkd-pkg-config.sh.min_frontend000066400000000000000000000000051477246567400227250ustar00rootroot000000000000002.068dub-1.40.0/test/issue782-gtkd-pkg-config/000077500000000000000000000000001477246567400177155ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/.no_build000066400000000000000000000000001477246567400214770ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/.no_run000066400000000000000000000000001477246567400212040ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/.no_test000066400000000000000000000000001477246567400213570ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/000077500000000000000000000000001477246567400215525ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/dub.json000066400000000000000000000001761477246567400232230ustar00rootroot00000000000000{ "name" : "fake-gtkd", "targetType": "dynamicLibrary", "description" : "Fake GtkD shared library for testing", } dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/pkgconfig/000077500000000000000000000000001477246567400235215ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/pkgconfig/fake-gtkd.pc000066400000000000000000000010371477246567400257030ustar00rootroot00000000000000prefix=../fake-gtkd libdir=${prefix} includedir=${prefix}/src Name: fake-gtkd Description: Fake GtkD shared library for testing Version: 1.0.0 #Requires: phobos2 # The "-L-defaultlib=libphobos2.so" and "-defaultlib=libphobos2.so" should both end up on the compiler (at link stage) invocation as "-defaultlib=libphobos2.so" # For this test, it doesn't hurt that they appear twice on the cmd line... Libs: -L-L${libdir} -L-l:libfake-gtkd.so -L-l:libdl.so.2 -pthread -L-defaultlib=libphobos2.so -defaultlib=libphobos2.so Cflags: -I${includedir} dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/src/000077500000000000000000000000001477246567400223415ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/src/fakegtkd.d000066400000000000000000000001121477246567400242600ustar00rootroot00000000000000extern (C) string test_function() { return "Fake GtkD shared library"; }dub-1.40.0/test/issue782-gtkd-pkg-config/fake-gtkd/src/lib.d000066400000000000000000000000001477246567400232420ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/main/000077500000000000000000000000001477246567400206415ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/main/dub.json000066400000000000000000000003151477246567400223050ustar00rootroot00000000000000{ "name" : "fake-gtkd-test", "description" : "Small test executable calling a Fake GtkD shared library function for testing", "importPaths" : ["../fake-gtkd/src"], "libs" : ["fake-gtkd"] } dub-1.40.0/test/issue782-gtkd-pkg-config/main/src/000077500000000000000000000000001477246567400214305ustar00rootroot00000000000000dub-1.40.0/test/issue782-gtkd-pkg-config/main/src/app.d000066400000000000000000000001321477246567400223510ustar00rootroot00000000000000import std.stdio; import fakegtkd; int main() { writeln(test_function()); return 0; }dub-1.40.0/test/issue813-fixed-dependency.sh000077500000000000000000000003101477246567400205010ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue813-fixed-dependency rm -rf main/.dub rm -rf sub/.dub rm -rf sub/sub/.dub ${DUB} build --bare --compiler=${DC} main dub-1.40.0/test/issue813-fixed-dependency/000077500000000000000000000000001477246567400201505ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/.no_build000066400000000000000000000000021477246567400217340ustar00rootroot00000000000000 dub-1.40.0/test/issue813-fixed-dependency/main/000077500000000000000000000000001477246567400210745ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/main/dub.sdl000066400000000000000000000001011477246567400223420ustar00rootroot00000000000000name "main" targetType "executable" dependency "sub" version="*" dub-1.40.0/test/issue813-fixed-dependency/main/dub.selections.json000066400000000000000000000001051477246567400247040ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "sub": {"path": "../sub"} } } dub-1.40.0/test/issue813-fixed-dependency/main/src/000077500000000000000000000000001477246567400216635ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/main/src/app.d000066400000000000000000000000521477246567400226050ustar00rootroot00000000000000import sub.test; void main() { foo(); } dub-1.40.0/test/issue813-fixed-dependency/sub/000077500000000000000000000000001477246567400207415ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/sub/dub.sdl000066400000000000000000000000731477246567400222170ustar00rootroot00000000000000name "sub" subPackage "sub/" dependency ":sub" version="*" dub-1.40.0/test/issue813-fixed-dependency/sub/sub/000077500000000000000000000000001477246567400215325ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/sub/sub/dub.sdl000066400000000000000000000000131477246567400230020ustar00rootroot00000000000000name "sub" dub-1.40.0/test/issue813-fixed-dependency/sub/sub/src/000077500000000000000000000000001477246567400223215ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/sub/sub/src/sub/000077500000000000000000000000001477246567400231125ustar00rootroot00000000000000dub-1.40.0/test/issue813-fixed-dependency/sub/sub/src/sub/test.d000066400000000000000000000000411477246567400242310ustar00rootroot00000000000000module sub.test; void foo() { }dub-1.40.0/test/issue813-pure-sub-dependency.sh000077500000000000000000000003521477246567400211520ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue813-pure-sub-dependency rm -rf main/.dub rm -rf sub/.dub rm -rf sub/sub/.dub rm -f main/dub.selections.json ${DUB} build --bare --compiler=${DC} main dub-1.40.0/test/issue813-pure-sub-dependency/000077500000000000000000000000001477246567400206135ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/.no_build000066400000000000000000000000021477246567400223770ustar00rootroot00000000000000 dub-1.40.0/test/issue813-pure-sub-dependency/main/000077500000000000000000000000001477246567400215375ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/main/dub.sdl000066400000000000000000000001051477246567400230110ustar00rootroot00000000000000name "main" targetType "executable" dependency "sub:sub" version="*" dub-1.40.0/test/issue813-pure-sub-dependency/main/src/000077500000000000000000000000001477246567400223265ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/main/src/app.d000066400000000000000000000000521477246567400232500ustar00rootroot00000000000000import sub.test; void main() { foo(); } dub-1.40.0/test/issue813-pure-sub-dependency/sub/000077500000000000000000000000001477246567400214045ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/sub/dub.sdl000066400000000000000000000000731477246567400226620ustar00rootroot00000000000000name "sub" subPackage "sub/" dependency ":sub" version="*" dub-1.40.0/test/issue813-pure-sub-dependency/sub/sub/000077500000000000000000000000001477246567400221755ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/sub/sub/dub.sdl000066400000000000000000000000131477246567400234450ustar00rootroot00000000000000name "sub" dub-1.40.0/test/issue813-pure-sub-dependency/sub/sub/src/000077500000000000000000000000001477246567400227645ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/sub/sub/src/sub/000077500000000000000000000000001477246567400235555ustar00rootroot00000000000000dub-1.40.0/test/issue813-pure-sub-dependency/sub/sub/src/sub/test.d000066400000000000000000000000411477246567400246740ustar00rootroot00000000000000module sub.test; void foo() { }dub-1.40.0/test/issue820-extra-fields-after-convert.sh000077500000000000000000000005441477246567400224410ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/1-exec-simple cp dub.json dub.json.bak ${DUB} convert -f sdl if grep -qe "version\|sourcePaths\|importPaths\|configuration" dub.sdl > /dev/null; then mv dub.json.bak dub.json rm dub.sdl die $LINENO 'Conversion added extra fields.' fi mv dub.json.bak dub.json rm dub.sdl dub-1.40.0/test/issue838-custom-cache-paths.sh000077500000000000000000000007341477246567400207770ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh CONFIG_FILE=$CURR_DIR/../etc/dub/settings.json mkdir $CURR_DIR/../etc && mkdir $CURR_DIR/../etc/dub || true echo "{\"customCachePaths\": [\"$CURR_DIR/issue838-custom-cache-paths/cache\"]}" > $CONFIG_FILE trap "rm $CONFIG_FILE" EXIT if ! { $DUB build --root "$CURR_DIR/issue838-custom-cache-paths" --skip-registry=all; }; then die $LINENO 'Failed to build package with custom cache path for dependencies.' fi dub-1.40.0/test/issue838-custom-cache-paths/000077500000000000000000000000001477246567400204345ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/.no_build000066400000000000000000000000001477246567400222160ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/cache/000077500000000000000000000000001477246567400214775ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/cache/foo/000077500000000000000000000000001477246567400222625ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/cache/foo/1.0.0/000077500000000000000000000000001477246567400227165ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/cache/foo/1.0.0/foo/000077500000000000000000000000001477246567400235015ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/cache/foo/1.0.0/foo/dub.sdl000066400000000000000000000000661477246567400247610ustar00rootroot00000000000000name "foo" version "1.0.0" targetType "sourceLibrary" dub-1.40.0/test/issue838-custom-cache-paths/dub.sdl000066400000000000000000000000551477246567400217120ustar00rootroot00000000000000name "test" dependency "foo" version="1.0.0" dub-1.40.0/test/issue838-custom-cache-paths/source/000077500000000000000000000000001477246567400217345ustar00rootroot00000000000000dub-1.40.0/test/issue838-custom-cache-paths/source/app.d000066400000000000000000000000201477246567400226510ustar00rootroot00000000000000void main() { } dub-1.40.0/test/issue877-auto-fetch-package-on-run.sh000077500000000000000000000021361477246567400221540ustar00rootroot00000000000000#!/bin/bash set -eu -o pipefail set -x $DUB remove 'gitcompatibledubpackage@*' || true # check whether the interactive run mode works echo "y" | $DUB run gitcompatibledubpackage | grep "Hello DUB" $DUB remove gitcompatibledubpackage ! (echo "n" | $DUB run gitcompatibledubpackage | grep "Hello DUB") ! $DUB remove gitcompatibledubpackage # check -y $DUB run --yes gitcompatibledubpackage | grep "Hello DUB" $DUB remove gitcompatibledubpackage # check --yes $DUB run -y gitcompatibledubpackage | grep "Hello DUB" $DUB remove gitcompatibledubpackage (! $DUB run --non-interactive gitcompatibledubpackage || true) 2>&1 | \ grep "Failed to find.*gitcompatibledubpackage.*locally" # check supplying versions directly dub_log="$($DUB run gitcompatibledubpackage@1.0.3)" echo "$dub_log" | grep "Hello DUB" echo "$dub_log" | grep "Fetching.*1.0.3" $DUB remove gitcompatibledubpackage # check supplying an invalid version (! $DUB run gitcompatibledubpackage@0.42.43 || true) 2>&1 | \ grep 'No package gitcompatibledubpackage was found matching the dependency 0[.]42[.]43' ! $DUB remove gitcompatibledubpackage dub-1.40.0/test/issue884-init-defer-file-creation.sh000077500000000000000000000007161477246567400220550ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh TMPDIR=${CURR_DIR}tmppack echo $TMPDIR mkdir ${TMPDIR} cd ${TMPDIR} # kill dub init during interactive mode mkfifo in ${DUB} init < in & sleep 1 kill $! rm in # ensure that no files are left behind NFILES_PLUS_ONE=`ls -la | wc -l` cd ${CURR_DIR} rm -r ${TMPDIR} # ignore sum + "." + ".." if [ ${NFILES_PLUS_ONE} -gt 3 ]; then die $LINENO 'Aborted dub init left spurious files around.' fi dub-1.40.0/test/issue895-local-configuration.sh000077500000000000000000000053301477246567400212460ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh if [ -e /var/lib/dub/settings.json ]; then die $LINENO 'Found existing system wide DUB configuration. Aborting.' fi if [ -e ~/.dub/settings.json ]; then die $LINENO 'Found existing user wide DUB configuration. Aborting.' fi cd ${CURR_DIR} mkdir -p ../etc/dub echo "{\"defaultCompiler\": \"foo\"}" > ../etc/dub/settings.json echo "Empty file named foo." > ../bin/foo function cleanup { rm -r ../etc } trap cleanup EXIT unset DC if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Unknown compiler: $(dirname $CURR_DIR)/bin/foo"; then rm ../bin/foo die $LINENO 'DUB did not find the local configuration with an adjacent compiler.' fi echo "{\"defaultCompiler\": \"$CURR_DIR/foo\"}" > ../etc/dub/settings.json mv ../bin/foo $CURR_DIR if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Unknown compiler: $CURR_DIR/foo"; then rm $CURR_DIR/foo die $LINENO 'DUB did not find a locally-configured compiler with an absolute path.' fi echo "{\"defaultCompiler\": \"~/.dub/foo\"}" > ../etc/dub/settings.json mv $CURR_DIR/foo ~/.dub/ if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Unknown compiler: "; then rm ~/.dub/foo die $LINENO 'DUB did not find a locally-configured compiler with a tilde-prefixed path.' fi echo "{\"defaultCompiler\": \"\$DUB_BINARY_PATH/../foo\"}" > ../etc/dub/settings.json mv ~/.dub/foo .. if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Unknown compiler: $(dirname $CURR_DIR)/bin/../foo"; then rm ../foo die $LINENO 'DUB did not find a locally-configured compiler with a DUB-relative path.' fi echo "{\"defaultCompiler\": \"../foo\"}" > ../etc/dub/settings.json if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "defaultCompiler specified in a DUB config file cannot use an unqualified relative path"; then rm ../foo die $LINENO 'DUB did not error properly for a locally-configured compiler with a relative path.' fi rm ../etc/dub/settings.json echo "Empty file named ldc2." > ../bin/ldc2 if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Failed to execute '$(dirname $CURR_DIR)/bin/ldc2'"; then rm ../bin/ldc2 die $LINENO 'DUB did not find ldc2 adjacent to it.' fi echo "{\"defaultCompiler\": \"foo\"}" > ../etc/dub/settings.json rm ../bin/ldc2 export PATH=$(dirname $CURR_DIR)${PATH:+:$PATH} if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF "Unknown compiler: foo"; then rm ../foo die $LINENO 'DUB did not find a locally-configured compiler in its PATH.' fi rm ../foo dub-1.40.0/test/issue923-subpackage-deps.sh000077500000000000000000000005021477246567400203310ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue923-subpackage-deps rm -rf main/.dub rm -rf a/.dub rm -rf b/.dub rm -f main/dub.selections.json ${DUB} build --bare --compiler=${DC} main if ! grep -c -e \"b\" main/dub.selections.json; then die $LINENO 'Dependency b not resolved.' fi dub-1.40.0/test/issue923-subpackage-deps/000077500000000000000000000000001477246567400177755ustar00rootroot00000000000000dub-1.40.0/test/issue923-subpackage-deps/.no_build000066400000000000000000000000021477246567400215610ustar00rootroot00000000000000 dub-1.40.0/test/issue923-subpackage-deps/a/000077500000000000000000000000001477246567400202155ustar00rootroot00000000000000dub-1.40.0/test/issue923-subpackage-deps/a/dub.sdl000066400000000000000000000002271477246567400214740ustar00rootroot00000000000000name "a" dependency ":foo" version="*" subPackage { name "foo" dependency "b" version="*" } subPackage { name "bar" dependency "a" version="*" }dub-1.40.0/test/issue923-subpackage-deps/b/000077500000000000000000000000001477246567400202165ustar00rootroot00000000000000dub-1.40.0/test/issue923-subpackage-deps/b/dub.sdl000066400000000000000000000000101477246567400214630ustar00rootroot00000000000000name "b"dub-1.40.0/test/issue923-subpackage-deps/b/source/000077500000000000000000000000001477246567400215165ustar00rootroot00000000000000dub-1.40.0/test/issue923-subpackage-deps/b/source/b.d000066400000000000000000000000331477246567400221000ustar00rootroot00000000000000module b; void test() { } dub-1.40.0/test/issue923-subpackage-deps/main/000077500000000000000000000000001477246567400207215ustar00rootroot00000000000000dub-1.40.0/test/issue923-subpackage-deps/main/dub.sdl000066400000000000000000000000531477246567400221750ustar00rootroot00000000000000name "main" dependency "a:bar" version="*" dub-1.40.0/test/issue923-subpackage-deps/main/source/000077500000000000000000000000001477246567400222215ustar00rootroot00000000000000dub-1.40.0/test/issue923-subpackage-deps/main/source/app.d000066400000000000000000000000441477246567400231440ustar00rootroot00000000000000import b; void main() { test(); } dub-1.40.0/test/issue934-path-dep.sh000077500000000000000000000003231477246567400170000ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue934-path-dep rm -rf main/.dub rm -rf a/.dub rm -rf b/.dub rm -f main/dub.selections.json cd main ${DUB} build --compiler=${DC} dub-1.40.0/test/issue934-path-dep/000077500000000000000000000000001477246567400164435ustar00rootroot00000000000000dub-1.40.0/test/issue934-path-dep/.no_build000066400000000000000000000000021477246567400202270ustar00rootroot00000000000000 dub-1.40.0/test/issue934-path-dep/a/000077500000000000000000000000001477246567400166635ustar00rootroot00000000000000dub-1.40.0/test/issue934-path-dep/a/dub.sdl000066400000000000000000000000111477246567400201310ustar00rootroot00000000000000name "a" dub-1.40.0/test/issue934-path-dep/b/000077500000000000000000000000001477246567400166645ustar00rootroot00000000000000dub-1.40.0/test/issue934-path-dep/b/dub.sdl000066400000000000000000000000431477246567400201370ustar00rootroot00000000000000name "b" dependency "a" path="../a"dub-1.40.0/test/issue934-path-dep/b/source/000077500000000000000000000000001477246567400201645ustar00rootroot00000000000000dub-1.40.0/test/issue934-path-dep/b/source/b.d000066400000000000000000000000331477246567400205460ustar00rootroot00000000000000module b; void test() { } dub-1.40.0/test/issue934-path-dep/main/000077500000000000000000000000001477246567400173675ustar00rootroot00000000000000dub-1.40.0/test/issue934-path-dep/main/dub.sdl000066400000000000000000000001021477246567400206360ustar00rootroot00000000000000name "main" dependency "a" path="../a" dependency "b" path="../b" dub-1.40.0/test/issue934-path-dep/main/source/000077500000000000000000000000001477246567400206675ustar00rootroot00000000000000dub-1.40.0/test/issue934-path-dep/main/source/app.d000066400000000000000000000000441477246567400216120ustar00rootroot00000000000000import b; void main() { test(); } dub-1.40.0/test/issue959-path-based-subpack-dep/000077500000000000000000000000001477246567400211545ustar00rootroot00000000000000dub-1.40.0/test/issue959-path-based-subpack-dep/dub.sdl000066400000000000000000000001701477246567400224300ustar00rootroot00000000000000name "bar" mainSourceFile "main.d" targetType "executable" dependency "foo" path="foo" dependency "foo:baz" path="foo" dub-1.40.0/test/issue959-path-based-subpack-dep/foo/000077500000000000000000000000001477246567400217375ustar00rootroot00000000000000dub-1.40.0/test/issue959-path-based-subpack-dep/foo/dub.sdl000066400000000000000000000002031477246567400232100ustar00rootroot00000000000000name "foo" targetType "sourceLibrary" subPackage { name "baz" targetType "sourceLibrary" dependency "foo" path="." } dub-1.40.0/test/issue959-path-based-subpack-dep/main.d000066400000000000000000000000161477246567400222420ustar00rootroot00000000000000void main() {}dub-1.40.0/test/issue97-targettype-none-nodeps/000077500000000000000000000000001477246567400212745ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/.fail_build000066400000000000000000000000001477246567400233550ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/.gitignore000066400000000000000000000000461477246567400232640ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj dub-1.40.0/test/issue97-targettype-none-nodeps/.no_run000066400000000000000000000000001477246567400225630ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/a/000077500000000000000000000000001477246567400215145ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/a/dub.sdl000066400000000000000000000000411477246567400227650ustar00rootroot00000000000000name "a" targetType "executable" dub-1.40.0/test/issue97-targettype-none-nodeps/a/source/000077500000000000000000000000001477246567400230145ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/a/source/app.d000066400000000000000000000001311477246567400237340ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue97-targettype-none-nodeps/b/000077500000000000000000000000001477246567400215155ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/b/dub.sdl000066400000000000000000000000411477246567400227660ustar00rootroot00000000000000name "b" targetType "executable" dub-1.40.0/test/issue97-targettype-none-nodeps/b/source/000077500000000000000000000000001477246567400230155ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-nodeps/b/source/app.d000066400000000000000000000001311477246567400237350ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue97-targettype-none-nodeps/dub.sdl000066400000000000000000000001251477246567400225500ustar00rootroot00000000000000name "issue97-targettype-none" targetType "none" subPackage "./a/" subPackage "./b/" dub-1.40.0/test/issue97-targettype-none-onerecipe/000077500000000000000000000000001477246567400217555ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/.gitignore000066400000000000000000000000461477246567400237450ustar00rootroot00000000000000.dub docs.json __dummy.html *.o *.obj dub-1.40.0/test/issue97-targettype-none-onerecipe/.no_run000066400000000000000000000000001477246567400232440ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/.no_test000066400000000000000000000000001477246567400234170ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/a/000077500000000000000000000000001477246567400221755ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/a/source/000077500000000000000000000000001477246567400234755ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/a/source/app.d000066400000000000000000000001311477246567400244150ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue97-targettype-none-onerecipe/b/000077500000000000000000000000001477246567400221765ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/b/source/000077500000000000000000000000001477246567400234765ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none-onerecipe/b/source/app.d000066400000000000000000000001311477246567400244160ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue97-targettype-none-onerecipe/dub.sdl000066400000000000000000000004531477246567400232350ustar00rootroot00000000000000name "issue97-targettype-none" targetType "none" dependency "issue97-targettype-none:a" version="*" dependency "issue97-targettype-none:b" version="*" subPackage { name "a" targetType "executable" sourcePaths "a/source" } subPackage { name "b" targetType "executable" sourcePaths "b/source" } dub-1.40.0/test/issue97-targettype-none.sh000077500000000000000000000014641477246567400203520ustar00rootroot00000000000000#!/usr/bin/env bash set -e ${DUB} build --root ${CURR_DIR}/issue97-targettype-none 2>&1 || true BUILD_CACHE_A="$HOME/.dub/cache/issue97-targettype-none/~master/+a/build/" BUILD_CACHE_B="$HOME/.dub/cache/issue97-targettype-none/~master/+b/build/" if [ ! -d $BUILD_CACHE_A ]; then echo "Generated 'a' subpackage build artifact not found!" 1>&2 exit 1 fi if [ ! -d $BUILD_CACHE_B ]; then echo "Generated 'b' subpackage build artifact not found!" 1>&2 exit 1 fi ${DUB} clean --root ${CURR_DIR}/issue97-targettype-none 2>&1 # make sure both sub-packages are cleaned if [ -d $BUILD_CACHE_A ]; then echo "Generated 'a' subpackage build artifact were not cleaned!" 1>&2 exit 1 fi if [ -d $BUILD_CACHE_B ]; then echo "Generated 'b' subpackage build artifact were not cleaned!" 1>&2 exit 1 fi dub-1.40.0/test/issue97-targettype-none/000077500000000000000000000000001477246567400200065ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/.no_build000066400000000000000000000000001477246567400215700ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/.no_run000066400000000000000000000000001477246567400212750ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/a/000077500000000000000000000000001477246567400202265ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/a/dub.sdl000066400000000000000000000000411477246567400214770ustar00rootroot00000000000000name "a" targetType "executable" dub-1.40.0/test/issue97-targettype-none/a/source/000077500000000000000000000000001477246567400215265ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/a/source/app.d000066400000000000000000000001311477246567400224460ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue97-targettype-none/b/000077500000000000000000000000001477246567400202275ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/b/dub.sdl000066400000000000000000000000411477246567400215000ustar00rootroot00000000000000name "b" targetType "executable" dub-1.40.0/test/issue97-targettype-none/b/source/000077500000000000000000000000001477246567400215275ustar00rootroot00000000000000dub-1.40.0/test/issue97-targettype-none/b/source/app.d000066400000000000000000000001311477246567400224470ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/issue97-targettype-none/dub.sdl000066400000000000000000000002731477246567400212660ustar00rootroot00000000000000name "issue97-targettype-none" targetType "none" dependency "issue97-targettype-none:a" version="*" dependency "issue97-targettype-none:b" version="*" subPackage "./a/" subPackage "./b/" dub-1.40.0/test/issue990-download-optional-selected.sh000077500000000000000000000003101477246567400225140ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/issue990-download-optional-selected ${DUB} clean ${DUB} remove gitcompatibledubpackage -n 2>/dev/null || true ${DUB} run dub-1.40.0/test/issue990-download-optional-selected/000077500000000000000000000000001477246567400221635ustar00rootroot00000000000000dub-1.40.0/test/issue990-download-optional-selected/.no_build000066400000000000000000000000001477246567400237450ustar00rootroot00000000000000dub-1.40.0/test/issue990-download-optional-selected/dub.sdl000066400000000000000000000001141477246567400234350ustar00rootroot00000000000000name "b" dependency "gitcompatibledubpackage" version="1.0.2" optional=true dub-1.40.0/test/issue990-download-optional-selected/dub.selections.json000066400000000000000000000001161477246567400257750ustar00rootroot00000000000000{ "fileVersion": 1, "versions": { "gitcompatibledubpackage": "1.0.2" } } dub-1.40.0/test/issue990-download-optional-selected/source/000077500000000000000000000000001477246567400234635ustar00rootroot00000000000000dub-1.40.0/test/issue990-download-optional-selected/source/app.d000066400000000000000000000001341477246567400244060ustar00rootroot00000000000000import gitcompatibledubpackage.subdir.file; void main() { assert(!hasTheWorldExploded); } dub-1.40.0/test/mutex-main-1/000077500000000000000000000000001477246567400155755ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-1/.no_run000066400000000000000000000000001477246567400170640ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-1/.no_test000066400000000000000000000000001477246567400172370ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-1/dub.json000066400000000000000000000006611477246567400172450ustar00rootroot00000000000000{ "description": "A minimal D application.", "name": "mutex-main", "targetType": "executable", "configurations": [ { "name": "app", "targetName": "app", "mainSourceFile": "source/app.d" }, { "name": "app2", "targetName": "app2", "mainSourceFile": "source/app2.d" }, { "name": "failapp", "targetName": "failapp", "mainSourceFile": "source/app.d", "sourceFiles": ["source/app2.d"] } ] } dub-1.40.0/test/mutex-main-1/source/000077500000000000000000000000001477246567400170755ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-1/source/app.d000066400000000000000000000001461477246567400200230ustar00rootroot00000000000000module app; import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/mutex-main-1/source/app2.d000066400000000000000000000001501477246567400201000ustar00rootroot00000000000000module app2; import std.stdio; void main() { writeln("Edit source/app2.d to start your project."); } dub-1.40.0/test/mutex-main-2/000077500000000000000000000000001477246567400155765ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-2/.no_run000066400000000000000000000000001477246567400170650ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-2/.no_test000066400000000000000000000000001477246567400172400ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-2/dub.json000066400000000000000000000006611477246567400172460ustar00rootroot00000000000000{ "description": "A minimal D application.", "name": "mutex-main", "targetType": "executable", "configurations": [ { "name": "app2", "targetName": "app2", "mainSourceFile": "source/app2.d" }, { "name": "app", "targetName": "app", "mainSourceFile": "source/app.d" }, { "name": "failapp", "targetName": "failapp", "mainSourceFile": "source/app.d", "sourceFiles": ["source/app2.d"] } ] } dub-1.40.0/test/mutex-main-2/source/000077500000000000000000000000001477246567400170765ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-2/source/app.d000066400000000000000000000001461477246567400200240ustar00rootroot00000000000000module app; import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/mutex-main-2/source/app2.d000066400000000000000000000001501477246567400201010ustar00rootroot00000000000000module app2; import std.stdio; void main() { writeln("Edit source/app2.d to start your project."); } dub-1.40.0/test/mutex-main-3/000077500000000000000000000000001477246567400155775ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-3/.fail_build000066400000000000000000000000001477246567400176600ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-3/.no_run000066400000000000000000000000001477246567400170660ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-3/.no_test000066400000000000000000000000001477246567400172410ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-3/dub.json000066400000000000000000000006621477246567400172500ustar00rootroot00000000000000{ "description": "A minimal D application.", "name": "mutex-main", "targetType": "executable", "configurations": [ { "name": "failapp", "targetName": "failapp", "mainSourceFile": "source/app.d", "sourceFiles": ["source/app2.d"] }, { "name": "app", "targetName": "app", "mainSourceFile": "source/app.d", }, { "name": "app2", "targetName": "app2", "mainSourceFile": "source/app2.d" } ] } dub-1.40.0/test/mutex-main-3/source/000077500000000000000000000000001477246567400170775ustar00rootroot00000000000000dub-1.40.0/test/mutex-main-3/source/app.d000066400000000000000000000001461477246567400200250ustar00rootroot00000000000000module app; import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/mutex-main-3/source/app2.d000066400000000000000000000001501477246567400201020ustar00rootroot00000000000000module app2; import std.stdio; void main() { writeln("Edit source/app2.d to start your project."); } dub-1.40.0/test/path-subpackage-ref/000077500000000000000000000000001477246567400171645ustar00rootroot00000000000000dub-1.40.0/test/path-subpackage-ref/dub.json000066400000000000000000000001441477246567400206300ustar00rootroot00000000000000{ "name": "test", "dependencies": { "test:subpack": "*" }, "subPackages": [ "subpack/" ] } dub-1.40.0/test/path-subpackage-ref/source/000077500000000000000000000000001477246567400204645ustar00rootroot00000000000000dub-1.40.0/test/path-subpackage-ref/source/app.d000066400000000000000000000000651477246567400214120ustar00rootroot00000000000000module app; import lib; void main() { libFunc(); } dub-1.40.0/test/path-subpackage-ref/subpack/000077500000000000000000000000001477246567400206145ustar00rootroot00000000000000dub-1.40.0/test/path-subpackage-ref/subpack/dub.json000066400000000000000000000002371477246567400222630ustar00rootroot00000000000000{ "name": "subpack", "description": "A minimal D application.", "copyright": "Copyright © 2014, sludwig", "authors": ["sludwig"], "dependencies": { } } dub-1.40.0/test/path-subpackage-ref/subpack/source/000077500000000000000000000000001477246567400221145ustar00rootroot00000000000000dub-1.40.0/test/path-subpackage-ref/subpack/source/lib.d000066400000000000000000000001141477246567400230230ustar00rootroot00000000000000import std.stdio; void libFunc() { writeln("Library function called."); } dub-1.40.0/test/pr1549-dub-exe-var.sh000077500000000000000000000004521477246567400167740ustar00rootroot00000000000000#! /usr/bin/env bash set -e . $(dirname "${BASH_SOURCE[0]}")/common.sh PR1549=$CURR_DIR/pr1549-dub-exe-var ${DUB} build --root ${PR1549} OUTPUT=$(${PR1549}/test-application) if [[ "$OUTPUT" != "modified code" ]]; then die $LINENO "\$DUB build variable was (likely) not evaluated correctly"; fi dub-1.40.0/test/pr1549-dub-exe-var/000077500000000000000000000000001477246567400164345ustar00rootroot00000000000000dub-1.40.0/test/pr1549-dub-exe-var/.gitignore000066400000000000000000000000221477246567400204160ustar00rootroot00000000000000setmsg setmsg.exe dub-1.40.0/test/pr1549-dub-exe-var/.no_build000066400000000000000000000000001477246567400202160ustar00rootroot00000000000000dub-1.40.0/test/pr1549-dub-exe-var/dub.sdl000066400000000000000000000003231477246567400177100ustar00rootroot00000000000000name "test-application" targetType "executable" preBuildCommands "$DUB run --single $PACKAGE_DIR/setmsg.d -- \"modified code\"" postBuildCommands "$DUB run --single $PACKAGE_DIR/setmsg.d -- \"unmodified code\"" dub-1.40.0/test/pr1549-dub-exe-var/setmsg.d000066400000000000000000000007201477246567400201020ustar00rootroot00000000000000/+ dub.sdl: +/ import std.exception; import std.path; import std.process; import std.stdio; void main(in string[] args) { enforce(args.length > 1); const string msg = args[1]; const path = buildPath(environment["DUB_PACKAGE_DIR"], "source", "app.d"); auto file = File(path, "w"); file.writeln(`import std.stdio;`); file.writeln(); file.writeln(`void main() {`); file.writefln(` writeln("%s");`, msg); file.writeln(`}`); } dub-1.40.0/test/pr1549-dub-exe-var/source/000077500000000000000000000000001477246567400177345ustar00rootroot00000000000000dub-1.40.0/test/pr1549-dub-exe-var/source/app.d000066400000000000000000000001031477246567400206530ustar00rootroot00000000000000import std.stdio; void main() { writeln("unmodified code"); } dub-1.40.0/test/pr2642-cache-db/000077500000000000000000000000001477246567400157365ustar00rootroot00000000000000dub-1.40.0/test/pr2642-cache-db/.gitignore000066400000000000000000000000311477246567400177200ustar00rootroot00000000000000dubhome/ pr2642-cache-db dub-1.40.0/test/pr2642-cache-db/.no_test000066400000000000000000000000001477246567400174000ustar00rootroot00000000000000dub-1.40.0/test/pr2642-cache-db/dub.sdl000066400000000000000000000000611477246567400172110ustar00rootroot00000000000000name "pr2642-cache-db"; targetType "executable"; dub-1.40.0/test/pr2642-cache-db/source/000077500000000000000000000000001477246567400172365ustar00rootroot00000000000000dub-1.40.0/test/pr2642-cache-db/source/test_cache_db.d000066400000000000000000000064231477246567400221570ustar00rootroot00000000000000module test_cache_db; import std.path; import std.file; import std.process; import std.stdio; import std.json; void main() { const dubhome = __FILE_FULL_PATH__.dirName().dirName().buildNormalizedPath("dubhome"); if (exists(dubhome)) { rmdirRecurse(dubhome); } const string[string] env = [ "DUB_HOME": dubhome, ]; const fetchProgram = [ environment["DUB"], "fetch", "gitcompatibledubpackage@1.0.4", ]; auto dubFetch = spawnProcess(fetchProgram, stdin, stdout, stderr, env); wait(dubFetch); const buildProgramLib = [ environment["DUB"], "build", "--build=debug", "--config=lib", "gitcompatibledubpackage@1.0.4", ]; auto dubBuild = spawnProcess(buildProgramLib, stdin, stdout, stderr, env); wait(dubBuild); const buildProgramExe = [ environment["DUB"], "build", "--build=debug", "--config=exe", "gitcompatibledubpackage@1.0.4", ]; dubBuild = spawnProcess(buildProgramExe, stdin, stdout, stderr, env); wait(dubBuild); scope (success) { // leave dubhome in the tree for analysis in case of failure rmdirRecurse(dubhome); } const buildDbPath = buildNormalizedPath(dubhome, "cache", "gitcompatibledubpackage", "1.0.4", "db.json"); assert(exists(buildDbPath), buildDbPath ~ " should exist"); const buildDbStr = readText(buildDbPath); auto json = parseJSON(buildDbStr); assert(json.type == JSONType.array, "build db should be an array"); assert(json.array.length == 2, "build db should have 2 entries"); auto db = json.array[0].object; void assertArray(string field) { assert(field in db, "db.json should have an array field " ~ field); assert(db[field].type == JSONType.array, "expected field " ~ field ~ " to be an array"); } void assertString(string field, string value = null) { assert(field in db, "db.json should have an string field " ~ field); assert(db[field].type == JSONType.string, "expected field " ~ field ~ " to be a string"); if (value) assert(db[field].str == value, "expected field " ~ field ~ " to equal " ~ value); } assertArray("architecture"); assertString("buildId"); assertString("buildType", "debug"); assertString("compiler"); assertString("compilerBinary"); assertString("compilerVersion"); assertString("configuration", "lib"); assertString("package", "gitcompatibledubpackage"); assertArray("platform"); assertString("targetBinaryPath"); assertString("version", "1.0.4"); auto binName = db["targetBinaryPath"].str; assert(isFile(binName), "expected " ~ binName ~ " to be a file."); db = json.array[1].object; assertArray("architecture"); assertString("buildId"); assertString("buildType", "debug"); assertString("compiler"); assertString("compilerBinary"); assertString("compilerVersion"); assertString("configuration", "exe"); assertString("package", "gitcompatibledubpackage"); assertArray("platform"); assertString("targetBinaryPath"); assertString("version", "1.0.4"); binName = db["targetBinaryPath"].str; assert(isFile(binName), "expected " ~ binName ~ " to be a file."); } dub-1.40.0/test/pr2644-describe-artifact-path/000077500000000000000000000000001477246567400206175ustar00rootroot00000000000000dub-1.40.0/test/pr2644-describe-artifact-path/.gitignore000066400000000000000000000000471477246567400226100ustar00rootroot00000000000000dubhome/ pr2644-describe-artifact-path dub-1.40.0/test/pr2644-describe-artifact-path/.no_test000066400000000000000000000000001477246567400222610ustar00rootroot00000000000000dub-1.40.0/test/pr2644-describe-artifact-path/dub.sdl000066400000000000000000000000771477246567400221010ustar00rootroot00000000000000name "pr2644-describe-artifact-path"; targetType "executable"; dub-1.40.0/test/pr2644-describe-artifact-path/source/000077500000000000000000000000001477246567400221175ustar00rootroot00000000000000dub-1.40.0/test/pr2644-describe-artifact-path/source/describe_artifact_path.d000066400000000000000000000031171477246567400267370ustar00rootroot00000000000000module describe_artifact_path; import std.path; import std.file; import std.process; import std.stdio; import std.json; void main() { const dubhome = __FILE_FULL_PATH__.dirName().dirName().buildNormalizedPath("dubhome"); if (exists(dubhome)) { rmdirRecurse(dubhome); } scope (success) { // leave dubhome in the tree for analysis in case of failure rmdirRecurse(dubhome); } const string[string] env = [ "DUB_HOME": dubhome, ]; const fetchProgram = [ environment["DUB"], "fetch", "gitcompatibledubpackage@1.0.4", ]; auto dubFetch = spawnProcess(fetchProgram, stdin, stdout, stderr, env); wait(dubFetch); const describeProgram = [ environment["DUB"], "describe", "--compiler=" ~ environment["DC"], "--build=debug", "--config=lib", "gitcompatibledubpackage@1.0.4", ]; auto result = execute(describeProgram, env); assert(result.status == 0, "expected dub describe to return zero"); auto json = parseJSON(result.output); auto cacheFile = json["targets"][0]["cacheArtifactPath"].str; assert(!exists(cacheFile), "found cache file in virgin dubhome"); const buildProgram = [ environment["DUB"], "build", "--compiler=" ~ environment["DC"], "--build=debug", "--config=lib", "gitcompatibledubpackage@1.0.4", ]; auto dubBuild = spawnProcess(buildProgram, stdin, stdout, stderr, env); wait(dubBuild); assert(exists(cacheFile), "did not find cache file after build"); } dub-1.40.0/test/pr2647-build-deep/000077500000000000000000000000001477246567400163275ustar00rootroot00000000000000dub-1.40.0/test/pr2647-build-deep/.gitignore000066400000000000000000000000331477246567400203130ustar00rootroot00000000000000dubhome/ pr2647-build-deep dub-1.40.0/test/pr2647-build-deep/.no_test000066400000000000000000000000001477246567400177710ustar00rootroot00000000000000dub-1.40.0/test/pr2647-build-deep/dub.sdl000066400000000000000000000000631477246567400176040ustar00rootroot00000000000000name "pr2647-build-deep"; targetType "executable"; dub-1.40.0/test/pr2647-build-deep/pack/000077500000000000000000000000001477246567400172455ustar00rootroot00000000000000dub-1.40.0/test/pr2647-build-deep/pack/dub.sdl000066400000000000000000000001131477246567400205160ustar00rootroot00000000000000name "pack" targetType "staticLibrary" dependency "urld" version="==2.1.1" dub-1.40.0/test/pr2647-build-deep/pack/source/000077500000000000000000000000001477246567400205455ustar00rootroot00000000000000dub-1.40.0/test/pr2647-build-deep/pack/source/lib.d000066400000000000000000000002531477246567400214600ustar00rootroot00000000000000module lib; import url; string getDlangUrl() { URL url; with(url) { scheme = "https"; host = "dlang.org"; } return url.toString(); } dub-1.40.0/test/pr2647-build-deep/source/000077500000000000000000000000001477246567400176275ustar00rootroot00000000000000dub-1.40.0/test/pr2647-build-deep/source/test_build_deep.d000066400000000000000000000036021477246567400231300ustar00rootroot00000000000000module test_build_deep; import std.array; import std.file; import std.path; import std.process; import std.stdio; void main() { const dubhome = __FILE_FULL_PATH__.dirName().dirName().buildNormalizedPath("dubhome"); const packdir = __FILE_FULL_PATH__.dirName().dirName().buildNormalizedPath("pack"); const dub = absolutePath(environment["DUB"]); if (exists(dubhome)) { rmdirRecurse(dubhome); } scope (success) { // leave dubhome in the tree for analysis in case of failure rmdirRecurse(dubhome); } const string[string] env = [ "DUB_HOME": dubhome, ]; // testing the regular way first: `dub build` only builds what is needed // (urld is downloaded but not built) const dubBuildProg = [dub, "build"]; writefln("running %s ...", dubBuildProg.join(" ")); auto dubBuild = spawnProcess(dubBuildProg, stdin, stdout, stderr, env, Config.none, packdir); wait(dubBuild); assert(exists(buildPath(dubhome, "cache", "pack"))); assert(isDir(buildPath(dubhome, "cache", "pack"))); assert(exists(buildPath(dubhome, "packages", "urld"))); assert(isDir(buildPath(dubhome, "packages", "urld"))); assert(!exists(buildPath(dubhome, "cache", "urld"))); // now testing the --deep switch: `dub build --deep` will build urld const dubBuildDeepProg = [dub, "build", "--deep"]; writefln("running %s ...", dubBuildDeepProg.join(" ")); auto dubBuildDeep = spawnProcess(dubBuildDeepProg, stdin, stdout, stderr, env, Config.none, packdir); wait(dubBuildDeep); assert(exists(buildPath(dubhome, "cache", "pack"))); assert(isDir(buildPath(dubhome, "cache", "pack"))); assert(exists(buildPath(dubhome, "packages", "urld"))); assert(isDir(buildPath(dubhome, "packages", "urld"))); assert(exists(buildPath(dubhome, "cache", "urld"))); assert(isDir(buildPath(dubhome, "cache", "urld"))); } dub-1.40.0/test/removed-dub-obj.sh000077500000000000000000000007231477246567400166750ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR}/removed-dub-obj DUB_CACHE_PATH="$HOME/.dub/cache/removed-dub-obj" rm -rf "$DUB_CACHE_PATH" ${DUB} build --compiler=${DC} [ -d "$DUB_CACHE_PATH" ] || die $LINENO "$DUB_CACHE_PATH not found" numObjectFiles=$(find "$DUB_CACHE_PATH" -type f -iname '*.o*' | wc -l) # note: fails with LDC < v1.1 [ "$numObjectFiles" -eq 0 ] || die $LINENO "Found left-over object files in $DUB_CACHE_PATH" dub-1.40.0/test/removed-dub-obj/000077500000000000000000000000001477246567400163345ustar00rootroot00000000000000dub-1.40.0/test/removed-dub-obj/.no_build000066400000000000000000000000001477246567400201160ustar00rootroot00000000000000dub-1.40.0/test/removed-dub-obj/.no_run000066400000000000000000000000001477246567400176230ustar00rootroot00000000000000dub-1.40.0/test/removed-dub-obj/.no_test000066400000000000000000000000001477246567400177760ustar00rootroot00000000000000dub-1.40.0/test/removed-dub-obj/dub.sdl000066400000000000000000000000621477246567400176100ustar00rootroot00000000000000name "removed-dub-obj" targetType "staticLibrary"dub-1.40.0/test/removed-dub-obj/source/000077500000000000000000000000001477246567400176345ustar00rootroot00000000000000dub-1.40.0/test/removed-dub-obj/source/test.d000066400000000000000000000000541477246567400207570ustar00rootroot00000000000000module test; unittest { assert(true); }dub-1.40.0/test/run-unittest.d000066400000000000000000000156341477246567400162120ustar00rootroot00000000000000#!/usr/bin/env dub /+dub.sdl: name: run_unittest targetName: run-unittest dependency "common" path="./common" +/ module run_unittest; import common; int main(string[] args) { import std.algorithm, std.file, std.format, std.stdio, std.path, std.process, std.string; alias ProcessConfig = std.process.Config; //** if [ -z ${DUB:-} ]; then //** die $LINENO 'Variable $DUB must be defined to run the tests.' //** fi auto dub = environment.get("DUB", ""); if (dub == "") { logError(`Environment variable "DUB" must be defined to run the tests.`); return 1; } //** if [ -z ${DC:-} ]; then //** log '$DC not defined, assuming dmd...' //** DC=dmd //** fi auto dc = environment.get("DC", ""); if (dc == "") { log(`Environment variable "DC" not defined, assuming dmd...`); dc = "dmd"; } // Clear log file { File(logFile, "w"); } //** DC_BIN=$(basename "$DC") //** CURR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) //** FRONTEND="${FRONTEND:-}" const dc_bin = baseName(dc).stripExtension; const curr_dir = __FILE_FULL_PATH__.dirName(); const frontend = environment.get("FRONTEND", __VERSION__.format!"%04d"); //** if [ "$#" -gt 0 ]; then FILTER=$1; else FILTER=".*"; fi auto filter = (args.length > 1) ? args[1] : "*"; version (linux) auto os = "linux"; version (Windows) auto os = "windows"; version (OSX) auto os = "osx"; version (Posix) { //** for script in $(ls $CURR_DIR/*.sh); do //** if [[ ! "$script" =~ $FILTER ]]; then continue; fi //** if [ "$script" = "$(gnureadlink ${BASH_SOURCE[0]})" ] || [ "$(basename $script)" = "common.sh" ]; then continue; fi //** if [ -e $script.min_frontend ] && [ ! -z "$FRONTEND" ] && [ ${FRONTEND} \< $(cat $script.min_frontend) ]; then continue; fi //** log "Running $script..." //** DUB=$DUB DC=$DC CURR_DIR="$CURR_DIR" $script || logError "Script failure." //** done foreach(DirEntry script; dirEntries(curr_dir, "*.sh", SpanMode.shallow)) { if (!script.name.baseName.globMatch(filter)) continue; if (!script.name.endsWith(".sh")) continue; if (baseName(script.name).among("run-unittest.sh", "common.sh")) continue; const min_frontend = script.name ~ ".min_frontend"; if (exists(min_frontend) && frontend.length && cmp(frontend, min_frontend.readText) < 0) continue; log("Running " ~ script ~ "..."); if (spawnShell(script.name, ["DUB":dub, "DC":dc, "CURR_DIR":curr_dir]).wait) logError("Script failure."); else log(script.name.baseName, " status: Ok"); } } foreach (DirEntry script; dirEntries(curr_dir, "*.script.d", SpanMode.shallow)) { if (!script.name.baseName.globMatch(filter)) continue; if (!script.name.endsWith(".d")) continue; const min_frontend = script.name ~ ".min_frontend"; if (frontend.length && exists(min_frontend) && cmp(frontend, min_frontend.readText) < 0) continue; log("Running " ~ script ~ "..."); if (spawnProcess([dub, script.name], ["DUB":dub, "DC":dc, "CURR_DIR":curr_dir]).wait) logError("Script failure."); else log(script.name, " status: Ok"); } //for pack in $(ls -d $CURR_DIR/*/); do foreach (DirEntry pack; dirEntries(curr_dir, SpanMode.shallow)) { //if [[ ! "$pack" =~ $FILTER ]]; then continue; fi if (!pack.name.baseName.globMatch(filter)) continue; if (!pack.isDir || pack.name.baseName.startsWith(".")) continue; if (!pack.name.buildPath("dub.json").exists && !pack.name.buildPath("dub.sdl").exists && !pack.name.buildPath("package.json").exists) continue; //if [ -e $pack/.min_frontend ] && [ ! -z "$FRONTEND" -a "$FRONTEND" \< $(cat $pack/.min_frontend) ]; then continue; fi if (pack.name.buildPath(".min_frontend").exists && cmp(frontend, pack.name.buildPath(".min_frontend").readText) < 0) continue; //#First we build the packages //if [ ! -e $pack/.no_build ] && [ ! -e $pack/.no_build_$DC_BIN ]; then # For sourceLibrary bool build = (!pack.name.buildPath(".no_build").exists && !pack.name.buildPath(".no_build_" ~ dc_bin).exists && !pack.name.buildPath(".no_build_" ~ os).exists); if (build) { //build=1 //if [ -e $pack/.fail_build ]; then // log "Building $pack, expected failure..." // $DUB build --force --root=$pack --compiler=$DC 2>/dev/null && logError "Error: Failure expected, but build passed." //else // log "Building $pack..." // $DUB build --force --root=$pack --compiler=$DC || logError "Build failure." //fi //if [ -e $pack/.fail_build ]; then if (pack.name.buildPath(".fail_build").exists) { log("Building " ~ pack.name.baseName ~ ", expected failure..."); if (spawnProcess([dub, "build", "--force", "--compiler", dc], ["DUB":dub, "DC":dc, "CURR_DIR":curr_dir], ProcessConfig.none, pack.name).wait) log(pack.name.baseName, " status: Ok"); else logError("Failure expected, but build passed."); } else { log("Building ", pack.name.baseName, "..."); if (spawnProcess([dub, "build", "--force", "--compiler", dc], ["DUB":dub, "DC":dc, "CURR_DIR":curr_dir], ProcessConfig.none, pack.name).wait) logError("Script failure."); else log(pack.name.baseName, " status: Ok"); } } //else // build=0 //fi //# We run the ones that are supposed to be run //if [ $build -eq 1 ] && [ ! -e $pack/.no_run ] && [ ! -e $pack/.no_run_$DC_BIN ]; then // log "Running $pack..." // $DUB run --force --root=$pack --compiler=$DC || logError "Run failure." //fi if (build && !pack.name.buildPath(".no_run").exists && !pack.name.buildPath(".no_run_" ~ dc_bin).exists && !pack.name.buildPath(".no_run_" ~ os).exists) { log("Running ", pack.name.baseName, "..."); if (spawnProcess([dub, "run", "--force", "--compiler", dc], ["DUB":dub, "DC":dc, "CURR_DIR":curr_dir], ProcessConfig.none, pack.name).wait) logError("Run failure."); else log(pack.name.baseName, " status: Ok"); } //# Finally, the unittest part //if [ $build -eq 1 ] && [ ! -e $pack/.no_test ] && [ ! -e $pack/.no_test_$DC_BIN ]; then // log "Testing $pack..." // $DUB test --force --root=$pack --compiler=$DC || logError "Test failure." //fi if (build && !pack.name.buildPath(".no_test").exists && !pack.name.buildPath(".no_test_" ~ dc_bin).exists && !pack.name.buildPath(".no_test_" ~ os).exists) { log("Testing ", pack.name.baseName, "..."); if (spawnProcess([dub, "test", "--force", "--root", pack.name, "--compiler", dc], ["DUB":dub, "DC":dc, "CURR_DIR":curr_dir]).wait) logError("Test failure."); else log(pack.name.baseName, " status: Ok"); } //done } //echo //echo 'Testing summary:' //cat $(dirname "${BASH_SOURCE[0]}")/test.log writeln(); writeln("Testing summary:"); auto logLines = readText("test.log").splitLines; foreach (line; logLines) writeln(line); auto errCnt = logLines.count!(a => a.startsWith("[ERROR]")); auto passCnt = logLines.count!(a => a.startsWith("[INFO]") && a.endsWith("status: Ok")); writeln(passCnt , "/", errCnt + passCnt, " tests succeeded."); return any_errors; } dub-1.40.0/test/run-unittest.sh000077500000000000000000000050521477246567400163750ustar00rootroot00000000000000#!/usr/bin/env bash set -ueo pipefail . $(dirname "${BASH_SOURCE[0]}")/common.sh > $(dirname "${BASH_SOURCE[0]}")/test.log function log() { echo -e "\033[0;33m[INFO] $@\033[0m" echo "[INFO] $@" >> $(dirname "${BASH_SOURCE[0]}")/test.log } function logError() { echo -e 1>&2 "\033[0;31m[ERROR] $@\033[0m" echo "[ERROR] $@" >> $(dirname "${BASH_SOURCE[0]}")/test.log any_errors=1 } function die() { logError "$@" exit 1 } export -f log export -f die if [ -z ${DUB:-} ]; then die $LINENO 'Variable $DUB must be defined to run the tests.' fi if [ -z ${DC:-} ]; then log '$DC not defined, assuming dmd...' DC=dmd fi DC_BIN=$(basename "$DC") CURR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) FRONTEND="${FRONTEND:-}" if [ -z ${FRONTEND:-} ]; then if [ "$DC_BIN" == "ldc2" ]; then FRONTEND=$(ldc2 --version | grep 'based on DMD v2.' | sed -E -n 's/^.*DMD v(2\.[0-9]+\.[0-9]).*$/\1/p') fi if [ "$DC_BIN" == "dmd" ]; then FRONTEND=$(dmd --version | grep 'D Compiler v2.' | sed -E -n 's/^.*D Compiler v(2\.[0-9]+\.[0-9]).*$/\1/p') fi fi echo "Running unittests with $DC_BIN (frontend=$FRONTEND)" if [ "$#" -gt 0 ]; then FILTER=$1; else FILTER=".*"; fi for pack in $(ls -d $CURR_DIR/*/); do if [[ ! "$pack" =~ $FILTER ]]; then continue; fi if [ -f $pack/.min_frontend ] && [ ! -z "$FRONTEND" -a "$FRONTEND" \< $(cat $pack/.min_frontend) ]; then continue; fi # First we build the packages if [ ! -e $pack/.no_build ] && [ ! -e $pack/.no_build_$DC_BIN ]; then # For sourceLibrary build=1 if [ -e $pack/.fail_build ]; then log "Building $pack, expected failure..." $DUB build --force --root=$pack --compiler=$DC 2>/dev/null && logError "Error: Failure expected, but build passed." else log "Building $pack..." $DUB build --force --root=$pack --compiler=$DC || logError "Build failure." fi else build=0 fi # We run the ones that are supposed to be run if [ $build -eq 1 ] && [ ! -e $pack/.no_run ] && [ ! -e $pack/.no_run_$DC_BIN ]; then log "Running $pack..." $DUB run --force --root=$pack --compiler=$DC || logError "Run failure." fi # Finally, the unittest part if [ $build -eq 1 ] && [ ! -e $pack/.no_test ] && [ ! -e $pack/.no_test_$DC_BIN ]; then log "Testing $pack..." $DUB test --force --root=$pack --compiler=$DC || logError "Test failure." fi done echo echo 'Testing summary:' cat $(dirname "${BASH_SOURCE[0]}")/test.log exit ${any_errors:-0} dub-1.40.0/test/sdl-package-simple/000077500000000000000000000000001477246567400170155ustar00rootroot00000000000000dub-1.40.0/test/sdl-package-simple/dub.sdl000066400000000000000000000000551477246567400202730ustar00rootroot00000000000000name "exec-simple"; targetType "executable"; dub-1.40.0/test/sdl-package-simple/source/000077500000000000000000000000001477246567400203155ustar00rootroot00000000000000dub-1.40.0/test/sdl-package-simple/source/app.d000066400000000000000000000000761477246567400212450ustar00rootroot00000000000000import std.stdio; void main() { writeln(__FUNCTION__); } dub-1.40.0/test/single-file-sdl-default-name.d000066400000000000000000000002251477246567400210350ustar00rootroot00000000000000/++dub.sdl: dependency "sourcelib-simple" path="1-sourceLib-simple" +/ module single; void main(string[] args) { import sourcelib.app; entry(); } dub-1.40.0/test/single-file-sdl-default-name.sh000077500000000000000000000004701477246567400212310ustar00rootroot00000000000000#!/usr/bin/env bash set -e cd ${CURR_DIR} rm -f single-file-sdl-default-name ${DUB} run --single single-file-sdl-default-name.d --compiler=${DC} if [ ! -f single-file-sdl-default-name ]; then echo "Normal invocation did not produce a binary in the current directory" exit 1 fi rm single-file-sdl-default-name dub-1.40.0/test/subpackage-common-with-sourcefile-globbing.sh000077500000000000000000000003471477246567400242010ustar00rootroot00000000000000#!/bin/sh set -e cd ${CURR_DIR}/subpackage-common-with-sourcefile-globbing rm -rf .dub dub.selections.json ${DUB} build --compiler=${DC} :server -v ${DUB} build --compiler=${DC} :client -v ${DUB} build --compiler=${DC} :common -v dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/000077500000000000000000000000001477246567400236365ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/.no_build000066400000000000000000000000001477246567400254200ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/000077500000000000000000000000001477246567400245505ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/000077500000000000000000000000001477246567400265115ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/client/000077500000000000000000000000001477246567400277675ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/client/app.d000066400000000000000000000001341477246567400307120ustar00rootroot00000000000000import mypackage.client.extra; import mypackage.common.blah; void main() { foo(); blah(); } dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/client/extra.d000066400000000000000000000000551477246567400312570ustar00rootroot00000000000000module mypackage.client.extra; void foo() {} dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/common/000077500000000000000000000000001477246567400300015ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/common/blah.d000066400000000000000000000000551477246567400310540ustar00rootroot00000000000000module mypackage.common.blah; void blah() {} dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/server/000077500000000000000000000000001477246567400300175ustar00rootroot00000000000000dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/server/app.d000066400000000000000000000001341477246567400307420ustar00rootroot00000000000000import mypackage.server.extra; import mypackage.common.blah; void main() { foo(); blah(); } dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/code/mypackage/server/extra.d000066400000000000000000000000551477246567400313070ustar00rootroot00000000000000module mypackage.server.extra; void foo() {} dub-1.40.0/test/subpackage-common-with-sourcefile-globbing/dub.sdl000066400000000000000000000006021477246567400251120ustar00rootroot00000000000000name "mypackage" targetType "none" subPackage { name "server" sourceFiles "code/mypackage/[sc][oe]*/*.d" targetType "executable" } subPackage { name "client" sourceFiles "code/mypackage/client/*.d" targetType "executable" dependency "mypackage:common" version="*" } subPackage { name "common" sourceFiles "code/mypackage/common/*.d" importPaths "code" targetType "library" } dub-1.40.0/test/subpackage-ref/000077500000000000000000000000001477246567400162325ustar00rootroot00000000000000dub-1.40.0/test/subpackage-ref/dub.json000066400000000000000000000001641477246567400177000ustar00rootroot00000000000000{ "name": "test", "dependencies": { "test:subpack": "*" }, "subPackages": [ { "name": "subpack" } ] } dub-1.40.0/test/subpackage-ref/source/000077500000000000000000000000001477246567400175325ustar00rootroot00000000000000dub-1.40.0/test/subpackage-ref/source/app.d000066400000000000000000000001311477246567400204520ustar00rootroot00000000000000import std.stdio; void main() { writeln("Edit source/app.d to start your project."); } dub-1.40.0/test/test-upgrade-subpackages.sh000077500000000000000000000020031477246567400205770ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname ${BASH_SOURCE[0]})/common.sh PACK_PATH="$CURR_DIR"/path-subpackage-ref # make sure that there are no left-over selections files rm -f $PACK_PATH/dub.selections.json $PACK_PATH/subpack/dub.selections.json # first upgrade only the root package if ! ${DUB} upgrade --root $PACK_PATH; then die $LINENO 'The upgrade command failed.' fi if [ ! -f $PACK_PATH/dub.selections.json ] || [ -f $PACK_PATH/subpack/dub.selections.json ]; then die $LINENO 'The upgrade command did not generate the right set of dub.selections.json files.' fi rm -f $PACK_PATH/dub.selections.json # now upgrade with all sub packages if ! ${DUB} upgrade -s --root $PACK_PATH; then die $LINENO 'The upgrade command failed with -s.' fi if [ ! -f $PACK_PATH/dub.selections.json ] || [ ! -f $PACK_PATH/subpack/dub.selections.json ]; then die $LINENO 'The upgrade command did not generate all dub.selections.json files.' fi # clean up rm -f $PACK_PATH/dub.selections.json $PACK_PATH/subpack/dub.selections.json dub-1.40.0/test/test-version-opt.sh000077500000000000000000000001501477246567400171500ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh $DUB --version | grep -qF 'DUB version' dub-1.40.0/test/test_registry.d000077500000000000000000000040331477246567400164320ustar00rootroot00000000000000#!/usr/bin/env dub /+dub.sdl: dependency "vibe-d:http" version="~>0.9" versions "VibeNoSSL" +/ import std.array; import vibe.core.args; import vibe.core.core; import vibe.core.path; import vibe.http.fileserver; import vibe.http.router; import vibe.http.server; /* Provide a special API File Handler as Vibe.d's builtin serveStaticFiles doesn't deal well with query params. This will blindly check if the requestURI payload exists on the filesystem and if so, return the file. It replaces `?` with `__` for Windows compatibility. Params: skip = initial part of the requestURI to skip over folder = the base directory from which to serve API requests from */ auto apiFileHandler(string skip, string folder) { import std.functional : toDelegate; void handler(HTTPServerRequest req, HTTPServerResponse res) { import std.algorithm : skipOver; import std.path : buildPath; import std.file : exists; // ? can't be part of path names on Windows auto requestURI = req.requestURI.replace("?", "__"); requestURI.skipOver(skip); const reqFile = buildPath(folder, requestURI); if (reqFile.exists) { return req.sendFile(res, PosixPath(reqFile)); } } return toDelegate(&handler); } void main(string[] args) { import std.conv; immutable folder = readRequiredOption!string("folder", "Folder to service files from."); immutable port = readRequiredOption!ushort("port", "Port to use"); auto router = new URLRouter; router.get("stop", (HTTPServerRequest req, HTTPServerResponse res){ res.writeVoidBody; exitEventLoop(); }); router.get("/packages/gitcompatibledubpackage/1.0.2.zip", (req, res) { res.writeBody("", HTTPStatus.badGateway); }); router.get("*", folder.serveStaticFiles); router.get("/fallback/*", folder.serveStaticFiles(new HTTPFileServerSettings("/fallback"))); router.get("/api/*", apiFileHandler("/", folder)); router.get("/fallback/api/*", apiFileHandler("/fallback/", folder)); listenHTTP(text("localhost:", port), router); runApplication(); } dub-1.40.0/test/timeout.sh000077500000000000000000000025641477246567400154070ustar00rootroot00000000000000#!/usr/bin/env bash set -euo pipefail . $(dirname "${BASH_SOURCE[0]}")/common.sh PORT=$(getRandomPort) log ' Testing unconnectable registry' if timeout 1s $DUB fetch dub --skip-registry=all --registry=http://localhost:$PORT; then die $LINENO 'Fetching from unconnectable registry should fail.' elif [ $? -eq 124 ]; then die $LINENO 'Fetching from unconnectable registry should fail immediately.' fi log ' Testing non-responding registry' cat | nc -l $PORT >/dev/null & PID=$! if timeout 10s $DUB fetch dub --skip-registry=all --registry=http://localhost:$PORT; then die $LINENO 'Fetching from non-responding registry should fail.' elif [ $? -eq 124 ]; then die $LINENO 'Fetching from non-responding registry should time-out within 8s.' fi kill $PID 2>/dev/null || true log ' Testing too slow registry' { res=$(printf 'HTTP/1.1 200 OK\r Server: dummy\r Content-Type: application/json\r Content-Length: 2\r \r {}') for i in $(seq 0 $((${#res} - 1))); do echo -n "${res:$i:1}" || true sleep 1 done } | tail -n +1 | nc -l $PORT >/dev/null & PID=$! if timeout 10s time $DUB fetch dub --skip-registry=all --registry=http://localhost:$PORT; then die $LINENO 'Fetching from too slow registry should fail.' elif [ $? -eq 124 ]; then die $LINENO 'Fetching from too slow registry should time-out within 8s.' fi kill $PID 2>/dev/null || true dub-1.40.0/test/unittest-cov-ctfe.sh000077500000000000000000000002211477246567400172700ustar00rootroot00000000000000#!/usr/bin/env bash DIR=$(dirname "${BASH_SOURCE[0]}") . "$DIR"/common.sh "$DUB" test --root "$DIR"/unittest-cov-ctfe --build=unittest-cov-ctfe dub-1.40.0/test/unittest-cov-ctfe/000077500000000000000000000000001477246567400167365ustar00rootroot00000000000000dub-1.40.0/test/unittest-cov-ctfe/.no_build000066400000000000000000000000001477246567400205200ustar00rootroot00000000000000dub-1.40.0/test/unittest-cov-ctfe/.no_run000066400000000000000000000000001477246567400202250ustar00rootroot00000000000000dub-1.40.0/test/unittest-cov-ctfe/.no_test000066400000000000000000000000001477246567400204000ustar00rootroot00000000000000dub-1.40.0/test/unittest-cov-ctfe/dub.sdl000066400000000000000000000001031477246567400202060ustar00rootroot00000000000000name "test" version "1.0.0" targetType "library" dflags "-cov=100" dub-1.40.0/test/unittest-cov-ctfe/source/000077500000000000000000000000001477246567400202365ustar00rootroot00000000000000dub-1.40.0/test/unittest-cov-ctfe/source/mod.d000066400000000000000000000002641477246567400211640ustar00rootroot00000000000000module mod; int f(int x) { return x + 1; } int g(int x) { return x * 2; } enum gResult = g(12); // execute g() at compile-time unittest { assert(f(11) + gResult == 36); } dub-1.40.0/test/use-c-sources/000077500000000000000000000000001477246567400160505ustar00rootroot00000000000000dub-1.40.0/test/use-c-sources/.min_frontend000066400000000000000000000000061477246567400205270ustar00rootroot000000000000002.101 dub-1.40.0/test/use-c-sources/.no_build_gdc000066400000000000000000000000001477246567400204470ustar00rootroot00000000000000dub-1.40.0/test/use-c-sources/dub.json000066400000000000000000000004121477246567400175120ustar00rootroot00000000000000{ "toolchainRequirements": { "dub": ">=1.29.0", "frontend": ">=2.101.0" }, "cSourcePaths": [ "source" ], "description": "A minimal D application using ImportC and C sources in a dub project.", "name": "use-c-sources" }dub-1.40.0/test/use-c-sources/source/000077500000000000000000000000001477246567400173505ustar00rootroot00000000000000dub-1.40.0/test/use-c-sources/source/app.d000066400000000000000000000012241477246567400202740ustar00rootroot00000000000000/** Some test code for ImportC */ module app.d; import std.algorithm.iteration; import std.array; import std.conv; import std.exception; import std.range; import std.stdio; import std.string; import some_c_code; void main() { doCCalls(); } /// Call C functions in zstd_binding module void doCCalls() { relatedCode(42); ulong a = 3; uint b = 4; auto rs0 = multiplyU64byU32(&a, &b); writeln("Result of multiplyU64byU32(3,4) = ", rs0); uint[8] arr = [1, 2, 3, 4, 5, 6, 7, 8]; auto rs1 = multiplyAndAdd(arr.ptr, arr.length, 3); writeln("Result of sum(%s*3) = ".format(arr), rs1); foreach (n; 1 .. 20) { writeln("fac(", n, ") = ", fac(n)); } } dub-1.40.0/test/use-c-sources/source/some_c_code.c000066400000000000000000000011021477246567400217450ustar00rootroot00000000000000 #include #include "some_c_code.h" // Some test functions follow to proof that C code can be called from D main() void relatedCode(size_t aNumber) { printf("Hallo! This is some output from C code! (%d)\n", aNumber); } uint64_t multiplyU64byU32(uint64_t*a, uint32_t*b) { return *a * *b; } uint64_t multiplyAndAdd(uint32_t*arr, size_t arrlen, uint32_t mult) { uint64_t acc = 0; for (int i = 0; i < arrlen; i++) { acc += arr[i]*mult; } return acc; } uint64_t fac(uint64_t n) { if (n > 1) return n * fac(n-1); else return 1; } dub-1.40.0/test/use-c-sources/source/some_c_code.h000066400000000000000000000004551477246567400217640ustar00rootroot00000000000000#ifndef SOME_C_CODE_H #define SOME_C_CODE_H #include #include extern void relatedCode(size_t aNumber); extern uint64_t multiplyU64byU32(uint64_t*a, uint32_t*b); extern uint64_t multiplyAndAdd(uint32_t*arr, size_t arrlen, uint32_t mult); extern uint64_t fac(uint64_t n); #endifdub-1.40.0/test/version-filters-diamond/000077500000000000000000000000001477246567400201175ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/.gitignore000066400000000000000000000005341477246567400221110ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1262-version-inheritance-diamond issue1262-version-inheritance-diamond.so issue1262-version-inheritance-diamond.dylib issue1262-version-inheritance-diamond.dll issue1262-version-inheritance-diamond.a issue1262-version-inheritance-diamond.lib issue1262-version-inheritance-diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters-diamond/.no_build000066400000000000000000000000001477246567400217010ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/.no_run000066400000000000000000000000001477246567400214060ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/.no_test000066400000000000000000000000001477246567400215610ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/daughter/000077500000000000000000000000001477246567400217225ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/daughter/.gitignore000066400000000000000000000002101477246567400237030ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ daughter.so daughter.dylib daughter.dll daughter.a daughter.lib daughter-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters-diamond/daughter/dub.sdl000066400000000000000000000002661477246567400232040ustar00rootroot00000000000000name "daughter" versions "Daughter" debugVersions "dDaughter" x:versionFilters "Daughter" "Parent" x:debugVersionFilters "dDaughter" "dParent" dependency "diamond" path="../diamond" dub-1.40.0/test/version-filters-diamond/daughter/source/000077500000000000000000000000001477246567400232225ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/daughter/source/dummy.d000066400000000000000000000016101477246567400245200ustar00rootroot00000000000000module daughter.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); // via dependency version (Daughter) {} else static assert(0, "Expected Daughter to be set"); // local version (Son) static assert(0, "Expected Son to not be set"); version (Diamond) static assert(0, "Expected Diamond to not be set"); debug (dParent) {} else static assert(0, "Expected dParent to be set"); // via dependency debug (dDaughter) {} else static assert(0, "Expected dDaughter to be set"); // local debug (dSon) {} else static assert(0, "Expected dSon to be set"); // via diamond dependency debug (dDiamond) static assert(0, "Expected dDiamond to not be set"); version (Have_daughter) static assert(0, "Expected Have_daughter to not be set"); version (Have_son) static assert(0, "Expected Have_son to not be set"); version (Have_diamond) static assert(0, "Expected Have_diamond to not be set"); dub-1.40.0/test/version-filters-diamond/diamond/000077500000000000000000000000001477246567400215325ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/diamond/.gitignore000066400000000000000000000002021477246567400235140ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ diamond.so diamond.dylib diamond.dll diamond.a diamond.lib diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters-diamond/diamond/dub.sdl000066400000000000000000000000731477246567400230100ustar00rootroot00000000000000name "diamond" versions "Diamond" debugVersions "dDiamond" dub-1.40.0/test/version-filters-diamond/diamond/source/000077500000000000000000000000001477246567400230325ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/diamond/source/dummy.d000066400000000000000000000015461477246567400243400ustar00rootroot00000000000000module diamond.dummy; template hasVersion(string v) { mixin("version ("~v~") enum hasVersion = true; else enum hasVersion = false;"); } template hasDebugVersion(string v) { mixin("debug ("~v~") enum hasDebugVersion = true; else enum hasDebugVersion = false;"); } // checking inference here version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); static assert(!hasVersion!"Son"); static assert(!hasVersion!"Diamond"); debug (dParent) {} else static assert(0, "Expected dParent to be set"); static assert(!hasDebugVersion!"dDaughter"); debug (dSon) {} else static assert(0, "Expected dSon to be set"); static assert(!hasDebugVersion!"dDiamond"); static assert(!hasVersion!"Have_daughter"); static assert(!hasVersion!"Have_son"); static assert(!hasVersion!"Have_diamond"); dub-1.40.0/test/version-filters-diamond/dub.sdl000066400000000000000000000003051477246567400213730ustar00rootroot00000000000000name "version-filters-diamond" versions "Parent" debugVersions "dParent" x:versionFilters "Parent" x:debugVersionFilters "dParent" dependency "daughter" path="daughter" dependency "son" path="son" dub-1.40.0/test/version-filters-diamond/son/000077500000000000000000000000001477246567400207165ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/son/.gitignore000066400000000000000000000001521477246567400227040ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ son.so son.dylib son.dll son.a son.lib son-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters-diamond/son/dub.sdl000066400000000000000000000002121477246567400221670ustar00rootroot00000000000000name "son" versions "Son" debugVersions "dSon" x:versionFilters "Son" x:debugVersionFilters "dSon" dependency "diamond" path="../diamond" dub-1.40.0/test/version-filters-diamond/son/source/000077500000000000000000000000001477246567400222165ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/son/source/dummy.d000066400000000000000000000016411477246567400235200ustar00rootroot00000000000000module son.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); // via dependency version (Daughter) {} else static assert(0, "Expected Daughter to not be set"); // via diamond dependency version (Son) {} else static assert(0, "Expected Son to be set"); // local version (Diamond) static assert(0, "Expected Diamond to not be set"); debug (dParent) {} else static assert(0, "Expected dParent to be set"); // via dependency debug (dDaughter) static assert(0, "Expected dDaughter to not be set"); // via diamond dependency debug (dSon) {} else static assert(0, "Expected dSon to be set"); // local debug (dDiamond) static assert(0, "Expected dDiamond to not be set"); version (Have_daughter) static assert(0, "Expected Have_daughter to not be set"); version (Have_son) static assert(0, "Expected Have_son to not be set"); version (Have_diamond) static assert(0, "Expected Have_diamond to not be set"); dub-1.40.0/test/version-filters-diamond/source/000077500000000000000000000000001477246567400214175ustar00rootroot00000000000000dub-1.40.0/test/version-filters-diamond/source/app.d000066400000000000000000000017171477246567400223520ustar00rootroot00000000000000version (Parent) {} else static assert(0, "Expected Parent to be set"); // local version (Daughter) {} else static assert(0, "Expected Daughter to not be set"); // via dependency version (Son) {} else static assert(0, "Expected Son to not be set"); // via dependency version (Diamond) static assert(0, "Expected Diamond to not be set"); // unused by dependencies debug (dParent) {} else static assert(0, "Expected dParent to be set"); // local debug (dDaughter) {} else static assert(0, "Expected dDaughter to be set"); // via dependency debug (dSon) {} else static assert(0, "Expected dSon to not be set"); // via dependency debug (dDiamond) static assert(0, "Expected dDiamond to not be set"); // unused by dependencies version (Have_daugther) static assert(0, "Expected Have_daugther to not be set"); version (Have_son) static assert(0, "Expected Have_son to not be set"); version (Have_diamond) static assert(0, "Expected Have_diamond to not be set"); void main() { } dub-1.40.0/test/version-filters-none/000077500000000000000000000000001477246567400174435ustar00rootroot00000000000000dub-1.40.0/test/version-filters-none/.gitignore000066400000000000000000000005341477246567400214350ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1262-version-inheritance-diamond issue1262-version-inheritance-diamond.so issue1262-version-inheritance-diamond.dylib issue1262-version-inheritance-diamond.dll issue1262-version-inheritance-diamond.a issue1262-version-inheritance-diamond.lib issue1262-version-inheritance-diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters-none/.no_build000066400000000000000000000000001477246567400212250ustar00rootroot00000000000000dub-1.40.0/test/version-filters-none/.no_run000066400000000000000000000000001477246567400207320ustar00rootroot00000000000000dub-1.40.0/test/version-filters-none/.no_test000066400000000000000000000000001477246567400211050ustar00rootroot00000000000000dub-1.40.0/test/version-filters-none/dub.sdl000066400000000000000000000001361477246567400207210ustar00rootroot00000000000000name "version-filters-none" versions "Parent" debugVersions "dParent" x:versionFilters "none" dub-1.40.0/test/version-filters-none/source/000077500000000000000000000000001477246567400207435ustar00rootroot00000000000000dub-1.40.0/test/version-filters-none/source/app.d000066400000000000000000000002311477246567400216640ustar00rootroot00000000000000version (Parent) static assert(0, "Expected Parent to not be set"); debug (dParent) static assert(0, "Expected dParent to not be set"); void main() { } dub-1.40.0/test/version-filters-source-dep/000077500000000000000000000000001477246567400205525ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/.gitignore000066400000000000000000000005341477246567400225440ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1262-version-inheritance-diamond issue1262-version-inheritance-diamond.so issue1262-version-inheritance-diamond.dylib issue1262-version-inheritance-diamond.dll issue1262-version-inheritance-diamond.a issue1262-version-inheritance-diamond.lib issue1262-version-inheritance-diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters-source-dep/.no_build000066400000000000000000000000001477246567400223340ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/.no_run000066400000000000000000000000001477246567400220410ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/.no_test000066400000000000000000000000001477246567400222140ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/dub.sdl000066400000000000000000000002161477246567400220270ustar00rootroot00000000000000name "version-filters-source-dep" versions "Parent" debugVersions "dParent" x:versionFilters "none" dependency "source-dep" path="source-dep" dub-1.40.0/test/version-filters-source-dep/source-dep/000077500000000000000000000000001477246567400226205ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/source-dep/dub.sdl000066400000000000000000000003241477246567400240750ustar00rootroot00000000000000name "source-dep" versions "SourceDep" debugVersions "dSourceDep" # filter of sourceOnly libs are merged with dependents x:versionFilters "SourceDep" x:debugVersionFilters "dSourceDep" targetType "sourceLibrary" dub-1.40.0/test/version-filters-source-dep/source-dep/source/000077500000000000000000000000001477246567400241205ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/source-dep/source/dummy.d000066400000000000000000000004761477246567400254270ustar00rootroot00000000000000module sourcedep.dummy; version (Parent) static assert(0, "Expected Parent to not be set"); version (SourceDep) {} else static assert(0, "Expected SourceDep to be set"); debug (dParent) static assert(0, "Expected dParent to not be set"); debug (dSourceDep) {} else static assert(0, "Expected dSourceDep to be set"); dub-1.40.0/test/version-filters-source-dep/source/000077500000000000000000000000001477246567400220525ustar00rootroot00000000000000dub-1.40.0/test/version-filters-source-dep/source/app.d000066400000000000000000000004661477246567400230050ustar00rootroot00000000000000version (Parent) static assert(0, "Expected Parent to not be set"); version (SourceDep) {} else static assert(0, "Expected SourceDep to be set"); debug (dParent) static assert(0, "Expected dParent to not be set"); debug (dSourceDep) {} else static assert(0, "Expected dSourceDep to be set"); void main() { } dub-1.40.0/test/version-filters.sh000077500000000000000000000005311477246567400170440ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh $DUB build --root="$CURR_DIR/version-filters" --filter-versions $DUB build --root="$CURR_DIR/version-filters-diamond" --filter-versions $DUB build --root="$CURR_DIR/version-filters-source-dep" --filter-versions $DUB build --root="$CURR_DIR/version-filters-none" --filter-versions dub-1.40.0/test/version-filters/000077500000000000000000000000001477246567400165065ustar00rootroot00000000000000dub-1.40.0/test/version-filters/.gitignore000066400000000000000000000005341477246567400205000ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ issue1262-version-inheritance-diamond issue1262-version-inheritance-diamond.so issue1262-version-inheritance-diamond.dylib issue1262-version-inheritance-diamond.dll issue1262-version-inheritance-diamond.a issue1262-version-inheritance-diamond.lib issue1262-version-inheritance-diamond-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters/.no_build000066400000000000000000000000001477246567400202700ustar00rootroot00000000000000dub-1.40.0/test/version-filters/.no_run000066400000000000000000000000001477246567400177750ustar00rootroot00000000000000dub-1.40.0/test/version-filters/.no_test000066400000000000000000000000001477246567400201500ustar00rootroot00000000000000dub-1.40.0/test/version-filters/daughter/000077500000000000000000000000001477246567400203115ustar00rootroot00000000000000dub-1.40.0/test/version-filters/daughter/.gitignore000066400000000000000000000002101477246567400222720ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ daughter.so daughter.dylib daughter.dll daughter.a daughter.lib daughter-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters/daughter/dub.sdl000066400000000000000000000002171477246567400215670ustar00rootroot00000000000000name "daughter" versions "Daughter" debugVersions "dDaughter" x:versionFilters "Daughter" "Parent" x:debugVersionFilters "dDaughter" "dParent" dub-1.40.0/test/version-filters/daughter/source/000077500000000000000000000000001477246567400216115ustar00rootroot00000000000000dub-1.40.0/test/version-filters/daughter/source/dummy.d000066400000000000000000000011301477246567400231040ustar00rootroot00000000000000module daughter.dummy; version (Parent) {} else static assert(0, "Expected Parent to be set"); version (Daughter) {} else static assert(0, "Expected Daughter to be set"); version (Son) static assert(0, "Expected Son to not be set"); debug (dParent) {} else static assert(0, "Expected dParent to be set"); debug (dDaughter) {} else static assert(0, "Expected dDaughter to be set"); debug (dSon) static assert(0, "Expected dSon to not be set"); version (Have_daughter) static assert(0, "Expected Have_daughter to not be set"); version (Have_son) static assert(0, "Expected Have_son to not be set"); dub-1.40.0/test/version-filters/dub.sdl000066400000000000000000000002751477246567400177700ustar00rootroot00000000000000name "version-filters" versions "Parent" debugVersions "dParent" x:versionFilters "Parent" x:debugVersionFilters "dParent" dependency "daughter" path="daughter" dependency "son" path="son" dub-1.40.0/test/version-filters/son/000077500000000000000000000000001477246567400173055ustar00rootroot00000000000000dub-1.40.0/test/version-filters/son/.gitignore000066400000000000000000000001521477246567400212730ustar00rootroot00000000000000.dub docs.json __dummy.html docs/ son.so son.dylib son.dll son.a son.lib son-test-* *.exe *.o *.obj *.lst dub-1.40.0/test/version-filters/son/dub.sdl000066400000000000000000000001431477246567400205610ustar00rootroot00000000000000name "son" versions "Son" debugVersions "dSon" x:versionFilters "Son" x:debugVersionFilters "dSon" dub-1.40.0/test/version-filters/son/source/000077500000000000000000000000001477246567400206055ustar00rootroot00000000000000dub-1.40.0/test/version-filters/son/source/dummy.d000066400000000000000000000011131477246567400221010ustar00rootroot00000000000000module son.dummy; version (Parent) static assert(0, "Expected Parent to not be set"); version (Daughter) static assert(0, "Expected Daughter to not be set"); version (Son) {} else static assert(0, "Expected Son to be set"); debug (dParent) static assert(0, "Expected dParent to not be set"); debug (dDaughter) static assert(0, "Expected dDaughter to not be set"); debug (dSon) {} else static assert(0, "Expected dSon to be set"); version (Have_daughter) static assert(0, "Expected Have_daughter to not be set"); version (Have_son) static assert(0, "Expected Have_son to not be set"); dub-1.40.0/test/version-filters/source/000077500000000000000000000000001477246567400200065ustar00rootroot00000000000000dub-1.40.0/test/version-filters/source/app.d000066400000000000000000000012631477246567400207350ustar00rootroot00000000000000version (Parent) {} else static assert(0, "Expected Parent to be set"); // local version (Daughter) {} else static assert(0, "Expected Daughter to be set"); // via dependency version (Son) {} else static assert(0, "Expected Son to be set"); // via dependency debug (dParent) {} else static assert(0, "Expected dParent to be set"); // local debug (dDaughter) {} else static assert(0, "Expected dDaughter to be set"); // via dependency debug (dSon) {} else static assert(0, "Expected dSon to be set"); // via dependency version (Have_daugther) static assert(0, "Expected Have_daugther to not be set"); version (Have_son) static assert(0, "Expected Have_son to not be set"); void main() { } dub-1.40.0/test/version-spec.sh000077500000000000000000000046131477246567400163330ustar00rootroot00000000000000#!/usr/bin/env bash . $(dirname "${BASH_SOURCE[0]}")/common.sh DUBPKGPATH=${DPATH+"$DPATH/dub/packages/dub"} DUBPKGPATH=${DUBPKGPATH:-"$HOME/.dub/packages/dub"} $DUB add-local "$CURR_DIR/version-spec/newfoo" $DUB add-local "$CURR_DIR/version-spec/oldfoo" [[ $($DUB describe foo | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB describe foo@1.0.0 | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB describe foo@0.1.0 | grep path | head -n 1) == *"/oldfoo/"* ]] [[ $($DUB describe foo@'<1.0.0' | grep path | head -n 1) == *"/oldfoo/"* ]] [[ $($DUB describe foo@'>0.1.0' | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB describe foo@'>0.2.0' | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB describe foo@'<=0.2.0' | grep path | head -n 1) == *"/oldfoo/"* ]] [[ $($DUB describe foo@'*' | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB describe foo@'>0.0.1 <2.0.0' | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB test foo | tail -n +1 | head -n 1) == *"/newfoo/" ]] [[ $($DUB test foo@1.0.0 | tail -n +1 | head -n 1) == *"/newfoo/" ]] [[ $($DUB test foo@0.1.0 | tail -n +1 | head -n 1) == *"/oldfoo/" ]] [[ $($DUB lint foo | tail -n 1) == *"/newfoo/" ]] [[ $($DUB lint foo@1.0.0 | tail -n 1) == *"/newfoo/" ]] [[ $($DUB lint foo@0.1.0 | tail -n 1) == *"/oldfoo/" ]] [[ $($DUB generate cmake foo | tail -n +1 | head -n 1) == *"/newfoo/" ]] [[ $($DUB generate cmake foo@1.0.0 | tail -n +1 | head -n 1) == *"/newfoo/" ]] [[ $($DUB generate cmake foo@0.1.0 | tail -n +1 | head -n 1) == *"/oldfoo/" ]] [[ $($DUB build -n foo | tail -n +1 | head -n 1) == *"/newfoo/" ]] [[ $($DUB build -n foo@1.0.0 | tail -n +1 | head -n 1) == *"/newfoo/" ]] [[ $($DUB build -n foo@0.1.0 | tail -n +1 | head -n 1) == *"/oldfoo/" ]] [[ $($DUB run -n foo | tail -n 1) == 'new-foo' ]] [[ $($DUB run -n foo@1.0.0 | tail -n 1) == 'new-foo' ]] [[ $($DUB run -n foo@0.1.0 | tail -n 1) == 'old-foo' ]] [[ $($DUB list foo | wc -l) == '4' ]] [[ $($DUB list foo@0.1.0 | wc -l) == '3' ]] [[ $($DUB list foo@'>0.1.0' | head -n 2 | tail -n 1) == *"/newfoo"* ]] $DUB remove-local "$CURR_DIR/version-spec/newfoo" $DUB remove-local "$CURR_DIR/version-spec/oldfoo" $DUB fetch dub@1.9.0 && [ -d $DUBPKGPATH/1.9.0/dub ] $DUB fetch dub=1.10.0 && [ -d $DUBPKGPATH/1.10.0/dub ] $DUB remove dub@1.9.0 $DUB remove dub=1.10.0 if [ -d $DUBPKGPATH/1.9.0/dub ] || [ -d $DUBPKGPATH/1.10.0/dub ]; then die $LINENO 'Failed to remove specified versions' fi dub-1.40.0/test/version-spec/000077500000000000000000000000001477246567400157705ustar00rootroot00000000000000dub-1.40.0/test/version-spec/.no_build000066400000000000000000000000001477246567400175520ustar00rootroot00000000000000dub-1.40.0/test/version-spec/.no_run000066400000000000000000000000001477246567400172570ustar00rootroot00000000000000dub-1.40.0/test/version-spec/.no_test000066400000000000000000000000001477246567400174320ustar00rootroot00000000000000dub-1.40.0/test/version-spec/newfoo/000077500000000000000000000000001477246567400172655ustar00rootroot00000000000000dub-1.40.0/test/version-spec/newfoo/dub.sdl000066400000000000000000000000631477246567400205420ustar00rootroot00000000000000name "foo" version "1.0.0" targetType "executable" dub-1.40.0/test/version-spec/newfoo/source/000077500000000000000000000000001477246567400205655ustar00rootroot00000000000000dub-1.40.0/test/version-spec/newfoo/source/app.d000066400000000000000000000000661477246567400215140ustar00rootroot00000000000000import std.stdio; void main() { writeln("new-foo"); } dub-1.40.0/test/version-spec/oldfoo/000077500000000000000000000000001477246567400172525ustar00rootroot00000000000000dub-1.40.0/test/version-spec/oldfoo/dub.sdl000066400000000000000000000000631477246567400205270ustar00rootroot00000000000000name "foo" version "0.1.0" targetType "executable" dub-1.40.0/test/version-spec/oldfoo/source/000077500000000000000000000000001477246567400205525ustar00rootroot00000000000000dub-1.40.0/test/version-spec/oldfoo/source/app.d000066400000000000000000000000661477246567400215010ustar00rootroot00000000000000import std.stdio; void main() { writeln("old-foo"); } dub-1.40.0/test/win32_default.d000066400000000000000000000016221477246567400161670ustar00rootroot00000000000000/+ dub.json: { "name": "win32_default", "configurations": [ { "name": "Default", "versions": [ "Default" ] }, { "name": "MsCoff", "versions": [ "MsCoff" ] }, { "name": "MsCoff64", "versions": [ "MsCoff", "Is64" ] } ] } +/ module dynlib.app; pragma(msg, "Frontend: ", __VERSION__); // Object format should match the expectation version (MsCoff) { // Should be a 32 bit build version (Is64) enum expSize = 8; else enum expSize = 4; enum expFormat = "coff"; } else version (Default) { enum expSize = 4; enum expFormat = __VERSION__ >= 2099 ? "coff" : "omf"; } else { static assert(false, "Missing version flag!"); } enum actFormat = __traits(getTargetInfo, "objectFormat"); static assert(actFormat == expFormat); static assert((int*).sizeof == expSize); dub-1.40.0/test/win32_default.script.d000066400000000000000000000032751477246567400175000ustar00rootroot00000000000000/+ dub.json: { "name": "win32_default_test" } +/ module win32_default.script; int main() { import std.stdio; version (Windows) { version (DigitalMars) enum disabled = null; else enum disabled = "DMD as the host compiler"; } else enum disabled = "Windows"; static if (disabled) { writeln("Test `win32_default` requires " ~ disabled); return 0; } else { import std.algorithm; import std.path; import std.process; const dir = __FILE_FULL_PATH__.dirName(); const file = buildPath(dir, "win32_default.d"); const dub = environment.get("DUB", buildPath(dirName(dir), "bin", "dub.exe")); const dmd = environment.get("DMD", "dmd"); int exitCode; void runTest(scope const string[] cmd) { const result = execute(cmd); if (result.status || result.output.canFind("Failed")) { writefln("\n> %-(%s %)", cmd); writeln("==========================================================="); writeln(result.output); writeln("==========================================================="); writeln("Last command failed with exit code ", result.status, '\n'); exitCode = 1; } } // Test without --arch runTest([ dub, "build", "--compiler", dmd, "--config", "MsCoff64", "--single", file, ]); // Test with different --arch const string[2][] tests = [ [ "x86", "Default" ], [ "x86_omf", "MsCoff" ], [ "x86_mscoff", "MsCoff" ], [ "x86_64", "MsCoff64" ], ]; foreach (string[2] test; tests) { const arch = test[0]; const config = test[1]; runTest([ dub, "build", "--compiler", dmd, "--arch", arch, "--config", config, "--single", file, ]); } return exitCode; } }
`, cmd, nameEscape); writefln(``, cmd, nameEscape); writeln(); writeln(name); writeln(); writeln(`