pax_global_header00006660000000000000000000000064147651127240014523gustar00rootroot0000000000000052 comment=aae009b9277d7a981ca59aa2c5227b09c44545a2 xgboost-3.0.0/000077500000000000000000000000001476511272400132105ustar00rootroot00000000000000xgboost-3.0.0/.clang-format000066400000000000000000000132031476511272400155620ustar00rootroot00000000000000--- Language: Cpp # BasedOnStyle: Google AccessModifierOffset: -1 AlignAfterOpenBracket: Align AlignArrayOfStructures: None AlignConsecutiveMacros: None AlignConsecutiveAssignments: None AlignConsecutiveBitFields: None AlignConsecutiveDeclarations: None AlignEscapedNewlines: Left AlignOperands: Align AlignTrailingComments: true AllowAllArgumentsOnNextLine: true AllowAllParametersOfDeclarationOnNextLine: true AllowShortEnumsOnASingleLine: true AllowShortBlocksOnASingleLine: Never AllowShortCaseLabelsOnASingleLine: false AllowShortFunctionsOnASingleLine: All AllowShortLambdasOnASingleLine: Inline AllowShortIfStatementsOnASingleLine: WithoutElse AllowShortLoopsOnASingleLine: true AlwaysBreakAfterDefinitionReturnType: None AlwaysBreakAfterReturnType: None AlwaysBreakBeforeMultilineStrings: true AlwaysBreakTemplateDeclarations: Yes AttributeMacros: - __capability BinPackArguments: true BinPackParameters: true BraceWrapping: AfterCaseLabel: false AfterClass: false AfterControlStatement: Never AfterEnum: false AfterFunction: false AfterNamespace: false AfterObjCDeclaration: false AfterStruct: false AfterUnion: false AfterExternBlock: false BeforeCatch: false BeforeElse: false BeforeLambdaBody: false BeforeWhile: false IndentBraces: false SplitEmptyFunction: true SplitEmptyRecord: true SplitEmptyNamespace: true BreakBeforeBinaryOperators: None BreakBeforeConceptDeclarations: true BreakBeforeBraces: Attach BreakBeforeInheritanceComma: false BreakInheritanceList: BeforeColon BreakBeforeTernaryOperators: true BreakConstructorInitializersBeforeComma: false BreakConstructorInitializers: BeforeColon BreakAfterJavaFieldAnnotations: false BreakStringLiterals: true ColumnLimit: 100 CommentPragmas: '^ IWYU pragma:' QualifierAlignment: Leave CompactNamespaces: false ConstructorInitializerIndentWidth: 4 ContinuationIndentWidth: 4 Cpp11BracedListStyle: true DeriveLineEnding: true DerivePointerAlignment: true DisableFormat: false EmptyLineAfterAccessModifier: Never EmptyLineBeforeAccessModifier: LogicalBlock ExperimentalAutoDetectBinPacking: false PackConstructorInitializers: NextLine BasedOnStyle: '' ConstructorInitializerAllOnOneLineOrOnePerLine: false AllowAllConstructorInitializersOnNextLine: true FixNamespaceComments: true ForEachMacros: - foreach - Q_FOREACH - BOOST_FOREACH IfMacros: - KJ_IF_MAYBE IncludeBlocks: Regroup IncludeCategories: - Regex: '^' Priority: 2 SortPriority: 0 CaseSensitive: false - Regex: '^<.*\.h>' Priority: 1 SortPriority: 0 CaseSensitive: false - Regex: '^<.*' Priority: 2 SortPriority: 0 CaseSensitive: false - Regex: '.*' Priority: 3 SortPriority: 0 CaseSensitive: false IncludeIsMainRegex: '([-_](test|unittest))?$' IncludeIsMainSourceRegex: '' IndentAccessModifiers: false IndentCaseLabels: true IndentCaseBlocks: false IndentGotoLabels: true IndentPPDirectives: None IndentExternBlock: AfterExternBlock IndentRequires: false IndentWidth: 2 IndentWrappedFunctionNames: false InsertTrailingCommas: None JavaScriptQuotes: Leave JavaScriptWrapImports: true KeepEmptyLinesAtTheStartOfBlocks: false LambdaBodyIndentation: Signature MacroBlockBegin: '' MacroBlockEnd: '' MaxEmptyLinesToKeep: 1 NamespaceIndentation: None ObjCBinPackProtocolList: Never ObjCBlockIndentWidth: 2 ObjCBreakBeforeNestedBlockParam: true ObjCSpaceAfterProperty: false ObjCSpaceBeforeProtocolList: true PenaltyBreakAssignment: 2 PenaltyBreakBeforeFirstCallParameter: 1 PenaltyBreakComment: 300 PenaltyBreakFirstLessLess: 120 PenaltyBreakString: 1000 PenaltyBreakTemplateDeclaration: 10 PenaltyExcessCharacter: 1000000 PenaltyReturnTypeOnItsOwnLine: 200 PenaltyIndentedWhitespace: 0 PointerAlignment: Left PPIndentWidth: -1 RawStringFormats: - Language: Cpp Delimiters: - cc - CC - cpp - Cpp - CPP - 'c++' - 'C++' CanonicalDelimiter: '' BasedOnStyle: google - Language: TextProto Delimiters: - pb - PB - proto - PROTO EnclosingFunctions: - EqualsProto - EquivToProto - PARSE_PARTIAL_TEXT_PROTO - PARSE_TEST_PROTO - PARSE_TEXT_PROTO - ParseTextOrDie - ParseTextProtoOrDie - ParseTestProto - ParsePartialTestProto CanonicalDelimiter: pb BasedOnStyle: google ReferenceAlignment: Pointer ReflowComments: true ShortNamespaceLines: 1 SortIncludes: CaseSensitive SortJavaStaticImport: Before SortUsingDeclarations: true SpaceAfterCStyleCast: false SpaceAfterLogicalNot: false SpaceAfterTemplateKeyword: true SpaceBeforeAssignmentOperators: true SpaceBeforeCaseColon: false SpaceBeforeCpp11BracedList: false SpaceBeforeCtorInitializerColon: true SpaceBeforeInheritanceColon: true SpaceBeforeParens: ControlStatements SpaceAroundPointerQualifiers: Default SpaceBeforeRangeBasedForLoopColon: true SpaceInEmptyBlock: false SpaceInEmptyParentheses: false SpacesBeforeTrailingComments: 2 SpacesInAngles: Never SpacesInConditionalStatement: false SpacesInContainerLiterals: true SpacesInCStyleCastParentheses: false SpacesInLineCommentPrefix: Minimum: 1 Maximum: -1 SpacesInParentheses: false SpacesInSquareBrackets: false SpaceBeforeSquareBrackets: false BitFieldColonSpacing: Both Standard: Auto StatementAttributeLikeMacros: - Q_EMIT StatementMacros: - Q_UNUSED - QT_REQUIRE_VERSION TabWidth: 8 UseCRLF: false UseTab: Never WhitespaceSensitiveMacros: - STRINGIZE - PP_STRINGIZE - BOOST_PP_STRINGIZE - NS_SWIFT_NAME - CF_SWIFT_NAME ... xgboost-3.0.0/.clang-tidy000066400000000000000000000037171476511272400152540ustar00rootroot00000000000000Checks: 'modernize-*,-modernize-use-nodiscard,-modernize-concat-nested-namespaces,-modernize-make-*,-modernize-use-auto,-modernize-raw-string-literal,-modernize-avoid-c-arrays,-modernize-use-trailing-return-type,google-*,-google-default-arguments,-clang-diagnostic-#pragma-messages,readability-identifier-naming' CheckOptions: - { key: readability-identifier-naming.ClassCase, value: CamelCase } - { key: readability-identifier-naming.StructCase, value: CamelCase } - { key: readability-identifier-naming.TypeAliasCase, value: CamelCase } - { key: readability-identifier-naming.TypedefCase, value: CamelCase } - { key: readability-identifier-naming.TypeTemplateParameterCase, value: CamelCase } - { key: readability-identifier-naming.MemberCase, value: lower_case } - { key: readability-identifier-naming.PrivateMemberSuffix, value: '_' } - { key: readability-identifier-naming.ProtectedMemberSuffix, value: '_' } - { key: readability-identifier-naming.EnumCase, value: CamelCase } - { key: readability-identifier-naming.EnumConstant, value: CamelCase } - { key: readability-identifier-naming.EnumConstantPrefix, value: k } - { key: readability-identifier-naming.GlobalConstantCase, value: CamelCase } - { key: readability-identifier-naming.GlobalConstantPrefix, value: k } - { key: readability-identifier-naming.StaticConstantCase, value: CamelCase } - { key: readability-identifier-naming.StaticConstantPrefix, value: k } - { key: readability-identifier-naming.ConstexprVariableCase, value: CamelCase } - { key: readability-identifier-naming.ConstexprVariablePrefix, value: k } - { key: readability-identifier-naming.FunctionCase, value: CamelCase } - { key: readability-identifier-naming.NamespaceCase, value: lower_case } xgboost-3.0.0/.editorconfig000066400000000000000000000002151476511272400156630ustar00rootroot00000000000000root = true [*] charset=utf-8 indent_style = space indent_size = 2 insert_final_newline = true [*.py] indent_style = space indent_size = 4 xgboost-3.0.0/.gitattributes000066400000000000000000000004151476511272400161030ustar00rootroot00000000000000* text=auto *.c text eol=lf *.h text eol=lf *.cc text eol=lf *.cuh text eol=lf *.cu text eol=lf *.py text eol=lf *.txt text eol=lf *.R text eol=lf *.scala text eol=lf *.java text eol=lf *.sh text eol=lf *.rst text eol=lf *.md text eol=lf *.csv text eol=lfxgboost-3.0.0/.github/000077500000000000000000000000001476511272400145505ustar00rootroot00000000000000xgboost-3.0.0/.github/FUNDING.yml000066400000000000000000000000751476511272400163670ustar00rootroot00000000000000open_collective: xgboost custom: https://xgboost.ai/sponsors xgboost-3.0.0/.github/ISSUE_TEMPLATE.md000066400000000000000000000013271476511272400172600ustar00rootroot00000000000000Thanks for participating in the XGBoost community! The issue tracker is used for actionable items such as feature proposals discussion, roadmaps, and bug tracking. Issues that are inactive for a period of time may get closed. We adopt this policy so that we won't lose track of actionable issues that may fall at the bottom of the pile. Feel free to reopen a new one if you feel there is an additional problem that needs attention when an old one gets closed. For bug reports, to help the developer act on the issues, please include a description of your environment, preferably a minimum script to reproduce the problem. For feature proposals, list clear, small actionable items so we can track the progress of the change. xgboost-3.0.0/.github/lock.yml000066400000000000000000000015231476511272400162240ustar00rootroot00000000000000# Configuration for lock-threads - https://github.com/dessant/lock-threads # Number of days of inactivity before a closed issue or pull request is locked daysUntilLock: 90 # Issues and pull requests with these labels will not be locked. Set to `[]` to disable exemptLabels: - feature-request # Label to add before locking, such as `outdated`. Set to `false` to disable lockLabel: false # Comment to post before locking. Set to `false` to disable lockComment: false # Assign `resolved` as the reason for locking. Set to `false` to disable setLockReason: true # Limit to only `issues` or `pulls` # only: issues # Optionally, specify configuration settings just for `issues` or `pulls` # issues: # exemptLabels: # - help-wanted # lockLabel: outdated # pulls: # daysUntilLock: 30 # Repository to extend settings from # _extends: repo xgboost-3.0.0/.github/runs-on.yml000066400000000000000000000020221476511272400166700ustar00rootroot00000000000000# Custom images with CUDA toolkit installed # See ops/packer for instructions for building the images images: linux-amd64: platform: "linux" arch: "x64" owner: "492475357299" # XGBooost CI name: "xgboost-ci-runs-on-linux-amd64-*" linux-arm64: platform: "linux" arch: "arm64" owner: "492475357299" # XGBooost CI name: "xgboost-ci-runs-on-linux-arm64-*" windows-amd64: platform: "windows" arch: "x64" owner: "492475357299" # XGBooost CI name: "xgboost-ci-runs-on-windows-*" runners: linux-amd64-cpu: cpu: 16 family: ["c7i-flex", "c7i", "c7a", "c5", "c5a"] image: linux-amd64 linux-amd64-gpu: family: ["g4dn.xlarge"] image: linux-amd64 linux-amd64-mgpu: family: ["g4dn.12xlarge"] image: linux-amd64 linux-arm64-cpu: cpu: 16 family: ["c6g", "c7g"] image: linux-arm64 windows-gpu: family: ["g4dn.2xlarge"] image: windows-amd64 windows-cpu: cpu: 32 family: ["c7i-flex", "c7i", "c7a", "c5", "c5a"] image: windows-amd64 xgboost-3.0.0/.github/workflows/000077500000000000000000000000001476511272400166055ustar00rootroot00000000000000xgboost-3.0.0/.github/workflows/doc.yml000066400000000000000000000063051476511272400201010ustar00rootroot00000000000000name: XGBoost-docs on: [push, pull_request] env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: build-jvm-docs: name: Build docs for JVM packages runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=doc-build-jvm-docs steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Get the git hash for the push event. if: ${{ github.event_name == 'push' }} shell: bash run: | echo "HEAD_SHA=${GITHUB_SHA}" >> ${GITHUB_ENV} - name: Get the git hash for the PR event. if: ${{ github.event_name == 'pull_request' }} shell: bash run: | echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV} - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-jvm-gpu.sh - run: bash ops/pipeline/build-jvm-doc.sh - name: Upload JVM doc run: | # xgboost-docs/{branch}/{commit}/{branch}.tar.bz2 # branch can be the name of the dmlc/xgboost branch, or `PR-{number}`. python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket xgboost-docs \ --prefix ${BRANCH_NAME}/${{ env.HEAD_SHA }} --make-public \ jvm-packages/${{ env.BRANCH_NAME }}.tar.bz2 build-r-docs: name: Build docs for the R package runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=r-tests-build-docs steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Get the git hash for the push event. if: ${{ github.event_name == 'push' }} shell: bash run: | echo "HEAD_SHA=${GITHUB_SHA}" >> ${GITHUB_ENV} - name: Get the git hash for the PR event. if: ${{ github.event_name == 'pull_request' }} shell: bash run: | echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV} - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-r-docs.sh - name: Upload R doc run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket xgboost-docs \ --prefix ${BRANCH_NAME}/${{ env.HEAD_SHA }} --make-public \ r-docs-${{ env.BRANCH_NAME }}.tar.bz2 trigger-rtd-build: needs: [build-jvm-docs] name: Trigger Read The Docs build. runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=doc-trigger-rtd-build steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Trigger RTD run: bash ops/pipeline/trigger-rtd.sh xgboost-3.0.0/.github/workflows/freebsd.yml000066400000000000000000000012621476511272400207430ustar00rootroot00000000000000name: FreeBSD on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: test: runs-on: ubuntu-latest timeout-minutes: 20 name: A job to run test in FreeBSD steps: - uses: actions/checkout@v4 with: submodules: 'true' - name: Test in FreeBSD id: test uses: vmactions/freebsd-vm@v1 with: usesh: true prepare: | pkg install -y cmake git ninja googletest bash run: | bash ops/pipeline/test-freebsd.sh xgboost-3.0.0/.github/workflows/i386.yml000066400000000000000000000013631476511272400200240ustar00rootroot00000000000000name: XGBoost-i386-test on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: build-32bit: name: Build 32-bit runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=i386-build-32bit steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/test-cpp-i386.sh xgboost-3.0.0/.github/workflows/jvm_tests.yml000066400000000000000000000214471476511272400213560ustar00rootroot00000000000000name: XGBoost CI (JVM packages) on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: build-jvm-manylinux2014: name: >- Build libxgboost4j.so targeting glibc 2.17 (arch ${{ matrix.arch }}, runner ${{ matrix.runner }}) runs-on: - runs-on - runner=${{ matrix.runner }} - run-id=${{ github.run_id }} - tag=jvm-tests-build-jvm-manylinux2014-${{ matrix.arch }} strategy: fail-fast: false matrix: include: - arch: aarch64 runner: linux-arm64-cpu - arch: x86_64 runner: linux-amd64-cpu steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-jvm-manylinux2014.sh ${{ matrix.arch }} build-jvm-gpu: name: Build libxgboost4j.so with CUDA runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=jvm-tests-build-jvm-gpu steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-jvm-gpu.sh - name: Stash files run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-jvm-gpu \ lib/libxgboost4j.so build-jvm-mac: name: "Build libxgboost4j.dylib for ${{ matrix.description }}" runs-on: ${{ matrix.runner }} strategy: fail-fast: false matrix: include: - description: "MacOS (Apple Silicon)" script: ops/pipeline/build-jvm-macos-apple-silicon.sh libname: libxgboost4j_m1.dylib runner: macos-14 - description: "MacOS (Intel)" script: ops/pipeline/build-jvm-macos-intel.sh libname: libxgboost4j_intel.dylib runner: macos-13 steps: - uses: actions/checkout@v4 with: submodules: "true" - run: bash ${{ matrix.script }} - name: Upload libxgboost4j.dylib if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_') run: | mv -v lib/libxgboost4j.dylib ${{ matrix.libname }} python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket xgboost-nightly-builds \ --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public \ ${{ matrix.libname }} env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }} build-test-jvm-packages: name: Build and test JVM packages (Linux, Scala ${{ matrix.scala_version }}) runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=jvm-tests-build-test-jvm-packages-scala${{ matrix.scala_version }} strategy: fail-fast: false matrix: scala_version: ["2.12", "2.13"] steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - name: Build and test JVM packages (Scala ${{ matrix.scala_version }}) run: bash ops/pipeline/build-test-jvm-packages.sh env: SCALA_VERSION: ${{ matrix.scala_version }} - name: Stash files run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-test-jvm-packages \ lib/libxgboost4j.so if: matrix.scala_version == '2.13' build-test-jvm-packages-other-os: name: Build and test JVM packages (${{ matrix.os }}) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [windows-latest, macos-13] steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '8' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: minimal environment-file: ops/conda_env/minimal.yml - name: Cache Maven packages uses: actions/cache@v4 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }} restore-keys: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }} - name: Test XGBoost4J (Core) on macos if: matrix.os == 'macos-13' run: | cd jvm-packages mvn test -B -pl :xgboost4j_2.12 -Duse.openmp=OFF - name: Test XGBoost4J (Core) on windows if: matrix.os == 'windows-latest' run: | cd jvm-packages mvn test -B -pl :xgboost4j_2.12 - name: Publish artifact xgboost4j.dll to S3 run: | python ops/pipeline/manage-artifacts.py upload ` --s3-bucket xgboost-nightly-builds ` --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public ` lib/xgboost4j.dll if: | (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) && matrix.os == 'windows-latest' env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }} test-jvm-packages-gpu: name: Test JVM packages with CUDA (Scala ${{ matrix.scala_version }}) needs: [build-jvm-gpu] runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-mgpu - tag=jvm-tests-test-jvm-packages-gpu-scala${{ matrix.scala_version }} strategy: fail-fast: false matrix: scala_version: ["2.12", "2.13"] steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - name: Unstash files run: | python3 ops/pipeline/manage-artifacts.py download \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-jvm-gpu \ --dest-dir lib \ libxgboost4j.so - run: bash ops/pipeline/test-jvm-gpu.sh env: SCALA_VERSION: ${{ matrix.scala_version }} deploy-jvm-packages: name: Deploy JVM packages to S3 (${{ matrix.variant.name }}) needs: [build-jvm-gpu, build-test-jvm-packages, test-jvm-packages-gpu] runs-on: - runs-on - runner=linux-amd64-cpu - run-id=${{ github.run_id }} - tag=jvm-tests-deploy-jvm-packages-${{ matrix.variant.name }}-scala${{ matrix.scala_version }} strategy: fail-fast: false matrix: variant: - name: cpu image_repo: xgb-ci.jvm artifact_from: build-test-jvm-packages - name: gpu image_repo: xgb-ci.jvm_gpu_build artifact_from: build-jvm-gpu scala_version: ['2.12', '2.13'] steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - name: Unstash files run: | python3 ops/pipeline/manage-artifacts.py download \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/${{ matrix.variant.artifact_from }} \ --dest-dir lib \ libxgboost4j.so ls -lh lib/libxgboost4j.so - name: Deploy JVM packages to S3 run: | bash ops/pipeline/deploy-jvm-packages.sh ${{ matrix.variant.name }} \ ${{ matrix.variant.image_repo }} ${{ matrix.scala_version }} xgboost-3.0.0/.github/workflows/lint.yml000066400000000000000000000060471476511272400203050ustar00rootroot00000000000000name: XGBoost CI (Lint) on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: clang-tidy: name: Run clang-tidy runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=lint-clang-tidy steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/run-clang-tidy.sh python-mypy-lint: runs-on: ubuntu-latest name: Type and format checks for the Python package steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: python_lint environment-file: ops/conda_env/python_lint.yml - name: Run mypy shell: bash -el {0} run: | python ops/script/lint_python.py --format=0 --type-check=1 --pylint=0 - name: Run formatter shell: bash -el {0} run: | python ops/script/lint_python.py --format=1 --type-check=0 --pylint=0 - name: Run pylint shell: bash -el {0} run: | python ops/script/lint_python.py --format=0 --type-check=0 --pylint=1 cpp-lint: runs-on: ubuntu-latest name: Code linting for C++ steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: actions/setup-python@v5 with: python-version: "3.10" architecture: 'x64' - name: Install Python packages run: | python -m pip install wheel setuptools cmakelint cpplint==1.6.1 pylint - name: Run lint run: | python3 ops/script/lint_cpp.py bash ops/script/lint_cmake.sh lintr: runs-on: ubuntu-latest name: Run R linters on Ubuntu env: R_REMOTES_NO_ERRORS_FROM_WARNINGS: true steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: r-lib/actions/setup-r@v2 with: r-version: "release" - name: Cache R packages uses: actions/cache@v4 with: path: ${{ env.R_LIBS_USER }} key: ${{ runner.os }}-r-release-7-${{ hashFiles('R-package/DESCRIPTION') }} restore-keys: ${{ runner.os }}-r-release-7-${{ hashFiles('R-package/DESCRIPTION') }} - name: Install dependencies shell: Rscript {0} run: | source("./R-package/tests/helper_scripts/install_deps.R") - name: Run lintr run: | MAKEFLAGS="-j$(nproc)" R CMD INSTALL R-package/ Rscript ops/script/lint_r.R $(pwd) xgboost-3.0.0/.github/workflows/main.yml000066400000000000000000000213411476511272400202550ustar00rootroot00000000000000name: XGBoost CI on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: build-cpu: name: Build CPU runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=main-build-cpu steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-cpu.sh - name: Stash CLI executable run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-cpu \ ./xgboost build-cpu-arm64: name: Build CPU ARM64 + manylinux_2_28_aarch64 wheel runs-on: - runs-on=${{ github.run_id }} - runner=linux-arm64-cpu - tag=build-cpu-arm64 steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-cpu-arm64.sh - name: Stash files run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-cpu-arm64 \ ./xgboost python-package/dist/*.whl build-cuda: name: Build CUDA + manylinux_2_28_x86_64 wheel runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=main-build-cuda steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: | bash ops/pipeline/build-cuda.sh xgb-ci.gpu_build_rockylinux8 disable-rmm - name: Stash files run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-cuda \ build/testxgboost ./xgboost python-package/dist/*.whl build-cuda-with-rmm: name: Build CUDA with RMM runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=main-build-cuda-with-rmm steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: | bash ops/pipeline/build-cuda.sh xgb-ci.gpu_build_rockylinux8 enable-rmm - name: Stash files run: | python3 ops/pipeline/manage-artifacts.py upload \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/build-cuda-with-rmm \ build/testxgboost build-cuda-with-rmm-dev: name: Build CUDA with RMM (dev) runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=main-build-cuda-with-rmm-dev steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: | bash ops/pipeline/build-cuda.sh \ xgb-ci.gpu_build_rockylinux8_dev_ver enable-rmm build-manylinux2014: name: Build manylinux2014_${{ matrix.arch }} wheel runs-on: - runs-on - runner=${{ matrix.runner }} - run-id=${{ github.run_id }} - tag=main-build-manylinux2014-${{ matrix.arch }} strategy: fail-fast: false matrix: include: - arch: aarch64 runner: linux-arm64-cpu - arch: x86_64 runner: linux-amd64-cpu steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-manylinux2014.sh ${{ matrix.arch }} build-gpu-rpkg: name: Build GPU-enabled R package runs-on: - runs-on=${{ github.run_id }} - runner=linux-amd64-cpu - tag=main-build-gpu-rpkg steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - run: bash ops/pipeline/build-gpu-rpkg.sh test-cpp-gpu: name: >- Run Google Tests with GPUs (Suite ${{ matrix.suite }}, Runner ${{ matrix.runner }}) needs: [build-cuda, build-cuda-with-rmm] runs-on: - runs-on - runner=${{ matrix.runner }} - run-id=${{ github.run_id }} - tag=main-test-cpp-gpu-${{ matrix.suite }} timeout-minutes: 30 strategy: fail-fast: false matrix: include: - suite: gpu runner: linux-amd64-gpu artifact_from: build-cuda - suite: gpu-rmm runner: linux-amd64-gpu artifact_from: build-cuda-with-rmm - suite: mgpu runner: linux-amd64-mgpu artifact_from: build-cuda steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - name: Unstash gtest run: | python3 ops/pipeline/manage-artifacts.py download \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/${{ matrix.artifact_from }} \ --dest-dir build \ testxgboost chmod +x build/testxgboost - run: bash ops/pipeline/test-cpp-gpu.sh ${{ matrix.suite }} test-python-wheel: name: Run Python tests (${{ matrix.description }}) needs: [build-cuda, build-cpu-arm64] runs-on: - runs-on - runner=${{ matrix.runner }} - run-id=${{ github.run_id }} - tag=main-test-python-wheel-${{ matrix.description }} timeout-minutes: 60 strategy: fail-fast: false matrix: include: - description: single-gpu image_repo: xgb-ci.gpu suite: gpu runner: linux-amd64-gpu artifact_from: build-cuda - description: multiple-gpu image_repo: xgb-ci.gpu suite: mgpu runner: linux-amd64-mgpu artifact_from: build-cuda - description: cpu-amd64 image_repo: xgb-ci.cpu suite: cpu runner: linux-amd64-cpu artifact_from: build-cuda - description: cpu-arm64 image_repo: xgb-ci.aarch64 suite: cpu-arm64 runner: linux-arm64-cpu artifact_from: build-cpu-arm64 steps: # Restart Docker daemon so that it recognizes the ephemeral disks - run: sudo systemctl restart docker - uses: actions/checkout@v4 with: submodules: "true" - name: Log into Docker registry (AWS ECR) run: bash ops/pipeline/login-docker-registry.sh - name: Unstash Python wheel run: | python3 ops/pipeline/manage-artifacts.py download \ --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \ --prefix cache/${{ github.run_id }}/${{ matrix.artifact_from }} \ --dest-dir wheelhouse \ *.whl xgboost mv -v wheelhouse/xgboost . chmod +x ./xgboost - name: Run Python tests, ${{ matrix.description }} run: bash ops/pipeline/test-python-wheel.sh ${{ matrix.suite }} ${{ matrix.image_repo }} xgboost-3.0.0/.github/workflows/misc.yml000066400000000000000000000024261476511272400202670ustar00rootroot00000000000000name: XGBoost CI (misc) on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: gtest-cpu-nonomp: name: Test Google C++ unittest (CPU Non-OMP) runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: submodules: 'true' - name: Install system packages run: | sudo apt-get install -y --no-install-recommends ninja-build - name: Build and test XGBoost run: bash ops/pipeline/build-test-cpu-nonomp.sh c-api-demo: name: Test installing XGBoost lib + building the C API demo runs-on: ubuntu-latest defaults: run: shell: bash -l {0} steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: cpp_test environment-file: ops/conda_env/cpp_test.yml - name: Build and run C API demo with shared run: bash ops/pipeline/test-c-api-demo.sh xgboost-3.0.0/.github/workflows/python_tests.yml000066400000000000000000000035431476511272400221000ustar00rootroot00000000000000name: XGBoost CI (Python tests) on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) defaults: run: shell: bash -l {0} concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: python-sdist-test: runs-on: ${{ matrix.os }} name: Test installing Python XGBoost from the source distribution (${{ matrix.os }}) strategy: fail-fast: false matrix: os: [macos-13, windows-latest, ubuntu-latest] steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: sdist_test environment-file: ops/conda_env/sdist_test.yml - name: Install extra package for MacOS run: | mamba install -c conda-forge llvm-openmp if: matrix.os == 'macos-13' - name: Build and install XGBoost run: bash ops/pipeline/test-python-sdist.sh python-tests-on-macos: name: Test XGBoost Python package on macos-13 runs-on: macos-13 timeout-minutes: 60 steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: macos_cpu_test environment-file: ops/conda_env/macos_cpu_test.yml - run: bash ops/pipeline/test-python-macos.sh python-system-installation-on-ubuntu: name: Test XGBoost Python package System Installation on Ubuntu runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: submodules: 'true' - name: Set up Python 3.10 uses: actions/setup-python@v5 with: python-version: "3.10" - run: bash ops/pipeline/test-python-with-sysprefix.sh xgboost-3.0.0/.github/workflows/python_wheels_macos.yml000066400000000000000000000036261476511272400234110ustar00rootroot00000000000000name: Build Python wheels targeting MacOS on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) defaults: run: shell: bash -l {0} concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: python-wheels-macos: name: Build wheel for ${{ matrix.platform_id }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: include: - os: macos-13 platform_id: macosx_x86_64 - os: macos-14 platform_id: macosx_arm64 steps: - uses: actions/checkout@v4 with: submodules: 'true' - name: Set up homebrew uses: Homebrew/actions/setup-homebrew@13341b4d5e459a98bbe0b122b12c11bf90518cc8 - name: Install libomp run: brew install libomp - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: minimal environment-file: ops/conda_env/minimal.yml - name: Build wheels run: bash ops/pipeline/build-python-wheels-macos.sh ${{ matrix.platform_id }} ${{ github.sha }} - name: Try installing XGBoost run: | python -m pip install -vvv wheelhouse/*.whl - name: Upload Python wheel if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_') run: | python ops/pipeline/manage-artifacts.py upload \ --s3-bucket xgboost-nightly-builds \ --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public \ wheelhouse/*.whl env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }} xgboost-3.0.0/.github/workflows/r_nold.yml000066400000000000000000000026161476511272400206120ustar00rootroot00000000000000# Run expensive R tests with the help of rhub. Only triggered by a pull request review # See discussion at https://github.com/dmlc/xgboost/pull/6378 name: XGBoost-R-noLD on: pull_request_review_comment: types: [created] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: test-R-noLD: if: github.event.comment.body == '/gha run r-nold-test' && contains('OWNER,MEMBER,COLLABORATOR', github.event.comment.author_association) timeout-minutes: 120 runs-on: ubuntu-latest container: image: rhub/debian-gcc-devel-nold steps: - name: Install git and system packages shell: bash run: | apt update && apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev git -y - uses: actions/checkout@v4 with: submodules: 'true' - name: Install dependencies shell: bash -l {0} run: | /tmp/R-devel/bin/Rscript -e "source('./R-package/tests/helper_scripts/install_deps.R')" - name: Run R tests shell: bash run: | cd R-package && \ /tmp/R-devel/bin/R CMD INSTALL . && \ /tmp/R-devel/bin/R -q -e "library(testthat); setwd('tests'); source('testthat.R')" xgboost-3.0.0/.github/workflows/r_tests.yml000066400000000000000000000070531476511272400210200ustar00rootroot00000000000000name: XGBoost-R-Tests on: [push, pull_request] env: GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: test-Rpkg: runs-on: ${{ matrix.os }} name: Test R on OS ${{ matrix.os }}, R ${{ matrix.r }}, Compiler ${{ matrix.compiler }}, Build ${{ matrix.build }} strategy: fail-fast: false matrix: include: - os: windows-latest r: release compiler: mingw build: autotools - os: ubuntu-latest r: release compiler: none build: cmake env: R_REMOTES_NO_ERRORS_FROM_WARNINGS: true steps: - name: Install system dependencies run: | sudo apt update sudo apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev librsvg2-dev librsvg2-2 if: matrix.os == 'ubuntu-latest' - uses: actions/checkout@v4 with: submodules: 'true' - uses: r-lib/actions/setup-r@v2 with: r-version: ${{ matrix.r }} - name: Cache R packages uses: actions/cache@v4 with: path: ${{ env.R_LIBS_USER }} key: ${{ runner.os }}-r-${{ matrix.r }}-8-${{ hashFiles('R-package/DESCRIPTION') }} restore-keys: ${{ runner.os }}-r-${{ matrix.r }}-8-${{ hashFiles('R-package/DESCRIPTION') }} - uses: actions/setup-python@v5 with: python-version: "3.10" architecture: 'x64' - uses: r-lib/actions/setup-tinytex@v2 - name: Install dependencies shell: Rscript {0} run: | source("./R-package/tests/helper_scripts/install_deps.R") - name: Test R run: | python ops/script/test_r_package.py --compiler='${{ matrix.compiler }}' --build-tool="${{ matrix.build }}" --task=check if: matrix.compiler != 'none' - name: Test R run: | python ops/script/test_r_package.py --build-tool="${{ matrix.build }}" --task=check if: matrix.compiler == 'none' test-R-on-Debian: name: Test R package on Debian runs-on: ubuntu-latest container: image: rhub/debian-gcc-release steps: - name: Install system dependencies run: | # Must run before checkout to have the latest git installed. # No need to add pandoc, the container has it figured out. apt update && apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev git librsvg2-dev librsvg2-2 -y - name: Trust git cloning project sources run: | git config --global --add safe.directory "${GITHUB_WORKSPACE}" - uses: actions/checkout@v4 with: submodules: 'true' - name: Install dependencies shell: bash -l {0} run: | Rscript -e "source('./R-package/tests/helper_scripts/install_deps.R')" - name: Test R shell: bash -l {0} run: | python3 ops/script/test_r_package.py --r=/usr/bin/R --build-tool=autotools --task=check - uses: dorny/paths-filter@v3 id: changes with: filters: | r_package: - 'R-package/**' - name: Run document check if: steps.changes.outputs.r_package == 'true' run: | python3 ops/script/test_r_package.py --r=/usr/bin/R --task=doc xgboost-3.0.0/.github/workflows/scorecards.yml000066400000000000000000000034171476511272400214650ustar00rootroot00000000000000name: Scorecards supply-chain security on: # Only the default branch is supported. branch_protection_rule: schedule: - cron: '17 2 * * 6' push: branches: [ "master" ] # Declare default permissions as read only. permissions: read-all jobs: analysis: name: Scorecards analysis runs-on: ubuntu-latest permissions: # Needed to upload the results to code-scanning dashboard. security-events: write # Used to receive a badge. id-token: write steps: - name: "Checkout code" uses: actions/checkout@v4 with: persist-credentials: false - name: "Run analysis" uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif results_format: sarif # Publish the results for public repositories to enable scorecard badges. For more details, see # https://github.com/ossf/scorecard-action#publishing-results. # For private repositories, `publish_results` will automatically be set to `false`, regardless # of the value entered here. publish_results: true # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 with: name: SARIF file path: results.sarif retention-days: 5 # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" uses: github/codeql-action/upload-sarif@83a02f7883b12e0e4e1a146174f5e2292a01e601 # v2.16.4 with: sarif_file: results.sarif xgboost-3.0.0/.github/workflows/sycl_tests.yml000066400000000000000000000025021476511272400215230ustar00rootroot00000000000000name: XGBoost CI (oneAPI) on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) defaults: run: shell: bash -l {0} concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: gtest-cpu-sycl: name: Test Google C++ unittest (CPU SYCL) runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: linux_sycl_test environment-file: ops/conda_env/linux_sycl_test.yml - name: Run gtest run: bash ops/pipeline/build-test-sycl.sh gtest python-sycl-tests-on-ubuntu: name: Test XGBoost Python package with SYCL runs-on: ubuntu-latest timeout-minutes: 90 steps: - uses: actions/checkout@v4 with: submodules: 'true' - uses: dmlc/xgboost-devops/actions/miniforge-setup@main with: environment-name: linux_sycl_test environment-file: ops/conda_env/linux_sycl_test.yml - name: Test Python package run: bash ops/pipeline/build-test-sycl.sh pytest xgboost-3.0.0/.github/workflows/windows.yml000066400000000000000000000046231476511272400210270ustar00rootroot00000000000000name: XGBoost CI (Windows) on: [push, pull_request] permissions: contents: read # to fetch code (actions/checkout) concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true defaults: run: shell: powershell env: BRANCH_NAME: >- ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }} jobs: build-win64-gpu: name: Build XGBoost for Windows with CUDA runs-on: - runs-on=${{ github.run_id }} - runner=windows-cpu - tag=windows-build-win64-gpu steps: - uses: actions/checkout@v4 with: submodules: "true" - run: powershell ops/pipeline/build-win64-gpu.ps1 - name: Stash files shell: powershell run: | conda activate python ops/pipeline/manage-artifacts.py upload ` --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} ` --prefix cache/${{ github.run_id }}/build-win64-gpu ` build/testxgboost.exe xgboost.exe ` (Get-ChildItem python-package/dist/*.whl | Select-Object -Expand FullName) build-win64-cpu: name: Build XGBoost for Windows (minimal) runs-on: - runs-on=${{ github.run_id }} - runner=windows-cpu - tag=windows-build-win64-cpu steps: - uses: actions/checkout@v4 with: submodules: "true" - run: powershell ops/pipeline/build-win64-cpu.ps1 test-win64-gpu: name: Test XGBoost on Windows needs: build-win64-gpu runs-on: - runs-on=${{ github.run_id }} - runner=windows-gpu - tag=windows-test-win64-gpu steps: - uses: actions/checkout@v4 with: submodules: "true" - name: Unstash files shell: powershell run: | conda activate python ops/pipeline/manage-artifacts.py download ` --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} ` --prefix cache/${{ github.run_id }}/build-win64-gpu ` --dest-dir build ` *.whl testxgboost.exe xgboost.exe Move-Item -Path build/xgboost.exe -Destination . New-Item -ItemType Directory -Path python-package/dist/ -Force Move-Item -Path (Get-ChildItem build/*.whl | Select-Object -Expand FullName) ` -Destination python-package/dist/ - run: powershell ops/pipeline/test-win64-gpu.ps1 xgboost-3.0.0/.gitignore000066400000000000000000000032711476511272400152030ustar00rootroot00000000000000# Compiled Object files *.slo *.lo *.o *.page # Compiled Dynamic libraries *.so *.dylib *.page # Compiled Static libraries *.lai *.la *.a *~ *.Rcheck *.rds *.tar.gz *conf *buffer *.model *pyc *.train *.test *.tar *group *rar *vali *sdf Release *exe *exp ipch *.filters *.user *log rmm_log.txt Debug *suo .Rhistory *.dll *i386 *x64 *dump *save *csv .Rproj.user *.cpage.col *.cpage *.Rproj ./xgboost.mpi ./xgboost.mock *.bak #.Rbuildignore R-package.Rproj R-package/build/* *.cache* .mypy_cache/ doxygen # java java/xgboost4j/target java/xgboost4j/tmp java/xgboost4j-demo/target java/xgboost4j-demo/data/ java/xgboost4j-demo/tmp/ java/xgboost4j-demo/model/ nb-configuration* # Eclipse .project .cproject .classpath .pydevproject .settings/ /build /build-gpu /xgboost *.data build_plugin recommonmark/ tags TAGS *.class target *.swp # cpp tests and gcov generated files *.gcov *.gcda *.gcno *.ubj build_tests /tests/cpp/xgboost_test .DS_Store lib/ # spark metastore_db /include/xgboost/build_config.h # files from R-package source install **/config.status R-package/config.h R-package/src/Makevars *.lib # Visual Studio .vs/ CMakeSettings.json *.ilk *.pdb # IntelliJ/CLion .idea *.iml /cmake-build-debug/ # GDB .gdb_history # Python joblib.Memory used in pytest. cachedir/ # Files from local Dask work dask-worker-space/ # Jupyter notebook checkpoints .ipynb_checkpoints/ # credentials and key material config credentials credentials.csv *.env *.pem *.pub *.rdp *_rsa # Visual Studio code + extensions .vscode .metals .bloop # python tests *.bin demo/**/*.txt *.dmatrix .hypothesis __MACOSX/ model*.json /tests/python/models/models/ # R tests *.htm *.html *.libsvm *.rds Rplots.pdf *.zip # nsys *.nsys-rep xgboost-3.0.0/.gitmodules000066400000000000000000000003031476511272400153610ustar00rootroot00000000000000[submodule "dmlc-core"] path = dmlc-core url = https://github.com/dmlc/dmlc-core branch = main [submodule "gputreeshap"] path = gputreeshap url = https://github.com/rapidsai/gputreeshap.git xgboost-3.0.0/.readthedocs.yaml000066400000000000000000000013051476511272400164360ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 submodules: include: all # Set the version of Python and other tools you might need build: os: ubuntu-22.04 tools: python: "3.10" apt_packages: - graphviz - cmake - g++ - doxygen - ninja-build # Build documentation in the docs/ directory with Sphinx sphinx: configuration: doc/conf.py # If using Sphinx, optionally build your docs in additional formats such as PDF formats: - pdf # Optionally declare the Python requirements required to build your docs python: install: - requirements: doc/requirements.txt xgboost-3.0.0/CITATION000066400000000000000000000011531476511272400143450ustar00rootroot00000000000000@inproceedings{Chen:2016:XST:2939672.2939785, author = {Chen, Tianqi and Guestrin, Carlos}, title = {{XGBoost}: A Scalable Tree Boosting System}, booktitle = {Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining}, series = {KDD '16}, year = {2016}, isbn = {978-1-4503-4232-2}, location = {San Francisco, California, USA}, pages = {785--794}, numpages = {10}, url = {http://doi.acm.org/10.1145/2939672.2939785}, doi = {10.1145/2939672.2939785}, acmid = {2939785}, publisher = {ACM}, address = {New York, NY, USA}, keywords = {large-scale machine learning}, } xgboost-3.0.0/CMakeLists.txt000066400000000000000000000443701476511272400157600ustar00rootroot00000000000000cmake_minimum_required(VERSION 3.18 FATAL_ERROR) if(PLUGIN_SYCL) string(REPLACE " -isystem ${CONDA_PREFIX}/include" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") endif() project(xgboost LANGUAGES CXX C VERSION 3.0.0) include(cmake/Utils.cmake) list(APPEND CMAKE_MODULE_PATH "${xgboost_SOURCE_DIR}/cmake/modules") # These policies are already set from 3.18 but we still need to set the policy # default variables here for lower minimum versions in the submodules set(CMAKE_POLICY_DEFAULT_CMP0063 NEW) set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) set(CMAKE_POLICY_DEFAULT_CMP0076 NEW) set(CMAKE_POLICY_DEFAULT_CMP0077 NEW) set(CMAKE_POLICY_DEFAULT_CMP0079 NEW) message(STATUS "CMake version ${CMAKE_VERSION}") # Check compiler versions # Use recent compilers to ensure that std::filesystem is available if(MSVC) if(MSVC_VERSION LESS 1920) message(FATAL_ERROR "Need Visual Studio 2019 or newer to build XGBoost") endif() elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS "8.1") message(FATAL_ERROR "Need GCC 8.1 or newer to build XGBoost") endif() elseif(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang") if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS "11.0") message(FATAL_ERROR "Need Xcode 11.0 (AppleClang 11.0) or newer to build XGBoost") endif() elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS "9.0") message(FATAL_ERROR "Need Clang 9.0 or newer to build XGBoost") endif() endif() include(${xgboost_SOURCE_DIR}/cmake/PrefetchIntrinsics.cmake) find_prefetch_intrinsics() include(${xgboost_SOURCE_DIR}/cmake/Version.cmake) write_version() set_default_configuration_release() #-- Options include(CMakeDependentOption) ## User options option(BUILD_C_DOC "Build documentation for C APIs using Doxygen." OFF) option(USE_OPENMP "Build with OpenMP support." ON) option(BUILD_STATIC_LIB "Build static library" OFF) option(BUILD_DEPRECATED_CLI "Build the deprecated command line interface" OFF) option(FORCE_SHARED_CRT "Build with dynamic CRT on Windows (/MD)" OFF) ## Bindings option(JVM_BINDINGS "Build JVM bindings" OFF) option(R_LIB "Build shared library for R package" OFF) ## Dev option(USE_DEBUG_OUTPUT "Dump internal training results like gradients and predictions to stdout. Should only be used for debugging." OFF) option(FORCE_COLORED_OUTPUT "Force colored output from compilers, useful when ninja is used instead of make." OFF) option(ENABLE_ALL_WARNINGS "Enable all compiler warnings. Only effective for GCC/Clang" OFF) option(LOG_CAPI_INVOCATION "Log all C API invocations for debugging" OFF) option(GOOGLE_TEST "Build google tests" OFF) option(USE_DMLC_GTEST "Use google tests bundled with dmlc-core submodule" OFF) option(USE_DEVICE_DEBUG "Generate CUDA device debug info." OFF) option(USE_NVTX "Build with cuda profiling annotations. Developers only." OFF) set(NVTX_HEADER_DIR "" CACHE PATH "Path to the stand-alone nvtx header") option(HIDE_CXX_SYMBOLS "Build shared library and hide all C++ symbols" OFF) option(KEEP_BUILD_ARTIFACTS_IN_BINARY_DIR "Output build artifacts in CMake binary dir" OFF) ## CUDA option(USE_CUDA "Build with GPU acceleration" OFF) option(USE_NCCL "Build with NCCL to enable distributed GPU support." OFF) # This is specifically designed for PyPI binary release and should be disabled for most of the cases. option(USE_DLOPEN_NCCL "Whether to load nccl dynamically." OFF) option(BUILD_WITH_SHARED_NCCL "Build with shared NCCL library." OFF) if(USE_CUDA) if(NOT DEFINED CMAKE_CUDA_ARCHITECTURES AND NOT DEFINED ENV{CUDAARCHS}) set(GPU_COMPUTE_VER "" CACHE STRING "Semicolon separated list of compute versions to be built against, e.g. '35;61'") else() # Clear any cached values from previous runs unset(GPU_COMPUTE_VER) unset(GPU_COMPUTE_VER CACHE) endif() endif() # CUDA device LTO was introduced in CMake v3.25 and requires host LTO to also be enabled but can still # be explicitly disabled allowing for LTO on host only, host and device, or neither, but device-only LTO # is not a supproted configuration cmake_dependent_option(USE_CUDA_LTO "Enable link-time optimization for CUDA device code" "${CMAKE_INTERPROCEDURAL_OPTIMIZATION}" "CMAKE_VERSION VERSION_GREATER_EQUAL 3.25;USE_CUDA;CMAKE_INTERPROCEDURAL_OPTIMIZATION" OFF) ## Sanitizers option(USE_SANITIZER "Use santizer flags" OFF) option(SANITIZER_PATH "Path to sanitizes.") set(ENABLED_SANITIZERS "address" "leak" CACHE STRING "Semicolon separated list of sanitizer names. E.g 'address;leak'. Supported sanitizers are address, leak, undefined and thread.") ## Plugins option(PLUGIN_RMM "Build with RAPIDS Memory Manager (RMM)" OFF) option(PLUGIN_FEDERATED "Build with Federated Learning" OFF) ## TODO: 1. Add check if DPC++ compiler is used for building option(PLUGIN_SYCL "SYCL plugin" OFF) option(ADD_PKGCONFIG "Add xgboost.pc into system." ON) #-- Checks for building XGBoost if(USE_DEBUG_OUTPUT AND (NOT (CMAKE_BUILD_TYPE MATCHES Debug))) message(SEND_ERROR "Do not enable `USE_DEBUG_OUTPUT' with release build.") endif() if(USE_NVTX AND (NOT USE_CUDA)) message(SEND_ERROR "`USE_NVTX` must be enabled with `USE_CUDA` flag.") endif() if(USE_NVTX) if(CMAKE_VERSION VERSION_LESS "3.25.0") # CUDA:nvtx3 target is added in 3.25 message("cmake >= 3.25 is required for NVTX.") endif() endif() if(USE_NCCL AND (NOT USE_CUDA)) message(SEND_ERROR "`USE_NCCL` must be enabled with `USE_CUDA` flag.") endif() if(USE_DEVICE_DEBUG AND (NOT USE_CUDA)) message(SEND_ERROR "`USE_DEVICE_DEBUG` must be enabled with `USE_CUDA` flag.") endif() if(BUILD_WITH_SHARED_NCCL AND (NOT USE_NCCL)) message(SEND_ERROR "Build XGBoost with -DUSE_NCCL=ON to enable BUILD_WITH_SHARED_NCCL.") endif() if(USE_DLOPEN_NCCL AND (NOT USE_NCCL)) message(SEND_ERROR "Build XGBoost with -DUSE_NCCL=ON to enable USE_DLOPEN_NCCL.") endif() if(USE_DLOPEN_NCCL AND (NOT (CMAKE_SYSTEM_NAME STREQUAL "Linux"))) message(SEND_ERROR "`USE_DLOPEN_NCCL` supports only Linux at the moment.") endif() if(JVM_BINDINGS AND R_LIB) message(SEND_ERROR "`R_LIB' is not compatible with `JVM_BINDINGS' as they both have customized configurations.") endif() if(R_LIB AND GOOGLE_TEST) message( WARNING "Some C++ tests will fail with `R_LIB` enabled, as R package redirects some functions to R runtime implementation." ) endif() if(PLUGIN_RMM AND NOT (USE_CUDA)) message(SEND_ERROR "`PLUGIN_RMM` must be enabled with `USE_CUDA` flag.") endif() if(PLUGIN_RMM AND NOT ((CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR (CMAKE_CXX_COMPILER_ID STREQUAL "GNU"))) message(SEND_ERROR "`PLUGIN_RMM` must be used with GCC or Clang compiler.") endif() if(PLUGIN_RMM AND NOT (CMAKE_SYSTEM_NAME STREQUAL "Linux")) message(SEND_ERROR "`PLUGIN_RMM` must be used with Linux.") endif() if(ENABLE_ALL_WARNINGS) if((NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang") AND (NOT CMAKE_CXX_COMPILER_ID STREQUAL "GNU")) message(SEND_ERROR "ENABLE_ALL_WARNINGS is only available for Clang and GCC.") endif() endif() if(BUILD_STATIC_LIB AND (R_LIB OR JVM_BINDINGS)) message(SEND_ERROR "Cannot build a static library libxgboost.a when R or JVM packages are enabled.") endif() if(PLUGIN_FEDERATED) if(CMAKE_CROSSCOMPILING) message(SEND_ERROR "Cannot cross compile with federated learning support") endif() if(BUILD_STATIC_LIB) message(SEND_ERROR "Cannot build static lib with federated learning support") endif() if(R_LIB OR JVM_BINDINGS) message(SEND_ERROR "Cannot enable federated learning support when R or JVM packages are enabled.") endif() if(WIN32) message(SEND_ERROR "Federated learning not supported for Windows platform") endif() endif() #-- Removed options if(USE_AVX) message(SEND_ERROR "The option `USE_AVX` is deprecated as experimental AVX features have been removed from XGBoost.") endif() if(PLUGIN_LZ4) message(SEND_ERROR "The option `PLUGIN_LZ4` is removed from XGBoost.") endif() if(RABIT_BUILD_MPI) message(SEND_ERROR "The option `RABIT_BUILD_MPI` has been removed from XGBoost.") endif() if(USE_S3) message(SEND_ERROR "The option `USE_S3` has been removed from XGBoost") endif() if(USE_AZURE) message(SEND_ERROR "The option `USE_AZURE` has been removed from XGBoost") endif() if(USE_HDFS) message(SEND_ERROR "The option `USE_HDFS` has been removed from XGBoost") endif() if(PLUGIN_DENSE_PARSER) message(SEND_ERROR "The option `PLUGIN_DENSE_PARSER` has been removed from XGBoost.") endif() #-- Sanitizer if(USE_SANITIZER) include(cmake/Sanitizer.cmake) enable_sanitizers("${ENABLED_SANITIZERS}") endif() if(USE_CUDA) set(USE_OPENMP ON CACHE BOOL "CUDA requires OpenMP" FORCE) # `export CXX=' is ignored by CMake CUDA. if(NOT DEFINED CMAKE_CUDA_HOST_COMPILER AND NOT DEFINED ENV{CUDAHOSTCXX}) set(CMAKE_CUDA_HOST_COMPILER ${CMAKE_CXX_COMPILER} CACHE FILEPATH "The compiler executable to use when compiling host code for CUDA or HIP language files.") mark_as_advanced(CMAKE_CUDA_HOST_COMPILER) message(STATUS "Configured CUDA host compiler: ${CMAKE_CUDA_HOST_COMPILER}") endif() if(NOT DEFINED CMAKE_CUDA_RUNTIME_LIBRARY) set(CMAKE_CUDA_RUNTIME_LIBRARY Static) endif() enable_language(CUDA) if(${CMAKE_CUDA_COMPILER_VERSION} VERSION_LESS 11.0) message(FATAL_ERROR "CUDA version must be at least 11.0!") endif() if(DEFINED GPU_COMPUTE_VER) compute_cmake_cuda_archs("${GPU_COMPUTE_VER}") endif() find_package(CUDAToolkit REQUIRED) find_package(CCCL CONFIG) if(CCCL_FOUND) message(STATUS "Standalone CCCL found.") else() message(STATUS "Standalone CCCL not found. Attempting to use CCCL from CUDA Toolkit...") find_package(CCCL CONFIG HINTS ${CUDAToolkit_LIBRARY_DIR}/cmake) if(NOT CCCL_FOUND) message(STATUS "Could not locate CCCL from CUDA Toolkit. Using Thrust and CUB from CUDA Toolkit...") find_package(libcudacxx CONFIG REQUIRED HINTS ${CUDAToolkit_LIBRARY_DIR}/cmake) find_package(CUB CONFIG REQUIRED HINTS ${CUDAToolkit_LIBRARY_DIR}/cmake) find_package(Thrust CONFIG REQUIRED HINTS ${CUDAToolkit_LIBRARY_DIR}/cmake) thrust_create_target(Thrust HOST CPP DEVICE CUDA) add_library(CCCL::CCCL INTERFACE IMPORTED GLOBAL) target_link_libraries(CCCL::CCCL INTERFACE libcudacxx::libcudacxx CUB::CUB Thrust) endif() endif() # Define guard macros to prevent windows.h from conflicting with winsock2.h if(WIN32) target_compile_definitions(CCCL::CCCL INTERFACE NOMINMAX WIN32_LEAN_AND_MEAN _WINSOCKAPI_) endif() endif() if(FORCE_COLORED_OUTPUT AND (CMAKE_GENERATOR STREQUAL "Ninja") AND ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") OR (CMAKE_CXX_COMPILER_ID STREQUAL "Clang"))) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always") endif() find_package(Threads REQUIRED) # -- OpenMP include(cmake/FindOpenMPMacOS.cmake) if(USE_OPENMP) if(APPLE) find_openmp_macos() else() find_package(OpenMP REQUIRED) endif() endif() # Add for IBM i if(${CMAKE_SYSTEM_NAME} MATCHES "OS400") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pthread") set(CMAKE_CXX_ARCHIVE_CREATE " -X64 qc ") endif() if(USE_NCCL) find_package(Nccl REQUIRED) endif() if(MSVC) if(FORCE_SHARED_CRT) message(STATUS "XGBoost: Using dynamically linked MSVC runtime...") set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>DLL") else() message(STATUS "XGBoost: Using statically linked MSVC runtime...") set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") endif() endif() # dmlc-core set(DMLC_FORCE_SHARED_CRT ${FORCE_SHARED_CRT}) add_subdirectory(${xgboost_SOURCE_DIR}/dmlc-core) if(MSVC) if(TARGET dmlc_unit_tests) target_compile_options( dmlc_unit_tests PRIVATE -D_CRT_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_DEPRECATE ) endif() endif() # core xgboost add_subdirectory(${xgboost_SOURCE_DIR}/src) target_link_libraries(objxgboost PUBLIC dmlc) # Link -lstdc++fs for GCC 8.x if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS "9.0") target_link_libraries(objxgboost PUBLIC stdc++fs) endif() # Exports some R specific definitions and objects if(R_LIB) add_subdirectory(${xgboost_SOURCE_DIR}/R-package) endif() # This creates its own shared library `xgboost4j'. if(JVM_BINDINGS) add_subdirectory(${xgboost_SOURCE_DIR}/jvm-packages) endif() # Plugin add_subdirectory(${xgboost_SOURCE_DIR}/plugin) if(PLUGIN_RMM) find_package(rmm REQUIRED) # Patch the rmm targets so they reference the static cudart # Remove this patch once RMM stops specifying cudart requirement # (since RMM is a header-only library, it should not specify cudart in its CMake config) get_target_property(rmm_link_libs rmm::rmm INTERFACE_LINK_LIBRARIES) list(REMOVE_ITEM rmm_link_libs CUDA::cudart) list(APPEND rmm_link_libs CUDA::cudart_static) set_target_properties(rmm::rmm PROPERTIES INTERFACE_LINK_LIBRARIES "${rmm_link_libs}") endif() if(PLUGIN_SYCL) set(CMAKE_CXX_LINK_EXECUTABLE "icpx -qopenmp -o ") set(CMAKE_CXX_CREATE_SHARED_LIBRARY "icpx -qopenmp \ , \ -o ") endif() #-- library if(BUILD_STATIC_LIB) add_library(xgboost STATIC) else() add_library(xgboost SHARED) endif() target_link_libraries(xgboost PRIVATE objxgboost) target_include_directories(xgboost INTERFACE $/include> $) #-- End shared library #-- CLI for xgboost if(BUILD_DEPRECATED_CLI) add_executable(runxgboost ${xgboost_SOURCE_DIR}/src/cli_main.cc) target_link_libraries(runxgboost PRIVATE objxgboost) target_include_directories(runxgboost PRIVATE ${xgboost_SOURCE_DIR}/include ${xgboost_SOURCE_DIR}/dmlc-core/include ) set_target_properties(runxgboost PROPERTIES OUTPUT_NAME xgboost) xgboost_target_properties(runxgboost) xgboost_target_link_libraries(runxgboost) xgboost_target_defs(runxgboost) if(KEEP_BUILD_ARTIFACTS_IN_BINARY_DIR) set_output_directory(runxgboost ${xgboost_BINARY_DIR}) else() set_output_directory(runxgboost ${xgboost_SOURCE_DIR}) endif() endif() #-- End CLI for xgboost # Common setup for all targets foreach(target xgboost objxgboost dmlc) xgboost_target_properties(${target}) xgboost_target_link_libraries(${target}) xgboost_target_defs(${target}) endforeach() if(JVM_BINDINGS) xgboost_target_properties(xgboost4j) xgboost_target_link_libraries(xgboost4j) xgboost_target_defs(xgboost4j) endif() if(USE_OPENMP AND APPLE) patch_openmp_path_macos(xgboost libxgboost) endif() if(KEEP_BUILD_ARTIFACTS_IN_BINARY_DIR) set_output_directory(xgboost ${xgboost_BINARY_DIR}/lib) else() set_output_directory(xgboost ${xgboost_SOURCE_DIR}/lib) endif() # Ensure these two targets do not build simultaneously, as they produce outputs with conflicting names if(BUILD_DEPRECATED_CLI) add_dependencies(xgboost runxgboost) endif() #-- Installing XGBoost if(R_LIB) include(cmake/RPackageInstallTargetSetup.cmake) set_target_properties(xgboost PROPERTIES PREFIX "") if(APPLE) set_target_properties(xgboost PROPERTIES SUFFIX ".so") endif() setup_rpackage_install_target(xgboost "${CMAKE_CURRENT_BINARY_DIR}/R-package-install") set(CMAKE_INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/dummy_inst") endif() if(MINGW) set_target_properties(xgboost PROPERTIES PREFIX "") endif() if(BUILD_C_DOC) include(cmake/Doc.cmake) run_doxygen() endif() include(CPack) include(GNUInstallDirs) # Install all headers. Please note that currently the C++ headers does not form an "API". install(DIRECTORY ${xgboost_SOURCE_DIR}/include/xgboost DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) # Install libraries. If `xgboost` is a static lib, specify `objxgboost` also, to avoid the # following error: # # > install(EXPORT ...) includes target "xgboost" which requires target "objxgboost" that is not # > in any export set. # # https://github.com/dmlc/xgboost/issues/6085 if(BUILD_STATIC_LIB) if(BUILD_DEPRECATED_CLI) set(INSTALL_TARGETS xgboost runxgboost objxgboost dmlc) else() set(INSTALL_TARGETS xgboost objxgboost dmlc) endif() else() if(BUILD_DEPRECATED_CLI) set(INSTALL_TARGETS xgboost runxgboost) else() set(INSTALL_TARGETS xgboost) endif() endif() install(TARGETS ${INSTALL_TARGETS} EXPORT XGBoostTargets ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} INCLUDES DESTINATION ${LIBLEGACY_INCLUDE_DIRS}) install(EXPORT XGBoostTargets FILE XGBoostTargets.cmake NAMESPACE xgboost:: DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/xgboost) include(CMakePackageConfigHelpers) configure_package_config_file( ${CMAKE_CURRENT_LIST_DIR}/cmake/xgboost-config.cmake.in ${CMAKE_CURRENT_BINARY_DIR}/cmake/xgboost-config.cmake INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/xgboost) write_basic_package_version_file( ${CMAKE_BINARY_DIR}/cmake/xgboost-config-version.cmake VERSION ${XGBOOST_VERSION} COMPATIBILITY AnyNewerVersion) install( FILES ${CMAKE_CURRENT_BINARY_DIR}/cmake/xgboost-config.cmake ${CMAKE_BINARY_DIR}/cmake/xgboost-config-version.cmake DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/xgboost) #-- Test if(GOOGLE_TEST) enable_testing() # Unittests. add_executable(testxgboost) target_link_libraries(testxgboost PRIVATE objxgboost) xgboost_target_properties(testxgboost) xgboost_target_link_libraries(testxgboost) xgboost_target_defs(testxgboost) add_subdirectory(${xgboost_SOURCE_DIR}/tests/cpp) add_test( NAME TestXGBoostLib COMMAND testxgboost WORKING_DIRECTORY ${xgboost_BINARY_DIR}) # CLI tests configure_file( ${xgboost_SOURCE_DIR}/tests/cli/machine.conf.in ${xgboost_BINARY_DIR}/tests/cli/machine.conf @ONLY NEWLINE_STYLE UNIX) if(BUILD_DEPRECATED_CLI) add_test( NAME TestXGBoostCLI COMMAND runxgboost ${xgboost_BINARY_DIR}/tests/cli/machine.conf WORKING_DIRECTORY ${xgboost_BINARY_DIR}) set_tests_properties(TestXGBoostCLI PROPERTIES PASS_REGULAR_EXPRESSION ".*test-rmse:0.087.*") endif() endif() # Add xgboost.pc if(ADD_PKGCONFIG) configure_file(${xgboost_SOURCE_DIR}/cmake/xgboost.pc.in ${xgboost_BINARY_DIR}/xgboost.pc @ONLY) install( FILES ${xgboost_BINARY_DIR}/xgboost.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig) endif() xgboost-3.0.0/CONTRIBUTORS.md000066400000000000000000000143441476511272400154750ustar00rootroot00000000000000Contributors of DMLC/XGBoost ============================ XGBoost has been developed and used by a group of active community. Everyone is more than welcomed to is a great way to make the project better and more accessible to more users. Project Management Committee(PMC) ---------- The Project Management Committee(PMC) consists group of active committers that moderate the discussion, manage the project release, and proposes new committer/PMC members. * [Tianqi Chen](https://github.com/tqchen), University of Washington - Tianqi is a Ph.D. student working on large-scale machine learning. He is the creator of the project. * [Michael Benesty](https://github.com/pommedeterresautee) - Michael is a lawyer and data scientist in France. He is the creator of XGBoost interactive analysis module in R. * [Yuan Tang](https://github.com/terrytangyuan), Red Hat - Yuan is a principal software engineer at Red Hat. He contributed mostly in R and Python packages. * [Nan Zhu](https://github.com/CodingCat), Uber - Nan is a software engineer in Uber. He contributed mostly in JVM packages. * [Jiaming Yuan](https://github.com/trivialfis) - Jiaming contributed to the GPU algorithms. He has also introduced new abstractions to improve the quality of the C++ codebase. * [Hyunsu Cho](http://hyunsu-cho.io/), NVIDIA - Hyunsu is the maintainer of the XGBoost Python package. He also manages the Jenkins continuous integration system (https://xgboost-ci.net/). He is the initial author of the CPU 'hist' updater. * [Rory Mitchell](https://github.com/RAMitchell), University of Waikato - Rory is a Ph.D. student at University of Waikato. He is the original creator of the GPU training algorithms. He improved the CMake build system and continuous integration. * [Hongliang Liu](https://github.com/phunterlau) Committers ---------- Committers are people who have made substantial contribution to the project and granted write access to the project. * [Tong He](https://github.com/hetong007), Amazon AI - Tong is an applied scientist in Amazon AI. He is the maintainer of XGBoost R package. * [Vadim Khotilovich](https://github.com/khotilov) - Vadim contributes many improvements in R and core packages. * [Bing Xu](https://github.com/antinucleon) - Bing is the original creator of XGBoost Python package and currently the maintainer of [XGBoost.jl](https://github.com/antinucleon/XGBoost.jl). * [Sergei Lebedev](https://github.com/superbobry), Criteo - Sergei is a software engineer in Criteo. He contributed mostly in JVM packages. * [Scott Lundberg](http://scottlundberg.com/), University of Washington - Scott is a Ph.D. student at University of Washington. He is the creator of SHAP, a unified approach to explain the output of machine learning models such as decision tree ensembles. He also helps maintain the XGBoost Julia package. * [Egor Smirnov](https://github.com/SmirnovEgorRu), Intel - Egor has led a major effort to improve the performance of XGBoost on multi-core CPUs. Become a Committer ------------------ XGBoost is a open source project and we are actively looking for new committers who are willing to help maintaining and lead the project. Committers comes from contributors who: * Made substantial contribution to the project. * Willing to spent time on maintaining and lead the project. New committers will be proposed by current committer members, with support from more than two of current committers. List of Contributors -------------------- * [Full List of Contributors](https://github.com/dmlc/xgboost/graphs/contributors) - To contributors: please add your name to the list when you submit a patch to the project:) * [Kailong Chen](https://github.com/kalenhaha) - Kailong is an early contributor of XGBoost, he is creator of ranking objectives in XGBoost. * [Skipper Seabold](https://github.com/jseabold) - Skipper is the major contributor to the scikit-learn module of XGBoost. * [Zygmunt Zając](https://github.com/zygmuntz) - Zygmunt is the master behind the early stopping feature frequently used by Kagglers. * [Ajinkya Kale](https://github.com/ajkl) * [Boliang Chen](https://github.com/cblsjtu) * [Yangqing Men](https://github.com/yanqingmen) - Yangqing is the creator of XGBoost java package. * [Engpeng Yao](https://github.com/yepyao) * [Giulio](https://github.com/giuliohome) - Giulio is the creator of Windows project of XGBoost * [Jamie Hall](https://github.com/nerdcha) - Jamie is the initial creator of XGBoost scikit-learn module. * [Yen-Ying Lee](https://github.com/white1033) * [Masaaki Horikoshi](https://github.com/sinhrks) - Masaaki is the initial creator of XGBoost Python plotting module. * [daiyl0320](https://github.com/daiyl0320) - daiyl0320 contributed patch to XGBoost distributed version more robust, and scales stably on TB scale datasets. * [Huayi Zhang](https://github.com/irachex) * [Johan Manders](https://github.com/johanmanders) * [yoori](https://github.com/yoori) * [Mathias Müller](https://github.com/far0n) * [Sam Thomson](https://github.com/sammthomson) * [ganesh-krishnan](https://github.com/ganesh-krishnan) * [Damien Carol](https://github.com/damiencarol) * [Alex Bain](https://github.com/convexquad) * [Baltazar Bieniek](https://github.com/bbieniek) * [Adam Pocock](https://github.com/Craigacp) * [Gideon Whitehead](https://github.com/gaw89) * [Yi-Lin Juang](https://github.com/frankyjuang) * [Andrew Hannigan](https://github.com/andrewhannigan) * [Andy Adinets](https://github.com/canonizer) * [Henry Gouk](https://github.com/henrygouk) * [Pierre de Sahb](https://github.com/pdesahb) * [liuliang01](https://github.com/liuliang01) - liuliang01 added support for the qid column for LIBSVM input format. This makes ranking task easier in distributed setting. * [Andrew Thia](https://github.com/BlueTea88) - Andrew Thia implemented feature interaction constraints * [Wei Tian](https://github.com/weitian) * [Chen Qin](https://github.com/chenqin) * [Sam Wilkinson](https://samwilkinson.io) * [Matthew Jones](https://github.com/mt-jones) * [Jiaxiang Li](https://github.com/JiaxiangBU) * [Bryan Woods](https://github.com/bryan-woods) - Bryan added support for cross-validation for the ranking objective * [Haoda Fu](https://github.com/fuhaoda) * [Evan Kepner](https://github.com/EvanKepner) - Evan Kepner added support for os.PathLike file paths in Python xgboost-3.0.0/LICENSE000066400000000000000000000261251476511272400142230ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright (c) 2019 by Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. xgboost-3.0.0/NEWS.md000066400000000000000000006325371476511272400143260ustar00rootroot00000000000000XGBoost Change Log ================== **Starting from 2.1.0, release note is recorded in the documentation.** This file records the changes in xgboost library in reverse chronological order. ## 2.0.0 (2023 Aug 16) We are excited to announce the release of XGBoost 2.0. This note will begin by covering some overall changes and then highlight specific updates to the package. ### Initial work on multi-target trees with vector-leaf outputs We have been working on vector-leaf tree models for multi-target regression, multi-label classification, and multi-class classification in version 2.0. Previously, XGBoost would build a separate model for each target. However, with this new feature that's still being developed, XGBoost can build one tree for all targets. The feature has multiple benefits and trade-offs compared to the existing approach. It can help prevent overfitting, produce smaller models, and build trees that consider the correlation between targets. In addition, users can combine vector leaf and scalar leaf trees during a training session using a callback. Please note that the feature is still a working in progress, and many parts are not yet available. See #9043 for the current status. Related PRs: (#8538, #8697, #8902, #8884, #8895, #8898, #8612, #8652, #8698, #8908, #8928, #8968, #8616, #8922, #8890, #8872, #8889, #9509) Please note that, only the `hist` (default) tree method on CPU can be used for building vector leaf trees at the moment. ### New `device` parameter. A new `device` parameter is set to replace the existing `gpu_id`, `gpu_hist`, `gpu_predictor`, `cpu_predictor`, `gpu_coord_descent`, and the PySpark specific parameter `use_gpu`. Onward, users need only the `device` parameter to select which device to run along with the ordinal of the device. For more information, please see our document page (https://xgboost.readthedocs.io/en/stable/parameter.html#general-parameters) . For example, with `device="cuda", tree_method="hist"`, XGBoost will run the `hist` tree method on GPU. (#9363, #8528, #8604, #9354, #9274, #9243, #8896, #9129, #9362, #9402, #9385, #9398, #9390, #9386, #9412, #9507, #9536). The old behavior of ``gpu_hist`` is preserved but deprecated. In addition, the `predictor` parameter is removed. ### `hist` is now the default tree method Starting from 2.0, the `hist` tree method will be the default. In previous versions, XGBoost chooses `approx` or `exact` depending on the input data and training environment. The new default can help XGBoost train models more efficiently and consistently. (#9320, #9353) ### GPU-based approx tree method There's initial support for using the `approx` tree method on GPU. The performance of the `approx` is not yet well optimized but is feature complete except for the JVM packages. It can be accessed through the use of the parameter combination `device="cuda", tree_method="approx"`. (#9414, #9399, #9478). Please note that the Scala-based Spark interface is not yet supported. ### Optimize and bound the size of the histogram on CPU, to control memory footprint XGBoost has a new parameter `max_cached_hist_node` for users to limit the CPU cache size for histograms. It can help prevent XGBoost from caching histograms too aggressively. Without the cache, performance is likely to decrease. However, the size of the cache grows exponentially with the depth of the tree. The limit can be crucial when growing deep trees. In most cases, users need not configure this parameter as it does not affect the model's accuracy. (#9455, #9441, #9440, #9427, #9400). Along with the cache limit, XGBoost also reduces the memory usage of the `hist` and `approx` tree method on distributed systems by cutting the size of the cache by half. (#9433) ### Improved external memory support There is some exciting development around external memory support in XGBoost. It's still an experimental feature, but the performance has been significantly improved with the default `hist` tree method. We replaced the old file IO logic with memory map. In addition to performance, we have reduced CPU memory usage and added extensive documentation. Beginning from 2.0.0, we encourage users to try it with the `hist` tree method when the memory saving by `QuantileDMatrix` is not sufficient. (#9361, #9317, #9282, #9315, #8457) ### Learning to rank We created a brand-new implementation for the learning-to-rank task. With the latest version, XGBoost gained a set of new features for ranking task including: - A new parameter `lambdarank_pair_method` for choosing the pair construction strategy. - A new parameter `lambdarank_num_pair_per_sample` for controlling the number of samples for each group. - An experimental implementation of unbiased learning-to-rank, which can be accessed using the `lambdarank_unbiased` parameter. - Support for custom gain function with `NDCG` using the `ndcg_exp_gain` parameter. - Deterministic GPU computation for all objectives and metrics. - `NDCG` is now the default objective function. - Improved performance of metrics using caches. - Support scikit-learn utilities for `XGBRanker`. - Extensive documentation on how learning-to-rank works with XGBoost. For more information, please see the [tutorial](https://xgboost.readthedocs.io/en/latest/tutorials/learning_to_rank.html). Related PRs: (#8771, #8692, #8783, #8789, #8790, #8859, #8887, #8893, #8906, #8931, #9075, #9015, #9381, #9336, #8822, #9222, #8984, #8785, #8786, #8768) ### Automatically estimated intercept In the previous version, `base_score` was a constant that could be set as a training parameter. In the new version, XGBoost can automatically estimate this parameter based on input labels for optimal accuracy. (#8539, #8498, #8272, #8793, #8607) ### Quantile regression The XGBoost algorithm now supports quantile regression, which involves minimizing the quantile loss (also called "pinball loss"). Furthermore, XGBoost allows for training with multiple target quantiles simultaneously with one tree per quantile. (#8775, #8761, #8760, #8758, #8750) ### L1 and Quantile regression now supports learning rate Both objectives use adaptive trees due to the lack of proper Hessian values. In the new version, XGBoost can scale the leaf value with the learning rate accordingly. (#8866) ### Export cut value Using the Python or the C package, users can export the quantile values (not to be confused with quantile regression) used for the `hist` tree method. (#9356) ### column-based split and federated learning We made progress on column-based split for federated learning. In 2.0, both `approx`, `hist`, and `hist` with vector leaf can work with column-based data split, along with support for vertical federated learning. Work on GPU support is still on-going, stay tuned. (#8576, #8468, #8442, #8847, #8811, #8985, #8623, #8568, #8828, #8932, #9081, #9102, #9103, #9124, #9120, #9367, #9370, #9343, #9171, #9346, #9270, #9244, #8494, #8434, #8742, #8804, #8710, #8676, #9020, #9002, #9058, #9037, #9018, #9295, #9006, #9300, #8765, #9365, #9060) ### PySpark After the initial introduction of the PySpark interface, it has gained some new features and optimizations in 2.0. - GPU-based prediction. (#9292, #9542) - Optimization for data initialization by avoiding the stack operation. (#9088) - Support predict feature contribution. (#8633) - Python typing support. (#9156, #9172, #9079, #8375) - `use_gpu` is deprecated. The `device` parameter is preferred. - Update eval_metric validation to support list of strings (#8826) - Improved logs for training (#9449) - Maintenance, including refactoring and document updates (#8324, #8465, #8605, #9202, #9460, #9302, #8385, #8630, #8525, #8496) - Fix for GPU setup. (#9495) ### Other General New Features Here's a list of new features that don't have their own section and yet are general to all language bindings. - Use array interface for CSC matrix. This helps XGBoost to use a consistent number of threads and align the interface of the CSC matrix with other interfaces. In addition, memory usage is likely to decrease with CSC input thanks to on-the-fly type conversion. (#8672) - CUDA compute 90 is now part of the default build.. (#9397) ### Other General Optimization These optimizations are general to all language bindings. For language-specific optimization, please visit the corresponding sections. - Performance for input with `array_interface` on CPU (like `numpy`) is significantly improved. (#9090) - Some optimization with CUDA for data initialization. (#9199, #9209, #9144) - Use the latest thrust policy to prevent synchronizing GPU devices. (#9212) - XGBoost now uses a per-thread CUDA stream, which prevents synchronization with other streams. (#9416, #9396, #9413) ### Notable breaking change Other than the aforementioned change with the `device` parameter, here's a list of breaking changes affecting all packages. - Users must specify the format for text input (#9077). However, we suggest using third-party data structures such as `numpy.ndarray` instead of relying on text inputs. See https://github.com/dmlc/xgboost/issues/9472 for more info. ### Notable bug fixes Some noteworthy bug fixes that are not related to specific language bindings are listed in this section. - Some language environments use a different thread to perform garbage collection, which breaks the thread-local cache used in XGBoost. XGBoost 2.0 implements a new thread-safe cache using a light weight lock to replace the thread-local cache. (#8851) - Fix model IO by clearing the prediction cache. (#8904) - `inf` is checked during data construction. (#8911) - Preserve order of saved updaters configuration. Usually, this is not an issue unless the `updater` parameter is used instead of the `tree_method` parameter (#9355) - Fix GPU memory allocation issue with categorical splits. (#9529) - Handle escape sequence like `\t\n` in feature names for JSON model dump. (#9474) - Normalize file path for model IO and text input. This handles short paths on Windows and paths that contain `~` on Unix (#9463). In addition, all path inputs are required to be encoded in UTF-8 (#9448, #9443) - Fix integer overflow on H100. (#9380) - Fix weighted sketching on GPU with categorical features. (#9341) - Fix metric serialization. The bug might cause some of the metrics to be dropped during evaluation. (#9405) - Fixes compilation errors on MSVC x86 targets (#8823) - Pick up the dmlc-core fix for the CSV parser. (#8897) ### Documentation Aside from documents for new features, we have many smaller updates to improve user experience, from troubleshooting guides to typo fixes. - Explain CPU/GPU interop. (#8450) - Guide to troubleshoot NCCL errors. (#8943, #9206) - Add a note for rabit port selection. (#8879) - How to build the docs using conda (#9276) - Explain how to obtain reproducible results on distributed systems. (#8903) * Fixes and small updates to document and demonstration scripts. (#8626, #8436, #8995, #8907, #8923, #8926, #9358, #9232, #9201, #9469, #9462, #9458, #8543, #8597, #8401, #8784, #9213, #9098, #9008, #9223, #9333, #9434, #9435, #9415, #8773, #8752, #9291, #9549) ### Python package * New Features and Improvements - Support primitive types of pyarrow-backed pandas dataframe. (#8653) - Warning messages emitted by XGBoost are now emitted using Python warnings. (#9387) - User can now format the value printed near the bars on the `plot_importance` plot (#8540) - XGBoost has improved half-type support (float16) with pandas, cupy, and cuDF. With GPU input, the handling is through CUDA `__half` type, and no data copy is made. (#8487, #9207, #8481) - Support `Series` and Python primitive types in `inplace_predict` and `QuantileDMatrix` (#8547, #8542) - Support all pandas' nullable integer types. (#8480) - Custom metric with the scikit-learn interface now supports `sample_weight`. (#8706) - Enable Installation of Python Package with System lib in a Virtual Environment (#9349) - Raise if expected workers are not alive in `xgboost.dask.train` (#9421) * Optimization - Cache transformed data in `QuantileDMatrix` for efficiency. (#8666, #9445) - Take datatable as row-major input. (#8472) - Remove unnecessary conversions between data structures (#8546) * Adopt modern Python packaging conventions (PEP 517, PEP 518, PEP 621) - XGBoost adopted the modern Python packaging conventions. The old setup script `setup.py` is now replaced with the new configuration file `pyproject.toml`. Along with this, XGBoost now supports Python 3.11. (#9021, #9112, #9114, #9115) Consult the latest documentation for the updated instructions to build and install XGBoost. * Fixes - `DataIter` now accepts only keyword arguments. (#9431) - Fix empty DMatrix with categorical features. (#8739) - Convert ``DaskXGBClassifier.classes_`` to an array (#8452) - Define `best_iteration` only if early stopping is used to be consistent with documented behavior. (#9403) - Make feature validation immutable. (#9388) * Breaking changes - Discussed in the new `device` parameter section, the `predictor` parameter is now removed. (#9129) - Remove support for single-string feature info. Feature type and names should be a sequence of strings (#9401) - Remove parameters in the `save_model` call for the scikit-learn interface. (#8963) - Remove the `ntree_limit` in the python package. This has been deprecated in previous versions. (#8345) * Maintenance including formatting and refactoring along with type hints. - More consistent use of `black` and `isort` for code formatting (#8420, #8748, #8867) - Improved type support. Most of the type changes happen in the PySpark module; here, we list the remaining changes. (#8444, #8617, #9197, #9005) - Set `enable_categorical` to True in predict. (#8592) - Some refactoring and updates for tests (#8395, #8372, #8557, #8379, #8702, #9459, #9316, #8446, #8695, #8409, #8993, #9480) * Documentation - Add introduction and notes for the sklearn interface. (#8948) - Demo for using dask for hyper-parameter optimization. (#8891) - Document all supported Python input types. (#8643) - Other documentation updates (#8944, #9304) ### R package - Use the new data consumption interface for CSR and CSC. This provides better control for the number of threads and improves performance. (#8455, #8673) - Accept multiple evaluation metrics during training. (#8657) - Fix integer inputs with `NA`. (#9522) - Some refactoring for the R package (#8545, #8430, #8614, #8624, #8613, #9457, #8689, #8563, #9461, #8647, #8564, #8565, #8736, #8610, #8609, #8599, #8704, #9456, #9450, #9476, #9477, #9481). Special thanks to @jameslamb. - Document updates (#8886, #9323, #9437, #8998) ### JVM packages Following are changes specific to various JVM-based packages. - Stop using Rabit in prediction (#9054) - Set feature_names and feature_types in jvm-packages. This is to prepare support for categorical features (#9364) - Scala 2.13 support. (#9099) - Change training stage from `ResultStage` to `ShuffleMapStage` (#9423) - Automatically set the max/min direction for the best score during early stopping. (#9404) * Revised support for `flink` (#9046) * Breaking changes - Scala-based tracker is removed. (#9078, #9045) - Change `DeviceQuantileDmatrix` into `QuantileDMatrix` (#8461) * Maintenance (#9253, #9166, #9395, #9389, #9224, #9233, #9351, #9479) * CI bot PRs We employed GitHub dependent bot to help us keep the dependencies up-to-date for JVM packages. With the help from the bot, we have cleared up all the dependencies that are lagging behind (#8501, #8507). Here's a list of dependency update PRs including those made by dependent bots (#8456, #8560, #8571, #8561, #8562, #8600, #8594, #8524, #8509, #8548, #8549, #8533, #8521, #8534, #8532, #8516, #8503, #8531, #8530, #8518, #8512, #8515, #8517, #8506, #8504, #8502, #8629, #8815, #8813, #8814, #8877, #8876, #8875, #8874, #8873, #9049, #9070, #9073, #9039, #9083, #8917, #8952, #8980, #8973, #8962, #9252, #9208, #9131, #9136, #9219, #9160, #9158, #9163, #9184, #9192, #9265, #9268, #8882, #8837, #8662, #8661, #8390, #9056, #8508, #8925, #8920, #9149, #9230, #9097, #8648, #9203, #8593). ### Maintenance Maintenance work includes refactoring, fixing small issues that don't affect end users. (#9256, #8627, #8756, #8735, #8966, #8864, #8747, #8892, #9057, #8921, #8949, #8941, #8942, #9108, #9125, #9155, #9153, #9176, #9447, #9444, #9436, #9438, #9430, #9200, #9210, #9055, #9014, #9004, #8999, #9154, #9148, #9283, #9246, #8888, #8900, #8871, #8861, #8858, #8791, #8807, #8751, #8703, #8696, #8693, #8677, #8686, #8665, #8660, #8386, #8371, #8410, #8578, #8574, #8483, #8443, #8454, #8733) ### CI - Build pip wheel with RMM support (#9383) - Other CI updates including updating dependencies and work on the CI infrastructure. (#9464, #9428, #8767, #9394, #9278, #9214, #9234, #9205, #9034, #9104, #8878, #9294, #8625, #8806, #8741, #8707, #8381, #8382, #8388, #8402, #8397, #8445, #8602, #8628, #8583, #8460, #9544) ## 1.7.6 (2023 Jun 16) This is a patch release for bug fixes. The CRAN package for the R binding is kept at 1.7.5. ### Bug Fixes * Fix distributed training with mixed dense and sparse partitions. (#9272) * Fix monotone constraints on CPU with large trees. (#9122) * [spark] Make the spark model have the same UID as its estimator (#9022) * Optimize prediction with `QuantileDMatrix`. (#9096) ### Document * Improve doxygen (#8959) * Update the cuDF pip index URL. (#9106) ### Maintenance * Fix tests with pandas 2.0. (#9014) ## 1.7.5 (2023 Mar 30) This is a patch release for bug fixes. * C++ requirement is updated to C++-17, along with which, CUDA 11.8 is used as the default CTK. (#8860, #8855, #8853) * Fix import for pyspark ranker. (#8692) * Fix Windows binary wheel to be compatible with Poetry (#8991) * Fix GPU hist with column sampling. (#8850) * Make sure iterative DMatrix is properly initialized. (#8997) * [R] Update link in document. (#8998) ## 1.7.4 (2023 Feb 16) This is a patch release for bug fixes. * [R] Fix OpenMP detection on macOS. (#8684) * [Python] Make sure input numpy array is aligned. (#8690) * Fix feature interaction with column sampling in gpu_hist evaluator. (#8754) * Fix GPU L1 error. (#8749) * [PySpark] Fix feature types param (#8772) * Fix ranking with quantile dmatrix and group weight. (#8762) ## 1.7.3 (2023 Jan 6) This is a patch release for bug fixes. * [Breaking] XGBoost Sklearn estimator method `get_params` no longer returns internally configured values. (#8634) * Fix linalg iterator, which may crash the L1 error. (#8603) * Fix loading pickled GPU model with a CPU-only XGBoost build. (#8632) * Fix inference with unseen categories with categorical features. (#8591, #8602) * CI fixes. (#8620, #8631, #8579) ## v1.7.2 (2022 Dec 8) This is a patch release for bug fixes. * Work with newer thrust and libcudacxx (#8432) * Support null value in CUDA array interface namespace. (#8486) * Use `getsockname` instead of `SO_DOMAIN` on AIX. (#8437) * [pyspark] Make QDM optional based on a cuDF check (#8471) * [pyspark] sort qid for SparkRanker. (#8497) * [dask] Properly await async method client.wait_for_workers. (#8558) * [R] Fix CRAN test notes. (#8428) * [doc] Fix outdated document [skip ci]. (#8527) * [CI] Fix github action mismatched glibcxx. (#8551) ## v1.7.1 (2022 Nov 3) This is a patch release to incorporate the following hotfix: * Add back xgboost.rabit for backwards compatibility (#8411) ## v1.7.0 (2022 Oct 20) We are excited to announce the feature packed XGBoost 1.7 release. The release note will walk through some of the major new features first, then make a summary for other improvements and language-binding-specific changes. ### PySpark XGBoost 1.7 features initial support for PySpark integration. The new interface is adapted from the existing PySpark XGBoost interface developed by databricks with additional features like `QuantileDMatrix` and the rapidsai plugin (GPU pipeline) support. The new Spark XGBoost Python estimators not only benefit from PySpark ml facilities for powerful distributed computing but also enjoy the rest of the Python ecosystem. Users can define a custom objective, callbacks, and metrics in Python and use them with this interface on distributed clusters. The support is labeled as experimental with more features to come in future releases. For a brief introduction please visit the tutorial on XGBoost's [document page](https://xgboost.readthedocs.io/en/latest/tutorials/spark_estimator.html). (#8355, #8344, #8335, #8284, #8271, #8283, #8250, #8231, #8219, #8245, #8217, #8200, #8173, #8172, #8145, #8117, #8131, #8088, #8082, #8085, #8066, #8068, #8067, #8020, #8385) Due to its initial support status, the new interface has some limitations; categorical features and multi-output models are not yet supported. ### Development of categorical data support More progress on the experimental support for categorical features. In 1.7, XGBoost can handle missing values in categorical features and features a new parameter `max_cat_threshold`, which limits the number of categories that can be used in the split evaluation. The parameter is enabled when the partitioning algorithm is used and helps prevent over-fitting. Also, the sklearn interface can now accept the `feature_types` parameter to use data types other than dataframe for categorical features. (#8280, #7821, #8285, #8080, #7948, #7858, #7853, #8212, #7957, #7937, #7934) ### Experimental support for federated learning and new communication collective An exciting addition to XGBoost is the experimental federated learning support. The federated learning is implemented with a gRPC federated server that aggregates allreduce calls, and federated clients that train on local data and use existing tree methods (approx, hist, gpu_hist). Currently, this only supports horizontal federated learning (samples are split across participants, and each participant has all the features and labels). Future plans include vertical federated learning (features split across participants), and stronger privacy guarantees with homomorphic encryption and differential privacy. See [Demo with NVFlare integration](demo/nvflare/README.md) for example usage with nvflare. As part of the work, XGBoost 1.7 has replaced the old rabit module with the new collective module as the network communication interface with added support for runtime backend selection. In previous versions, the backend is defined at compile time and can not be changed once built. In this new release, users can choose between `rabit` and `federated.` (#8029, #8351, #8350, #8342, #8340, #8325, #8279, #8181, #8027, #7958, #7831, #7879, #8257, #8316, #8242, #8057, #8203, #8038, #7965, #7930, #7911) The feature is available in the public PyPI binary package for testing. ### Quantile DMatrix Before 1.7, XGBoost has an internal data structure called `DeviceQuantileDMatrix` (and its distributed version). We now extend its support to CPU and renamed it to `QuantileDMatrix`. This data structure is used for optimizing memory usage for the `hist` and `gpu_hist` tree methods. The new feature helps reduce CPU memory usage significantly, especially for dense data. The new `QuantileDMatrix` can be initialized from both CPU and GPU data, and regardless of where the data comes from, the constructed instance can be used by both the CPU algorithm and GPU algorithm including training and prediction (with some overhead of conversion if the device of data and training algorithm doesn't match). Also, a new parameter `ref` is added to `QuantileDMatrix`, which can be used to construct validation/test datasets. Lastly, it's set as default in the scikit-learn interface when a supported tree method is specified by users. (#7889, #7923, #8136, #8215, #8284, #8268, #8220, #8346, #8327, #8130, #8116, #8103, #8094, #8086, #7898, #8060, #8019, #8045, #7901, #7912, #7922) ### Mean absolute error The mean absolute error is a new member of the collection of objectives in XGBoost. It's noteworthy since MAE has zero hessian value, which is unusual to XGBoost as XGBoost relies on Newton optimization. Without valid Hessian values, the convergence speed can be slow. As part of the support for MAE, we added line searches into the XGBoost training algorithm to overcome the difficulty of training without valid Hessian values. In the future, we will extend the line search to other objectives where it's appropriate for faster convergence speed. (#8343, #8107, #7812, #8380) ### XGBoost on Browser With the help of the [pyodide](https://github.com/pyodide/pyodide) project, you can now run XGBoost on browsers. (#7954, #8369) ### Experimental IPv6 Support for Dask With the growing adaption of the new internet protocol, XGBoost joined the club. In the latest release, the Dask interface can be used on IPv6 clusters, see XGBoost's Dask tutorial for details. (#8225, #8234) ### Optimizations We have new optimizations for both the `hist` and `gpu_hist` tree methods to make XGBoost's training even more efficient. * Hist Hist now supports optional by-column histogram build, which is automatically configured based on various conditions of input data. This helps the XGBoost CPU hist algorithm to scale better with different shapes of training datasets. (#8233, #8259). Also, the build histogram kernel now can better utilize CPU registers (#8218) * GPU Hist GPU hist performance is significantly improved for wide datasets. GPU hist now supports batched node build, which reduces kernel latency and increases throughput. The improvement is particularly significant when growing deep trees with the default ``depthwise`` policy. (#7919, #8073, #8051, #8118, #7867, #7964, #8026) ### Breaking Changes Breaking changes made in the 1.7 release are summarized below. - The `grow_local_histmaker` updater is removed. This updater is rarely used in practice and has no test. We decided to remove it and focus have XGBoot focus on other more efficient algorithms. (#7992, #8091) - Single precision histogram is removed due to its lack of accuracy caused by significant floating point error. In some cases the error can be difficult to detect due to log-scale operations, which makes the parameter dangerous to use. (#7892, #7828) - Deprecated CUDA architectures are no longer supported in the release binaries. (#7774) - As part of the federated learning development, the `rabit` module is replaced with the new `collective` module. It's a drop-in replacement with added runtime backend selection, see the federated learning section for more details (#8257) ### General new features and improvements Before diving into package-specific changes, some general new features other than those listed at the beginning are summarized here. * Users of `DMatrix` and `QuantileDMatrix` can get the data from XGBoost. In previous versions, only getters for meta info like labels are available. The new method is available in Python (`DMatrix::get_data`) and C. (#8269, #8323) * In previous versions, the GPU histogram tree method may generate phantom gradient for missing values due to floating point error. We fixed such an error in this release and XGBoost is much better equated to handle floating point errors when training on GPU. (#8274, #8246) * Parameter validation is no longer experimental. (#8206) * C pointer parameters and JSON parameters are vigorously checked. (#8254, #8254) * Improved handling of JSON model input. (#7953, #7918) * Support IBM i OS (#7920, #8178) ### Fixes Some noteworthy bug fixes that are not related to specific language binding are listed in this section. * Rename misspelled config parameter for pseudo-Huber (#7904) * Fix feature weights with nested column sampling. (#8100) * Fix loading DMatrix binary in distributed env. (#8149) * Force auc.cc to be statically linked for unusual compiler platforms. (#8039) * New logic for detecting libomp on macos (#8384). ### Python Package * Python 3.8 is now the minimum required Python version. (#8071) * More progress on type hint support. Except for the new PySpark interface, the XGBoost module is fully typed. (#7742, #7945, #8302, #7914, #8052) * XGBoost now validates the feature names in `inplace_predict`, which also affects the predict function in scikit-learn estimators as it uses `inplace_predict` internally. (#8359) * Users can now get the data from `DMatrix` using `DMatrix::get_data` or `QuantileDMatrix::get_data`. * Show `libxgboost.so` path in build info. (#7893) * Raise import error when using the sklearn module while scikit-learn is missing. (#8049) * Use `config_context` in the sklearn interface. (#8141) * Validate features for inplace prediction. (#8359) * Pandas dataframe handling is refactored to reduce data fragmentation. (#7843) * Support more pandas nullable types (#8262) * Remove pyarrow workaround. (#7884) * Binary wheel size We aim to enable as many features as possible in XGBoost's default binary distribution on PyPI (package installed with pip), but there's a upper limit on the size of the binary wheel. In 1.7, XGBoost reduces the size of the wheel by pruning unused CUDA architectures. (#8179, #8152, #8150) * Fixes Some noteworthy fixes are listed here: - Fix the Dask interface with the latest cupy. (#8210) - Check cuDF lazily to avoid potential errors with cuda-python. (#8084) * Fix potential error in DMatrix constructor on 32-bit platform. (#8369) * Maintenance work - Linter script is moved from dmlc-core to XGBoost with added support for formatting, mypy, and parallel run, along with some fixes (#7967, #8101, #8216) - We now require the use of `isort` and `black` for selected files. (#8137, #8096) - Code cleanups. (#7827) - Deprecate `use_label_encoder` in XGBClassifier. The label encoder has already been deprecated and removed in the previous version. These changes only affect the indicator parameter (#7822) - Remove the use of distutils. (#7770) - Refactor and fixes for tests (#8077, #8064, #8078, #8076, #8013, #8010, #8244, #7833) * Documents - [dask] Fix potential error in demo. (#8079) - Improved documentation for the ranker. (#8356, #8347) - Indicate lack of py-xgboost-gpu on Windows (#8127) - Clarification for feature importance. (#8151) - Simplify Python getting started example (#8153) ### R Package We summarize improvements for the R package briefly here: * Feature info including names and types are now passed to DMatrix in preparation for categorical feature support. (#804) * XGBoost 1.7 can now gracefully load old R models from RDS for better compatibility with 3-party tuning libraries (#7864) * The R package now can be built with parallel compilation, along with fixes for warnings in CRAN tests. (#8330) * Emit error early if DiagrammeR is missing (#8037) * Fix R package Windows build. (#8065) ### JVM Packages The consistency between JVM packages and other language bindings is greatly improved in 1.7, improvements range from model serialization format to the default value of hyper-parameters. * Java package now supports feature names and feature types for DMatrix in preparation for categorical feature support. (#7966) * Models trained by the JVM packages can now be safely used with other language bindings. (#7896, #7907) * Users can specify the model format when saving models with a stream. (#7940, #7955) * The default value for training parameters is now sourced from XGBoost directly, which helps JVM packages be consistent with other packages. (#7938) * Set the correct objective if the user doesn't explicitly set it (#7781) * Auto-detection of MUSL is replaced by system properties (#7921) * Improved error message for launching tracker. (#7952, #7968) * Fix a race condition in parameter configuration. (#8025) * [Breaking] ` timeoutRequestWorkers` is now removed. With the support for barrier mode, this parameter is no longer needed. (#7839) * Dependencies updates. (#7791, #8157, #7801, #8240) ### Documents - Document for the C interface is greatly improved and is now displayed at the [sphinx document page](https://xgboost.readthedocs.io/en/latest/c.html). Thanks to the breathe project, you can view the C API just like the Python API. (#8300) - We now avoid having XGBoost internal text parser in demos and recommend users use dedicated libraries for loading data whenever it's feasible. (#7753) - Python survival training demos are now displayed at [sphinx gallery](https://xgboost.readthedocs.io/en/latest/python/survival-examples/index.html). (#8328) - Some typos, links, format, and grammar fixes. (#7800, #7832, #7861, #8099, #8163, #8166, #8229, #8028, #8214, #7777, #7905, #8270, #8309, d70e59fef, #7806) - Updated winning solution under readme.md (#7862) - New security policy. (#8360) - GPU document is overhauled as we consider CUDA support to be feature-complete. (#8378) ### Maintenance * Code refactoring and cleanups. (#7850, #7826, #7910, #8332, #8204) * Reduce compiler warnings. (#7768, #7916, #8046, #8059, #7974, #8031, #8022) * Compiler workarounds. (#8211, #8314, #8226, #8093) * Dependencies update. (#8001, #7876, #7973, #8298, #7816) * Remove warnings emitted in previous versions. (#7815) * Small fixes occurred during development. (#8008) ### CI and Tests * We overhauled the CI infrastructure to reduce the CI cost and lift the maintenance burdens. Jenkins is replaced with buildkite for better automation, with which, finer control of test runs is implemented to reduce overall cost. Also, we refactored some of the existing tests to reduce their runtime, drooped the size of docker images, and removed multi-GPU C++ tests. Lastly, `pytest-timeout` is added as an optional dependency for running Python tests to keep the test time in check. (#7772, #8291, #8286, #8276, #8306, #8287, #8243, #8313, #8235, #8288, #8303, #8142, #8092, #8333, #8312, #8348) * New documents for how to reproduce the CI environment (#7971, #8297) * Improved automation for JVM release. (#7882) * GitHub Action security-related updates. (#8263, #8267, #8360) * Other fixes and maintenance work. (#8154, #7848, #8069, #7943) * Small updates and fixes to GitHub action pipelines. (#8364, #8321, #8241, #7950, #8011) ## v1.6.1 (2022 May 9) This is a patch release for bug fixes and Spark barrier mode support. The R package is unchanged. ### Experimental support for categorical data - Fix segfault when the number of samples is smaller than the number of categories. (https://github.com/dmlc/xgboost/pull/7853) - Enable partition-based split for all model types. (https://github.com/dmlc/xgboost/pull/7857) ### JVM packages We replaced the old parallelism tracker with spark barrier mode to improve the robustness of the JVM package and fix the GPU training pipeline. - Fix GPU training pipeline quantile synchronization. (#7823, #7834) - Use barrier model in spark package. (https://github.com/dmlc/xgboost/pull/7836, https://github.com/dmlc/xgboost/pull/7840, https://github.com/dmlc/xgboost/pull/7845, https://github.com/dmlc/xgboost/pull/7846) - Fix shared object loading on some platforms. (https://github.com/dmlc/xgboost/pull/7844) ## v1.6.0 (2022 Apr 16) After a long period of development, XGBoost v1.6.0 is packed with many new features and improvements. We summarize them in the following sections starting with an introduction to some major new features, then moving on to language binding specific changes including new features and notable bug fixes for that binding. ### Development of categorical data support This version of XGBoost features new improvements and full coverage of experimental categorical data support in Python and C package with tree model. Both `hist`, `approx` and `gpu_hist` now support training with categorical data. Also, partition-based categorical split is introduced in this release. This split type is first available in LightGBM in the context of gradient boosting. The previous XGBoost release supported one-hot split where the splitting criteria is of form `x \in {c}`, i.e. the categorical feature `x` is tested against a single candidate. The new release allows for more expressive conditions: `x \in S` where the categorical feature `x` is tested against multiple candidates. Moreover, it is now possible to use any tree algorithms (`hist`, `approx`, `gpu_hist`) when creating categorical splits. For more information, please see our tutorial on [categorical data](https://xgboost.readthedocs.io/en/latest/tutorials/categorical.html), along with examples linked on that page. (#7380, #7708, #7695, #7330, #7307, #7322, #7705, #7652, #7592, #7666, #7576, #7569, #7529, #7575, #7393, #7465, #7385, #7371, #7745, #7810) In the future, we will continue to improve categorical data support with new features and optimizations. Also, we are looking forward to bringing the feature beyond Python binding, contributions and feedback are welcomed! Lastly, as a result of experimental status, the behavior might be subject to change, especially the default value of related hyper-parameters. ### Experimental support for multi-output model XGBoost 1.6 features initial support for the multi-output model, which includes multi-output regression and multi-label classification. Along with this, the XGBoost classifier has proper support for base margin without to need for the user to flatten the input. In this initial support, XGBoost builds one model for each target similar to the sklearn meta estimator, for more details, please see our [quick introduction](https://xgboost.readthedocs.io/en/latest/tutorials/multioutput.html). (#7365, #7736, #7607, #7574, #7521, #7514, #7456, #7453, #7455, #7434, #7429, #7405, #7381) ### External memory support External memory support for both approx and hist tree method is considered feature complete in XGBoost 1.6. Building upon the iterator-based interface introduced in the previous version, now both `hist` and `approx` iterates over each batch of data during training and prediction. In previous versions, `hist` concatenates all the batches into an internal representation, which is removed in this version. As a result, users can expect higher scalability in terms of data size but might experience lower performance due to disk IO. (#7531, #7320, #7638, #7372) ### Rewritten approx The `approx` tree method is rewritten based on the existing `hist` tree method. The rewrite closes the feature gap between `approx` and `hist` and improves the performance. Now the behavior of `approx` should be more aligned with `hist` and `gpu_hist`. Here is a list of user-visible changes: - Supports both `max_leaves` and `max_depth`. - Supports `grow_policy`. - Supports monotonic constraint. - Supports feature weights. - Use `max_bin` to replace `sketch_eps`. - Supports categorical data. - Faster performance for many of the datasets. - Improved performance and robustness for distributed training. - Supports prediction cache. - Significantly better performance for external memory when `depthwise` policy is used. ### New serialization format Based on the existing JSON serialization format, we introduce UBJSON support as a more efficient alternative. Both formats will be available in the future and we plan to gradually [phase out](https://github.com/dmlc/xgboost/issues/7547) support for the old binary model format. Users can opt to use the different formats in the serialization function by providing the file extension `json` or `ubj`. Also, the `save_raw` function in all supported languages bindings gains a new parameter for exporting the model in different formats, available options are `json`, `ubj`, and `deprecated`, see document for the language binding you are using for details. Lastly, the default internal serialization format is set to UBJSON, which affects Python pickle and R RDS. (#7572, #7570, #7358, #7571, #7556, #7549, #7416) ### General new features and improvements Aside from the major new features mentioned above, some others are summarized here: * Users can now access the build information of XGBoost binary in Python and C interface. (#7399, #7553) * Auto-configuration of `seed_per_iteration` is removed, now distributed training should generate closer results to single node training when sampling is used. (#7009) * A new parameter `huber_slope` is introduced for the `Pseudo-Huber` objective. * During source build, XGBoost can choose cub in the system path automatically. (#7579) * XGBoost now honors the CPU counts from CFS, which is usually set in docker environments. (#7654, #7704) * The metric `aucpr` is rewritten for better performance and GPU support. (#7297, #7368) * Metric calculation is now performed in double precision. (#7364) * XGBoost no longer mutates the global OpenMP thread limit. (#7537, #7519, #7608, #7590, #7589, #7588, #7687) * The default behavior of `max_leave` and `max_depth` is now unified (#7302, #7551). * CUDA fat binary is now compressed. (#7601) * Deterministic result for evaluation metric and linear model. In previous versions of XGBoost, evaluation results might differ slightly for each run due to parallel reduction for floating-point values, which is now addressed. (#7362, #7303, #7316, #7349) * XGBoost now uses double for GPU Hist node sum, which improves the accuracy of `gpu_hist`. (#7507) ### Performance improvements Most of the performance improvements are integrated into other refactors during feature developments. The `approx` should see significant performance gain for many datasets as mentioned in the previous section, while the `hist` tree method also enjoys improved performance with the removal of the internal `pruner` along with some other refactoring. Lastly, `gpu_hist` no longer synchronizes the device during training. (#7737) ### General bug fixes This section lists bug fixes that are not specific to any language binding. * The `num_parallel_tree` is now a model parameter instead of a training hyper-parameter, which fixes model IO with random forest. (#7751) * Fixes in CMake script for exporting configuration. (#7730) * XGBoost can now handle unsorted sparse input. This includes text file formats like libsvm and scipy sparse matrix where column index might not be sorted. (#7731) * Fix tree param feature type, this affects inputs with the number of columns greater than the maximum value of int32. (#7565) * Fix external memory with gpu_hist and subsampling. (#7481) * Check the number of trees in inplace predict, this avoids a potential segfault when an incorrect value for `iteration_range` is provided. (#7409) * Fix non-stable result in cox regression (#7756) ### Changes in the Python package Other than the changes in Dask, the XGBoost Python package gained some new features and improvements along with small bug fixes. * Python 3.7 is required as the lowest Python version. (#7682) * Pre-built binary wheel for Apple Silicon. (#7621, #7612, #7747) Apple Silicon users will now be able to run `pip install xgboost` to install XGBoost. * MacOS users no longer need to install `libomp` from Homebrew, as the XGBoost wheel now bundles `libomp.dylib` library. * There are new parameters for users to specify the custom metric with new behavior. XGBoost can now output transformed prediction values when a custom objective is not supplied. See our explanation in the [tutorial](https://xgboost.readthedocs.io/en/latest/tutorials/custom_metric_obj.html#reverse-link-function) for details. * For the sklearn interface, following the estimator guideline from scikit-learn, all parameters in `fit` that are not related to input data are moved into the constructor and can be set by `set_params`. (#6751, #7420, #7375, #7369) * Apache arrow format is now supported, which can bring better performance to users' pipeline (#7512) * Pandas nullable types are now supported (#7760) * A new function `get_group` is introduced for `DMatrix` to allow users to get the group information in the custom objective function. (#7564) * More training parameters are exposed in the sklearn interface instead of relying on the `**kwargs`. (#7629) * A new attribute `feature_names_in_` is defined for all sklearn estimators like `XGBRegressor` to follow the convention of sklearn. (#7526) * More work on Python type hint. (#7432, #7348, #7338, #7513, #7707) * Support the latest pandas Index type. (#7595) * Fix for Feature shape mismatch error on s390x platform (#7715) * Fix using feature names for constraints with multiple groups (#7711) * We clarified the behavior of the callback function when it contains mutable states. (#7685) * Lastly, there are some code cleanups and maintenance work. (#7585, #7426, #7634, #7665, #7667, #7377, #7360, #7498, #7438, #7667, #7752, #7749, #7751) ### Changes in the Dask interface * Dask module now supports user-supplied host IP and port address of scheduler node. Please see [introduction](https://xgboost.readthedocs.io/en/latest/tutorials/dask.html#troubleshooting) and [API document](https://xgboost.readthedocs.io/en/latest/python/python_api.html#optional-dask-configuration) for reference. (#7645, #7581) * Internal `DMatrix` construction in dask now honers thread configuration. (#7337) * A fix for `nthread` configuration using the Dask sklearn interface. (#7633) * The Dask interface can now handle empty partitions. An empty partition is different from an empty worker, the latter refers to the case when a worker has no partition of an input dataset, while the former refers to some partitions on a worker that has zero sizes. (#7644, #7510) * Scipy sparse matrix is supported as Dask array partition. (#7457) * Dask interface is no longer considered experimental. (#7509) ### Changes in the R package This section summarizes the new features, improvements, and bug fixes to the R package. * `load.raw` can optionally construct a booster as return. (#7686) * Fix parsing decision stump, which affects both transforming text representation to data table and plotting. (#7689) * Implement feature weights. (#7660) * Some improvements for complying the CRAN release policy. (#7672, #7661, #7763) * Support CSR data for predictions (#7615) * Document update (#7263, #7606) * New maintainer for the CRAN package (#7691, #7649) * Handle non-standard installation of toolchain on macos (#7759) ### Changes in JVM-packages Some new features for JVM-packages are introduced for a more integrated GPU pipeline and better compatibility with musl-based Linux. Aside from this, we have a few notable bug fixes. * User can specify the tracker IP address for training, which helps running XGBoost on restricted network environments. (#7808) * Add support for detecting musl-based Linux (#7624) * Add `DeviceQuantileDMatrix` to Scala binding (#7459) * Add Rapids plugin support, now more of the JVM pipeline can be accelerated by RAPIDS (#7491, #7779, #7793, #7806) * The setters for CPU and GPU are more aligned (#7692, #7798) * Control logging for early stopping (#7326) * Do not repartition when nWorker = 1 (#7676) * Fix the prediction issue for `multi:softmax` (#7694) * Fix for serialization of custom objective and eval (#7274) * Update documentation about Python tracker (#7396) * Remove jackson from dependency, which fixes CVE-2020-36518. (#7791) * Some refactoring to the training pipeline for better compatibility between CPU and GPU. (#7440, #7401, #7789, #7784) * Maintenance work. (#7550, #7335, #7641, #7523, #6792, #4676) ### Deprecation Other than the changes in the Python package and serialization, we removed some deprecated features in previous releases. Also, as mentioned in the previous section, we plan to phase out the old binary format in future releases. * Remove old warning in 1.3 (#7279) * Remove label encoder deprecated in 1.3. (#7357) * Remove old callback deprecated in 1.3. (#7280) * Pre-built binary will no longer support deprecated CUDA architectures including sm35 and sm50. Users can continue to use these platforms with source build. (#7767) ### Documentation This section lists some of the general changes to XGBoost's document, for language binding specific change please visit related sections. * Document is overhauled to use the new RTD theme, along with integration of Python examples using Sphinx gallery. Also, we replaced most of the hard-coded URLs with sphinx references. (#7347, #7346, #7468, #7522, #7530) * Small update along with fixes for broken links, typos, etc. (#7684, #7324, #7334, #7655, #7628, #7623, #7487, #7532, #7500, #7341, #7648, #7311) * Update document for GPU. [skip ci] (#7403) * Document the status of RTD hosting. (#7353) * Update document for building from source. (#7664) * Add note about CRAN release [skip ci] (#7395) ### Maintenance This is a summary of maintenance work that is not specific to any language binding. * Add CMake option to use /MD runtime (#7277) * Add clang-format configuration. (#7383) * Code cleanups (#7539, #7536, #7466, #7499, #7533, #7735, #7722, #7668, #7304, #7293, #7321, #7356, #7345, #7387, #7577, #7548, #7469, #7680, #7433, #7398) * Improved tests with better coverage and latest dependency (#7573, #7446, #7650, #7520, #7373, #7723, #7611, #7771) * Improved automation of the release process. (#7278, #7332, #7470) * Compiler workarounds (#7673) * Change shebang used in CLI demo. (#7389) * Update affiliation (#7289) ### CI Some fixes and update to XGBoost's CI infrastructure. (#7739, #7701, #7382, #7662, #7646, #7582, #7407, #7417, #7475, #7474, #7479, #7472, #7626) ## v1.5.0 (2021 Oct 11) This release comes with many exciting new features and optimizations, along with some bug fixes. We will describe the experimental categorical data support and the external memory interface independently. Package-specific new features will be listed in respective sections. ### Development on categorical data support In version 1.3, XGBoost introduced an experimental feature for handling categorical data natively, without one-hot encoding. XGBoost can fit categorical splits in decision trees. (Currently, the generated splits will be of form `x \in {v}`, where the input is compared to a single category value. A future version of XGBoost will generate splits that compare the input against a list of multiple category values.) Most of the other features, including prediction, SHAP value computation, feature importance, and model plotting were revised to natively handle categorical splits. Also, all Python interfaces including native interface with and without quantized `DMatrix`, scikit-learn interface, and Dask interface now accept categorical data with a wide range of data structures support including numpy/cupy array and cuDF/pandas/modin dataframe. In practice, the following are required for enabling categorical data support during training: - Use Python package. - Use `gpu_hist` to train the model. - Use JSON model file format for saving the model. Once the model is trained, it can be used with most of the features that are available on the Python package. For a quick introduction, see https://xgboost.readthedocs.io/en/latest/tutorials/categorical.html Related PRs: (#7011, #7001, #7042, #7041, #7047, #7043, #7036, #7054, #7053, #7065, #7213, #7228, #7220, #7221, #7231, #7306) * Next steps - Revise the CPU training algorithm to handle categorical data natively and generate categorical splits - Extend the CPU and GPU algorithms to generate categorical splits of form `x \in S` where the input is compared with multiple category values. split. (#7081) ### External memory This release features a brand-new interface and implementation for external memory (also known as out-of-core training). (#6901, #7064, #7088, #7089, #7087, #7092, #7070, #7216). The new implementation leverages the data iterator interface, which is currently used to create `DeviceQuantileDMatrix`. For a quick introduction, see https://xgboost.readthedocs.io/en/latest/tutorials/external_memory.html#data-iterator . During the development of this new interface, `lz4` compression is removed. (#7076). Please note that external memory support is still experimental and not ready for production use yet. All future development will focus on this new interface and users are advised to migrate. (You are using the old interface if you are using a URL suffix to use external memory.) ### New features in Python package * Support numpy array interface and all numeric types from numpy in `DMatrix` construction and `inplace_predict` (#6998, #7003). Now XGBoost no longer makes data copy when input is numpy array view. * The early stopping callback in Python has a new `min_delta` parameter to control the stopping behavior (#7137) * Python package now supports calculating feature scores for the linear model, which is also available on R package. (#7048) * Python interface now supports configuring constraints using feature names instead of feature indices. * Typehint support for more Python code including scikit-learn interface and rabit module. (#6799, #7240) * Add tutorial for XGBoost-Ray (#6884) ### New features in R package * In 1.4 we have a new prediction function in the C API which is used by the Python package. This release revises the R package to use the new prediction function as well. A new parameter `iteration_range` for the predict function is available, which can be used for specifying the range of trees for running prediction. (#6819, #7126) * R package now supports the `nthread` parameter in `DMatrix` construction. (#7127) ### New features in JVM packages * Support GPU dataframe and `DeviceQuantileDMatrix` (#7195). Constructing `DMatrix` with GPU data structures and the interface for quantized `DMatrix` were first introduced in the Python package and are now available in the xgboost4j package. * JVM packages now support saving and getting early stopping attributes. (#7095) Here is a quick [example](https://github.com/dmlc/xgboost/jvm-packages/xgboost4j-example/src/main/java/ml/dmlc/xgboost4j/java/example/EarlyStopping.java "example") in JAVA (#7252). ### General new features * We now have a pre-built binary package for R on Windows with GPU support. (#7185) * CUDA compute capability 86 is now part of the default CMake build configuration with newly added support for CUDA 11.4. (#7131, #7182, #7254) * XGBoost can be compiled using system CUB provided by CUDA 11.x installation. (#7232) ### Optimizations The performance for both `hist` and `gpu_hist` has been significantly improved in 1.5 with the following optimizations: * GPU multi-class model training now supports prediction cache. (#6860) * GPU histogram building is sped up and the overall training time is 2-3 times faster on large datasets (#7180, #7198). In addition, we removed the parameter `deterministic_histogram` and now the GPU algorithm is always deterministic. * CPU hist has an optimized procedure for data sampling (#6922) * More performance optimization in regression and binary classification objectives on CPU (#7206) * Tree model dump is now performed in parallel (#7040) ### Breaking changes * `n_gpus` was deprecated in 1.0 release and is now removed. * Feature grouping in CPU hist tree method is removed, which was disabled long ago. (#7018) * C API for Quantile DMatrix is changed to be consistent with the new external memory implementation. (#7082) ### Notable general bug fixes * XGBoost no long changes global CUDA device ordinal when `gpu_id` is specified (#6891, #6987) * Fix `gamma` negative likelihood evaluation metric. (#7275) * Fix integer value of `verbose_eal` for `xgboost.cv` function in Python. (#7291) * Remove extra sync in CPU hist for dense data, which can lead to incorrect tree node statistics. (#7120, #7128) * Fix a bug in GPU hist when data size is larger than `UINT32_MAX` with missing values. (#7026) * Fix a thread safety issue in prediction with the `softmax` objective. (#7104) * Fix a thread safety issue in CPU SHAP value computation. (#7050) Please note that all prediction functions in Python are thread-safe. * Fix model slicing. (#7149, #7078) * Workaround a bug in old GCC which can lead to segfault during construction of DMatrix. (#7161) * Fix histogram truncation in GPU hist, which can lead to slightly-off results. (#7181) * Fix loading GPU linear model pickle files on CPU-only machine. (#7154) * Check input value is duplicated when CPU quantile queue is full (#7091) * Fix parameter loading with training continuation. (#7121) * Fix CMake interface for exposing C library by specifying dependencies. (#7099) * Callback and early stopping are explicitly disabled for the scikit-learn interface random forest estimator. (#7236) * Fix compilation error on x86 (32-bit machine) (#6964) * Fix CPU memory usage with extremely sparse datasets (#7255) * Fix a bug in GPU multi-class AUC implementation with weighted data (#7300) ### Python package Other than the items mentioned in the previous sections, there are some Python-specific improvements. * Change development release postfix to `dev` (#6988) * Fix early stopping behavior with MAPE metric (#7061) * Fixed incorrect feature mismatch error message (#6949) * Add predictor to skl constructor. (#7000, #7159) * Re-enable feature validation in predict proba. (#7177) * scikit learn interface regression estimator now can pass the scikit-learn estimator check and is fully compatible with scikit-learn utilities. `__sklearn_is_fitted__` is implemented as part of the changes (#7130, #7230) * Conform the latest pylint. (#7071, #7241) * Support latest panda range index in DMatrix construction. (#7074) * Fix DMatrix construction from pandas series. (#7243) * Fix typo and grammatical mistake in error message (#7134) * [dask] disable work stealing explicitly for training tasks (#6794) * [dask] Set dataframe index in predict. (#6944) * [dask] Fix prediction on df with latest dask. (#6969) * [dask] Fix dask predict on `DaskDMatrix` with `iteration_range`. (#7005) * [dask] Disallow importing non-dask estimators from xgboost.dask (#7133) ### R package Improvements other than new features on R package: * Optimization for updating R handles in-place (#6903) * Removed the magrittr dependency. (#6855, #6906, #6928) * The R package now hides all C++ symbols to avoid conflicts. (#7245) * Other maintenance including code cleanups, document updates. (#6863, #6915, #6930, #6966, #6967) ### JVM packages Improvements other than new features on JVM packages: * Constructors with implicit missing value are deprecated due to confusing behaviors. (#7225) * Reduce scala-compiler, scalatest dependency scopes (#6730) * Making the Java library loader emit helpful error messages on missing dependencies. (#6926) * JVM packages now use the Python tracker in XGBoost instead of dmlc. The one in XGBoost is shared between JVM packages and Python Dask and enjoys better maintenance (#7132) * Fix "key not found: train" error (#6842) * Fix model loading from stream (#7067) ### General document improvements * Overhaul the installation documents. (#6877) * A few demos are added for AFT with dask (#6853), callback with dask (#6995), inference in C (#7151), `process_type`. (#7135) * Fix PDF format of document. (#7143) * Clarify the behavior of `use_rmm`. (#6808) * Clarify prediction function. (#6813) * Improve tutorial on feature interactions (#7219) * Add small example for dask sklearn interface. (#6970) * Update Python intro. (#7235) * Some fixes/updates (#6810, #6856, #6935, #6948, #6976, #7084, #7097, #7170, #7173, #7174, #7226, #6979, #6809, #6796, #6979) ### Maintenance * Some refactoring around CPU hist, which lead to better performance but are listed under general maintenance tasks: - Extract evaluate splits from CPU hist. (#7079) - Merge lossgude and depthwise strategies for CPU hist (#7007) - Simplify sparse and dense CPU hist kernels (#7029) - Extract histogram builder from CPU Hist. (#7152) * Others - Fix `gpu_id` with custom objective. (#7015) - Fix typos in AUC. (#6795) - Use constexpr in `dh::CopyIf`. (#6828) - Update dmlc-core. (#6862) - Bump version to 1.5.0 snapshot in master. (#6875) - Relax shotgun test. (#6900) - Guard against index error in prediction. (#6982) - Hide symbols in CI build + hide symbols for C and CUDA (#6798) - Persist data in dask test. (#7077) - Fix typo in arguments of PartitionBuilder::Init (#7113) - Fix typo in src/common/hist.cc BuildHistKernel (#7116) - Use upstream URI in distributed quantile tests. (#7129) - Include cpack (#7160) - Remove synchronization in monitor. (#7164) - Remove unused code. (#7175) - Fix building on CUDA 11.0. (#7187) - Better error message for `ncclUnhandledCudaError`. (#7190) - Add noexcept to JSON objects. (#7205) - Improve wording for warning (#7248) - Fix typo in release script. [skip ci] (#7238) - Relax shotgun test. (#6918) - Relax test for decision stump in distributed environment. (#6919) - [dask] speed up tests (#7020) ### CI * [CI] Rotate access keys for uploading MacOS artifacts from Travis CI (#7253) * Reduce Travis environment setup time. (#6912) * Restore R cache on github action. (#6985) * [CI] Remove stray build artifact to avoid error in artifact packaging (#6994) * [CI] Move appveyor tests to action (#6986) * Remove appveyor badge. [skip ci] (#7035) * [CI] Configure RAPIDS, dask, modin (#7033) * Test on s390x. (#7038) * [CI] Upgrade to CMake 3.14 (#7060) * [CI] Update R cache. (#7102) * [CI] Pin libomp to 11.1.0 (#7107) * [CI] Upgrade build image to CentOS 7 + GCC 8; require CUDA 10.1 and later (#7141) * [dask] Work around segfault in prediction. (#7112) * [dask] Remove the workaround for segfault. (#7146) * [CI] Fix hanging Python setup in Windows CI (#7186) * [CI] Clean up in beginning of each task in Win CI (#7189) * Fix travis. (#7237) ### Acknowledgement * **Contributors**: Adam Pocock (@Craigacp), Jeff H (@JeffHCross), Johan Hansson (@JohanWork), Jose Manuel Llorens (@JoseLlorensRipolles), Benjamin Szőke (@Livius90), @ReeceGoding, @ShvetsKS, Robert Zabel (@ZabelTech), Ali (@ali5h), Andrew Ziem (@az0), Andy Adinets (@canonizer), @david-cortes, Daniel Saxton (@dsaxton), Emil Sadek (@esadek), @farfarawayzyt, Gil Forsyth (@gforsyth), @giladmaya, @graue70, Philip Hyunsu Cho (@hcho3), James Lamb (@jameslamb), José Morales (@jmoralez), Kai Fricke (@krfricke), Christian Lorentzen (@lorentzenchr), Mads R. B. Kristensen (@madsbk), Anton Kostin (@masguit42), Martin Petříček (@mpetricek-corp), @naveenkb, Taewoo Kim (@oOTWK), Viktor Szathmáry (@phraktle), Robert Maynard (@robertmaynard), TP Boudreau (@tpboudreau), Jiaming Yuan (@trivialfis), Paul Taylor (@trxcllnt), @vslaykovsky, Bobby Wang (@wbo4958), * **Reviewers**: Nan Zhu (@CodingCat), Adam Pocock (@Craigacp), Jose Manuel Llorens (@JoseLlorensRipolles), Kodi Arfer (@Kodiologist), Benjamin Szőke (@Livius90), Mark Guryanov (@MarkGuryanov), Rory Mitchell (@RAMitchell), @ReeceGoding, @ShvetsKS, Egor Smirnov (@SmirnovEgorRu), Andrew Ziem (@az0), @candalfigomoro, Andy Adinets (@canonizer), Dante Gama Dessavre (@dantegd), @david-cortes, Daniel Saxton (@dsaxton), @farfarawayzyt, Gil Forsyth (@gforsyth), Harutaka Kawamura (@harupy), Philip Hyunsu Cho (@hcho3), @jakirkham, James Lamb (@jameslamb), José Morales (@jmoralez), James Bourbeau (@jrbourbeau), Christian Lorentzen (@lorentzenchr), Martin Petříček (@mpetricek-corp), Nikolay Petrov (@napetrov), @naveenkb, Viktor Szathmáry (@phraktle), Robin Teuwens (@rteuwens), Yuan Tang (@terrytangyuan), TP Boudreau (@tpboudreau), Jiaming Yuan (@trivialfis), @vkuzmin-uber, Bobby Wang (@wbo4958), William Hicks (@wphicks) ## v1.4.2 (2021.05.13) This is a patch release for Python package with following fixes: * Handle the latest version of cupy.ndarray in inplace_predict. (#6933) * Ensure output array from predict_leaf is (n_samples, ) when there's only 1 tree. 1.4.0 outputs (n_samples, 1). (#6889) * Fix empty dataset handling with multi-class AUC. (#6947) * Handle object type from pandas in inplace_predict. (#6927) ## v1.4.1 (2021.04.20) This is a bug fix release. * Fix GPU implementation of AUC on some large datasets. (#6866) ## v1.4.0 (2021.04.12) ### Introduction of pre-built binary package for R, with GPU support Starting with release 1.4.0, users now have the option of installing `{xgboost}` without having to build it from the source. This is particularly advantageous for users who want to take advantage of the GPU algorithm (`gpu_hist`), as previously they'd have to build `{xgboost}` from the source using CMake and NVCC. Now installing `{xgboost}` with GPU support is as easy as: `R CMD INSTALL ./xgboost_r_gpu_linux.tar.gz`. (#6827) See the instructions at https://xgboost.readthedocs.io/en/latest/build.html ### Improvements on prediction functions XGBoost has many prediction types including shap value computation and inplace prediction. In 1.4 we overhauled the underlying prediction functions for C API and Python API with an unified interface. (#6777, #6693, #6653, #6662, #6648, #6668, #6804) * Starting with 1.4, sklearn interface prediction will use inplace predict by default when input data is supported. * Users can use inplace predict with `dart` booster and enable GPU acceleration just like `gbtree`. * Also all prediction functions with tree models are now thread-safe. Inplace predict is improved with `base_margin` support. * A new set of C predict functions are exposed in the public interface. * A user-visible change is a newly added parameter called `strict_shape`. See https://xgboost.readthedocs.io/en/latest/prediction.html for more details. ### Improvement on Dask interface * Starting with 1.4, the Dask interface is considered to be feature-complete, which means all of the models found in the single node Python interface are now supported in Dask, including but not limited to ranking and random forest. Also, the prediction function is significantly faster and supports shap value computation. - Most of the parameters found in single node sklearn interface are supported by Dask interface. (#6471, #6591) - Implements learning to rank. On the Dask interface, we use the newly added support of query ID to enable group structure. (#6576) - The Dask interface has Python type hints support. (#6519) - All models can be safely pickled. (#6651) - Random forest estimators are now supported. (#6602) - Shap value computation is now supported. (#6575, #6645, #6614) - Evaluation result is printed on the scheduler process. (#6609) - `DaskDMatrix` (and device quantile dmatrix) now accepts all meta-information. (#6601) * Prediction optimization. We enhanced and speeded up the prediction function for the Dask interface. See the latest Dask tutorial page in our document for an overview of how you can optimize it even further. (#6650, #6645, #6648, #6668) * Bug fixes - If you are using the latest Dask and distributed where `distributed.MultiLock` is present, XGBoost supports training multiple models on the same cluster in parallel. (#6743) - A bug fix for when using `dask.client` to launch async task, XGBoost might use a different client object internally. (#6722) * Other improvements on documents, blogs, tutorials, and demos. (#6389, #6366, #6687, #6699, #6532, #6501) ### Python package With changes from Dask and general improvement on prediction, we have made some enhancements on the general Python interface and IO for booster information. Starting from 1.4, booster feature names and types can be saved into the JSON model. Also some model attributes like `best_iteration`, `best_score` are restored upon model load. On sklearn interface, some attributes are now implemented as Python object property with better documents. * Breaking change: All `data` parameters in prediction functions are renamed to `X` for better compliance to sklearn estimator interface guidelines. * Breaking change: XGBoost used to generate some pseudo feature names with `DMatrix` when inputs like `np.ndarray` don't have column names. The procedure is removed to avoid conflict with other inputs. (#6605) * Early stopping with training continuation is now supported. (#6506) * Optional import for Dask and cuDF are now lazy. (#6522) * As mentioned in the prediction improvement summary, the sklearn interface uses inplace prediction whenever possible. (#6718) * Booster information like feature names and feature types are now saved into the JSON model file. (#6605) * All `DMatrix` interfaces including `DeviceQuantileDMatrix` and counterparts in Dask interface (as mentioned in the Dask changes summary) now accept all the meta-information like `group` and `qid` in their constructor for better consistency. (#6601) * Booster attributes are restored upon model load so users don't have to call `attr` manually. (#6593) * On sklearn interface, all models accept `base_margin` for evaluation datasets. (#6591) * Improvements over the setup script including smaller sdist size and faster installation if the C++ library is already built (#6611, #6694, #6565). * Bug fixes for Python package: - Don't validate feature when number of rows is 0. (#6472) - Move metric configuration into booster. (#6504) - Calling XGBModel.fit() should clear the Booster by default (#6562) - Support `_estimator_type`. (#6582) - [dask, sklearn] Fix predict proba. (#6566, #6817) - Restore unknown data support. (#6595) - Fix learning rate scheduler with cv. (#6720) - Fixes small typo in sklearn documentation (#6717) - [python-package] Fix class Booster: feature_types = None (#6705) - Fix divide by 0 in feature importance when no split is found. (#6676) ### JVM package * [jvm-packages] fix early stopping doesn't work even without custom_eval setting (#6738) * fix potential TaskFailedListener's callback won't be called (#6612) * [jvm] Add ability to load booster direct from byte array (#6655) * [jvm-packages] JVM library loader extensions (#6630) ### R package * R documentation: Make construction of DMatrix consistent. * Fix R documentation for xgb.train. (#6764) ### ROC-AUC We re-implemented the ROC-AUC metric in XGBoost. The new implementation supports multi-class classification and has better support for learning to rank tasks that are not binary. Also, it has a better-defined average on distributed environments with additional handling for invalid datasets. (#6749, #6747, #6797) ### Global configuration. Starting from 1.4, XGBoost's Python, R and C interfaces support a new global configuration model where users can specify some global parameters. Currently, supported parameters are `verbosity` and `use_rmm`. The latter is experimental, see rmm plugin demo and related README file for details. (#6414, #6656) ### Other New features. * Better handling for input data types that support `__array_interface__`. For some data types including GPU inputs and `scipy.sparse.csr_matrix`, XGBoost employs `__array_interface__` for processing the underlying data. Starting from 1.4, XGBoost can accept arbitrary array strides (which means column-major is supported) without making data copies, potentially reducing a significant amount of memory consumption. Also version 3 of `__cuda_array_interface__` is now supported. (#6776, #6765, #6459, #6675) * Improved parameter validation, now feeding XGBoost with parameters that contain whitespace will trigger an error. (#6769) * For Python and R packages, file paths containing the home indicator `~` are supported. * As mentioned in the Python changes summary, the JSON model can now save feature information of the trained booster. The JSON schema is updated accordingly. (#6605) * Development of categorical data support is continued. Newly added weighted data support and `dart` booster support. (#6508, #6693) * As mentioned in Dask change summary, ranking now supports the `qid` parameter for query groups. (#6576) * `DMatrix.slice` can now consume a numpy array. (#6368) ### Other breaking changes * Aside from the feature name generation, there are 2 breaking changes: - Drop saving binary format for memory snapshot. (#6513, #6640) - Change default evaluation metric for binary:logitraw objective to logloss (#6647) ### CPU Optimization * Aside from the general changes on predict function, some optimizations are applied on CPU implementation. (#6683, #6550, #6696, #6700) * Also performance for sampling initialization in `hist` is improved. (#6410) ### Notable fixes in the core library These fixes do not reside in particular language bindings: * Fixes for gamma regression. This includes checking for invalid input values, fixes for gamma deviance metric, and better floating point guard for gamma negative log-likelihood metric. (#6778, #6537, #6761) * Random forest with `gpu_hist` might generate low accuracy in previous versions. (#6755) * Fix a bug in GPU sketching when data size exceeds limit of 32-bit integer. (#6826) * Memory consumption fix for row-major adapters (#6779) * Don't estimate sketch batch size when rmm is used. (#6807) (#6830) * Fix in-place predict with missing value. (#6787) * Re-introduce double buffer in UpdatePosition, to fix perf regression in gpu_hist (#6757) * Pass correct split_type to GPU predictor (#6491) * Fix DMatrix feature names/types IO. (#6507) * Use view for `SparsePage` exclusively to avoid some data access races. (#6590) * Check for invalid data. (#6742) * Fix relocatable include in CMakeList (#6734) (#6737) * Fix DMatrix slice with feature types. (#6689) ### Other deprecation notices: * This release will be the last release to support CUDA 10.0. (#6642) * Starting in the next release, the Python package will require Pip 19.3+ due to the use of manylinux2014 tag. Also, CentOS 6, RHEL 6 and other old distributions will not be supported. ### Known issue: MacOS build of the JVM packages doesn't support multi-threading out of the box. To enable multi-threading with JVM packages, MacOS users will need to build the JVM packages from the source. See https://xgboost.readthedocs.io/en/latest/jvm/index.html#installation-from-source ### Doc * Dedicated page for `tree_method` parameter is added. (#6564, #6633) * [doc] Add FLAML as a fast tuning tool for XGBoost (#6770) * Add document for tests directory. [skip ci] (#6760) * Fix doc string of config.py to use correct `versionadded` (#6458) * Update demo for prediction. (#6789) * [Doc] Document that AUCPR is for binary classification/ranking (#5899) * Update the C API comments (#6457) * Fix document. [skip ci] (#6669) ### Maintenance: Testing, continuous integration * Use CPU input for test_boost_from_prediction. (#6818) * [CI] Upload xgboost4j.dll to S3 (#6781) * Update dmlc-core submodule (#6745) * [CI] Use manylinux2010_x86_64 container to vendor libgomp (#6485) * Add conda-forge badge (#6502) * Fix merge conflict. (#6512) * [CI] Split up main.yml, add mypy. (#6515) * [Breaking] Upgrade cuDF and RMM to 0.18 nightlies; require RMM 0.18+ for RMM plugin (#6510) * "featue_map" typo changed to "feature_map" (#6540) * Add script for generating release tarball. (#6544) * Add credentials to .gitignore (#6559) * Remove warnings in tests. (#6554) * Update dmlc-core submodule and conform to new API (#6431) * Suppress hypothesis health check for dask client. (#6589) * Fix pylint. (#6714) * [CI] Clear R package cache (#6746) * Exclude dmlc test on github action. (#6625) * Tests for regression metrics with weights. (#6729) * Add helper script and doc for releasing pip package. (#6613) * Support pylint 2.7.0 (#6726) * Remove R cache in github action. (#6695) * [CI] Do not mix up stashed executable built for ARM and x86_64 platforms (#6646) * [CI] Add ARM64 test to Jenkins pipeline (#6643) * Disable s390x and arm64 tests on travis for now. (#6641) * Move sdist test to action. (#6635) * [dask] Rework base margin test. (#6627) ### Maintenance: Refactor code for legibility and maintainability * Improve OpenMP exception handling (#6680) * Improve string view to reduce string allocation. (#6644) * Simplify Span checks. (#6685) * Use generic dispatching routine for array interface. (#6672) ## v1.3.0 (2020.12.08) ### XGBoost4J-Spark: Exceptions should cancel jobs gracefully instead of killing SparkContext (#6019). * By default, exceptions in XGBoost4J-Spark causes the whole SparkContext to shut down, necessitating the restart of the Spark cluster. This behavior is often a major inconvenience. * Starting from 1.3.0 release, XGBoost adds a new parameter `killSparkContextOnWorkerFailure` to optionally prevent killing SparkContext. If this parameter is set, exceptions will gracefully cancel training jobs instead of killing SparkContext. ### GPUTreeSHAP: GPU acceleration of the TreeSHAP algorithm (#6038, #6064, #6087, #6099, #6163, #6281, #6332) * [SHAP (SHapley Additive exPlanations)](https://github.com/slundberg/shap) is a game theoretic approach to explain predictions of machine learning models. It computes feature importance scores for individual examples, establishing how each feature influences a particular prediction. TreeSHAP is an optimized SHAP algorithm specifically designed for decision tree ensembles. * Starting with 1.3.0 release, it is now possible to leverage CUDA-capable GPUs to accelerate the TreeSHAP algorithm. Check out [the demo notebook](https://github.com/dmlc/xgboost/blob/master/demo/gpu_acceleration/shap.ipynb). * The CUDA implementation of the TreeSHAP algorithm is hosted at [rapidsai/GPUTreeSHAP](https://github.com/rapidsai/gputreeshap). XGBoost imports it as a Git submodule. ### New style Python callback API (#6199, #6270, #6320, #6348, #6376, #6399, #6441) * The XGBoost Python package now offers a re-designed callback API. The new callback API lets you design various extensions of training in idomatic Python. In addition, the new callback API allows you to use early stopping with the native Dask API (`xgboost.dask`). Check out [the tutorial](https://xgboost.readthedocs.io/en/release_1.3.0/python/callbacks.html) and [the demo](https://github.com/dmlc/xgboost/blob/master/demo/guide-python/callbacks.py). ### Enable the use of `DeviceQuantileDMatrix` / `DaskDeviceQuantileDMatrix` with large data (#6201, #6229, #6234). * `DeviceQuantileDMatrix` can achieve memory saving by avoiding extra copies of the training data, and the saving is bigger for large data. Unfortunately, large data with more than 2^31 elements was triggering integer overflow bugs in CUB and Thrust. Tracking issue: #6228. * This release contains a series of work-arounds to allow the use of `DeviceQuantileDMatrix` with large data: - Loop over `copy_if` (#6201) - Loop over `thrust::reduce` (#6229) - Implement the inclusive scan algorithm in-house, to handle large offsets (#6234) ### Support slicing of tree models (#6302) * Accessing the best iteration of a model after the application of early stopping used to be error-prone, need to manually pass the `ntree_limit` argument to the `predict()` function. * Now we provide a simple interface to slice tree models by specifying a range of boosting rounds. The tree ensemble can be split into multiple sub-ensembles via the slicing interface. Check out [an example](https://xgboost.readthedocs.io/en/release_1.3.0/python/model.html). * In addition, the early stopping callback now supports `save_best` option. When enabled, XGBoost will save (persist) the model at the best boosting round and discard the trees that were fit subsequent to the best round. ### Weighted subsampling of features (columns) (#5962) * It is now possible to sample features (columns) via weighted subsampling, in which features with higher weights are more likely to be selected in the sample. Weighted subsampling allows you to encode domain knowledge by emphasizing a particular set of features in the choice of tree splits. In addition, you can prevent particular features from being used in any splits, by assigning them zero weights. * Check out [the demo](https://github.com/dmlc/xgboost/blob/master/demo/guide-python/feature_weights.py). ### Improved integration with Dask * Support reverse-proxy environment such as Google Kubernetes Engine (#6343, #6475) * An XGBoost training job will no longer use all available workers. Instead, it will only use the workers that contain input data (#6343). * The new callback API works well with the Dask training API. * The `predict()` and `fit()` function of `DaskXGBClassifier` and `DaskXGBRegressor` now accept a base margin (#6155). * Support more meta data in the Dask API (#6130, #6132, #6333). * Allow passing extra keyword arguments as `kwargs` in `predict()` (#6117) * Fix typo in dask interface: `sample_weights` -> `sample_weight` (#6240) * Allow empty data matrix in AFT survival, as Dask may produce empty partitions (#6379) * Speed up prediction by overlapping prediction jobs in all workers (#6412) ### Experimental support for direct splits with categorical features (#6028, #6128, #6137, #6140, #6164, #6165, #6166, #6179, #6194, #6219) * Currently, XGBoost requires users to one-hot-encode categorical variables. This has adverse performance implications, as the creation of many dummy variables results into higher memory consumption and may require fitting deeper trees to achieve equivalent model accuracy. * The 1.3.0 release of XGBoost contains an experimental support for direct handling of categorical variables in test nodes. Each test node will have the condition of form `feature_value \in match_set`, where the `match_set` on the right hand side contains one or more matching categories. The matching categories in `match_set` represent the condition for traversing to the right child node. Currently, XGBoost will only generate categorical splits with only a single matching category ("one-vs-rest split"). In a future release, we plan to remove this restriction and produce splits with multiple matching categories in `match_set`. * The categorical split requires the use of JSON model serialization. The legacy binary serialization method cannot be used to save (persist) models with categorical splits. * Note. This feature is currently highly experimental. Use it at your own risk. See the detailed list of limitations at [#5949](https://github.com/dmlc/xgboost/pull/5949). ### Experimental plugin for RAPIDS Memory Manager (#5873, #6131, #6146, #6150, #6182) * RAPIDS Memory Manager library ([rapidsai/rmm](https://github.com/rapidsai/rmm)) provides a collection of efficient memory allocators for NVIDIA GPUs. It is now possible to use XGBoost with memory allocators provided by RMM, by enabling the RMM integration plugin. With this plugin, XGBoost is now able to share a common GPU memory pool with other applications using RMM, such as the RAPIDS data science packages. * See [the demo](https://github.com/dmlc/xgboost/blob/master/demo/rmm_plugin/README.md) for a working example, as well as directions for building XGBoost with the RMM plugin. * The plugin will be soon considered non-experimental, once #6297 is resolved. ### Experimental plugin for oneAPI programming model (#5825) * oneAPI is a programming interface developed by Intel aimed at providing one programming model for many types of hardware such as CPU, GPU, FGPA and other hardware accelerators. * XGBoost now includes an experimental plugin for using oneAPI for the predictor and objective functions. The plugin is hosted in the directory `plugin/updater_oneapi`. * Roadmap: #5442 ### Pickling the XGBoost model will now trigger JSON serialization (#6027) * The pickle will now contain the JSON string representation of the XGBoost model, as well as related configuration. ### Performance improvements * Various performance improvement on multi-core CPUs - Optimize DMatrix build time by up to 3.7x. (#5877) - CPU predict performance improvement, by up to 3.6x. (#6127) - Optimize CPU sketch allreduce for sparse data (#6009) - Thread local memory allocation for BuildHist, leading to speedup up to 1.7x. (#6358) - Disable hyperthreading for DMatrix creation (#6386). This speeds up DMatrix creation by up to 2x. - Simple fix for static shedule in predict (#6357) * Unify thread configuration, to make it easy to utilize all CPU cores (#6186) * [jvm-packages] Clean the way deterministic paritioning is computed (#6033) * Speed up JSON serialization by implementing an intrusive pointer class (#6129). It leads to 1.5x-2x performance boost. ### API additions * [R] Add SHAP summary plot using ggplot2 (#5882) * Modin DataFrame can now be used as input (#6055) * [jvm-packages] Add `getNumFeature` method (#6075) * Add MAPE metric (#6119) * Implement GPU predict leaf. (#6187) * Enable cuDF/cuPy inputs in `XGBClassifier` (#6269) * Document tree method for feature weights. (#6312) * Add `fail_on_invalid_gpu_id` parameter, which will cause XGBoost to terminate upon seeing an invalid value of `gpu_id` (#6342) ### Breaking: the default evaluation metric for classification is changed to `logloss` / `mlogloss` (#6183) * The default metric used to be accuracy, and it is not statistically consistent to perform early stopping with the accuracy metric when we are really optimizing the log loss for the `binary:logistic` objective. * For statistical consistency, the default metric for classification has been changed to `logloss`. Users may choose to preserve the old behavior by explicitly specifying `eval_metric`. ### Breaking: `skmaker` is now removed (#5971) * The `skmaker` updater has not been documented nor tested. ### Breaking: the JSON model format no longer stores the leaf child count (#6094). * The leaf child count field has been deprecated and is not used anywhere in the XGBoost codebase. ### Breaking: XGBoost now requires MacOS 10.14 (Mojave) and later. * Homebrew has dropped support for MacOS 10.13 (High Sierra), so we are not able to install the OpenMP runtime (`libomp`) from Homebrew on MacOS 10.13. Please use MacOS 10.14 (Mojave) or later. ### Deprecation notices * The use of `LabelEncoder` in `XGBClassifier` is now deprecated and will be removed in the next minor release (#6269). The deprecation is necessary to support multiple types of inputs, such as cuDF data frames or cuPy arrays. * The use of certain positional arguments in the Python interface is deprecated (#6365). Users will use deprecation warnings for the use of position arguments for certain function parameters. New code should use keyword arguments as much as possible. We have not yet decided when we will fully require the use of keyword arguments. ### Bug-fixes * On big-endian arch, swap the byte order in the binary serializer to enable loading models that were produced by a little-endian machine (#5813). * [jvm-packages] Fix deterministic partitioning with dataset containing Double.NaN (#5996) * Limit tree depth for GPU hist to 31 to prevent integer overflow (#6045) * [jvm-packages] Set `maxBins` to 256 to align with the default value in the C++ code (#6066) * [R] Fix CRAN check (#6077) * Add back support for `scipy.sparse.coo_matrix` (#6162) * Handle duplicated values in sketching. (#6178) * Catch all standard exceptions in C API. (#6220) * Fix linear GPU input (#6255) * Fix inplace prediction interval. (#6259) * [R] allow `xgb.plot.importance()` calls to fill a grid (#6294) * Lazy import dask libraries. (#6309) * Deterministic data partitioning for external memory (#6317) * Avoid resetting seed for every configuration. (#6349) * Fix label errors in graph visualization (#6369) * [jvm-packages] fix potential unit test suites aborted issue due to race condition (#6373) * [R] Fix warnings from `R check --as-cran` (#6374) * [R] Fix a crash that occurs with noLD R (#6378) * [R] Do not convert continuous labels to factors (#6380) * [R] remove uses of `exists()` (#6387) * Propagate parameters to the underlying `Booster` handle from `XGBClassifier.set_param` / `XGBRegressor.set_param`. (#6416) * [R] Fix R package installation via CMake (#6423) * Enforce row-major order in cuPy array (#6459) * Fix filtering callable objects in the parameters passed to the scikit-learn API. (#6466) ### Maintenance: Testing, continuous integration, build system * [CI] Improve JVM test in GitHub Actions (#5930) * Refactor plotting test so that it can run independently (#6040) * [CI] Cancel builds on subsequent pushes (#6011) * Fix Dask Pytest fixture (#6024) * [CI] Migrate linters to GitHub Actions (#6035) * [CI] Remove win2016 JVM test from GitHub Actions (#6042) * Fix CMake build with `BUILD_STATIC_LIB` option (#6090) * Don't link imported target in CMake (#6093) * Work around a compiler bug in MacOS AppleClang 11 (#6103) * [CI] Fix CTest by running it in a correct directory (#6104) * [R] Check warnings explicitly for model compatibility tests (#6114) * [jvm-packages] add xgboost4j-gpu/xgboost4j-spark-gpu module to facilitate release (#6136) * [CI] Time GPU tests. (#6141) * [R] remove warning in configure.ac (#6152) * [CI] Upgrade cuDF and RMM to 0.16 nightlies; upgrade to Ubuntu 18.04 (#6157) * [CI] Test C API demo (#6159) * Option for generating device debug info. (#6168) * Update `.gitignore` (#6175, #6193, #6346) * Hide C++ symbols from dmlc-core (#6188) * [CI] Added arm64 job in Travis-CI (#6200) * [CI] Fix Docker build for CUDA 11 (#6202) * [CI] Move non-OpenMP gtest to GitHub Actions (#6210) * [jvm-packages] Fix up build for xgboost4j-gpu, xgboost4j-spark-gpu (#6216) * Add more tests for categorical data support (#6219) * [dask] Test for data initializaton. (#6226) * Bump junit from 4.11 to 4.13.1 in /jvm-packages/xgboost4j (#6230) * Bump junit from 4.11 to 4.13.1 in /jvm-packages/xgboost4j-gpu (#6233) * [CI] Reduce testing load with RMM (#6249) * [CI] Build a Python wheel for aarch64 platform (#6253) * [CI] Time the CPU tests on Jenkins. (#6257) * [CI] Skip Dask tests on ARM. (#6267) * Fix a typo in `is_arm()` in testing.py (#6271) * [CI] replace `egrep` with `grep -E` (#6287) * Support unity build. (#6295) * [CI] Mark flaky tests as XFAIL (#6299) * [CI] Use separate Docker cache for each CUDA version (#6305) * Added `USE_NCCL_LIB_PATH` option to enable user to set `NCCL_LIBRARY` during build (#6310) * Fix flaky data initialization test. (#6318) * Add a badge for GitHub Actions (#6321) * Optional `find_package` for sanitizers. (#6329) * Use pytest conventions consistently in Python tests (#6337) * Fix missing space in warning message (#6340) * Update `custom_metric_obj.rst` (#6367) * [CI] Run R check with `--as-cran` flag on GitHub Actions (#6371) * [CI] Remove R check from Jenkins (#6372) * Mark GPU external memory test as XFAIL. (#6381) * [CI] Add noLD R test (#6382) * Fix MPI build. (#6403) * [CI] Upgrade to MacOS Mojave image (#6406) * Fix flaky sparse page dmatrix test. (#6417) * [CI] Upgrade cuDF and RMM to 0.17 nightlies (#6434) * [CI] Fix CentOS 6 Docker images (#6467) * [CI] Vendor libgomp in the manylinux Python wheel (#6461) * [CI] Hot fix for libgomp vendoring (#6482) ### Maintenance: Clean up and merge the Rabit submodule (#6023, #6095, #6096, #6105, #6110, #6262, #6275, #6290) * The Rabit submodule is now maintained as part of the XGBoost codebase. * Tests for Rabit are now part of the test suites of XGBoost. * Rabit can now be built on the Windows platform. * We made various code re-formatting for the C++ code with clang-tidy. * Public headers of XGBoost no longer depend on Rabit headers. * Unused CMake targets for Rabit were removed. * Single-point model recovery has been dropped and removed from Rabit, simplifying the Rabit code greatly. The single-point model recovery feature has not been adequately maintained over the years. * We removed the parts of Rabit that were not useful for XGBoost. ### Maintenance: Refactor code for legibility and maintainability * Unify CPU hist sketching (#5880) * [R] fix uses of 1:length(x) and other small things (#5992) * Unify evaluation functions. (#6037) * Make binary bin search reusable. (#6058) * Unify set index data. (#6062) * [R] Remove `stringi` dependency (#6109) * Merge extract cuts into QuantileContainer. (#6125) * Reduce C++ compiler warnings (#6197, #6198, #6213, #6286, #6325) * Cleanup Python code. (#6223) * Small cleanup to evaluator. (#6400) ### Usability Improvements, Documentation * [jvm-packages] add example to handle missing value other than 0 (#5677) * Add DMatrix usage examples to the C API demo (#5854) * List `DaskDeviceQuantileDMatrix` in the doc. (#5975) * Update Python custom objective demo. (#5981) * Update the JSON model schema to document more objective functions. (#5982) * [Python] Fix warning when `missing` field is not used. (#5969) * Fix typo in tracker logging (#5994) * Move a warning about empty dataset, so that it's shown for all objectives and metrics (#5998) * Fix the instructions for installing the nightly build. (#6004) * [Doc] Add dtreeviz as a showcase example of integration with 3rd-party software (#6013) * [jvm-packages] [doc] Update install doc for JVM packages (#6051) * Fix typo in `xgboost.callback.early_stop` docstring (#6071) * Add cache suffix to the files used in the external memory demo. (#6088) * [Doc] Document the parameter `kill_spark_context_on_worker_failure` (#6097) * Fix link to the demo for custom objectives (#6100) * Update Dask doc. (#6108) * Validate weights are positive values. (#6115) * Document the updated CMake version requirement. (#6123) * Add demo for `DaskDeviceQuantileDMatrix`. (#6156) * Cosmetic fixes in `faq.rst` (#6161) * Fix error message. (#6176) * [Doc] Add list of winning solutions in data science competitions using XGBoost (#6177) * Fix a comment in demo to use correct reference (#6190) * Update the list of winning solutions using XGBoost (#6192) * Consistent style for build status badge (#6203) * [Doc] Add info on GPU compiler (#6204) * Update the list of winning solutions (#6222, #6254) * Add link to XGBoost's Twitter handle (#6244) * Fix minor typos in XGBClassifier methods' docstrings (#6247) * Add sponsors link to FUNDING.yml (#6252) * Group CLI demo into subdirectory. (#6258) * Reduce warning messages from `gbtree`. (#6273) * Create a tutorial for using the C API in a C/C++ application (#6285) * Update plugin instructions for CMake build (#6289) * [doc] make Dask distributed example copy-pastable (#6345) * [Python] Add option to use `libxgboost.so` from the system path (#6362) * Fixed few grammatical mistakes in doc (#6393) * Fix broken link in CLI doc (#6396) * Improve documentation for the Dask API (#6413) * Revise misleading exception information: no such param of `allow_non_zero_missing` (#6418) * Fix CLI ranking demo. (#6439) * Fix broken links. (#6455) ### Acknowledgement **Contributors**: Nan Zhu (@CodingCat), @FelixYBW, Jack Dunn (@JackDunnNZ), Jean Lescut-Muller (@JeanLescut), Boris Feld (@Lothiraldan), Nikhil Choudhary (@Nikhil1O1), Rory Mitchell (@RAMitchell), @ShvetsKS, Anthony D'Amato (@Totoketchup), @Wittty-Panda, neko (@akiyamaneko), Alexander Gugel (@alexanderGugel), @dependabot[bot], DIVYA CHAUHAN (@divya661), Daniel Steinberg (@dstein64), Akira Funahashi (@funasoul), Philip Hyunsu Cho (@hcho3), Tong He (@hetong007), Hristo Iliev (@hiliev), Honza Sterba (@honzasterba), @hzy001, Igor Moura (@igormp), @jameskrach, James Lamb (@jameslamb), Naveed Ahmed Saleem Janvekar (@janvekarnaveed), Kyle Nicholson (@kylejn27), lacrosse91 (@lacrosse91), Christian Lorentzen (@lorentzenchr), Manikya Bardhan (@manikyabard), @nabokovas, John Quitto-Graham (@nvidia-johnq), @odidev, Qi Zhang (@qzhang90), Sergio Gavilán (@sgavil), Tanuja Kirthi Doddapaneni (@tanuja3), Cuong Duong (@tcuongd), Yuan Tang (@terrytangyuan), Jiaming Yuan (@trivialfis), vcarpani (@vcarpani), Vladislav Epifanov (@vepifanov), Vitalie Spinu (@vspinu), Bobby Wang (@wbo4958), Zeno Gantner (@zenogantner), zhang_jf (@zuston) **Reviewers**: Nan Zhu (@CodingCat), John Zedlewski (@JohnZed), Rory Mitchell (@RAMitchell), @ShvetsKS, Egor Smirnov (@SmirnovEgorRu), Anthony D'Amato (@Totoketchup), @Wittty-Panda, Alexander Gugel (@alexanderGugel), Codecov Comments Bot (@codecov-commenter), Codecov (@codecov-io), DIVYA CHAUHAN (@divya661), Devin Robison (@drobison00), Geoffrey Blake (@geoffreyblake), Mark Harris (@harrism), Philip Hyunsu Cho (@hcho3), Honza Sterba (@honzasterba), Igor Moura (@igormp), @jakirkham, @jameskrach, James Lamb (@jameslamb), Janakarajan Natarajan (@janaknat), Jake Hemstad (@jrhemstad), Keith Kraus (@kkraus14), Kyle Nicholson (@kylejn27), Christian Lorentzen (@lorentzenchr), Michael Mayer (@mayer79), Nikolay Petrov (@napetrov), @odidev, PSEUDOTENSOR / Jonathan McKinney (@pseudotensor), Qi Zhang (@qzhang90), Sergio Gavilán (@sgavil), Scott Lundberg (@slundberg), Cuong Duong (@tcuongd), Yuan Tang (@terrytangyuan), Jiaming Yuan (@trivialfis), vcarpani (@vcarpani), Vladislav Epifanov (@vepifanov), Vincent Nijs (@vnijs), Vitalie Spinu (@vspinu), Bobby Wang (@wbo4958), William Hicks (@wphicks) ## v1.2.0 (2020.08.22) ### XGBoost4J-Spark now supports the GPU algorithm (#5171) * Now XGBoost4J-Spark is able to leverage NVIDIA GPU hardware to speed up training. * There is on-going work for accelerating the rest of the data pipeline with NVIDIA GPUs (#5950, #5972). ### XGBoost now supports CUDA 11 (#5808) * It is now possible to build XGBoost with CUDA 11. Note that we do not yet distribute pre-built binaries built with CUDA 11; all current distributions use CUDA 10.0. ### Better guidance for persisting XGBoost models in an R environment (#5940, #5964) * Users are strongly encouraged to use `xgb.save()` and `xgb.save.raw()` instead of `saveRDS()`. This is so that the persisted models can be accessed with future releases of XGBoost. * The previous release (1.1.0) had problems loading models that were saved with `saveRDS()`. This release adds a compatibility layer to restore access to the old RDS files. Note that this is meant to be a temporary measure; users are advised to stop using `saveRDS()` and migrate to `xgb.save()` and `xgb.save.raw()`. ### New objectives and metrics * The pseudo-Huber loss `reg:pseudohubererror` is added (#5647). The corresponding metric is `mphe`. Right now, the slope is hard-coded to 1. * The Accelerated Failure Time objective for survival analysis (`survival:aft`) is now accelerated on GPUs (#5714, #5716). The survival metrics `aft-nloglik` and `interval-regression-accuracy` are also accelerated on GPUs. ### Improved integration with scikit-learn * Added `n_features_in_` attribute to the scikit-learn interface to store the number of features used (#5780). This is useful for integrating with some scikit-learn features such as `StackingClassifier`. See [this link](https://scikit-learn-enhancement-proposals.readthedocs.io/en/latest/slep010/proposal.html) for more details. * `XGBoostError` now inherits `ValueError`, which conforms scikit-learn's exception requirement (#5696). ### Improved integration with Dask * The XGBoost Dask API now exposes an asynchronous interface (#5862). See [the document](https://xgboost.readthedocs.io/en/latest/tutorials/dask.html#working-with-asyncio) for details. * Zero-copy ingestion of GPU arrays via `DaskDeviceQuantileDMatrix` (#5623, #5799, #5800, #5803, #5837, #5874, #5901): Previously, the Dask interface had to make 2 data copies: one for concatenating the Dask partition/block into a single block and another for internal representation. To save memory, we introduce `DaskDeviceQuantileDMatrix`. As long as Dask partitions are resident in the GPU memory, `DaskDeviceQuantileDMatrix` is able to ingest them directly without making copies. This matrix type wraps `DeviceQuantileDMatrix`. * The prediction function now returns GPU Series type if the input is from Dask-cuDF (#5710). This is to preserve the input data type. ### Robust handling of external data types (#5689, #5893) - As we support more and more external data types, the handling logic has proliferated all over the code base and became hard to keep track. It also became unclear how missing values and threads are handled. We refactored the Python package code to collect all data handling logic to a central location, and now we have an explicit list of of all supported data types. ### Improvements in GPU-side data matrix (`DeviceQuantileDMatrix`) * The GPU-side data matrix now implements its own quantile sketching logic, so that data don't have to be transported back to the main memory (#5700, #5747, #5760, #5846, #5870, #5898). The GK sketching algorithm is also now better documented. - Now we can load extremely sparse dataset like URL, although performance is still sub-optimal. * The GPU-side data matrix now exposes an iterative interface (#5783), so that users are able to construct a matrix from a data iterator. See the [Python demo](https://github.com/dmlc/xgboost/blob/release_1.2.0/demo/guide-python/data_iterator.py). ### New language binding: Swift (#5728) * Visit https://github.com/kongzii/SwiftXGBoost for more details. ### Robust model serialization with JSON (#5772, #5804, #5831, #5857, #5934) * We continue efforts from the 1.0.0 release to adopt JSON as the format to save and load models robustly. * JSON model IO is significantly faster and produces smaller model files. * Round-trip reproducibility is guaranteed, via the introduction of an efficient float-to-string conversion algorithm known as [the Ryū algorithm](https://dl.acm.org/doi/10.1145/3192366.3192369). The conversion is locale-independent, producing consistent numeric representation regardless of the locale setting of the user's machine. * We fixed an issue in loading large JSON files to memory. * It is now possible to load a JSON file from a remote source such as S3. ### Performance improvements * CPU hist tree method optimization - Skip missing lookup in hist row partitioning if data is dense. (#5644) - Specialize training procedures for CPU hist tree method on distributed environment. (#5557) - Add single point histogram for CPU hist. Previously gradient histogram for CPU hist is hard coded to be 64 bit, now users can specify the parameter `single_precision_histogram` to use 32 bit histogram instead for faster training performance. (#5624, #5811) * GPU hist tree method optimization - Removed some unnecessary synchronizations and better memory allocation pattern. (#5707) - Optimize GPU Hist for wide dataset. Previously for wide dataset the atomic operation is performed on global memory, now it can run on shared memory for faster histogram building. But there's a known small regression on GeForce cards with dense data. (#5795, #5926, #5948, #5631) ### API additions * Support passing fmap to importance plot (#5719). Now importance plot can show actual names of features instead of default ones. * Support 64bit seed. (#5643) * A new C API `XGBoosterGetNumFeature` is added for getting number of features in booster (#5856). * Feature names and feature types are now stored in C++ core and saved in binary DMatrix (#5858). ### Breaking: The `predict()` method of `DaskXGBClassifier` now produces class predictions (#5986). Use `predict_proba()` to obtain probability predictions. * Previously, `DaskXGBClassifier.predict()` produced probability predictions. This is inconsistent with the behavior of other scikit-learn classifiers, where `predict()` returns class predictions. We make a breaking change in 1.2.0 release so that `DaskXGBClassifier.predict()` now correctly produces class predictions and thus behave like other scikit-learn classifiers. Furthermore, we introduce the `predict_proba()` method for obtaining probability predictions, again to be in line with other scikit-learn classifiers. ### Breaking: Custom evaluation metric now receives raw prediction (#5954) * Previously, the custom evaluation metric received a transformed prediction result when used with a classifier. Now the custom metric will receive a raw (untransformed) prediction and will need to transform the prediction itself. See [demo/guide-python/custom\_softmax.py](https://github.com/dmlc/xgboost/blob/release_1.2.0/demo/guide-python/custom_softmax.py) for an example. * This change is to make the custom metric behave consistently with the custom objective, which already receives raw prediction (#5564). ### Breaking: XGBoost4J-Spark now requires Spark 3.0 and Scala 2.12 (#5836, #5890) * Starting with version 3.0, Spark can manage GPU resources and allocate them among executors. * Spark 3.0 dropped support for Scala 2.11 and now only supports Scala 2.12. Thus, XGBoost4J-Spark also only supports Scala 2.12. ### Breaking: XGBoost Python package now requires Python 3.6 and later (#5715) * Python 3.6 has many useful features such as f-strings. ### Breaking: XGBoost now adopts the C++14 standard (#5664) * Make sure to use a sufficiently modern C++ compiler that supports C++14, such as Visual Studio 2017, GCC 5.0+, and Clang 3.4+. ### Bug-fixes * Fix a data race in the prediction function (#5853). As a byproduct, the prediction function now uses a thread-local data store and became thread-safe. * Restore capability to run prediction when the test input has fewer features than the training data (#5955). This capability is necessary to support predicting with LIBSVM inputs. The previous release (1.1) had broken this capability, so we restore it in this version with better tests. * Fix OpenMP build with CMake for R package, to support CMake 3.13 (#5895). * Fix Windows 2016 build (#5902, #5918). * Fix edge cases in scikit-learn interface with Pandas input by disabling feature validation. (#5953) * [R] Enable weighted learning to rank (#5945) * [R] Fix early stopping with custom objective (#5923) * Fix NDK Build (#5886) * Add missing explicit template specializations for greater portability (#5921) * Handle empty rows in data iterators correctly (#5929). This bug affects file loader and JVM data frames. * Fix `IsDense` (#5702) * [jvm-packages] Fix wrong method name `setAllowZeroForMissingValue` (#5740) * Fix shape inference for Dask predict (#5989) ### Usability Improvements, Documentation * [Doc] Document that CUDA 10.0 is required (#5872) * Refactored command line interface (CLI). Now CLI is able to handle user errors and output basic document. (#5574) * Better error handling in Python: use `raise from` syntax to preserve full stacktrace (#5787). * The JSON model dump now has a formal schema (#5660, #5818). The benefit is to prevent `dump_model()` function from breaking. See [this document](https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html#difference-between-saving-model-and-dumping-model) to understand the difference between saving and dumping models. * Add a reference to the GPU external memory paper (#5684) * Document more objective parameters in the R package (#5682) * Document the existence of pre-built binary wheels for MacOS (#5711) * Remove `max.depth` in the R gblinear example. (#5753) * Added conda environment file for building docs (#5773) * Mention dask blog post in the doc, which introduces using Dask with GPU and some internal workings. (#5789) * Fix rendering of Markdown docs (#5821) * Document new objectives and metrics available on GPUs (#5909) * Better message when no GPU is found. (#5594) * Remove the use of `silent` parameter from R demos. (#5675) * Don't use masked array in array interface. (#5730) * Update affiliation of @terrytangyuan: Ant Financial -> Ant Group (#5827) * Move dask tutorial closer other distributed tutorials (#5613) * Update XGBoost + Dask overview documentation (#5961) * Show `n_estimators` in the docstring of the scikit-learn interface (#6041) * Fix a type in a doctring of the scikit-learn interface (#5980) ### Maintenance: testing, continuous integration, build system * [CI] Remove CUDA 9.0 from CI (#5674, #5745) * Require CUDA 10.0+ in CMake build (#5718) * [R] Remove dependency on gendef for Visual Studio builds (fixes #5608) (#5764). This enables building XGBoost with GPU support with R 4.x. * [R-package] Reduce duplication in configure.ac (#5693) * Bump com.esotericsoftware to 4.0.2 (#5690) * Migrate some tests from AppVeyor to GitHub Actions to speed up the tests. (#5911, #5917, #5919, #5922, #5928) * Reduce cost of the Jenkins CI server (#5884, #5904, #5892). We now enforce a daily budget via an automated monitor. We also dramatically reduced the workload for the Windows platform, since the cloud VM cost is vastly greater for Windows. * [R] Set up automated R linter (#5944) * [R] replace uses of T and F with TRUE and FALSE (#5778) * Update Docker container 'CPU' (#5956) * Simplify CMake build with modern CMake techniques (#5871) * Use `hypothesis` package for testing (#5759, #5835, #5849). * Define `_CRT_SECURE_NO_WARNINGS` to remove unneeded warnings in MSVC (#5434) * Run all Python demos in CI, to ensure that they don't break (#5651) * Enhance nvtx support (#5636). Now we can use unified timer between CPU and GPU. Also CMake is able to find nvtx automatically. * Speed up python test. (#5752) * Add helper for generating batches of data. (#5756) * Add c-api-demo to .gitignore (#5855) * Add option to enable all compiler warnings in GCC/Clang (#5897) * Make Python model compatibility test runnable locally (#5941) * Add cupy to Windows CI (#5797) * [CI] Fix cuDF install; merge 'gpu' and 'cudf' test suite (#5814) * Update rabit submodule (#5680, #5876) * Force colored output for Ninja build. (#5959) * [CI] Assign larger /dev/shm to NCCL (#5966) * Add missing Pytest marks to AsyncIO unit test (#5968) * [CI] Use latest cuDF and dask-cudf (#6048) * Add CMake flag to log C API invocations, to aid debugging (#5925) * Fix a unit test on CLI, to handle RC versions (#6050) * [CI] Use mgpu machine to run gpu hist unit tests (#6050) * [CI] Build GPU-enabled JAR artifact and deploy to xgboost-maven-repo (#6050) ### Maintenance: Refactor code for legibility and maintainability * Remove dead code in DMatrix initialization. (#5635) * Catch dmlc error by ref. (#5678) * Refactor the `gpu_hist` split evaluation in preparation for batched nodes enumeration. (#5610) * Remove column major specialization. (#5755) * Remove unused imports in Python (#5776) * Avoid including `c_api.h` in header files. (#5782) * Remove unweighted GK quantile, which is unused. (#5816) * Add Python binding for rabit ops. (#5743) * Implement `Empty` method for host device vector. (#5781) * Remove print (#5867) * Enforce tree order in JSON (#5974) ### Acknowledgement **Contributors**: Nan Zhu (@CodingCat), @LionOrCatThatIsTheQuestion, Dmitry Mottl (@Mottl), Rory Mitchell (@RAMitchell), @ShvetsKS, Alex Wozniakowski (@a-wozniakowski), Alexander Gugel (@alexanderGugel), @anttisaukko, @boxdot, Andy Adinets (@canonizer), Ram Rachum (@cool-RR), Elliot Hershberg (@elliothershberg), Jason E. Aten, Ph.D. (@glycerine), Philip Hyunsu Cho (@hcho3), @jameskrach, James Lamb (@jameslamb), James Bourbeau (@jrbourbeau), Peter Jung (@kongzii), Lorenz Walthert (@lorenzwalthert), Oleksandr Kuvshynov (@okuvshynov), Rong Ou (@rongou), Shaochen Shi (@shishaochen), Yuan Tang (@terrytangyuan), Jiaming Yuan (@trivialfis), Bobby Wang (@wbo4958), Zhang Zhang (@zhangzhang10) **Reviewers**: Nan Zhu (@CodingCat), @LionOrCatThatIsTheQuestion, Hao Yang (@QuantHao), Rory Mitchell (@RAMitchell), @ShvetsKS, Egor Smirnov (@SmirnovEgorRu), Alex Wozniakowski (@a-wozniakowski), Amit Kumar (@aktech), Avinash Barnwal (@avinashbarnwal), @boxdot, Andy Adinets (@canonizer), Chandra Shekhar Reddy (@chandrureddy), Ram Rachum (@cool-RR), Cristiano Goncalves (@cristianogoncalves), Elliot Hershberg (@elliothershberg), Jason E. Aten, Ph.D. (@glycerine), Philip Hyunsu Cho (@hcho3), Tong He (@hetong007), James Lamb (@jameslamb), James Bourbeau (@jrbourbeau), Lee Drake (@leedrake5), DougM (@mengdong), Oleksandr Kuvshynov (@okuvshynov), RongOu (@rongou), Shaochen Shi (@shishaochen), Xu Xiao (@sperlingxx), Yuan Tang (@terrytangyuan), Theodore Vasiloudis (@thvasilo), Jiaming Yuan (@trivialfis), Bobby Wang (@wbo4958), Zhang Zhang (@zhangzhang10) ## v1.1.1 (2020.06.06) This patch release applies the following patches to 1.1.0 release: * CPU performance improvement in the PyPI wheels (#5720) * Fix loading old model (#5724) * Install pkg-config file (#5744) ## v1.1.0 (2020.05.17) ### Better performance on multi-core CPUs (#5244, #5334, #5522) * Poor performance scaling of the `hist` algorithm for multi-core CPUs has been under investigation (#3810). #5244 concludes the ongoing effort to improve performance scaling on multi-CPUs, in particular Intel CPUs. Roadmap: #5104 * #5334 makes steps toward reducing memory consumption for the `hist` tree method on CPU. * #5522 optimizes random number generation for data sampling. ### Deterministic GPU algorithm for regression and classification (#5361) * GPU algorithm for regression and classification tasks is now deterministic. * Roadmap: #5023. Currently only single-GPU training is deterministic. Distributed training with multiple GPUs is not yet deterministic. ### Improve external memory support on GPUs (#5093, #5365) * Starting from 1.0.0 release, we added support for external memory on GPUs to enable training with larger datasets. Gradient-based sampling (#5093) speeds up the external memory algorithm by intelligently sampling a subset of the training data to copy into the GPU memory. [Learn more about out-of-core GPU gradient boosting.](https://arxiv.org/abs/2005.09148) * GPU-side data sketching now works with data from external memory (#5365). ### Parameter validation: detection of unused or incorrect parameters (#5477, #5569, #5508) * Mis-spelled training parameter is a common user mistake. In previous versions of XGBoost, mis-spelled parameters were silently ignored. Starting with 1.0.0 release, XGBoost will produce a warning message if there is any unused training parameters. The 1.1.0 release makes parameter validation available to the scikit-learn interface (#5477) and the R binding (#5569). ### Thread-safe, in-place prediction method (#5389, #5512) * Previously, the prediction method was not thread-safe (#5339). This release adds a new API function `inplace_predict()` that is thread-safe. It is now possible to serve concurrent requests for prediction using a shared model object. * It is now possible to compute prediction in-place for selected data formats (`numpy.ndarray` / `scipy.sparse.csr_matrix` / `cupy.ndarray` / `cudf.DataFrame` / `pd.DataFrame`) without creating a `DMatrix` object. ### Addition of Accelerated Failure Time objective for survival analysis (#4763, #5473, #5486, #5552, #5553) * Survival analysis (regression) models the time it takes for an event of interest to occur. The target label is potentially censored, i.e. the label is a range rather than a single number. We added a new objective `survival:aft` to support survival analysis. Also added is the new API to specify the ranged labels. Check out [the tutorial](https://xgboost.readthedocs.io/en/release_1.1.0/tutorials/aft_survival_analysis.html) and the [demos](https://github.com/dmlc/xgboost/tree/release_1.1.0/demo/aft_survival). * GPU support is work in progress (#5714). ### Improved installation experience on Mac OSX (#5597, #5602, #5606, #5701) * It only takes two commands to install the XGBoost Python package: `brew install libomp` followed by `pip install xgboost`. The installed XGBoost will use all CPU cores. Even better, starting with this release, we distribute pre-compiled binary wheels targeting Mac OSX. Now the install command `pip install xgboost` finishes instantly, as it no longer compiles the C++ source of XGBoost. The last three Mac versions (High Sierra, Mojave, Catalina) are supported. * R package: the 1.1.0 release fixes the error `Initializing libomp.dylib, but found libomp.dylib already initialized` (#5701) ### Ranking metrics are now accelerated on GPUs (#5380, #5387, #5398) ### GPU-side data matrix to ingest data directly from other GPU libraries (#5420, #5465) * Previously, data on GPU memory had to be copied back to the main memory before it could be used by XGBoost. Starting with 1.1.0 release, XGBoost provides a dedicated interface (`DeviceQuantileDMatrix`) so that it can ingest data from GPU memory directly. The result is that XGBoost interoperates better with GPU-accelerated data science libraries, such as cuDF, cuPy, and PyTorch. * Set device in device dmatrix. (#5596) ### Robust model serialization with JSON (#5123, #5217) * We continue efforts from the 1.0.0 release to adopt JSON as the format to save and load models robustly. Refer to the release note for 1.0.0 to learn more. * It is now possible to store internal configuration of the trained model (`Booster`) object in R as a JSON string (#5123, #5217). ### Improved integration with Dask * Pass through `verbose` parameter for dask fit (#5413) * Use `DMLC_TASK_ID`. (#5415) * Order the prediction result. (#5416) * Honor `nthreads` from dask worker. (#5414) * Enable grid searching with scikit-learn. (#5417) * Check non-equal when setting threads. (#5421) * Accept other inputs for prediction. (#5428) * Fix missing value for scikit-learn interface. (#5435) ### XGBoost4J-Spark: Check number of columns in the data iterator (#5202, #5303) * Before, the native layer in XGBoost did not know the number of columns (features) ahead of time and had to guess the number of columns by counting the feature index when ingesting data. This method has a failure more in distributed setting: if the training data is highly sparse, some features may be completely missing in one or more worker partitions. Thus, one or more workers may deduce an incorrect data shape, leading to crashes or silently wrong models. * Enforce correct data shape by passing the number of columns explicitly from the JVM layer into the native layer. ### Major refactoring of the `DMatrix` class * Continued from 1.0.0 release. * Remove update prediction cache from predictors. (#5312) * Predict on Ellpack. (#5327) * Partial rewrite EllpackPage (#5352) * Use ellpack for prediction only when sparsepage doesn't exist. (#5504) * RFC: #4354, Roadmap: #5143 ### Breaking: XGBoost Python package now requires Pip 19.0 and higher (#5589) * Your Linux machine may have an old version of Pip and may attempt to install a source package, leading to long installation time. This is because we are now using `manylinux2010` tag in the binary wheel release. Ensure you have Pip 19.0 or newer by running `python3 -m pip -V` to check the version. Upgrade Pip with command ``` python3 -m pip install --upgrade pip ``` Upgrading to latest pip allows us to depend on newer versions of system libraries. [TensorFlow](https://www.tensorflow.org/install/pip) also requires Pip 19.0+. ### Breaking: GPU algorithm now requires CUDA 10.0 and higher (#5649) * CUDA 10.0 is necessary to make the GPU algorithm deterministic (#5361). ### Breaking: `silent` parameter is now removed (#5476) * Please use `verbosity` instead. ### Breaking: Set `output_margin` to True for custom objectives (#5564) * Now both R and Python interface custom objectives get un-transformed (raw) prediction outputs. ### Breaking: `Makefile` is now removed. We use CMake exclusively to build XGBoost (#5513) * Exception: the R package uses Autotools, as the CRAN ecosystem did not yet adopt CMake widely. ### Breaking: `distcol` updater is now removed (#5507) * The `distcol` updater has been long broken, and currently we lack resources to implement a working implementation from scratch. ### Deprecation notices * **Python 3.5**. This release is the last release to support Python 3.5. The following release (1.2.0) will require Python 3.6. * **Scala 2.11**. Currently XGBoost4J supports Scala 2.11. However, if a future release of XGBoost adopts Spark 3, it will not support Scala 2.11, as Spark 3 requires Scala 2.12+. We do not yet know which XGBoost release will adopt Spark 3. ### Known limitations * (Python package) When early stopping is activated with `early_stopping_rounds` at training time, the prediction method (`xgb.predict()`) behaves in a surprising way. If XGBoost runs for M rounds and chooses iteration N (N < M) as the best iteration, then the prediction method will use M trees by default. To use the best iteration (N trees), users will need to manually take the best iteration field `bst.best_iteration` and pass it as the `ntree_limit` argument to `xgb.predict()`. See #5209 and #4052 for additional context. * GPU ranking objective is currently not deterministic (#5561). * When training parameter `reg_lambda` is set to zero, some leaf nodes may be assigned a NaN value. (See [discussion](https://discuss.xgboost.ai/t/still-getting-unexplained-nans-new-replication-code/1383/9).) For now, please set `reg_lambda` to a nonzero value. ### Community and Governance * The XGBoost Project Management Committee (PMC) is pleased to announce a new committer: Egor Smirnov (@SmirnovEgorRu). He has led a major initiative to improve the performance of XGBoost on multi-core CPUs. ### Bug-fixes * Improved compatibility with scikit-learn (#5255, #5505, #5538) * Remove f-string, since it's not supported by Python 3.5 (#5330). Note that Python 3.5 support is deprecated and schedule to be dropped in the upcoming release (1.2.0). * Fix the pruner so that it doesn't prune the same branch twice (#5335) * Enforce only major version in JSON model schema (#5336). Any major revision of the model schema would bump up the major version. * Fix a small typo in sklearn.py that broke multiple eval metrics (#5341) * Restore loading model from a memory buffer (#5360) * Define lazy isinstance for Python compat (#5364) * [R] fixed uses of `class()` (#5426) * Force compressed buffer to be 4 bytes aligned, to keep cuda-memcheck happy (#5441) * Remove warning for calling host function (`std::max`) on a GPU device (#5453) * Fix uninitialized value bug in xgboost callback (#5463) * Fix model dump in CLI (#5485) * Fix out-of-bound array access in `WQSummary::SetPrune()` (#5493) * Ensure that configured `dmlc/build_config.h` is picked up by Rabit and XGBoost, to fix build on Alpine (#5514) * Fix a misspelled method, made in a git merge (#5509) * Fix a bug in binary model serialization (#5532) * Fix CLI model IO (#5535) * Don't use `uint` for threads (#5542) * Fix R interaction constraints to handle more than 100000 features (#5543) * [jvm-packages] XGBoost Spark should deal with NaN when parsing evaluation output (#5546) * GPU-side data sketching is now aware of query groups in learning-to-rank data (#5551) * Fix DMatrix slicing for newly added fields (#5552) * Fix configuration status with loading binary model (#5562) * Fix build when OpenMP is disabled (#5566) * R compatibility patches (#5577, #5600) * gpu\_hist performance fixes (#5558) * Don't set seed on CLI interface (#5563) * [R] When serializing model, preserve model attributes related to early stopping (#5573) * Avoid rabit calls in learner configuration (#5581) * Hide C++ symbols in libxgboost.so when building Python wheel (#5590). This fixes apache/incubator-tvm#4953. * Fix compilation on Mac OSX High Sierra (10.13) (#5597) * Fix build on big endian CPUs (#5617) * Resolve crash due to use of `vector::iterator` (#5642) * Validation JSON model dump using JSON schema (#5660) ### Performance improvements * Wide dataset quantile performance improvement (#5306) * Reduce memory usage of GPU-side data sketching (#5407) * Reduce span check overhead (#5464) * Serialise booster after training to free up GPU memory (#5484) * Use the maximum amount of GPU shared memory available to speed up the histogram kernel (#5491) * Use non-synchronising scan in Thrust (#5560) * Use `cudaDeviceGetAttribute()` instead of `cudaGetDeviceProperties()` for speed (#5570) ### API changes * Support importing data from a Pandas SparseArray (#5431) * `HostDeviceVector` (vector shared between CPU and GPU memory) now exposes `HostSpan` interface, to enable access on the CPU side with bound check (#5459) * Accept other gradient types for `SplitEntry` (#5467) ### Usability Improvements, Documentation * Add `JVM_CHECK_CALL` to prevent C++ exceptions from leaking into the JVM layer (#5199) * Updated Windows build docs (#5283) * Update affiliation of @hcho3 (#5292) * Display Sponsor button, link to OpenCollective (#5325) * Update docs for GPU external memory (#5332) * Add link to GPU documentation (#5437) * Small updates to GPU documentation (#5483) * Edits on tutorial for XGBoost job on Kubernetes (#5487) * Add reference to GPU external memory (#5490) * Fix typos (#5346, #5371, #5384, #5399, #5482, #5515) * Update Python doc (#5517) * Add Neptune and Optuna to list of examples (#5528) * Raise error if the number of data weights doesn't match the number of data sets (#5540) * Add a note about GPU ranking (#5572) * Clarify meaning of `training` parameter in the C API function `XGBoosterPredict()` (#5604) * Better error handling for situations where existing trees cannot be modified (#5406, #5418). This feature is enabled when `process_type` is set to `update`. ### Maintenance: testing, continuous integration, build system * Add C++ test coverage for data sketching (#5251) * Ignore gdb\_history (#5257) * Rewrite setup.py. (#5271, #5280) * Use `scikit-learn` in extra dependencies (#5310) * Add CMake option to build static library (#5397) * [R] changed FindLibR to take advantage of CMake cache (#5427) * [R] fixed inconsistency in R -e calls in FindLibR.cmake (#5438) * Refactor tests with data generator (#5439) * Resolve failing Travis CI (#5445) * Update dmlc-core. (#5466) * [CI] Use clang-tidy 10 (#5469) * De-duplicate code for checking maximum number of nodes (#5497) * [CI] Use Ubuntu 18.04 LTS in JVM CI, because 19.04 is EOL (#5537) * [jvm-packages] [CI] Create a Maven repository to host SNAPSHOT JARs (#5533) * [jvm-packages] [CI] Publish XGBoost4J JARs with Scala 2.11 and 2.12 (#5539) * [CI] Use Vault repository to re-gain access to devtoolset-4 (#5589) ### Maintenance: Refactor code for legibility and maintainability * Move prediction cache to Learner (#5220, #5302) * Remove SimpleCSRSource (#5315) * Refactor SparsePageSource, delete cache files after use (#5321) * Remove unnecessary DMatrix methods (#5324) * Split up `LearnerImpl` (#5350) * Move segment sorter to common (#5378) * Move thread local entry into Learner (#5396) * Split up test helpers header (#5455) * Requires setting leaf stat when expanding tree (#5501) * Purge device\_helpers.cuh (#5534) * Use thrust functions instead of custom functions (#5544) ### Acknowledgement **Contributors**: Nan Zhu (@CodingCat), Rory Mitchell (@RAMitchell), @ShvetsKS, Egor Smirnov (@SmirnovEgorRu), Andrew Kane (@ankane), Avinash Barnwal (@avinashbarnwal), Bart Broere (@bartbroere), Andy Adinets (@canonizer), Chen Qin (@chenqin), Daiki Katsuragawa (@daikikatsuragawa), David Díaz Vico (@daviddiazvico), Darius Kharazi (@dkharazi), Darby Payne (@dpayne), Jason E. Aten, Ph.D. (@glycerine), Philip Hyunsu Cho (@hcho3), James Lamb (@jameslamb), Jan Borchmann (@jborchma), Kamil A. Kaczmarek (@kamil-kaczmarek), Melissa Kohl (@mjkohl32), Nicolas Scozzaro (@nscozzaro), Paul Kaefer (@paulkaefer), Rong Ou (@rongou), Samrat Pandiri (@samratp), Sriram Chandramouli (@sriramch), Yuan Tang (@terrytangyuan), Jiaming Yuan (@trivialfis), Liang-Chi Hsieh (@viirya), Bobby Wang (@wbo4958), Zhang Zhang (@zhangzhang10), **Reviewers**: Nan Zhu (@CodingCat), @LeZhengThu, Rory Mitchell (@RAMitchell), @ShvetsKS, Egor Smirnov (@SmirnovEgorRu), Steve Bronder (@SteveBronder), Nikita Titov (@StrikerRUS), Andrew Kane (@ankane), Avinash Barnwal (@avinashbarnwal), @brydag, Andy Adinets (@canonizer), Chandra Shekhar Reddy (@chandrureddy), Chen Qin (@chenqin), Codecov (@codecov-io), David Díaz Vico (@daviddiazvico), Darby Payne (@dpayne), Jason E. Aten, Ph.D. (@glycerine), Philip Hyunsu Cho (@hcho3), James Lamb (@jameslamb), @johnny-cat, Mu Li (@mli), Mate Soos (@msoos), @rnyak, Rong Ou (@rongou), Sriram Chandramouli (@sriramch), Toby Dylan Hocking (@tdhock), Yuan Tang (@terrytangyuan), Oleksandr Pryimak (@trams), Jiaming Yuan (@trivialfis), Liang-Chi Hsieh (@viirya), Bobby Wang (@wbo4958), ## v1.0.2 (2020.03.03) This patch release applies the following patches to 1.0.0 release: * Fix a small typo in sklearn.py that broke multiple eval metrics (#5341) * Restore loading model from buffer (#5360) * Use type name for data type check (#5364) ## v1.0.1 (2020.02.21) This release is identical to the 1.0.0 release, except that it fixes a small bug that rendered 1.0.0 incompatible with Python 3.5. See #5328. ## v1.0.0 (2020.02.19) This release marks a major milestone for the XGBoost project. ### Apache-style governance, contribution policy, and semantic versioning (#4646, #4659) * Starting with 1.0.0 release, the XGBoost Project is adopting Apache-style governance. The full community guideline is [available in the doc website](https://xgboost.readthedocs.io/en/release_1.0.0/contrib/community.html). Note that we now have Project Management Committee (PMC) who would steward the project on the long-term basis. The PMC is also entrusted to run and fund the project's continuous integration (CI) infrastructure (https://xgboost-ci.net). * We also adopt the [semantic versioning](https://semver.org/). See [our release versioning policy](https://xgboost.readthedocs.io/en/release_1.0.0/contrib/release.html). ### Better performance scaling for multi-core CPUs (#4502, #4529, #4716, #4851, #5008, #5107, #5138, #5156) * Poor performance scaling of the `hist` algorithm for multi-core CPUs has been under investigation (#3810). Previous effort #4529 was replaced with a series of pull requests (#5107, #5138, #5156) aimed at achieving the same performance benefits while keeping the C++ codebase legible. The latest performance benchmark results show [up to 5x speedup on Intel CPUs with many cores](https://github.com/dmlc/xgboost/pull/5156#issuecomment-580024413). Note: #5244, which concludes the effort, will become part of the upcoming release 1.1.0. ### Improved installation experience on Mac OSX (#4672, #5074, #5080, #5146, #5240) * It used to be quite complicated to install XGBoost on Mac OSX. XGBoost uses OpenMP to distribute work among multiple CPU cores, and Mac's default C++ compiler (Apple Clang) does not come with OpenMP. Existing work-around (using another C++ compiler) was complex and prone to fail with cryptic diagnosis (#4933, #4949, #4969). * Now it only takes two commands to install XGBoost: `brew install libomp` followed by `pip install xgboost`. The installed XGBoost will use all CPU cores. * Even better, XGBoost is now available from Homebrew: `brew install xgboost`. See Homebrew/homebrew-core#50467. * Previously, if you installed the XGBoost R package using the command `install.packages('xgboost')`, it could only use a single CPU core and you would experience slow training performance. With 1.0.0 release, the R package will use all CPU cores out of box. ### Distributed XGBoost now available on Kubernetes (#4621, #4939) * Check out the [tutorial for setting up distributed XGBoost on a Kubernetes cluster](https://xgboost.readthedocs.io/en/release_1.0.0/tutorials/kubernetes.html). ### Ruby binding for XGBoost (#4856) ### New Native Dask interface for multi-GPU and multi-node scaling (#4473, #4507, #4617, #4819, #4907, #4914, #4941, #4942, #4951, #4973, #5048, #5077, #5144, #5270) * XGBoost now integrates seamlessly with [Dask](https://dask.org/), a lightweight distributed framework for data processing. Together with the first-class support for cuDF data frames (see below), it is now easier than ever to create end-to-end data pipeline running on one or more NVIDIA GPUs. * Multi-GPU training with Dask is now up to 20% faster than the previous release (#4914, #4951). ### First-class support for cuDF data frames and cuPy arrays (#4737, #4745, #4794, #4850, #4891, #4902, #4918, #4927, #4928, #5053, #5189, #5194, #5206, #5219, #5225) * [cuDF](https://github.com/rapidsai/cudf) is a data frame library for loading and processing tabular data on NVIDIA GPUs. It provides a Pandas-like API. * [cuPy](https://github.com/cupy/cupy) implements a NumPy-compatible multi-dimensional array on NVIDIA GPUs. * Now users can keep the data on the GPU memory throughout the end-to-end data pipeline, obviating the need for copying data between the main memory and GPU memory. * XGBoost can accept any data structure that exposes `__array_interface__` signature, opening way to support other columar formats that are compatible with Apache Arrow. ### [Feature interaction constraint](https://xgboost.readthedocs.io/en/release_1.0.0/tutorials/feature_interaction_constraint.html) is now available with `approx` and `gpu_hist` algorithms (#4534, #4587, #4596, #5034). ### Learning to rank is now GPU accelerated (#4873, #5004, #5129) * Supported ranking objectives: NDGC, Map, Pairwise. * [Up to 2x improved training performance on GPUs](https://devblogs.nvidia.com/learning-to-rank-with-xgboost-and-gpu/). ### Enable `gamma` parameter for GPU training (#4874, #4953) * The `gamma` parameter specifies the minimum loss reduction required to add a new split in a tree. A larger value for `gamma` has the effect of pre-pruning the tree, by making harder to add splits. ### External memory for GPU training (#4486, #4526, #4747, #4833, #4879, #5014) * It is now possible to use NVIDIA GPUs even when the size of training data exceeds the available GPU memory. Note that the external memory support for GPU is still experimental. #5093 will further improve performance and will become part of the upcoming release 1.1.0. * RFC for enabling external memory with GPU algorithms: #4357 ### Improve Scikit-Learn interface (#4558, #4842, #4929, #5049, #5151, #5130, #5227) * Many users of XGBoost enjoy the convenience and breadth of Scikit-Learn ecosystem. In this release, we revise the Scikit-Learn API of XGBoost (`XGBRegressor`, `XGBClassifier`, and `XGBRanker`) to achieve feature parity with the traditional XGBoost interface (`xgboost.train()`). * Insert check to validate data shapes. * Produce an error message if `eval_set` is not a tuple. An error message is better than silently crashing. * Allow using `numpy.RandomState` object. * Add `n_jobs` as an alias of `nthread`. * Roadmap: #5152 ### XGBoost4J-Spark: Redesigning checkpointing mechanism * RFC is available at #4786 * Clean up checkpoint file after a successful training job (#4754): The current implementation in XGBoost4J-Spark does not clean up the checkpoint file after a successful training job. If the user runs another job with the same checkpointing directory, she will get a wrong model because the second job will re-use the checkpoint file left over from the first job. To prevent this scenario, we propose to always clean up the checkpoint file after every successful training job. * Avoid Multiple Jobs for Checkpointing (#5082): The current method for checkpoint is to collect the booster produced at the last iteration of each checkpoint internal to Driver and persist it in HDFS. The major issue with this approach is that it needs to re-perform the data preparation for training if the user did not choose to cache the training dataset. To avoid re-performing data prep, we build external-memory checkpointing in the XGBoost4J layer as well. * Enable deterministic repartitioning when checkpoint is enabled (#4807): Distributed algorithm for gradient boosting assumes a fixed partition of the training data between multiple iterations. In previous versions, there was no guarantee that data partition would stay the same, especially when a worker goes down and some data had to recovered from previous checkpoint. In this release, we make data partition deterministic by using the data hash value of each data row in computing the partition. ### XGBoost4J-Spark: handle errors thrown by the native code (#4560) * All core logic of XGBoost is written in C++, so XGBoost4J-Spark internally uses the C++ code via Java Native Interface (JNI). #4560 adds a proper error handling for any errors or exceptions arising from the C++ code, so that the XGBoost Spark application can be torn down in an orderly fashion. ### XGBoost4J-Spark: Refine method to count the number of alive cores (#4858) * The `SparkParallelismTracker` class ensures that sufficient number of executor cores are alive. To that end, it is important to query the number of alive cores reliably. ### XGBoost4J: Add `BigDenseMatrix` to store more than `Integer.MAX_VALUE` elements (#4383) ### Robust model serialization with JSON (#4632, #4708, #4739, #4868, #4936, #4945, #4974, #5086, #5087, #5089, #5091, #5094, #5110, #5111, #5112, #5120, #5137, #5218, #5222, #5236, #5245, #5248, #5281) * In this release, we introduce an experimental support of using [JSON](https://www.json.org/json-en.html) for serializing (saving/loading) XGBoost models and related hyperparameters for training. We would like to eventually replace the old binary format with JSON, since it is an open format and parsers are available in many programming languages and platforms. See [the documentation for model I/O using JSON](https://xgboost.readthedocs.io/en/release_1.0.0/tutorials/saving_model.html). #3980 explains why JSON was chosen over other alternatives. * To maximize interoperability and compatibility of the serialized models, we now split serialization into two parts (#4855): 1. Model, e.g. decision trees and strictly related metadata like `num_features`. 2. Internal configuration, consisting of training parameters and other configurable parameters. For example, `max_delta_step`, `tree_method`, `objective`, `predictor`, `gpu_id`. Previously, users often ran into issues where the model file produced by one machine could not load or run on another machine. For example, models trained using a machine with an NVIDIA GPU could not run on another machine without a GPU (#5291, #5234). The reason is that the old binary format saved some internal configuration that were not universally applicable to all machines, e.g. `predictor='gpu_predictor'`. Now, model saving function (`Booster.save_model()` in Python) will save only the model, without internal configuration. This will guarantee that your model file would be used anywhere. Internal configuration will be serialized in limited circumstances such as: * Multiple nodes in a distributed system exchange model details over the network. * Model checkpointing, to recover from possible crashes. This work proved to be useful for parameter validation as well (see below). * Starting with 1.0.0 release, we will use semantic versioning to indicate whether the model produced by one version of XGBoost would be compatible with another version of XGBoost. Any change in the major version indicates a breaking change in the serialization format. * We now provide a robust method to save and load scikit-learn related attributes (#5245). Previously, we used Python pickle to save Python attributes related to `XGBClassifier`, `XGBRegressor`, and `XGBRanker` objects. The attributes are necessary to properly interact with scikit-learn. See #4639 for more details. The use of pickling hampered interoperability, as a pickle from one machine may not necessarily work on another machine. Starting with this release, we use an alternative method to serialize the scikit-learn related attributes. The use of Python pickle is now discouraged (#5236, #5281). ### Parameter validation: detection of unused or incorrect parameters (#4553, #4577, #4738, #4801, #4961, #5101, #5157, #5167, #5256) * Mis-spelled training parameter is a common user mistake. In previous versions of XGBoost, mis-spelled parameters were silently ignored. Starting with 1.0.0 release, XGBoost will produce a warning message if there is any unused training parameters. Currently, parameter validation is available to R users and Python XGBoost API users. We are working to extend its support to scikit-learn users. * Configuration steps now have well-defined semantics (#4542, #4738), so we know exactly where and how the internal configurable parameters are changed. * The user can now use `save_config()` function to inspect all (used) training parameters. This is helpful for debugging model performance. ### Allow individual workers to recover from faults (#4808, #4966) * Status quo: if a worker fails, all workers are shut down and restarted, and learning resumes from the last checkpoint. This involves requesting resources from the scheduler (e.g. Spark) and shuffling all the data again from scratch. Both of these operations can be quite costly and block training for extended periods of time, especially if the training data is big and the number of worker nodes is in the hundreds. * The proposed solution is to recover the single node that failed, instead of shutting down all workers. The rest of the clusters wait until the single failed worker is bootstrapped and catches up with the rest. * See roadmap at #4753. Note that this is work in progress. In particular, the feature is not yet available from XGBoost4J-Spark. ### Accurate prediction for DART models * Use DART tree weights when computing SHAPs (#5050) * Don't drop trees during DART prediction by default (#5115) * Fix DART prediction in R (#5204) ### Make external memory more robust * Fix issues with training with external memory on cpu (#4487) * Fix crash with approx tree method on cpu (#4510) * Fix external memory race in `exact` (#4980). Note: `dmlc::ThreadedIter` is not actually thread-safe. We would like to re-design it in the long term. ### Major refactoring of the `DMatrix` class (#4686, #4744, #4748, #5044, #5092, #5108, #5188, #5198) * Goal 1: improve performance and reduce memory consumption. Right now, if the user trains a model with a NumPy array as training data, the array gets copies 2-3 times before training begins. We'd like to reduce duplication of the data matrix. * Goal 2: Expose a common interface to external data, unify the way DMatrix objects are constructed and simplify the process of adding new external data sources. This work is essential for ingesting cuPy arrays. * Goal 3: Handle missing values consistently. * RFC: #4354, Roadmap: #5143 * This work is also relevant to external memory support on GPUs. ### Breaking: XGBoost Python package now requires Python 3.5 or newer (#5021, #5274) * Python 3.4 has reached its end-of-life on March 16, 2019, so we now require Python 3.5 or newer. ### Breaking: GPU algorithm now requires CUDA 9.0 and higher (#4527, #4580) ### Breaking: `n_gpus` parameter removed; multi-GPU training now requires a distributed framework (#4579, #4749, #4773, #4810, #4867, #4908) * #4531 proposed removing support for single-process multi-GPU training. Contributors would focus on multi-GPU support through distributed frameworks such as Dask and Spark, where the framework would be expected to assign a worker process for each GPU independently. By delegating GPU management and data movement to the distributed framework, we can greatly simplify the core XGBoost codebase, make multi-GPU training more robust, and reduce burden for future development. ### Breaking: Some deprecated features have been removed * ``gpu_exact`` training method (#4527, #4742, #4777). Use ``gpu_hist`` instead. * ``learning_rates`` parameter in Python (#5155). Use the callback API instead. * ``num_roots`` (#5059, #5165), since the current training code always uses a single root node. * GPU-specific objectives (#4690), such as `gpu:reg:linear`. Use objectives without `gpu:` prefix; GPU will be used automatically if your machine has one. ### Breaking: the C API function `XGBoosterPredict()` now asks for an extra parameter `training`. ### Breaking: We now use CMake exclusively to build XGBoost. `Makefile` is being sunset. * Exception: the R package uses Autotools, as the CRAN ecosystem did not yet adopt CMake widely. ### Performance improvements * Smarter choice of histogram construction for distributed `gpu_hist` (#4519) * Optimizations for quantization on device (#4572) * Introduce caching memory allocator to avoid latency associated with GPU memory allocation (#4554, #4615) * Optimize the initialization stage of the CPU `hist` algorithm for sparse datasets (#4625) * Prevent unnecessary data copies from GPU memory to the host (#4795) * Improve operation efficiency for single prediction (#5016) * Group builder modified for incremental building, to speed up building large `DMatrix` (#5098) ### Bug-fixes * Eliminate `FutureWarning: Series.base is deprecated` (#4337) * Ensure pandas DataFrame column names are treated as strings in type error message (#4481) * [jvm-packages] Add back `reg:linear` for scala, as it is only deprecated and not meant to be removed yet (#4490) * Fix library loading for Cygwin users (#4499) * Fix prediction from loaded pickle (#4516) * Enforce exclusion between `pred_interactions=True` and `pred_interactions=True` (#4522) * Do not return dangling reference to local `std::string` (#4543) * Set the appropriate device before freeing device memory (#4566) * Mark `SparsePageDmatrix` destructor default. (#4568) * Choose the appropriate tree method only when the tree method is 'auto' (#4571) * Fix `benchmark_tree.py` (#4593) * [jvm-packages] Fix silly bug in feature scoring (#4604) * Fix GPU predictor when the test data matrix has different number of features than the training data matrix used to train the model (#4613) * Fix external memory for get column batches. (#4622) * [R] Use built-in label when xgb.DMatrix is given to xgb.cv() (#4631) * Fix early stopping in the Python package (#4638) * Fix AUC error in distributed mode caused by imbalanced dataset (#4645, #4798) * [jvm-packages] Expose `setMissing` method in `XGBoostClassificationModel` / `XGBoostRegressionModel` (#4643) * Remove initializing stringstream reference. (#4788) * [R] `xgb.get.handle` now checks all class listed of `object` (#4800) * Do not use `gpu_predictor` unless data comes from GPU (#4836) * Fix data loading (#4862) * Workaround `isnan` across different environments. (#4883) * [jvm-packages] Handle Long-type parameter (#4885) * Don't `set_params` at the end of `set_state` (#4947). Ensure that the model does not change after pickling and unpickling multiple times. * C++ exceptions should not crash OpenMP loops (#4960) * Fix `usegpu` flag in DART. (#4984) * Run training with empty `DMatrix` (#4990, #5159) * Ensure that no two processes can use the same GPU (#4990) * Fix repeated split and 0 cover nodes (#5010) * Reset histogram hit counter between multiple data batches (#5035) * Fix `feature_name` crated from int64index dataframe. (#5081) * Don't use 0 for "fresh leaf" (#5084) * Throw error when user attempts to use multi-GPU training and XGBoost has not been compiled with NCCL (#5170) * Fix metric name loading (#5122) * Quick fix for memory leak in CPU `hist` algorithm (#5153) * Fix wrapping GPU ID and prevent data copying (#5160) * Fix signature of Span constructor (#5166) * Lazy initialization of device vector, so that XGBoost compiled with CUDA can run on a machine without any GPU (#5173) * Model loading should not change system locale (#5314) * Distributed training jobs would sometimes hang; revert Rabit to fix this regression (dmlc/rabit#132, #5237) ### API changes * Add support for cross-validation using query ID (#4474) * Enable feature importance property for DART model (#4525) * Add `rmsle` metric and `reg:squaredlogerror` objective (#4541) * All objective and evaluation metrics are now exposed to JVM packages (#4560) * `dump_model()` and `get_dump()` now support exporting in GraphViz language (#4602) * Support metrics `ndcg-` and `map-` (#4635) * [jvm-packages] Allow chaining prediction (transform) in XGBoost4J-Spark (#4667) * [jvm-packages] Add option to bypass missing value check in the Spark layer (#4805). Only use this option if you know what you are doing. * [jvm-packages] Add public group getter (#4838) * `XGDMatrixSetGroup` C API is now deprecated (#4864). Use `XGDMatrixSetUIntInfo` instead. * [R] Added new `train_folds` parameter to `xgb.cv()` (#5114) * Ingest meta information from Pandas DataFrame, such as data weights (#5216) ### Maintenance: Refactor code for legibility and maintainability * De-duplicate GPU parameters (#4454) * Simplify INI-style config reader using C++11 STL (#4478, #4521) * Refactor histogram building code for `gpu_hist` (#4528) * Overload device memory allocator, to enable instrumentation for compiling memory usage statistics (#4532) * Refactor out row partitioning logic from `gpu_hist` (#4554) * Remove an unused variable (#4588) * Implement tree model dump with code generator, to de-duplicate code for generating dumps in 3 different formats (#4602) * Remove `RowSet` class which is no longer being used (#4697) * Remove some unused functions as reported by cppcheck (#4743) * Mimic CUDA assert output in Span check (#4762) * [jvm-packages] Refactor `XGBoost.scala` to put all params processing in one place (#4815) * Add some comments for GPU row partitioner (#4832) * Span: use `size_t' for index_type, add `front' and `back'. (#4935) * Remove dead code in `exact` algorithm (#5034, #5105) * Unify integer types used for row and column indices (#5034) * Extract feature interaction constraint from `SplitEvaluator` class. (#5034) * [Breaking] De-duplicate paramters and docstrings in the constructors of Scikit-Learn models (#5130) * Remove benchmark code from GPU tests (#5141) * Clean up Python 2 compatibility code. (#5161) * Extensible binary serialization format for `DMatrix::MetaInfo` (#5187). This will be useful for implementing censored labels for survival analysis applications. * Cleanup clang-tidy warnings. (#5247) ### Maintenance: testing, continuous integration, build system * Use `yaml.safe_load` instead of `yaml.load`. (#4537) * Ensure GCC is at least 5.x (#4538) * Remove all mention of `reg:linear` from tests (#4544) * [jvm-packages] Upgrade to Scala 2.12 (#4574) * [jvm-packages] Update kryo dependency to 2.22 (#4575) * [CI] Specify account ID when logging into ECR Docker registry (#4584) * Use Sphinx 2.1+ to compile documentation (#4609) * Make Pandas optional for running Python unit tests (#4620) * Fix spark tests on machines with many cores (#4634) * [jvm-packages] Update local dev build process (#4640) * Add optional dependencies to setup.py (#4655) * [jvm-packages] Fix maven warnings (#4664) * Remove extraneous files from the R package, to comply with CRAN policy (#4699) * Remove VC-2013 support, since it is not C++11 compliant (#4701) * [CI] Fix broken installation of Pandas (#4704, #4722) * [jvm-packages] Clean up temporary files afer running tests (#4706) * Specify version macro in CMake. (#4730) * Include dmlc-tracker into XGBoost Python package (#4731) * [CI] Use long key ID for Ubuntu repository fingerprints. (#4783) * Remove plugin, CUDA related code in automake & autoconf files (#4789) * Skip related tests when scikit-learn is not installed. (#4791) * Ignore vscode and clion files (#4866) * Use bundled Google Test by default (#4900) * [CI] Raise timeout threshold in Jenkins (#4938) * Copy CMake parameter from dmlc-core. (#4948) * Set correct file permission. (#4964) * [CI] Update lint configuration to support latest pylint convention (#4971) * [CI] Upload nightly builds to S3 (#4976, #4979) * Add asan.so.5 to cmake script. (#4999) * [CI] Fix Travis tests. (#5062) * [CI] Locate vcomp140.dll from System32 directory (#5078) * Implement training observer to dump internal states of objects (#5088). This will be useful for debugging. * Fix visual studio output library directories (#5119) * [jvm-packages] Comply with scala style convention + fix broken unit test (#5134) * [CI] Repair download URL for Maven 3.6.1 (#5139) * Don't use modernize-use-trailing-return-type in clang-tidy. (#5169) * Explicitly use UTF-8 codepage when using MSVC (#5197) * Add CMake option to run Undefined Behavior Sanitizer (UBSan) (#5211) * Make some GPU tests deterministic (#5229) * [R] Robust endian detection in CRAN xgboost build (#5232) * Support FreeBSD (#5233) * Make `pip install xgboost*.tar.gz` work by fixing build-python.sh (#5241) * Fix compilation error due to 64-bit integer narrowing to `size_t` (#5250) * Remove use of `std::cout` from R package, to comply with CRAN policy (#5261) * Update DMLC-Core submodule (#4674, #4688, #4726, #4924) * Update Rabit submodule (#4560, #4667, #4718, #4808, #4966, #5237) ### Usability Improvements, Documentation * Add Random Forest API to Python API doc (#4500) * Fix Python demo and doc. (#4545) * Remove doc about not supporting CUDA 10.1 (#4578) * Address some sphinx warnings and errors, add doc for building doc. (#4589) * Add instruction to run formatting checks locally (#4591) * Fix docstring for `XGBModel.predict()` (#4592) * Doc and demo for customized metric and objective (#4598, #4608) * Add to documentation how to run tests locally (#4610) * Empty evaluation list in early stopping should produce meaningful error message (#4633) * Fixed year to 2019 in conf.py, helpers.h and LICENSE (#4661) * Minor updates to links and grammar (#4673) * Remove `silent` in doc (#4689) * Remove old Python trouble shooting doc (#4729) * Add `os.PathLike` support for file paths to DMatrix and Booster Python classes (#4757) * Update XGBoost4J-Spark doc (#4804) * Regular formatting for evaluation metrics (#4803) * [jvm-packages] Refine documentation for handling missing values in XGBoost4J-Spark (#4805) * Monitor for distributed environment (#4829). This is useful for identifying performance bottleneck. * Add check for length of weights and produce a good error message (#4872) * Fix DMatrix doc (#4884) * Export C++ headers in CMake installation (#4897) * Update license year in README.md to 2019 (#4940) * Fix incorrectly displayed Note in the doc (#4943) * Follow PEP 257 Docstring Conventions (#4959) * Document minimum version required for Google Test (#5001) * Add better error message for invalid feature names (#5024) * Some guidelines on device memory usage (#5038) * [doc] Some notes for external memory. (#5065) * Update document for `tree_method` (#5106) * Update demo for ranking. (#5154) * Add new lines for Spark XGBoost missing values section (#5180) * Fix simple typo: utilty -> utility (#5182) * Update R doc by roxygen2 (#5201) * [R] Direct user to use `set.seed()` instead of setting `seed` parameter (#5125) * Add Optuna badge to `README.md` (#5208) * Fix compilation error in `c-api-demo.c` (#5215) ### Acknowledgement **Contributors**: Nan Zhu (@CodingCat), Crissman Loomis (@Crissman), Cyprien Ricque (@Cyprien-Ricque), Evan Kepner (@EvanKepner), K.O. (@Hi-king), KaiJin Ji (@KerryJi), Peter Badida (@KeyWeeUsr), Kodi Arfer (@Kodiologist), Rory Mitchell (@RAMitchell), Egor Smirnov (@SmirnovEgorRu), Jacob Kim (@TheJacobKim), Vibhu Jawa (@VibhuJawa), Marcos (@astrowonk), Andy Adinets (@canonizer), Chen Qin (@chenqin), Christopher Cowden (@cowden), @cpfarrell, @david-cortes, Liangcai Li (@firestarman), @fuhaoda, Philip Hyunsu Cho (@hcho3), @here-nagini, Tong He (@hetong007), Michal Kurka (@michalkurka), Honza Sterba (@honzasterba), @iblumin, @koertkuipers, mattn (@mattn), Mingjie Tang (@merlintang), OrdoAbChao (@mglowacki100), Matthew Jones (@mt-jones), mitama (@nigimitama), Nathan Moore (@nmoorenz), Daniel Stahl (@phillyfan1138), Michaël Benesty (@pommedeterresautee), Rong Ou (@rongou), Sebastian (@sfahnens), Xu Xiao (@sperlingxx), @sriramch, Sean Owen (@srowen), Stephanie Yang (@stpyang), Yuan Tang (@terrytangyuan), Mathew Wicks (@thesuperzapper), Tim Gates (@timgates42), TinkleG (@tinkle1129), Oleksandr Pryimak (@trams), Jiaming Yuan (@trivialfis), Matvey Turkov (@turk0v), Bobby Wang (@wbo4958), yage (@yage99), @yellowdolphin **Reviewers**: Nan Zhu (@CodingCat), Crissman Loomis (@Crissman), Cyprien Ricque (@Cyprien-Ricque), Evan Kepner (@EvanKepner), John Zedlewski (@JohnZed), KOLANICH (@KOLANICH), KaiJin Ji (@KerryJi), Kodi Arfer (@Kodiologist), Rory Mitchell (@RAMitchell), Egor Smirnov (@SmirnovEgorRu), Nikita Titov (@StrikerRUS), Jacob Kim (@TheJacobKim), Vibhu Jawa (@VibhuJawa), Andrew Kane (@ankane), Arno Candel (@arnocandel), Marcos (@astrowonk), Bryan Woods (@bryan-woods), Andy Adinets (@canonizer), Chen Qin (@chenqin), Thomas Franke (@coding-komek), Peter (@codingforfun), @cpfarrell, Joshua Patterson (@datametrician), @fuhaoda, Philip Hyunsu Cho (@hcho3), Tong He (@hetong007), Honza Sterba (@honzasterba), @iblumin, @jakirkham, Vadim Khotilovich (@khotilov), Keith Kraus (@kkraus14), @koertkuipers, @melonki, Mingjie Tang (@merlintang), OrdoAbChao (@mglowacki100), Daniel Mahler (@mhlr), Matthew Rocklin (@mrocklin), Matthew Jones (@mt-jones), Michaël Benesty (@pommedeterresautee), PSEUDOTENSOR / Jonathan McKinney (@pseudotensor), Rong Ou (@rongou), Vladimir (@sh1ng), Scott Lundberg (@slundberg), Xu Xiao (@sperlingxx), @sriramch, Pasha Stetsenko (@st-pasha), Stephanie Yang (@stpyang), Yuan Tang (@terrytangyuan), Mathew Wicks (@thesuperzapper), Theodore Vasiloudis (@thvasilo), TinkleG (@tinkle1129), Oleksandr Pryimak (@trams), Jiaming Yuan (@trivialfis), Bobby Wang (@wbo4958), yage (@yage99), @yellowdolphin, Yin Lou (@yinlou) ## v0.90 (2019.05.18) ### XGBoost Python package drops Python 2.x (#4379, #4381) Python 2.x is reaching its end-of-life at the end of this year. [Many scientific Python packages are now moving to drop Python 2.x](https://python3statement.github.io/). ### XGBoost4J-Spark now requires Spark 2.4.x (#4377) * Spark 2.3 is reaching its end-of-life soon. See discussion at #4389. * **Consistent handling of missing values** (#4309, #4349, #4411): Many users had reported issue with inconsistent predictions between XGBoost4J-Spark and the Python XGBoost package. The issue was caused by Spark mis-handling non-zero missing values (NaN, -1, 999 etc). We now alert the user whenever Spark doesn't handle missing values correctly (#4309, #4349). See [the tutorial for dealing with missing values in XGBoost4J-Spark](https://xgboost.readthedocs.io/en/release_0.90/jvm/xgboost4j_spark_tutorial.html#dealing-with-missing-values). This fix also depends on the availability of Spark 2.4.x. ### Roadmap: better performance scaling for multi-core CPUs (#4310) * Poor performance scaling of the `hist` algorithm for multi-core CPUs has been under investigation (#3810). #4310 optimizes quantile sketches and other pre-processing tasks. Special thanks to @SmirnovEgorRu. ### Roadmap: Harden distributed training (#4250) * Make distributed training in XGBoost more robust by hardening [Rabit](https://github.com/dmlc/rabit), which implements [the AllReduce primitive](https://en.wikipedia.org/wiki/Reduce_%28parallel_pattern%29). In particular, improve test coverage on mechanisms for fault tolerance and recovery. Special thanks to @chenqin. ### New feature: Multi-class metric functions for GPUs (#4368) * Metrics for multi-class classification have been ported to GPU: `merror`, `mlogloss`. Special thanks to @trivialfis. * With supported metrics, XGBoost will select the correct devices based on your system and `n_gpus` parameter. ### New feature: Scikit-learn-like random forest API (#4148, #4255, #4258) * XGBoost Python package now offers `XGBRFClassifier` and `XGBRFRegressor` API to train random forests. See [the tutorial](https://xgboost.readthedocs.io/en/release_0.90/tutorials/rf.html). Special thanks to @canonizer ### New feature: use external memory in GPU predictor (#4284, #4396, #4438, #4457) * It is now possible to make predictions on GPU when the input is read from external memory. This is useful when you want to make predictions with big dataset that does not fit into the GPU memory. Special thanks to @rongou, @canonizer, @sriramch. ```python dtest = xgboost.DMatrix('test_data.libsvm#dtest.cache') bst.set_param('predictor', 'gpu_predictor') bst.predict(dtest) ``` * Coming soon: GPU training (`gpu_hist`) with external memory ### New feature: XGBoost can now handle comments in LIBSVM files (#4430) * Special thanks to @trivialfis and @hcho3 ### New feature: Embed XGBoost in your C/C++ applications using CMake (#4323, #4333, #4453) * It is now easier than ever to embed XGBoost in your C/C++ applications. In your CMakeLists.txt, add `xgboost::xgboost` as a linked library: ```cmake find_package(xgboost REQUIRED) add_executable(api-demo c-api-demo.c) target_link_libraries(api-demo xgboost::xgboost) ``` [XGBoost C API documentation is available.](https://xgboost.readthedocs.io/en/release_0.90/dev) Special thanks to @trivialfis ### Performance improvements * Use feature interaction constraints to narrow split search space (#4341, #4428) * Additional optimizations for `gpu_hist` (#4248, #4283) * Reduce OpenMP thread launches in `gpu_hist` (#4343) * Additional optimizations for multi-node multi-GPU random forests. (#4238) * Allocate unique prediction buffer for each input matrix, to avoid re-sizing GPU array (#4275) * Remove various synchronisations from CUDA API calls (#4205) * XGBoost4J-Spark - Allow the user to control whether to cache partitioned training data, to potentially reduce execution time (#4268) ### Bug-fixes * Fix node reuse in `hist` (#4404) * Fix GPU histogram allocation (#4347) * Fix matrix attributes not sliced (#4311) * Revise AUC and AUCPR metrics now work with weighted ranking task (#4216, #4436) * Fix timer invocation for InitDataOnce() in `gpu_hist` (#4206) * Fix R-devel errors (#4251) * Make gradient update in GPU linear updater thread-safe (#4259) * Prevent out-of-range access in column matrix (#4231) * Don't store DMatrix handle in Python object until it's initialized, to improve exception safety (#4317) * XGBoost4J-Spark - Fix non-deterministic order within a zipped partition on prediction (#4388) - Remove race condition on tracker shutdown (#4224) - Allow set the parameter `maxLeaves`. (#4226) - Allow partial evaluation of dataframe before prediction (#4407) - Automatically set `maximize_evaluation_metrics` if not explicitly given (#4446) ### API changes * Deprecate `reg:linear` in favor of `reg:squarederror`. (#4267, #4427) * Add attribute getter and setter to the Booster object in XGBoost4J (#4336) ### Maintenance: Refactor C++ code for legibility and maintainability * Fix clang-tidy warnings. (#4149) * Remove deprecated C APIs. (#4266) * Use Monitor class to time functions in `hist`. (#4273) * Retire DVec class in favour of c++20 style span for device memory. (#4293) * Improve HostDeviceVector exception safety (#4301) ### Maintenance: testing, continuous integration, build system * **Major refactor of CMakeLists.txt** (#4323, #4333, #4453): adopt modern CMake and export XGBoost as a target * **Major improvement in Jenkins CI pipeline** (#4234) - Migrate all Linux tests to Jenkins (#4401) - Builds and tests are now de-coupled, to test an artifact against multiple versions of CUDA, JDK, and other dependencies (#4401) - Add Windows GPU to Jenkins CI pipeline (#4463, #4469) * Support CUDA 10.1 (#4223, #4232, #4265, #4468) * Python wheels are now built with CUDA 9.0, so that JIT is not required on Volta architecture (#4459) * Integrate with NVTX CUDA profiler (#4205) * Add a test for cpu predictor using external memory (#4308) * Refactor tests to get rid of duplication (#4358) * Remove test dependency on `craigcitro/r-travis`, since it's deprecated (#4353) * Add files from local R build to `.gitignore` (#4346) * Make XGBoost4J compatible with Java 9+ by revising NativeLibLoader (#4351) * Jenkins build for CUDA 10.0 (#4281) * Remove remaining `silent` and `debug_verbose` in Python tests (#4299) * Use all cores to build XGBoost4J lib on linux (#4304) * Upgrade Jenkins Linux build environment to GCC 5.3.1, CMake 3.6.0 (#4306) * Make CMakeLists.txt compatible with CMake 3.3 (#4420) * Add OpenMP option in CMakeLists.txt (#4339) * Get rid of a few trivial compiler warnings (#4312) * Add external Docker build cache, to speed up builds on Jenkins CI (#4331, #4334, #4458) * Fix Windows tests (#4403) * Fix a broken python test (#4395) * Use a fixed seed to split data in XGBoost4J-Spark tests, for reproducibility (#4417) * Add additional Python tests to test training under constraints (#4426) * Enable building with shared NCCL. (#4447) ### Usability Improvements, Documentation * Document limitation of one-split-at-a-time Greedy tree learning heuristic (#4233) * Update build doc: PyPI wheel now support multi-GPU (#4219) * Fix docs for `num_parallel_tree` (#4221) * Fix document about `colsample_by*` parameter (#4340) * Make the train and test input with same colnames. (#4329) * Update R contribute link. (#4236) * Fix travis R tests (#4277) * Log version number in crash log in XGBoost4J-Spark (#4271, #4303) * Allow supression of Rabit output in Booster::train in XGBoost4J (#4262) * Add tutorial on handling missing values in XGBoost4J-Spark (#4425) * Fix typos (#4345, #4393, #4432, #4435) * Added language classifier in setup.py (#4327) * Added Travis CI badge (#4344) * Add BentoML to use case section (#4400) * Remove subtly sexist remark (#4418) * Add R vignette about parsing JSON dumps (#4439) ### Acknowledgement **Contributors**: Nan Zhu (@CodingCat), Adam Pocock (@Craigacp), Daniel Hen (@Daniel8hen), Jiaxiang Li (@JiaxiangBU), Rory Mitchell (@RAMitchell), Egor Smirnov (@SmirnovEgorRu), Andy Adinets (@canonizer), Jonas (@elcombato), Harry Braviner (@harrybraviner), Philip Hyunsu Cho (@hcho3), Tong He (@hetong007), James Lamb (@jameslamb), Jean-Francois Zinque (@jeffzi), Yang Yang (@jokerkeny), Mayank Suman (@mayanksuman), jess (@monkeywithacupcake), Hajime Morrita (@omo), Ravi Kalia (@project-delphi), @ras44, Rong Ou (@rongou), Shaochen Shi (@shishaochen), Xu Xiao (@sperlingxx), @sriramch, Jiaming Yuan (@trivialfis), Christopher Suchanek (@wsuchy), Bozhao (@yubozhao) **Reviewers**: Nan Zhu (@CodingCat), Adam Pocock (@Craigacp), Daniel Hen (@Daniel8hen), Jiaxiang Li (@JiaxiangBU), Laurae (@Laurae2), Rory Mitchell (@RAMitchell), Egor Smirnov (@SmirnovEgorRu), @alois-bissuel, Andy Adinets (@canonizer), Chen Qin (@chenqin), Harry Braviner (@harrybraviner), Philip Hyunsu Cho (@hcho3), Tong He (@hetong007), @jakirkham, James Lamb (@jameslamb), Julien Schueller (@jschueller), Mayank Suman (@mayanksuman), Hajime Morrita (@omo), Rong Ou (@rongou), Sara Robinson (@sararob), Shaochen Shi (@shishaochen), Xu Xiao (@sperlingxx), @sriramch, Sean Owen (@srowen), Sergei Lebedev (@superbobry), Yuan (Terry) Tang (@terrytangyuan), Theodore Vasiloudis (@thvasilo), Matthew Tovbin (@tovbinm), Jiaming Yuan (@trivialfis), Xin Yin (@xydrolase) ## v0.82 (2019.03.03) This release is packed with many new features and bug fixes. ### Roadmap: better performance scaling for multi-core CPUs (#3957) * Poor performance scaling of the `hist` algorithm for multi-core CPUs has been under investigation (#3810). #3957 marks an important step toward better performance scaling, by using software pre-fetching and replacing STL vectors with C-style arrays. Special thanks to @Laurae2 and @SmirnovEgorRu. * See #3810 for latest progress on this roadmap. ### New feature: Distributed Fast Histogram Algorithm (`hist`) (#4011, #4102, #4140, #4128) * It is now possible to run the `hist` algorithm in distributed setting. Special thanks to @CodingCat. The benefits include: 1. Faster local computation via feature binning 2. Support for monotonic constraints and feature interaction constraints 3. Simpler codebase than `approx`, allowing for future improvement * Depth-wise tree growing is now performed in a separate code path, so that cross-node syncronization is performed only once per level. ### New feature: Multi-Node, Multi-GPU training (#4095) * Distributed training is now able to utilize clusters equipped with NVIDIA GPUs. In particular, the rabit AllReduce layer will communicate GPU device information. Special thanks to @mt-jones, @RAMitchell, @rongou, @trivialfis, @canonizer, and @jeffdk. * Resource management systems will be able to assign a rank for each GPU in the cluster. * In Dask, users will be able to construct a collection of XGBoost processes over an inhomogeneous device cluster (i.e. workers with different number and/or kinds of GPUs). ### New feature: Multiple validation datasets in XGBoost4J-Spark (#3904, #3910) * You can now track the performance of the model during training with multiple evaluation datasets. By specifying `eval_sets` or call `setEvalSets` over a `XGBoostClassifier` or `XGBoostRegressor`, you can pass in multiple evaluation datasets typed as a `Map` from `String` to `DataFrame`. Special thanks to @CodingCat. * See the usage of multiple validation datasets [here](https://github.com/dmlc/xgboost/blob/0c1d5f1120c0a159f2567b267f0ec4ffadee00d0/jvm-packages/xgboost4j-example/src/main/scala/ml/dmlc/xgboost4j/scala/example/spark/SparkTraining.scala#L66-L78) ### New feature: Additional metric functions for GPUs (#3952) * Element-wise metrics have been ported to GPU: `rmse`, `mae`, `logloss`, `poisson-nloglik`, `gamma-deviance`, `gamma-nloglik`, `error`, `tweedie-nloglik`. Special thanks to @trivialfis and @RAMitchell. * With supported metrics, XGBoost will select the correct devices based on your system and `n_gpus` parameter. ### New feature: Column sampling at individual nodes (splits) (#3971) * Columns (features) can now be sampled at individual tree nodes, in addition to per-tree and per-level sampling. To enable per-node sampling, set `colsample_bynode` parameter, which represents the fraction of columns sampled at each node. This parameter is set to 1.0 by default (i.e. no sampling per node). Special thanks to @canonizer. * The `colsample_bynode` parameter works cumulatively with other `colsample_by*` parameters: for example, `{'colsample_bynode':0.5, 'colsample_bytree':0.5}` with 100 columns will give 25 features to choose from at each split. ### Major API change: consistent logging level via `verbosity` (#3982, #4002, #4138) * XGBoost now allows fine-grained control over logging. You can set `verbosity` to 0 (silent), 1 (warning), 2 (info), and 3 (debug). This is useful for controlling the amount of logging outputs. Special thanks to @trivialfis. * Parameters `silent` and `debug_verbose` are now deprecated. * Note: Sometimes XGBoost tries to change configurations based on heuristics, which is displayed as warning message. If there's unexpected behaviour, please try to increase value of verbosity. ### Major bug fix: external memory (#4040, #4193) * Clarify object ownership in multi-threaded prefetcher, to avoid memory error. * Correctly merge two column batches (which uses [CSC layout](https://en.wikipedia.org/wiki/Sparse_matrix#Compressed_sparse_column_(CSC_or_CCS))). * Add unit tests for external memory. * Special thanks to @trivialfis and @hcho3. ### Major bug fix: early stopping fixed in XGBoost4J and XGBoost4J-Spark (#3928, #4176) * Early stopping in XGBoost4J and XGBoost4J-Spark is now consistent with its counterpart in the Python package. Training stops if the current iteration is `earlyStoppingSteps` away from the best iteration. If there are multiple evaluation sets, only the last one is used to determinate early stop. * See the updated documentation [here](https://xgboost.readthedocs.io/en/release_0.82/jvm/xgboost4j_spark_tutorial.html#early-stopping) * Special thanks to @CodingCat, @yanboliang, and @mingyang. ### Major bug fix: infrequent features should not crash distributed training (#4045) * For infrequently occuring features, some partitions may not get any instance. This scenario used to crash distributed training due to mal-formed ranges. The problem has now been fixed. * In practice, one-hot-encoded categorical variables tend to produce rare features, particularly when the cardinality is high. * Special thanks to @CodingCat. ### Performance improvements * Faster, more space-efficient radix sorting in `gpu_hist` (#3895) * Subtraction trick in histogram calculation in `gpu_hist` (#3945) * More performant re-partition in XGBoost4J-Spark (#4049) ### Bug-fixes * Fix semantics of `gpu_id` when running multiple XGBoost processes on a multi-GPU machine (#3851) * Fix page storage path for external memory on Windows (#3869) * Fix configuration setup so that DART utilizes GPU (#4024) * Eliminate NAN values from SHAP prediction (#3943) * Prevent empty quantile sketches in `hist` (#4155) * Enable running objectives with 0 GPU (#3878) * Parameters are no longer dependent on system locale (#3891, #3907) * Use consistent data type in the GPU coordinate descent code (#3917) * Remove undefined behavior in the CLI config parser on the ARM platform (#3976) * Initialize counters in GPU AllReduce (#3987) * Prevent deadlocks in GPU AllReduce (#4113) * Load correct values from sliced NumPy arrays (#4147, #4165) * Fix incorrect GPU device selection (#4161) * Make feature binning logic in `hist` aware of query groups when running a ranking task (#4115). For ranking task, query groups are weighted, not individual instances. * Generate correct C++ exception type for `LOG(FATAL)` macro (#4159) * Python package - Python package should run on system without `PATH` environment variable (#3845) - Fix `coef_` and `intercept_` signature to be compatible with `sklearn.RFECV` (#3873) - Use UTF-8 encoding in Python package README, to support non-English locale (#3867) - Add AUC-PR to list of metrics to maximize for early stopping (#3936) - Allow loading pickles without `self.booster` attribute, for backward compatibility (#3938, #3944) - White-list DART for feature importances (#4073) - Update usage of [h2oai/datatable](https://github.com/h2oai/datatable) (#4123) * XGBoost4J-Spark - Address scalability issue in prediction (#4033) - Enforce the use of per-group weights for ranking task (#4118) - Fix vector size of `rawPredictionCol` in `XGBoostClassificationModel` (#3932) - More robust error handling in Spark tracker (#4046, #4108) - Fix return type of `setEvalSets` (#4105) - Return correct value of `getMaxLeaves` (#4114) ### API changes * Add experimental parameter `single_precision_histogram` to use single-precision histograms for the `gpu_hist` algorithm (#3965) * Python package - Add option to select type of feature importances in the scikit-learn inferface (#3876) - Add `trees_to_df()` method to dump decision trees as Pandas data frame (#4153) - Add options to control node shapes in the GraphViz plotting function (#3859) - Add `xgb_model` option to `XGBClassifier`, to load previously saved model (#4092) - Passing lists into `DMatrix` is now deprecated (#3970) * XGBoost4J - Support multiple feature importance features (#3801) ### Maintenance: Refactor C++ code for legibility and maintainability * Refactor `hist` algorithm code and add unit tests (#3836) * Minor refactoring of split evaluator in `gpu_hist` (#3889) * Removed unused leaf vector field in the tree model (#3989) * Simplify the tree representation by combining `TreeModel` and `RegTree` classes (#3995) * Simplify and harden tree expansion code (#4008, #4015) * De-duplicate parameter classes in the linear model algorithms (#4013) * Robust handling of ranges with C++20 span in `gpu_exact` and `gpu_coord_descent` (#4020, #4029) * Simplify tree training code (#3825). Also use Span class for robust handling of ranges. ### Maintenance: testing, continuous integration, build system * Disallow `std::regex` since it's not supported by GCC 4.8.x (#3870) * Add multi-GPU tests for coordinate descent algorithm for linear models (#3893, #3974) * Enforce naming style in Python lint (#3896) * Refactor Python tests (#3897, #3901): Use pytest exclusively, display full trace upon failure * Address `DeprecationWarning` when using Python collections (#3909) * Use correct group for maven site plugin (#3937) * Jenkins CI is now using on-demand EC2 instances exclusively, due to unreliability of Spot instances (#3948) * Better GPU performance logging (#3945) * Fix GPU tests on machines with only 1 GPU (#4053) * Eliminate CRAN check warnings and notes (#3988) * Add unit tests for tree serialization (#3989) * Add unit tests for tree fitting functions in `hist` (#4155) * Add a unit test for `gpu_exact` algorithm (#4020) * Correct JVM CMake GPU flag (#4071) * Fix failing Travis CI on Mac (#4086) * Speed up Jenkins by not compiling CMake (#4099) * Analyze C++ and CUDA code using clang-tidy, as part of Jenkins CI pipeline (#4034) * Fix broken R test: Install Homebrew GCC (#4142) * Check for empty datasets in GPU unit tests (#4151) * Fix Windows compilation (#4139) * Comply with latest convention of cpplint (#4157) * Fix a unit test in `gpu_hist` (#4158) * Speed up data generation in Python tests (#4164) ### Usability Improvements * Add link to [InfoWorld 2019 Technology of the Year Award](https://www.infoworld.com/article/3336072/application-development/infoworlds-2019-technology-of-the-year-award-winners.html) (#4116) * Remove outdated AWS YARN tutorial (#3885) * Document current limitation in number of features (#3886) * Remove unnecessary warning when `gblinear` is selected (#3888) * Document limitation of CSV parser: header not supported (#3934) * Log training parameters in XGBoost4J-Spark (#4091) * Clarify early stopping behavior in the scikit-learn interface (#3967) * Clarify behavior of `max_depth` parameter (#4078) * Revise Python docstrings for ranking task (#4121). In particular, weights must be per-group in learning-to-rank setting. * Document parameter `num_parallel_tree` (#4022) * Add Jenkins status badge (#4090) * Warn users against using internal functions of `Booster` object (#4066) * Reformat `benchmark_tree.py` to comply with Python style convention (#4126) * Clarify a comment in `objectiveTrait` (#4174) * Fix typos and broken links in documentation (#3890, #3872, #3902, #3919, #3975, #4027, #4156, #4167) ### Acknowledgement **Contributors** (in no particular order): Jiaming Yuan (@trivialfis), Hyunsu Cho (@hcho3), Nan Zhu (@CodingCat), Rory Mitchell (@RAMitchell), Yanbo Liang (@yanboliang), Andy Adinets (@canonizer), Tong He (@hetong007), Yuan Tang (@terrytangyuan) **First-time Contributors** (in no particular order): Jelle Zijlstra (@JelleZijlstra), Jiacheng Xu (@jiachengxu), @ajing, Kashif Rasul (@kashif), @theycallhimavi, Joey Gao (@pjgao), Prabakaran Kumaresshan (@nixphix), Huafeng Wang (@huafengw), @lyxthe, Sam Wilkinson (@scwilkinson), Tatsuhito Kato (@stabacov), Shayak Banerjee (@shayakbanerjee), Kodi Arfer (@Kodiologist), @KyleLi1985, Egor Smirnov (@SmirnovEgorRu), @tmitanitky, Pasha Stetsenko (@st-pasha), Kenichi Nagahara (@keni-chi), Abhai Kollara Dilip (@abhaikollara), Patrick Ford (@pford221), @hshujuan, Matthew Jones (@mt-jones), Thejaswi Rao (@teju85), Adam November (@anovember) **First-time Reviewers** (in no particular order): Mingyang Hu (@mingyang), Theodore Vasiloudis (@thvasilo), Jakub Troszok (@troszok), Rong Ou (@rongou), @Denisevi4, Matthew Jones (@mt-jones), Jeff Kaplan (@jeffdk) ## v0.81 (2018.11.04) ### New feature: feature interaction constraints * Users are now able to control which features (independent variables) are allowed to interact by specifying feature interaction constraints (#3466). * [Tutorial](https://xgboost.readthedocs.io/en/release_0.81/tutorials/feature_interaction_constraint.html) is available, as well as [R](https://github.com/dmlc/xgboost/blob/9254c58e4dfff6a59dc0829a2ceb02e45ed17cd0/R-package/demo/interaction_constraints.R) and [Python](https://github.com/dmlc/xgboost/blob/9254c58e4dfff6a59dc0829a2ceb02e45ed17cd0/tests/python/test_interaction_constraints.py) examples. ### New feature: learning to rank using scikit-learn interface * Learning to rank task is now available for the scikit-learn interface of the Python package (#3560, #3848). It is now possible to integrate the XGBoost ranking model into the scikit-learn learning pipeline. * Examples of using `XGBRanker` class is found at [demo/rank/rank_sklearn.py](https://github.com/dmlc/xgboost/blob/24a268a2e3cb17302db3d72da8f04016b7d352d9/demo/rank/rank_sklearn.py). ### New feature: R interface for SHAP interactions * SHAP (SHapley Additive exPlanations) is a unified approach to explain the output of any machine learning model. Previously, this feature was only available from the Python package; now it is available from the R package as well (#3636). ### New feature: GPU predictor now use multiple GPUs to predict * GPU predictor is now able to utilize multiple GPUs at once to accelerate prediction (#3738) ### New feature: Scale distributed XGBoost to large-scale clusters * Fix OS file descriptor limit assertion error on large cluster (#3835, dmlc/rabit#73) by replacing `select()` based AllReduce/Broadcast with `poll()` based implementation. * Mitigate tracker "thundering herd" issue on large cluster. Add exponential backoff retry when workers connect to tracker. * With this change, we were able to scale to 1.5k executors on a 12 billion row dataset after some tweaks here and there. ### New feature: Additional objective functions for GPUs * New objective functions ported to GPU: `hinge`, `multi:softmax`, `multi:softprob`, `count:poisson`, `reg:gamma`, `"reg:tweedie`. * With supported objectives, XGBoost will select the correct devices based on your system and `n_gpus` parameter. ### Major bug fix: learning to rank with XGBoost4J-Spark * Previously, `repartitionForData` would shuffle data and lose ordering necessary for ranking task. * To fix this issue, data points within each RDD partition is explicitly group by their group (query session) IDs (#3654). Also handle empty RDD partition carefully (#3750). ### Major bug fix: early stopping fixed in XGBoost4J-Spark * Earlier implementation of early stopping had incorrect semantics and didn't let users to specify direction for optimizing (maximize / minimize) * A parameter `maximize_evaluation_metrics` is defined so as to tell whether a metric should be maximized or minimized as part of early stopping criteria (#3808). Also early stopping now has correct semantics. ### API changes * Column sampling by level (`colsample_bylevel`) is now functional for `hist` algorithm (#3635, #3862) * GPU tag `gpu:` for regression objectives are now deprecated. XGBoost will select the correct devices automatically (#3643) * Add `disable_default_eval_metric` parameter to disable default metric (#3606) * Experimental AVX support for gradient computation is removed (#3752) * XGBoost4J-Spark - Add `rank:ndcg` and `rank:map` to supported objectives (#3697) * Python package - Add `callbacks` argument to `fit()` function of sciki-learn API (#3682) - Add `XGBRanker` to scikit-learn interface (#3560, #3848) - Add `validate_features` argument to `predict()` function of scikit-learn API (#3653) - Allow scikit-learn grid search over parameters specified as keyword arguments (#3791) - Add `coef_` and `intercept_` as properties of scikit-learn wrapper (#3855). Some scikit-learn functions expect these properties. ### Performance improvements * Address very high GPU memory usage for large data (#3635) * Fix performance regression within `EvaluateSplits()` of `gpu_hist` algorithm. (#3680) ### Bug-fixes * Fix a problem in GPU quantile sketch with tiny instance weights. (#3628) * Fix copy constructor for `HostDeviceVectorImpl` to prevent dangling pointers (#3657) * Fix a bug in partitioned file loading (#3673) * Fixed an uninitialized pointer in `gpu_hist` (#3703) * Reshared data among GPUs when number of GPUs is changed (#3721) * Add back `max_delta_step` to split evaluation (#3668) * Do not round up integer thresholds for integer features in JSON dump (#3717) * Use `dmlc::TemporaryDirectory` to handle temporaries in cross-platform way (#3783) * Fix accuracy problem with `gpu_hist` when `min_child_weight` and `lambda` are set to 0 (#3793) * Make sure that `tree_method` parameter is recognized and not silently ignored (#3849) * XGBoost4J-Spark - Make sure `thresholds` are considered when executing `predict()` method (#3577) - Avoid losing precision when computing probabilities by converting to `Double` early (#3576) - `getTreeLimit()` should return `Int` (#3602) - Fix checkpoint serialization on HDFS (#3614) - Throw `ControlThrowable` instead of `InterruptedException` so that it is properly re-thrown (#3632) - Remove extraneous output to stdout (#3665) - Allow specification of task type for custom objectives and evaluations (#3646) - Fix distributed updater check (#3739) - Fix issue when spark job execution thread cannot return before we execute `first()` (#3758) * Python package - Fix accessing `DMatrix.handle` before it is set (#3599) - `XGBClassifier.predict()` should return margin scores when `output_margin` is set to true (#3651) - Early stopping callback should maximize metric of form `NDCG@n-` (#3685) - Preserve feature names when slicing `DMatrix` (#3766) * R package - Replace `nround` with `nrounds` to match actual parameter (#3592) - Amend `xgb.createFolds` to handle classes of a single element (#3630) - Fix buggy random generator and make `colsample_bytree` functional (#3781) ### Maintenance: testing, continuous integration, build system * Add sanitizers tests to Travis CI (#3557) * Add NumPy, Matplotlib, Graphviz as requirements for doc build (#3669) * Comply with CRAN submission policy (#3660, #3728) * Remove copy-paste error in JVM test suite (#3692) * Disable flaky tests in `R-package/tests/testthat/test_update.R` (#3723) * Make Python tests compatible with scikit-learn 0.20 release (#3731) * Separate out restricted and unrestricted tasks, so that pull requests don't build downloadable artifacts (#3736) * Add multi-GPU unit test environment (#3741) * Allow plug-ins to be built by CMake (#3752) * Test wheel compatibility on CPU containers for pull requests (#3762) * Fix broken doc build due to Matplotlib 3.0 release (#3764) * Produce `xgboost.so` for XGBoost-R on Mac OSX, so that `make install` works (#3767) * Retry Jenkins CI tests up to 3 times to improve reliability (#3769, #3769, #3775, #3776, #3777) * Add basic unit tests for `gpu_hist` algorithm (#3785) * Fix Python environment for distributed unit tests (#3806) * Test wheels on CUDA 10.0 container for compatibility (#3838) * Fix JVM doc build (#3853) ### Maintenance: Refactor C++ code for legibility and maintainability * Merge generic device helper functions into `GPUSet` class (#3626) * Re-factor column sampling logic into `ColumnSampler` class (#3635, #3637) * Replace `std::vector` with `HostDeviceVector` in `MetaInfo` and `SparsePage` (#3446) * Simplify `DMatrix` class (#3395) * De-duplicate CPU/GPU code using `Transform` class (#3643, #3751) * Remove obsoleted `QuantileHistMaker` class (#3761) * Remove obsoleted `NoConstraint` class (#3792) ### Other Features * C++20-compliant Span class for safe pointer indexing (#3548, #3588) * Add helper functions to manipulate multiple GPU devices (#3693) * XGBoost4J-Spark - Allow specifying host ip from the `xgboost-tracker.properties file` (#3833). This comes in handy when `hosts` files doesn't correctly define localhost. ### Usability Improvements * Add reference to GitHub repository in `pom.xml` of JVM packages (#3589) * Add R demo of multi-class classification (#3695) * Document JSON dump functionality (#3600, #3603) * Document CUDA requirement and lack of external memory for GPU algorithms (#3624) * Document LambdaMART objectives, both pairwise and listwise (#3672) * Document `aucpr` evaluation metric (#3687) * Document gblinear parameters: `feature_selector` and `top_k` (#3780) * Add instructions for using MinGW-built XGBoost with Python. (#3774) * Removed nonexistent parameter `use_buffer` from documentation (#3610) * Update Python API doc to include all classes and members (#3619, #3682) * Fix typos and broken links in documentation (#3618, #3640, #3676, #3713, #3759, #3784, #3843, #3852) * Binary classification demo should produce LIBSVM with 0-based indexing (#3652) * Process data once for Python and CLI examples of learning to rank (#3666) * Include full text of Apache 2.0 license in the repository (#3698) * Save predictor parameters in model file (#3856) * JVM packages - Let users specify feature names when calling `getModelDump` and `getFeatureScore` (#3733) - Warn the user about the lack of over-the-wire encryption (#3667) - Fix errors in examples (#3719) - Document choice of trackers (#3831) - Document that vanilla Apache Spark is required (#3854) * Python package - Document that custom objective can't contain colon (:) (#3601) - Show a better error message for failed library loading (#3690) - Document that feature importance is unavailable for non-tree learners (#3765) - Document behavior of `get_fscore()` for zero-importance features (#3763) - Recommend pickling as the way to save `XGBClassifier` / `XGBRegressor` / `XGBRanker` (#3829) * R package - Enlarge variable importance plot to make it more visible (#3820) ### BREAKING CHANGES * External memory page files have changed, breaking backwards compatibility for temporary storage used during external memory training. This only affects external memory users upgrading their xgboost version - we recommend clearing all `*.page` files before resuming training. Model serialization is unaffected. ### Known issues * Quantile sketcher fails to produce any quantile for some edge cases (#2943) * The `hist` algorithm leaks memory when used with learning rate decay callback (#3579) * Using custom evaluation function together with early stopping causes assertion failure in XGBoost4J-Spark (#3595) * Early stopping doesn't work with `gblinear` learner (#3789) * Label and weight vectors are not reshared upon the change in number of GPUs (#3794). To get around this issue, delete the `DMatrix` object and re-load. * The `DMatrix` Python objects are initialized with incorrect values when given array slices (#3841) * The `gpu_id` parameter is broken and not yet properly supported (#3850) ### Acknowledgement **Contributors** (in no particular order): Hyunsu Cho (@hcho3), Jiaming Yuan (@trivialfis), Nan Zhu (@CodingCat), Rory Mitchell (@RAMitchell), Andy Adinets (@canonizer), Vadim Khotilovich (@khotilov), Sergei Lebedev (@superbobry) **First-time Contributors** (in no particular order): Matthew Tovbin (@tovbinm), Jakob Richter (@jakob-r), Grace Lam (@grace-lam), Grant W Schneider (@grantschneider), Andrew Thia (@BlueTea88), Sergei Chipiga (@schipiga), Joseph Bradley (@jkbradley), Chen Qin (@chenqin), Jerry Lin (@linjer), Dmitriy Rybalko (@rdtft), Michael Mui (@mmui), Takahiro Kojima (@515hikaru), Bruce Zhao (@BruceZhaoR), Wei Tian (@weitian), Saumya Bhatnagar (@Sam1301), Juzer Shakir (@JuzerShakir), Zhao Hang (@cleghom), Jonathan Friedman (@jontonsoup), Bruno Tremblay (@meztez), Boris Filippov (@frenzykryger), @Shiki-H, @mrgutkun, @gorogm, @htgeis, @jakehoare, @zengxy, @KOLANICH **First-time Reviewers** (in no particular order): Nikita Titov (@StrikerRUS), Xiangrui Meng (@mengxr), Nirmal Borah (@Nirmal-Neel) ## v0.80 (2018.08.13) * **JVM packages received a major upgrade**: To consolidate the APIs and improve the user experience, we refactored the design of XGBoost4J-Spark in a significant manner. (#3387) - Consolidated APIs: It is now much easier to integrate XGBoost models into a Spark ML pipeline. Users can control behaviors like output leaf prediction results by setting corresponding column names. Training is now more consistent with other Estimators in Spark MLLIB: there is now one single method `fit()` to train decision trees. - Better user experience: we refactored the parameters relevant modules in XGBoost4J-Spark to provide both camel-case (Spark ML style) and underscore (XGBoost style) parameters - A brand-new tutorial is [available](https://xgboost.readthedocs.io/en/release_0.80/jvm/xgboost4j_spark_tutorial.html) for XGBoost4J-Spark. - Latest API documentation is now hosted at https://xgboost.readthedocs.io/. * XGBoost documentation now keeps track of multiple versions: - Latest master: https://xgboost.readthedocs.io/en/latest - 0.80 stable: https://xgboost.readthedocs.io/en/release_0.80 - 0.72 stable: https://xgboost.readthedocs.io/en/release_0.72 * Support for per-group weights in ranking objective (#3379) * Fix inaccurate decimal parsing (#3546) * New functionality - Query ID column support in LIBSVM data files (#2749). This is convenient for performing ranking task in distributed setting. - Hinge loss for binary classification (`binary:hinge`) (#3477) - Ability to specify delimiter and instance weight column for CSV files (#3546) - Ability to use 1-based indexing instead of 0-based (#3546) * GPU support - Quantile sketch, binning, and index compression are now performed on GPU, eliminating PCIe transfer for 'gpu_hist' algorithm (#3319, #3393) - Upgrade to NCCL2 for multi-GPU training (#3404). - Use shared memory atomics for faster training (#3384). - Dynamically allocate GPU memory, to prevent large allocations for deep trees (#3519) - Fix memory copy bug for large files (#3472) * Python package - Importing data from Python datatable (#3272) - Pre-built binary wheels available for 64-bit Linux and Windows (#3424, #3443) - Add new importance measures 'total_gain', 'total_cover' (#3498) - Sklearn API now supports saving and loading models (#3192) - Arbitrary cross validation fold indices (#3353) - `predict()` function in Sklearn API uses `best_ntree_limit` if available, to make early stopping easier to use (#3445) - Informational messages are now directed to Python's `print()` rather than standard output (#3438). This way, messages appear inside Jupyter notebooks. * R package - Oracle Solaris support, per CRAN policy (#3372) * JVM packages - Single-instance prediction (#3464) - Pre-built JARs are now available from Maven Central (#3401) - Add NULL pointer check (#3021) - Consider `spark.task.cpus` when controlling parallelism (#3530) - Handle missing values in prediction (#3529) - Eliminate outputs of `System.out` (#3572) * Refactored C++ DMatrix class for simplicity and de-duplication (#3301) * Refactored C++ histogram facilities (#3564) * Refactored constraints / regularization mechanism for split finding (#3335, #3429). Users may specify an elastic net (L2 + L1 regularization) on leaf weights as well as monotonic constraints on test nodes. The refactor will be useful for a future addition of feature interaction constraints. * Statically link `libstdc++` for MinGW32 (#3430) * Enable loading from `group`, `base_margin` and `weight` (see [here](http://xgboost.readthedocs.io/en/latest/tutorials/input_format.html#auxiliary-files-for-additional-information)) for Python, R, and JVM packages (#3431) * Fix model saving for `count:possion` so that `max_delta_step` doesn't get truncated (#3515) * Fix loading of sparse CSC matrix (#3553) * Fix incorrect handling of `base_score` parameter for Tweedie regression (#3295) ## v0.72.1 (2018.07.08) This version is only applicable for the Python package. The content is identical to that of v0.72. ## v0.72 (2018.06.01) * Starting with this release, we plan to make a new release every two months. See #3252 for more details. * Fix a pathological behavior (near-zero second-order gradients) in multiclass objective (#3304) * Tree dumps now use high precision in storing floating-point values (#3298) * Submodules `rabit` and `dmlc-core` have been brought up to date, bringing bug fixes (#3330, #3221). * GPU support - Continuous integration tests for GPU code (#3294, #3309) - GPU accelerated coordinate descent algorithm (#3178) - Abstract 1D vector class now works with multiple GPUs (#3287) - Generate PTX code for most recent architecture (#3316) - Fix a memory bug on NVIDIA K80 cards (#3293) - Address performance instability for single-GPU, multi-core machines (#3324) * Python package - FreeBSD support (#3247) - Validation of feature names in `Booster.predict()` is now optional (#3323) * Updated Sklearn API - Validation sets now support instance weights (#2354) - `XGBClassifier.predict_proba()` should not support `output_margin` option. (#3343) See BREAKING CHANGES below. * R package: - Better handling of NULL in `print.xgb.Booster()` (#3338) - Comply with CRAN policy by removing compiler warning suppression (#3329) - Updated CRAN submission * JVM packages - JVM packages will now use the same versioning scheme as other packages (#3253) - Update Spark to 2.3 (#3254) - Add scripts to cross-build and deploy artifacts (#3276, #3307) - Fix a compilation error for Scala 2.10 (#3332) * BREAKING CHANGES - `XGBClassifier.predict_proba()` no longer accepts parameter `output_margin`. The parameter makes no sense for `predict_proba()` because the method is to predict class probabilities, not raw margin scores. ## v0.71 (2018.04.11) * This is a minor release, mainly motivated by issues concerning `pip install`, e.g. #2426, #3189, #3118, and #3194. With this release, users of Linux and MacOS will be able to run `pip install` for the most part. * Refactored linear booster class (`gblinear`), so as to support multiple coordinate descent updaters (#3103, #3134). See BREAKING CHANGES below. * Fix slow training for multiclass classification with high number of classes (#3109) * Fix a corner case in approximate quantile sketch (#3167). Applicable for 'hist' and 'gpu_hist' algorithms * Fix memory leak in DMatrix (#3182) * New functionality - Better linear booster class (#3103, #3134) - Pairwise SHAP interaction effects (#3043) - Cox loss (#3043) - AUC-PR metric for ranking task (#3172) - Monotonic constraints for 'hist' algorithm (#3085) * GPU support - Create an abstract 1D vector class that moves data seamlessly between the main and GPU memory (#2935, #3116, #3068). This eliminates unnecessary PCIe data transfer during training time. - Fix minor bugs (#3051, #3217) - Fix compatibility error for CUDA 9.1 (#3218) * Python package: - Correctly handle parameter `verbose_eval=0` (#3115) * R package: - Eliminate segmentation fault on 32-bit Windows platform (#2994) * JVM packages - Fix a memory bug involving double-freeing Booster objects (#3005, #3011) - Handle empty partition in predict (#3014) - Update docs and unify terminology (#3024) - Delete cache files after job finishes (#3022) - Compatibility fixes for latest Spark versions (#3062, #3093) * BREAKING CHANGES: Updated linear modelling algorithms. In particular L1/L2 regularisation penalties are now normalised to number of training examples. This makes the implementation consistent with sklearn/glmnet. L2 regularisation has also been removed from the intercept. To produce linear models with the old regularisation behaviour, the alpha/lambda regularisation parameters can be manually scaled by dividing them by the number of training examples. ## v0.7 (2017.12.30) * **This version represents a major change from the last release (v0.6), which was released one year and half ago.** * Updated Sklearn API - Add compatibility layer for scikit-learn v0.18: `sklearn.cross_validation` now deprecated - Updated to allow use of all XGBoost parameters via `**kwargs`. - Updated `nthread` to `n_jobs` and `seed` to `random_state` (as per Sklearn convention); `nthread` and `seed` are now marked as deprecated - Updated to allow choice of Booster (`gbtree`, `gblinear`, or `dart`) - `XGBRegressor` now supports instance weights (specify `sample_weight` parameter) - Pass `n_jobs` parameter to the `DMatrix` constructor - Add `xgb_model` parameter to `fit` method, to allow continuation of training * Refactored gbm to allow more friendly cache strategy - Specialized some prediction routine * Robust `DMatrix` construction from a sparse matrix * Faster construction of `DMatrix` from 2D NumPy matrices: elide copies, use of multiple threads * Automatically remove nan from input data when it is sparse. - This can solve some of user reported problem of istart != hist.size * Fix the single-instance prediction function to obtain correct predictions * Minor fixes - Thread local variable is upgraded so it is automatically freed at thread exit. - Fix saving and loading `count::poisson` models - Fix CalcDCG to use base-2 logarithm - Messages are now written to stderr instead of stdout - Keep built-in evaluations while using customized evaluation functions - Use `bst_float` consistently to minimize type conversion - Copy the base margin when slicing `DMatrix` - Evaluation metrics are now saved to the model file - Use `int32_t` explicitly when serializing version - In distributed training, synchronize the number of features after loading a data matrix. * Migrate to C++11 - The current master version now requires C++11 enabled compiled(g++4.8 or higher) * Predictor interface was factored out (in a manner similar to the updater interface). * Makefile support for Solaris and ARM * Test code coverage using Codecov * Add CPP tests * Add `Dockerfile` and `Jenkinsfile` to support continuous integration for GPU code * New functionality - Ability to adjust tree model's statistics to a new dataset without changing tree structures. - Ability to extract feature contributions from individual predictions, as described in [here](http://blog.datadive.net/interpreting-random-forests/) and [here](https://arxiv.org/abs/1706.06060). - Faster, histogram-based tree algorithm (`tree_method='hist'`) . - GPU/CUDA accelerated tree algorithms (`tree_method='gpu_hist'` or `'gpu_exact'`), including the GPU-based predictor. - Monotonic constraints: when other features are fixed, force the prediction to be monotonic increasing with respect to a certain specified feature. - Faster gradient calculation using AVX SIMD - Ability to export models in JSON format - Support for Tweedie regression - Additional dropout options for DART: binomial+1, epsilon - Ability to update an existing model in-place: this is useful for many applications, such as determining feature importance * Python package: - New parameters: - `learning_rates` in `cv()` - `shuffle` in `mknfold()` - `max_features` and `show_values` in `plot_importance()` - `sample_weight` in `XGBRegressor.fit()` - Support binary wheel builds - Fix `MultiIndex` detection to support Pandas 0.21.0 and higher - Support metrics and evaluation sets whose names contain `-` - Support feature maps when plotting trees - Compatibility fix for Python 2.6 - Call `print_evaluation` callback at last iteration - Use appropriate integer types when calling native code, to prevent truncation and memory error - Fix shared library loading on Mac OS X * R package: - New parameters: - `silent` in `xgb.DMatrix()` - `use_int_id` in `xgb.model.dt.tree()` - `predcontrib` in `predict()` - `monotone_constraints` in `xgb.train()` - Default value of the `save_period` parameter in `xgboost()` changed to NULL (consistent with `xgb.train()`). - It's possible to custom-build the R package with GPU acceleration support. - Enable JVM build for Mac OS X and Windows - Integration with AppVeyor CI - Improved safety for garbage collection - Store numeric attributes with higher precision - Easier installation for devel version - Improved `xgb.plot.tree()` - Various minor fixes to improve user experience and robustness - Register native code to pass CRAN check - Updated CRAN submission * JVM packages - Add Spark pipeline persistence API - Fix data persistence: loss evaluation on test data had wrongly used caches for training data. - Clean external cache after training - Implement early stopping - Enable training of multiple models by distinguishing stage IDs - Better Spark integration: support RDD / dataframe / dataset, integrate with Spark ML package - XGBoost4j now supports ranking task - Support training with missing data - Refactor JVM package to separate regression and classification models to be consistent with other machine learning libraries - Support XGBoost4j compilation on Windows - Parameter tuning tool - Publish source code for XGBoost4j to maven local repo - Scala implementation of the Rabit tracker (drop-in replacement for the Java implementation) - Better exception handling for the Rabit tracker - Persist `num_class`, number of classes (for classification task) - `XGBoostModel` now holds `BoosterParams` - libxgboost4j is now part of CMake build - Release `DMatrix` when no longer needed, to conserve memory - Expose `baseMargin`, to allow initialization of boosting with predictions from an external model - Support instance weights - Use `SparkParallelismTracker` to prevent jobs from hanging forever - Expose train-time evaluation metrics via `XGBoostModel.summary` - Option to specify `host-ip` explicitly in the Rabit tracker * Documentation - Better math notation for gradient boosting - Updated build instructions for Mac OS X - Template for GitHub issues - Add `CITATION` file for citing XGBoost in scientific writing - Fix dropdown menu in xgboost.readthedocs.io - Document `updater_seq` parameter - Style fixes for Python documentation - Links to additional examples and tutorials - Clarify installation requirements * Changes that break backward compatibility - [#1519](https://github.com/dmlc/xgboost/pull/1519) XGBoost-spark no longer contains APIs for DMatrix; use the public booster interface instead. - [#2476](https://github.com/dmlc/xgboost/pull/2476) `XGBoostModel.predict()` now has a different signature ## v0.6 (2016.07.29) * Version 0.5 is skipped due to major improvements in the core * Major refactor of core library. - Goal: more flexible and modular code as a portable library. - Switch to use of c++11 standard code. - Random number generator defaults to ```std::mt19937```. - Share the data loading pipeline and logging module from dmlc-core. - Enable registry pattern to allow optionally plugin of objective, metric, tree constructor, data loader. - Future plugin modules can be put into xgboost/plugin and register back to the library. - Remove most of the raw pointers to smart ptrs, for RAII safety. * Add official option to approximate algorithm `tree_method` to parameter. - Change default behavior to switch to prefer faster algorithm. - User will get a message when approximate algorithm is chosen. * Change library name to libxgboost.so * Backward compatiblity - The binary buffer file is not backward compatible with previous version. - The model file is backward compatible on 64 bit platforms. * The model file is compatible between 64/32 bit platforms(not yet tested). * External memory version and other advanced features will be exposed to R library as well on linux. - Previously some of the features are blocked due to C++11 and threading limits. - The windows version is still blocked due to Rtools do not support ```std::thread```. * rabit and dmlc-core are maintained through git submodule - Anyone can open PR to update these dependencies now. * Improvements - Rabit and xgboost libs are not thread-safe and use thread local PRNGs - This could fix some of the previous problem which runs xgboost on multiple threads. * JVM Package - Enable xgboost4j for java and scala - XGBoost distributed now runs on Flink and Spark. * Support model attributes listing for meta data. - https://github.com/dmlc/xgboost/pull/1198 - https://github.com/dmlc/xgboost/pull/1166 * Support callback API - https://github.com/dmlc/xgboost/issues/892 - https://github.com/dmlc/xgboost/pull/1211 - https://github.com/dmlc/xgboost/pull/1264 * Support new booster DART(dropout in tree boosting) - https://github.com/dmlc/xgboost/pull/1220 * Add CMake build system - https://github.com/dmlc/xgboost/pull/1314 ## v0.47 (2016.01.14) * Changes in R library - fixed possible problem of poisson regression. - switched from 0 to NA for missing values. - exposed access to additional model parameters. * Changes in Python library - throws exception instead of crash terminal when a parameter error happens. - has importance plot and tree plot functions. - accepts different learning rates for each boosting round. - allows model training continuation from previously saved model. - allows early stopping in CV. - allows feval to return a list of tuples. - allows eval_metric to handle additional format. - improved compatibility in sklearn module. - additional parameters added for sklearn wrapper. - added pip installation functionality. - supports more Pandas DataFrame dtypes. - added best_ntree_limit attribute, in addition to best_score and best_iteration. * Java api is ready for use * Added more test cases and continuous integration to make each build more robust. ## v0.4 (2015.05.11) * Distributed version of xgboost that runs on YARN, scales to billions of examples * Direct save/load data and model from/to S3 and HDFS * Feature importance visualization in R module, by Michael Benesty * Predict leaf index * Poisson regression for counts data * Early stopping option in training * Native save load support in R and python - xgboost models now can be saved using save/load in R - xgboost python model is now pickable * sklearn wrapper is supported in python module * Experimental External memory version ## v0.3 (2014.09.07) * Faster tree construction module - Allows subsample columns during tree construction via ```bst:col_samplebytree=ratio``` * Support for boosting from initial predictions * Experimental version of LambdaRank * Linear booster is now parallelized, using parallel coordinated descent. * Add [Code Guide](src/README.md) for customizing objective function and evaluation * Add R module ## v0.2x (2014.05.20) * Python module * Weighted samples instances * Initial version of pairwise rank ## v0.1 (2014.03.26) * Initial release xgboost-3.0.0/R-package/000077500000000000000000000000001476511272400150025ustar00rootroot00000000000000xgboost-3.0.0/R-package/.Rbuildignore000066400000000000000000000001461476511272400174310ustar00rootroot00000000000000\.o$ \.so$ \.dll$ ^.*\.Rproj$ ^\.Rproj\.user$ README.md ^doc$ ^Meta$ ^_pkgdown\.yml$ ^docs$ ^pkgdown$ xgboost-3.0.0/R-package/.gitignore000066400000000000000000000000051476511272400167650ustar00rootroot00000000000000docs xgboost-3.0.0/R-package/CMakeLists.txt000066400000000000000000000031061476511272400175420ustar00rootroot00000000000000find_package(LibR REQUIRED) message(STATUS "LIBR_CORE_LIBRARY " ${LIBR_CORE_LIBRARY}) file( GLOB_RECURSE R_SOURCES ${CMAKE_CURRENT_LIST_DIR}/src/*.cc ${CMAKE_CURRENT_LIST_DIR}/src/*.c ) # Use object library to expose symbols add_library(xgboost-r OBJECT ${R_SOURCES}) if(ENABLE_ALL_WARNINGS) target_compile_options(xgboost-r PRIVATE -Wall -Wextra) endif() if(MSVC) # https://github.com/microsoft/LightGBM/pull/6061 # MSVC doesn't work with anonymous types in structs. (R complex) # # syntax error: missing ';' before identifier 'private_data_c' # target_compile_definitions(xgboost-r PRIVATE -DR_LEGACY_RCOMPLEX) endif() target_compile_definitions( xgboost-r PUBLIC -DXGBOOST_STRICT_R_MODE=1 -DDMLC_LOG_BEFORE_THROW=0 -DDMLC_DISABLE_STDIN=1 -DDMLC_LOG_CUSTOMIZE=1 ) target_include_directories( xgboost-r PRIVATE ${LIBR_INCLUDE_DIRS} ${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}/dmlc-core/include ) target_link_libraries(xgboost-r PUBLIC ${LIBR_CORE_LIBRARY}) if(USE_OPENMP) find_package(OpenMP REQUIRED) target_link_libraries(xgboost-r PUBLIC OpenMP::OpenMP_CXX OpenMP::OpenMP_C) endif() set_target_properties( xgboost-r PROPERTIES CXX_STANDARD 17 CXX_STANDARD_REQUIRED ON POSITION_INDEPENDENT_CODE ON ) # Get compilation and link flags of xgboost-r and propagate to objxgboost target_link_libraries(objxgboost PUBLIC xgboost-r) # Add all objects of xgboost-r to objxgboost target_sources(objxgboost INTERFACE $) set(LIBR_HOME "${LIBR_HOME}" PARENT_SCOPE) set(LIBR_EXECUTABLE "${LIBR_EXECUTABLE}" PARENT_SCOPE) xgboost-3.0.0/R-package/DESCRIPTION000066400000000000000000000052501476511272400165120ustar00rootroot00000000000000Package: xgboost Type: Package Title: Extreme Gradient Boosting Version: 3.0.0.1 Date: 2025-03-14 Authors@R: c( person("Tianqi", "Chen", role = c("aut"), email = "tianqi.tchen@gmail.com"), person("Tong", "He", role = c("aut"), email = "hetong007@gmail.com"), person("Michael", "Benesty", role = c("aut"), email = "michael@benesty.fr"), person("Vadim", "Khotilovich", role = c("aut"), email = "khotilovich@gmail.com"), person("Yuan", "Tang", role = c("aut"), email = "terrytangyuan@gmail.com", comment = c(ORCID = "0000-0001-5243-233X")), person("Hyunsu", "Cho", role = c("aut"), email = "chohyu01@cs.washington.edu"), person("Kailong", "Chen", role = c("aut")), person("Rory", "Mitchell", role = c("aut")), person("Ignacio", "Cano", role = c("aut")), person("Tianyi", "Zhou", role = c("aut")), person("Mu", "Li", role = c("aut")), person("Junyuan", "Xie", role = c("aut")), person("Min", "Lin", role = c("aut")), person("Yifeng", "Geng", role = c("aut")), person("Yutian", "Li", role = c("aut")), person("Jiaming", "Yuan", role = c("aut", "cre"), email = "jm.yuan@outlook.com"), person("David", "Cortes", role = c("aut")), person("XGBoost contributors", role = c("cph"), comment = "base XGBoost implementation") ) Maintainer: Jiaming Yuan Description: Extreme Gradient Boosting, which is an efficient implementation of the gradient boosting framework from Chen & Guestrin (2016) . This package is its R interface. The package includes efficient linear model solver and tree learning algorithms. The package can automatically do parallel computation on a single machine which could be more than 10 times faster than existing gradient boosting packages. It supports various objective functions, including regression, classification and ranking. The package is made to be extensible, so that users are also allowed to define their own objectives easily. License: Apache License (== 2.0) | file LICENSE URL: https://github.com/dmlc/xgboost BugReports: https://github.com/dmlc/xgboost/issues NeedsCompilation: yes VignetteBuilder: knitr Suggests: knitr, rmarkdown, ggplot2 (>= 1.0.1), DiagrammeR (>= 0.9.0), DiagrammeRsvg, rsvg, htmlwidgets, Ckmeans.1d.dp (>= 3.3.1), vcd (>= 1.3), testthat, igraph (>= 1.0.1), float, titanic, RhpcBLASctl, survival Depends: R (>= 4.3.0) Imports: Matrix (>= 1.1-0), methods, data.table (>= 1.9.6), jsonlite (>= 1.0) Roxygen: list(markdown = TRUE) RoxygenNote: 7.3.2 Encoding: UTF-8 SystemRequirements: GNU make, C++17 xgboost-3.0.0/R-package/LICENSE000066400000000000000000000011121476511272400160020ustar00rootroot00000000000000Copyright (c) 2014-2023, Tianqi Chen and XBGoost Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. xgboost-3.0.0/R-package/NAMESPACE000066400000000000000000000056321476511272400162270ustar00rootroot00000000000000# Generated by roxygen2: do not edit by hand S3method("[",xgb.Booster) S3method("[",xgb.DMatrix) S3method("dimnames<-",xgb.DMatrix) S3method(coef,xgb.Booster) S3method(dim,xgb.DMatrix) S3method(dimnames,xgb.DMatrix) S3method(getinfo,xgb.Booster) S3method(getinfo,xgb.DMatrix) S3method(length,xgb.Booster) S3method(predict,xgb.Booster) S3method(predict,xgboost) S3method(print,xgb.Booster) S3method(print,xgb.DMatrix) S3method(print,xgb.cv.synchronous) S3method(print,xgboost) S3method(setinfo,xgb.Booster) S3method(setinfo,xgb.DMatrix) S3method(variable.names,xgb.Booster) export("xgb.attr<-") export("xgb.attributes<-") export("xgb.config<-") export("xgb.model.parameters<-") export(getinfo) export(setinfo) export(xgb.Callback) export(xgb.DMatrix) export(xgb.DMatrix.hasinfo) export(xgb.DMatrix.save) export(xgb.DataBatch) export(xgb.DataIter) export(xgb.ExtMemDMatrix) export(xgb.QuantileDMatrix) export(xgb.QuantileDMatrix.from_iterator) export(xgb.attr) export(xgb.attributes) export(xgb.cb.cv.predict) export(xgb.cb.early.stop) export(xgb.cb.evaluation.log) export(xgb.cb.gblinear.history) export(xgb.cb.print.evaluation) export(xgb.cb.reset.parameters) export(xgb.cb.save.model) export(xgb.config) export(xgb.copy.Booster) export(xgb.create.features) export(xgb.cv) export(xgb.dump) export(xgb.gblinear.history) export(xgb.get.DMatrix.data) export(xgb.get.DMatrix.num.non.missing) export(xgb.get.DMatrix.qcut) export(xgb.get.config) export(xgb.get.num.boosted.rounds) export(xgb.ggplot.deepness) export(xgb.ggplot.importance) export(xgb.ggplot.shap.summary) export(xgb.importance) export(xgb.is.same.Booster) export(xgb.load) export(xgb.load.raw) export(xgb.model.dt.tree) export(xgb.params) export(xgb.plot.deepness) export(xgb.plot.importance) export(xgb.plot.multi.trees) export(xgb.plot.shap) export(xgb.plot.shap.summary) export(xgb.plot.tree) export(xgb.save) export(xgb.save.raw) export(xgb.set.config) export(xgb.slice.Booster) export(xgb.slice.DMatrix) export(xgb.train) export(xgboost) import(methods) importClassesFrom(Matrix,CsparseMatrix) importClassesFrom(Matrix,dgCMatrix) importClassesFrom(Matrix,dgRMatrix) importFrom(Matrix,sparse.model.matrix) importFrom(data.table,":=") importFrom(data.table,as.data.table) importFrom(data.table,data.table) importFrom(data.table,is.data.table) importFrom(data.table,rbindlist) importFrom(data.table,setkey) importFrom(data.table,setkeyv) importFrom(data.table,setnames) importFrom(grDevices,rgb) importFrom(graphics,barplot) importFrom(graphics,grid) importFrom(graphics,lines) importFrom(graphics,par) importFrom(graphics,points) importFrom(graphics,title) importFrom(jsonlite,fromJSON) importFrom(jsonlite,toJSON) importFrom(methods,new) importFrom(stats,coef) importFrom(stats,median) importFrom(stats,predict) importFrom(stats,sd) importFrom(stats,variable.names) importFrom(utils,head) importFrom(utils,object.size) importFrom(utils,str) importFrom(utils,tail) useDynLib(xgboost, .registration = TRUE) xgboost-3.0.0/R-package/R/000077500000000000000000000000001476511272400152035ustar00rootroot00000000000000xgboost-3.0.0/R-package/R/callbacks.R000066400000000000000000001376311476511272400172600ustar00rootroot00000000000000.reserved_cb_names <- c("names", "class", "call", "params", "niter", "nfeatures", "folds") #' XGBoost Callback Constructor #' #' Constructor for defining the structure of callback functions that can be executed #' at different stages of model training (before / after training, before / after each boosting #' iteration). #' #' @details #' Arguments that will be passed to the supplied functions are as follows: #' - env The same environment that is passed under argument `env`. #' #' It may be modified by the functions in order to e.g. keep tracking of what happens #' across iterations or similar. #' #' This environment is only used by the functions supplied to the callback, and will #' not be kept after the model fitting function terminates (see parameter `f_after_training`). #' #' - model The booster object when using [xgb.train()], or the folds when using [xgb.cv()]. #' #' For [xgb.cv()], folds are a list with a structure as follows: #' - `dtrain`: The training data for the fold (as an `xgb.DMatrix` object). #' - `bst`: Rhe `xgb.Booster` object for the fold. #' - `evals`: A list containing two DMatrices, with names `train` and `test` #' (`test` is the held-out data for the fold). #' - `index`: The indices of the hold-out data for that fold (base-1 indexing), #' from which the `test` entry in `evals` was obtained. #' #' This object should **not** be in-place modified in ways that conflict with the #' training (e.g. resetting the parameters for a training update in a way that resets #' the number of rounds to zero in order to overwrite rounds). #' #' Note that any R attributes that are assigned to the booster during the callback functions, #' will not be kept thereafter as the booster object variable is not re-assigned during #' training. It is however possible to set C-level attributes of the booster through #' [xgb.attr()] or [xgb.attributes()], which should remain available for the rest #' of the iterations and after the training is done. #' #' For keeping variables across iterations, it's recommended to use `env` instead. #' - data The data to which the model is being fit, as an `xgb.DMatrix` object. #' #' Note that, for [xgb.cv()], this will be the full data, while data for the specific #' folds can be found in the `model` object. #' - evals The evaluation data, as passed under argument `evals` to [xgb.train()]. #' #' For [xgb.cv()], this will always be `NULL`. #' - begin_iteration Index of the first boosting iteration that will be executed (base-1 indexing). #' #' This will typically be '1', but when using training continuation, depending on the #' parameters for updates, boosting rounds will be continued from where the previous #' model ended, in which case this will be larger than 1. #' #' - end_iteration Index of the last boostign iteration that will be executed #' (base-1 indexing, inclusive of this end). #' #' It should match with argument `nrounds` passed to [xgb.train()] or [xgb.cv()]. #' #' Note that boosting might be interrupted before reaching this last iteration, for #' example by using the early stopping callback [xgb.cb.early.stop()]. #' - iteration Index of the iteration number that is being executed (first iteration #' will be the same as parameter `begin_iteration`, then next one will add +1, and so on). #' #' - iter_feval Evaluation metrics for `evals` that were supplied, either #' determined by the objective, or by parameter `custom_metric`. #' #' For [xgb.train()], this will be a named vector with one entry per element in #' `evals`, where the names are determined as 'evals name' + '-' + 'metric name' - for #' example, if `evals` contains an entry named "tr" and the metric is "rmse", #' this will be a one-element vector with name "tr-rmse". #' #' For [xgb.cv()], this will be a 2d matrix with dimensions `[length(evals), nfolds]`, #' where the row names will follow the same naming logic as the one-dimensional vector #' that is passed in [xgb.train()]. #' #' Note that, internally, the built-in callbacks such as [xgb.cb.print.evaluation] summarize #' this table by calculating the row-wise means and standard deviations. #' #' - final_feval The evaluation results after the last boosting round is executed #' (same format as `iter_feval`, and will be the exact same input as passed under #' `iter_feval` to the last round that is executed during model fitting). #' #' - prev_cb_res Result from a previous run of a callback sharing the same name #' (as given by parameter `cb_name`) when conducting training continuation, if there #' was any in the booster R attributes. #' #' Sometimes, one might want to append the new results to the previous one, and this will #' be done automatically by the built-in callbacks such as [xgb.cb.evaluation.log], #' which will append the new rows to the previous table. #' #' If no such previous callback result is available (which it never will when fitting #' a model from start instead of updating an existing model), this will be `NULL`. #' #' For [xgb.cv()], which doesn't support training continuation, this will always be `NULL`. #' #' The following names (`cb_name` values) are reserved for internal callbacks: #' - print_evaluation #' - evaluation_log #' - reset_parameters #' - early_stop #' - save_model #' - cv_predict #' - gblinear_history #' #' The following names are reserved for other non-callback attributes: #' - names #' - class #' - call #' - params #' - niter #' - nfeatures #' - folds #' #' When using the built-in early stopping callback ([xgb.cb.early.stop]), said callback #' will always be executed before the others, as it sets some booster C-level attributes #' that other callbacks might also use. Otherwise, the order of execution will match with #' the order in which the callbacks are passed to the model fitting function. #' #' @param cb_name Name for the callback. #' #' If the callback produces some non-NULL result (from executing the function passed under #' `f_after_training`), that result will be added as an R attribute to the resulting booster #' (or as a named element in the result of CV), with the attribute name specified here. #' #' Names of callbacks must be unique - i.e. there cannot be two callbacks with the same name. #' @param env An environment object that will be passed to the different functions in the callback. #' Note that this environment will not be shared with other callbacks. #' @param f_before_training A function that will be executed before the training has started. #' #' If passing `NULL` for this or for the other function inputs, then no function will be executed. #' #' If passing a function, it will be called with parameters supplied as non-named arguments #' matching the function signatures that are shown in the default value for each function argument. #' @param f_before_iter A function that will be executed before each boosting round. #' #' This function can signal whether the training should be finalized or not, by outputting #' a value that evaluates to `TRUE` - i.e. if the output from the function provided here at #' a given round is `TRUE`, then training will be stopped before the current iteration happens. #' #' Return values of `NULL` will be interpreted as `FALSE`. #' @param f_after_iter A function that will be executed after each boosting round. #' #' This function can signal whether the training should be finalized or not, by outputting #' a value that evaluates to `TRUE` - i.e. if the output from the function provided here at #' a given round is `TRUE`, then training will be stopped at that round. #' #' Return values of `NULL` will be interpreted as `FALSE`. #' @param f_after_training A function that will be executed after training is finished. #' #' This function can optionally output something non-NULL, which will become part of the R #' attributes of the booster (assuming one passes `keep_extra_attributes=TRUE` to [xgb.train()]) #' under the name supplied for parameter `cb_name` imn the case of [xgb.train()]; or a part #' of the named elements in the result of [xgb.cv()]. #' @return An `xgb.Callback` object, which can be passed to [xgb.train()] or [xgb.cv()]. #' #' @seealso Built-in callbacks: #' - [xgb.cb.print.evaluation] #' - [xgb.cb.evaluation.log] #' - [xgb.cb.reset.parameters] #' - [xgb.cb.early.stop] #' - [xgb.cb.save.model] #' - [xgb.cb.cv.predict] #' - [xgb.cb.gblinear.history] # #' @examples #' # Example constructing a custom callback that calculates #' # squared error on the training data (no separate test set), #' # and outputs the per-iteration results. #' ssq_callback <- xgb.Callback( #' cb_name = "ssq", #' f_before_training = function(env, model, data, evals, #' begin_iteration, end_iteration) { #' # A vector to keep track of a number at each iteration #' env$logs <- rep(NA_real_, end_iteration - begin_iteration + 1) #' }, #' f_after_iter = function(env, model, data, evals, iteration, iter_feval) { #' # This calculates the sum of squared errors on the training data. #' # Note that this can be better done by passing an 'evals' entry, #' # but this demonstrates a way in which callbacks can be structured. #' pred <- predict(model, data) #' err <- pred - getinfo(data, "label") #' sq_err <- sum(err^2) #' env$logs[iteration] <- sq_err #' cat( #' sprintf( #' "Squared error at iteration %d: %.2f\n", #' iteration, sq_err #' ) #' ) #' #' # A return value of 'TRUE' here would signal to finalize the training #' return(FALSE) #' }, #' f_after_training = function(env, model, data, evals, iteration, #' final_feval, prev_cb_res) { #' return(env$logs) #' } #' ) #' #' data(mtcars) #' #' y <- mtcars$mpg #' x <- as.matrix(mtcars[, -1]) #' #' dm <- xgb.DMatrix(x, label = y, nthread = 1) #' model <- xgb.train( #' data = dm, #' params = xgb.params(objective = "reg:squarederror", nthread = 1), #' nrounds = 5, #' callbacks = list(ssq_callback) #' ) #' #' # Result from 'f_after_iter' will be available as an attribute #' attributes(model)$ssq #' @export xgb.Callback <- function( cb_name = "custom_callback", env = new.env(), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) NULL, f_before_iter = function(env, model, data, evals, iteration) NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) NULL, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) NULL ) { stopifnot(is.null(f_before_training) || is.function(f_before_training)) stopifnot(is.null(f_before_iter) || is.function(f_before_iter)) stopifnot(is.null(f_after_iter) || is.function(f_after_iter)) stopifnot(is.null(f_after_training) || is.function(f_after_training)) stopifnot(is.character(cb_name) && length(cb_name) == 1) if (cb_name %in% .reserved_cb_names) { stop("Cannot use reserved callback name '", cb_name, "'.") } out <- list( cb_name = cb_name, env = env, f_before_training = f_before_training, f_before_iter = f_before_iter, f_after_iter = f_after_iter, f_after_training = f_after_training ) class(out) <- "xgb.Callback" return(out) } .execute.cb.before.training <- function( callbacks, model, data, evals, begin_iteration, end_iteration ) { for (callback in callbacks) { if (!is.null(callback$f_before_training)) { callback$f_before_training( callback$env, model, data, evals, begin_iteration, end_iteration ) } } } .execute.cb.before.iter <- function( callbacks, model, data, evals, iteration ) { if (!length(callbacks)) { return(FALSE) } out <- sapply(callbacks, function(cb) { if (is.null(cb$f_before_iter)) { return(FALSE) } should_stop <- cb$f_before_iter( cb$env, model, data, evals, iteration ) if (!NROW(should_stop)) { should_stop <- FALSE } else if (NROW(should_stop) > 1) { should_stop <- head(as.logical(should_stop), 1) } return(should_stop) }) return(any(out)) } .execute.cb.after.iter <- function( callbacks, model, data, evals, iteration, iter_feval ) { if (!length(callbacks)) { return(FALSE) } out <- sapply(callbacks, function(cb) { if (is.null(cb$f_after_iter)) { return(FALSE) } should_stop <- cb$f_after_iter( cb$env, model, data, evals, iteration, iter_feval ) if (!NROW(should_stop)) { should_stop <- FALSE } else if (NROW(should_stop) > 1) { should_stop <- head(as.logical(should_stop), 1) } return(should_stop) }) return(any(out)) } .execute.cb.after.training <- function( callbacks, model, data, evals, iteration, final_feval, prev_cb_res ) { if (!length(callbacks)) { return(NULL) } old_cb_res <- attributes(model) out <- lapply(callbacks, function(cb) { if (is.null(cb$f_after_training)) { return(NULL) } else { return( cb$f_after_training( cb$env, model, data, evals, iteration, final_feval, getElement(old_cb_res, cb$cb_name) ) ) } }) names(out) <- sapply(callbacks, function(cb) cb$cb_name) if (NROW(out)) { out <- out[!sapply(out, is.null)] } return(out) } .summarize.feval <- function(iter_feval, showsd) { if (NCOL(iter_feval) > 1L && showsd) { stdev <- apply(iter_feval, 1, sd) } else { stdev <- NULL } if (NCOL(iter_feval) > 1L) { iter_feval <- rowMeans(iter_feval) } return(list(feval = iter_feval, stdev = stdev)) } .print.evaluation <- function(iter_feval, showsd, iteration) { tmp <- .summarize.feval(iter_feval, showsd) msg <- .format_eval_string(iteration, tmp$feval, tmp$stdev) cat(msg, '\n') } # Format the evaluation metric string .format_eval_string <- function(iter, eval_res, eval_err = NULL) { if (length(eval_res) == 0) stop('no evaluation results') enames <- names(eval_res) if (is.null(enames)) stop('evaluation results must have names') iter <- sprintf('[%d]\t', iter) if (!is.null(eval_err)) { if (length(eval_res) != length(eval_err)) stop('eval_res & eval_err lengths mismatch') # Note: UTF-8 code for plus/minus sign is U+00B1 res <- paste0(sprintf("%s:%f\U00B1%f", enames, eval_res, eval_err), collapse = '\t') } else { res <- paste0(sprintf("%s:%f", enames, eval_res), collapse = '\t') } return(paste0(iter, res)) } #' Callback for printing the result of evaluation #' #' @description #' The callback function prints the result of evaluation at every `period` iterations. #' The initial and the last iteration's evaluations are always printed. #' #' Does not leave any attribute in the booster (see [xgb.cb.evaluation.log] for that). #' #' @param period Results would be printed every number of periods. #' @param showsd Whether standard deviations should be printed (when available). #' @return An `xgb.Callback` object, which can be passed to [xgb.train()] or [xgb.cv()]. #' @seealso [xgb.Callback] #' @export xgb.cb.print.evaluation <- function(period = 1, showsd = TRUE) { if (length(period) != 1 || period != floor(period) || period < 1) { stop("'period' must be a positive integer.") } xgb.Callback( cb_name = "print_evaluation", env = as.environment(list(period = period, showsd = showsd, is_first_call = TRUE)), f_before_training = NULL, f_before_iter = NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) { if (is.null(iter_feval)) { return(FALSE) } if (env$is_first_call || (iteration - 1) %% env$period == 0) { .print.evaluation(iter_feval, env$showsd, iteration) env$last_printed_iter <- iteration } env$is_first_call <- FALSE return(FALSE) }, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { if (is.null(final_feval)) { return(NULL) } if (is.null(env$last_printed_iter) || iteration > env$last_printed_iter) { .print.evaluation(final_feval, env$showsd, iteration) } } ) } #' Callback for logging the evaluation history #' #' @details This callback creates a table with per-iteration evaluation metrics (see parameters #' `evals` and `custom_metric` in [xgb.train()]). #' #' Note: in the column names of the final data.table, the dash '-' character is replaced with #' the underscore '_' in order to make the column names more like regular R identifiers. #' #' @return An `xgb.Callback` object, which can be passed to [xgb.train()] or [xgb.cv()]. #' @seealso [xgb.cb.print.evaluation] #' @export xgb.cb.evaluation.log <- function() { xgb.Callback( cb_name = "evaluation_log", f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { env$evaluation_log <- vector("list", end_iteration - begin_iteration + 1) env$next_log <- 1 }, f_before_iter = NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) { tmp <- .summarize.feval(iter_feval, TRUE) env$evaluation_log[[env$next_log]] <- list(iter = iteration, metrics = tmp$feval, sds = tmp$stdev) env$next_log <- env$next_log + 1 return(FALSE) }, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { if (!NROW(env$evaluation_log)) { return(prev_cb_res) } # in case of early stopping if (env$next_log <= length(env$evaluation_log)) { env$evaluation_log <- head(env$evaluation_log, env$next_log - 1) } iters <- data.frame(iter = sapply(env$evaluation_log, function(x) x$iter)) metrics <- do.call(rbind, lapply(env$evaluation_log, function(x) x$metrics)) mnames <- gsub("-", "_", names(env$evaluation_log[[1]]$metrics), fixed = TRUE) colnames(metrics) <- mnames has_sds <- !is.null(env$evaluation_log[[1]]$sds) if (has_sds) { sds <- do.call(rbind, lapply(env$evaluation_log, function(x) x$sds)) colnames(sds) <- mnames metrics <- lapply( mnames, function(metric) { out <- cbind(metrics[, metric], sds[, metric]) colnames(out) <- paste0(metric, c("_mean", "_std")) return(out) } ) metrics <- do.call(cbind, metrics) } evaluation_log <- cbind(iters, metrics) if (!is.null(prev_cb_res)) { if (!is.data.table(prev_cb_res)) { prev_cb_res <- data.table::as.data.table(prev_cb_res) } prev_take <- prev_cb_res[prev_cb_res$iter < min(evaluation_log$iter)] if (nrow(prev_take)) { evaluation_log <- rbind(prev_cb_res, evaluation_log) } } evaluation_log <- data.table::as.data.table(evaluation_log) return(evaluation_log) } ) } #' Callback for resetting booster parameters at each iteration #' #' @details #' Note that when training is resumed from some previous model, and a function is used to #' reset a parameter value, the `nrounds` argument in this function would be the #' the number of boosting rounds in the current training. #' #' Does not leave any attribute in the booster. #' #' @param new_params List of parameters needed to be reset. #' Each element's value must be either a vector of values of length `nrounds` #' to be set at each iteration, #' or a function of two parameters `learning_rates(iteration, nrounds)` #' which returns a new parameter value by using the current iteration number #' and the total number of boosting rounds. #' @return An `xgb.Callback` object, which can be passed to [xgb.train()] or [xgb.cv()]. #' @export xgb.cb.reset.parameters <- function(new_params) { stopifnot(is.list(new_params)) pnames <- gsub(".", "_", names(new_params), fixed = TRUE) not_allowed <- pnames %in% c('num_class', 'num_output_group', 'size_leaf_vector', 'updater_seq') if (any(not_allowed)) stop('Parameters ', paste(pnames[not_allowed]), " cannot be changed during boosting.") xgb.Callback( cb_name = "reset_parameters", env = as.environment(list(new_params = new_params)), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { env$end_iteration <- end_iteration pnames <- gsub(".", "_", names(env$new_params), fixed = TRUE) for (n in pnames) { p <- env$new_params[[n]] if (is.function(p)) { if (length(formals(p)) != 2) stop("Parameter '", n, "' is a function but not of two arguments") } else if (is.numeric(p) || is.character(p)) { if (length(p) != env$end_iteration) stop("Length of '", n, "' has to be equal to 'nrounds'") } else { stop("Parameter '", n, "' is not a function or a vector") } } }, f_before_iter = function(env, model, data, evals, iteration) { params <- lapply(env$new_params, function(p) { if (is.function(p)) { return(p(iteration, env$end_iteration)) } else { return(p[iteration]) } }) if (inherits(model, "xgb.Booster")) { xgb.model.parameters(model) <- params } else { for (fd in model) { xgb.model.parameters(fd$bst) <- params } } return(FALSE) }, f_after_iter = NULL, f_after_training = NULL ) } #' Callback to activate early stopping #' #' @description #' This callback function determines the condition for early stopping. #' #' The following attributes are assigned to the booster's object: #' - `best_score` the evaluation score at the best iteration #' - `best_iteration` at which boosting iteration the best score has occurred #' (0-based index for interoperability of binary models) #' #' The same values are also stored as R attributes as a result of the callback, plus an additional #' attribute `stopped_by_max_rounds` which indicates whether an early stopping by the `stopping_rounds` #' condition occurred. Note that the `best_iteration` that is stored under R attributes will follow #' base-1 indexing, so it will be larger by '1' than the C-level 'best_iteration' that is accessed #' through [xgb.attr()] or [xgb.attributes()]. #' #' At least one dataset is required in `evals` for early stopping to work. #' #' @param stopping_rounds The number of rounds with no improvement in #' the evaluation metric in order to stop the training. #' @param maximize Whether to maximize the evaluation metric. #' @param metric_name The name of an evaluation column to use as a criteria for early #' stopping. If not set, the last column would be used. #' Let's say the test data in `evals` was labelled as `dtest`, #' and one wants to use the AUC in test data for early stopping regardless of where #' it is in the `evals`, then one of the following would need to be set: #' `metric_name = 'dtest-auc'` or `metric_name = 'dtest_auc'`. #' All dash '-' characters in metric names are considered equivalent to '_'. #' @param verbose Whether to print the early stopping information. #' #' @param save_best Whether training should return the best model or the last model. If #' set to `TRUE`, it will only keep the boosting rounds up to the detected best #' iteration, discarding the ones that come after. This parameter is not supported by #' the `xgb.cv` function and the `gblinear` booster yet. #' @return An `xgb.Callback` object, which can be passed to [xgb.train()] or [xgb.cv()]. #' @export xgb.cb.early.stop <- function( stopping_rounds, maximize = FALSE, metric_name = NULL, verbose = TRUE, save_best = FALSE ) { if (!is.null(metric_name)) { stopifnot(is.character(metric_name)) stopifnot(length(metric_name) == 1L) } xgb.Callback( cb_name = "early_stop", env = as.environment( list( checked_evnames = FALSE, stopping_rounds = stopping_rounds, maximize = maximize, metric_name = metric_name, verbose = verbose, save_best = save_best, stopped_by_max_rounds = FALSE ) ), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { if (inherits(model, "xgb.Booster") && !length(evals)) { stop("For early stopping, 'evals' must have at least one element") } if (!inherits(model, "xgb.Booster") && save_best) { stop("'save_best' must be set to FALSE when using early stopping in 'xgb.cv'.") } env$begin_iteration <- begin_iteration return(NULL) }, f_before_iter = function(env, model, data, evals, iteration) NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) { sds <- NULL if (NCOL(iter_feval) > 1) { tmp <- .summarize.feval(iter_feval, TRUE) iter_feval <- tmp$feval sds <- tmp$stdev } if (!env$checked_evnames) { eval_names <- gsub('-', '_', names(iter_feval), fixed = TRUE) if (!is.null(env$metric_name)) { env$metric_idx <- which(gsub('-', '_', env$metric_name, fixed = TRUE) == eval_names) if (length(env$metric_idx) == 0) stop("'metric_name' for early stopping is not one of the following:\n", paste(eval_names, collapse = ' '), '\n') } if (is.null(env$metric_name)) { if (NROW(iter_feval) == 1) { env$metric_idx <- 1L } else { env$metric_idx <- length(eval_names) if (env$verbose) cat('Multiple eval metrics are present. Will use ', eval_names[env$metric_idx], ' for early stopping.\n', sep = '') } } env$metric_name <- eval_names[env$metric_idx] # maximize is usually NULL when not set in xgb.train and built-in metrics if (is.null(env$maximize)) env$maximize <- grepl('(_auc|_aupr|_map|_ndcg|_pre)', env$metric_name) if (env$verbose) cat("Will train until ", env$metric_name, " hasn't improved in ", env$stopping_rounds, " rounds.\n\n", sep = '') env$best_iteration <- env$begin_iteration if (env$maximize) { env$best_score <- -Inf } else { env$best_score <- Inf } if (inherits(model, "xgb.Booster")) { best_score <- xgb.attr(model, 'best_score') if (NROW(best_score)) env$best_score <- as.numeric(best_score) best_iteration <- xgb.attr(model, 'best_iteration') if (NROW(best_iteration)) env$best_iteration <- as.numeric(best_iteration) + 1 } env$checked_evnames <- TRUE } score <- iter_feval[env$metric_idx] if ((env$maximize && score > env$best_score) || (!env$maximize && score < env$best_score)) { env$best_score <- score env$best_iteration <- iteration # save the property to attributes, so they will occur in checkpoint if (inherits(model, "xgb.Booster")) { xgb.attributes(model) <- list( best_iteration = env$best_iteration - 1, # convert to 0-based index best_score = env$best_score ) } } else if (iteration - env$best_iteration >= env$stopping_rounds) { if (env$verbose) { best_msg <- .format_eval_string(iteration, iter_feval, sds) cat("Stopping. Best iteration:\n", best_msg, "\n\n", sep = '') } env$stopped_by_max_rounds <- TRUE return(TRUE) } return(FALSE) }, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { if (inherits(model, "xgb.Booster") && env$save_best && env$best_iteration < iteration) { # Note: it loses the attributes after being sliced, # so they have to be re-assigned afterwards. prev_attr <- xgb.attributes(model) if (NROW(prev_attr)) { suppressWarnings({ prev_attr <- within(prev_attr, rm("best_score", "best_iteration")) }) } .Call(XGBoosterSliceAndReplace_R, xgb.get.handle(model), 0L, env$best_iteration, 1L) if (NROW(prev_attr)) { xgb.attributes(model) <- prev_attr } } attrs_set <- list(best_iteration = env$best_iteration - 1, best_score = env$best_score) if (inherits(model, "xgb.Booster")) { xgb.attributes(model) <- attrs_set } else { for (fd in model) { xgb.attributes(fd$bst) <- attrs_set # to use in the cv.predict callback } } return( list( best_iteration = env$best_iteration, best_score = env$best_score, stopped_by_max_rounds = env$stopped_by_max_rounds ) ) } ) } .save.model.w.formatted.name <- function(model, save_name, iteration) { # Note: this throws a warning if the name doesn't have anything to format through 'sprintf' suppressWarnings({ save_name <- sprintf(save_name, iteration) }) xgb.save(model, save_name) } #' Callback for saving a model file #' #' @description #' This callback function allows to save an xgb-model file, either periodically #' after each `save_period`'s or at the end. #' #' Does not leave any attribute in the booster. #' #' @param save_period Save the model to disk after every `save_period` iterations; #' 0 means save the model at the end. #' @param save_name The name or path for the saved model file. #' It can contain a [sprintf()] formatting specifier to include the integer #' iteration number in the file name. E.g., with `save_name = 'xgboost_%04d.model'`, #' the file saved at iteration 50 would be named "xgboost_0050.model". #' @return An `xgb.Callback` object, which can be passed to [xgb.train()], #' but **not** to [xgb.cv()]. #' @export xgb.cb.save.model <- function(save_period = 0, save_name = "xgboost.ubj") { if (save_period < 0) { stop("'save_period' cannot be negative") } if (!is.character(save_name) || length(save_name) != 1L) { stop("'save_name' must be a single character refering to file name.") } xgb.Callback( cb_name = "save_model", env = as.environment(list(save_period = save_period, save_name = save_name, last_save = 0)), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { env$begin_iteration <- begin_iteration }, f_before_iter = NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) { if (env$save_period > 0 && (iteration - env$begin_iteration) %% env$save_period == 0) { .save.model.w.formatted.name(model, env$save_name, iteration) env$last_save <- iteration } return(FALSE) }, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { if (env$save_period == 0 && iteration > env$last_save) { .save.model.w.formatted.name(model, env$save_name, iteration) } } ) } #' Callback for returning cross-validation based predictions #' #' This callback function saves predictions for all of the test folds, #' and also allows to save the folds' models. #' #' @details #' Predictions are saved inside of the `pred` element, which is either a vector or a matrix, #' depending on the number of prediction outputs per data row. The order of predictions corresponds #' to the order of rows in the original dataset. Note that when a custom `folds` list is #' provided in [xgb.cv()], the predictions would only be returned properly when this list is a #' non-overlapping list of k sets of indices, as in a standard k-fold CV. The predictions would not be #' meaningful when user-provided folds have overlapping indices as in, e.g., random sampling splits. #' When some of the indices in the training dataset are not included into user-provided `folds`, #' their prediction value would be `NA`. #' #' @param save_models A flag for whether to save the folds' models. #' @param outputmargin Whether to save margin predictions (same effect as passing this #' parameter to [predict.xgb.Booster]). #' @return An `xgb.Callback` object, which can be passed to [xgb.cv()], #' but **not** to [xgb.train()]. #' @export xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) { xgb.Callback( cb_name = "cv_predict", env = as.environment(list(save_models = save_models, outputmargin = outputmargin)), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { if (inherits(model, "xgb.Booster")) { stop("'cv.predict' callback is only for 'xgb.cv'.") } }, f_before_iter = NULL, f_after_iter = NULL, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { pred <- NULL for (fd in model) { pr <- predict( fd$bst, fd$evals[[2L]], outputmargin = env$outputmargin ) if (is.null(pred)) { if (NCOL(pr) > 1L) { pred <- matrix(NA_real_, nrow(data), ncol(pr)) } else { pred <- matrix(NA_real_, nrow(data)) } } if (is.matrix(pred)) { pred[fd$index, ] <- pr } else { pred[fd$index] <- pr } } out <- list(pred = pred) if (env$save_models) { out$models <- lapply(model, function(fd) fd$bst) } return(out) } ) } .list2mat <- function(coef_list, sparse) { if (sparse) { coef_mat <- methods::new("dgRMatrix") coef_mat@p <- as.integer(c(0, cumsum(sapply(coef_list, function(x) length(x@x))))) coef_mat@j <- as.integer(unlist(lapply(coef_list, slot, "i")) - 1L) coef_mat@x <- unlist(lapply(coef_list, slot, "x")) coef_mat@Dim <- as.integer(c(length(coef_list), length(coef_list[[1L]]))) # Note: function 'xgb.gblinear.history' might later on try to slice by columns coef_mat <- methods::as(coef_mat, "CsparseMatrix") return(coef_mat) } else { return(unname(do.call(rbind, coef_list))) } } .extract.coef <- function(model, sparse) { coefs <- .internal.coef.xgb.Booster(model, add_names = FALSE) if (NCOL(coefs) > 1L) { coefs <- as.vector(coefs) } if (sparse) { coefs <- methods::as(coefs, "sparseVector") } return(coefs) } #' Callback for collecting coefficients history of a gblinear booster #' #' @details #' To keep things fast and simple, gblinear booster does not internally store the history of linear #' model coefficients at each boosting iteration. This callback provides a workaround for storing #' the coefficients' path, by extracting them after each training iteration. #' #' This callback will construct a matrix where rows are boosting iterations and columns are #' feature coefficients (same order as when calling [coef.xgb.Booster], with the intercept #' corresponding to the first column). #' #' When there is more than one coefficient per feature (e.g. multi-class classification), #' the result will be reshaped into a vector where coefficients are arranged first by features and #' then by class (e.g. first 1 through N coefficients will be for the first class, then #' coefficients N+1 through 2N for the second class, and so on). #' #' If the result has only one coefficient per feature in the data, then the resulting matrix #' will have column names matching with the feature names, otherwise (when there's more than #' one coefficient per feature) the names will be composed as 'column name' + ':' + 'class index' #' (so e.g. column 'c1' for class '0' will be named 'c1:0'). #' #' With [xgb.train()], the output is either a dense or a sparse matrix. #' With with [xgb.cv()], it is a list (one element per each fold) of such matrices. #' #' Function [xgb.gblinear.history] provides an easy way to retrieve the #' outputs from this callback. #' #' @param sparse When set to `FALSE`/`TRUE`, a dense/sparse matrix is used to store the result. #' Sparse format is useful when one expects only a subset of coefficients to be non-zero, #' when using the "thrifty" feature selector with fairly small number of top features #' selected per iteration. #' @return An `xgb.Callback` object, which can be passed to [xgb.train()] or [xgb.cv()]. #' @seealso [xgb.gblinear.history], [coef.xgb.Booster]. #' @examples #' #### Binary classification: #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' #' # In the iris dataset, it is hard to linearly separate Versicolor class from the rest #' # without considering the 2nd order interactions: #' x <- model.matrix(Species ~ .^2, iris)[, -1] #' colnames(x) #' dtrain <- xgb.DMatrix( #' scale(x), #' label = 1 * (iris$Species == "versicolor"), #' nthread = nthread #' ) #' param <- xgb.params( #' booster = "gblinear", #' objective = "reg:logistic", #' eval_metric = "auc", #' reg_lambda = 0.0003, #' reg_alpha = 0.0003, #' nthread = nthread #' ) #' #' # For 'shotgun', which is a default linear updater, using high learning_rate values may result in #' # unstable behaviour in some datasets. With this simple dataset, however, the high learning #' # rate does not break the convergence, but allows us to illustrate the typical pattern of #' # "stochastic explosion" behaviour of this lock-free algorithm at early boosting iterations. #' bst <- xgb.train( #' c(param, list(learning_rate = 1.)), #' dtrain, #' evals = list(tr = dtrain), #' nrounds = 200, #' callbacks = list(xgb.cb.gblinear.history()) #' ) #' #' # Extract the coefficients' path and plot them vs boosting iteration number: #' coef_path <- xgb.gblinear.history(bst) #' matplot(coef_path, type = "l") #' #' # With the deterministic coordinate descent updater, it is safer to use higher learning rates. #' # Will try the classical componentwise boosting which selects a single best feature per round: #' bst <- xgb.train( #' c( #' param, #' xgb.params( #' learning_rate = 0.8, #' updater = "coord_descent", #' feature_selector = "thrifty", #' top_k = 1 #' ) #' ), #' dtrain, #' evals = list(tr = dtrain), #' nrounds = 200, #' callbacks = list(xgb.cb.gblinear.history()) #' ) #' matplot(xgb.gblinear.history(bst), type = "l") #' # Componentwise boosting is known to have similar effect to Lasso regularization. #' # Try experimenting with various values of top_k, learning_rate, nrounds, #' # as well as different feature_selectors. #' #' # For xgb.cv: #' bst <- xgb.cv( #' c( #' param, #' xgb.params( #' learning_rate = 0.8, #' updater = "coord_descent", #' feature_selector = "thrifty", #' top_k = 1 #' ) #' ), #' dtrain, #' nfold = 5, #' nrounds = 100, #' callbacks = list(xgb.cb.gblinear.history()) #' ) #' # coefficients in the CV fold #3 #' matplot(xgb.gblinear.history(bst)[[3]], type = "l") #' #' #' #### Multiclass classification: #' dtrain <- xgb.DMatrix(scale(x), label = as.numeric(iris$Species) - 1, nthread = nthread) #' #' param <- xgb.params( #' booster = "gblinear", #' objective = "multi:softprob", #' num_class = 3, #' reg_lambda = 0.0003, #' reg_alpha = 0.0003, #' nthread = nthread #' ) #' #' # For the default linear updater 'shotgun' it sometimes is helpful #' # to use smaller learning_rate to reduce instability #' bst <- xgb.train( #' c(param, list(learning_rate = 0.5)), #' dtrain, #' evals = list(tr = dtrain), #' nrounds = 50, #' callbacks = list(xgb.cb.gblinear.history()) #' ) #' #' # Will plot the coefficient paths separately for each class: #' matplot(xgb.gblinear.history(bst, class_index = 0), type = "l") #' matplot(xgb.gblinear.history(bst, class_index = 1), type = "l") #' matplot(xgb.gblinear.history(bst, class_index = 2), type = "l") #' #' # CV: #' bst <- xgb.cv( #' c(param, list(learning_rate = 0.5)), #' dtrain, #' nfold = 5, #' nrounds = 70, #' callbacks = list(xgb.cb.gblinear.history(FALSE)) #' ) #' # 1st fold of 1st class #' matplot(xgb.gblinear.history(bst, class_index = 0)[[1]], type = "l") #' #' @export xgb.cb.gblinear.history <- function(sparse = FALSE) { xgb.Callback( cb_name = "gblinear_history", env = as.environment(list(sparse = sparse)), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { if (!inherits(model, "xgb.Booster")) { model <- model[[1L]]$bst } if (xgb.booster_type(model) != "gblinear") { stop("Callback 'xgb.cb.gblinear.history' is only for booster='gblinear'.") } env$coef_hist <- vector("list", end_iteration - begin_iteration + 1) env$next_idx <- 1 }, f_before_iter = NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) { if (inherits(model, "xgb.Booster")) { coef_this <- .extract.coef(model, env$sparse) } else { coef_this <- lapply(model, function(fd) .extract.coef(fd$bst, env$sparse)) } env$coef_hist[[env$next_idx]] <- coef_this env$next_idx <- env$next_idx + 1 return(FALSE) }, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { # in case of early stopping if (env$next_idx <= length(env$coef_hist)) { env$coef_hist <- head(env$coef_hist, env$next_idx - 1) } is_booster <- inherits(model, "xgb.Booster") if (is_booster) { out <- .list2mat(env$coef_hist, env$sparse) } else { out <- lapply( X = lapply( X = seq_along(env$coef_hist[[1]]), FUN = function(i) lapply(env$coef_hist, "[[", i) ), FUN = .list2mat, env$sparse ) } if (!is.null(prev_cb_res)) { if (is_booster) { out <- rbind(prev_cb_res, out) } else { # Note: this case should never be encountered, since training cannot # be continued from the result of xgb.cv, but this code should in # theory do the job if the situation were to be encountered. out <- lapply( out, function(lst) { lapply( seq_along(lst), function(i) rbind(prev_cb_res[[i]], lst[[i]]) ) } ) } } feature_names <- getinfo(data, "feature_name") if (!NROW(feature_names)) { feature_names <- paste0("V", seq(1L, ncol(data))) } expected_ncols <- length(feature_names) + 1 if (is_booster) { mat_ncols <- ncol(out) } else { mat_ncols <- ncol(out[[1L]]) } if (mat_ncols %% expected_ncols == 0) { feature_names <- c("(Intercept)", feature_names) n_rep <- mat_ncols / expected_ncols if (n_rep > 1) { feature_names <- unlist( lapply( seq(1, n_rep), function(cl) paste(feature_names, cl - 1, sep = ":") ) ) } if (is_booster) { colnames(out) <- feature_names } else { out <- lapply( out, function(mat) { colnames(mat) <- feature_names return(mat) } ) } } return(out) } ) } #' Extract gblinear coefficients history #' #' A helper function to extract the matrix of linear coefficients' history #' from a gblinear model created while using the [xgb.cb.gblinear.history] #' callback (which must be added manually as by default it is not used). #' #' @details #' Note that this is an R-specific function that relies on R attributes that #' are not saved when using XGBoost's own serialization functions like [xgb.load()] #' or [xgb.load.raw()]. #' #' In order for a serialized model to be accepted by this function, one must use R #' serializers such as [saveRDS()]. #' @param model Either an `xgb.Booster` or a result of [xgb.cv()], trained #' using the [xgb.cb.gblinear.history] callback, but **not** a booster #' loaded from [xgb.load()] or [xgb.load.raw()]. #' @param class_index zero-based class index to extract the coefficients for only that #' specific class in a multinomial multiclass model. When it is `NULL`, all the #' coefficients are returned. Has no effect in non-multiclass models. #' #' @return #' For an [xgb.train()] result, a matrix (either dense or sparse) with the columns #' corresponding to iteration's coefficients and the rows corresponding to boosting iterations. #' #' For an [xgb.cv()] result, a list of such matrices is returned with the elements #' corresponding to CV folds. #' #' When there is more than one coefficient per feature (e.g. multi-class classification) #' and `class_index` is not provided, #' the result will be reshaped into a vector where coefficients are arranged first by features and #' then by class (e.g. first 1 through N coefficients will be for the first class, then #' coefficients N+1 through 2N for the second class, and so on). #' @seealso [xgb.cb.gblinear.history], [coef.xgb.Booster]. #' @export xgb.gblinear.history <- function(model, class_index = NULL) { if (!(inherits(model, "xgb.Booster") || inherits(model, "xgb.cv.synchronous"))) stop("model must be an object of either xgb.Booster or xgb.cv.synchronous class") is_cv <- inherits(model, "xgb.cv.synchronous") if (!is_cv) { coef_path <- getElement(attributes(model), "gblinear_history") } else { coef_path <- getElement(model, "gblinear_history") } if (is.null(coef_path)) { stop("model must be trained while using the xgb.cb.gblinear.history() callback") } if (!is_cv) { num_class <- xgb.num_class(model) num_feat <- xgb.num_feature(model) } else { # in case of CV, the object is expected to have this info if (model$params$booster != "gblinear") stop("It does not appear to be a gblinear model") num_class <- NVL(model$params$num_class, 1) num_feat <- model$nfeatures if (is.null(num_feat)) stop("This xgb.cv result does not have nfeatures info") } if (!is.null(class_index) && num_class > 1 && (class_index[1] < 0 || class_index[1] >= num_class)) stop("class_index has to be within [0,", num_class - 1, "]") if (!is.null(class_index) && num_class > 1) { seq_take <- seq(1 + class_index * (num_feat + 1), (class_index + 1) * (num_feat + 1)) coef_path <- if (is.list(coef_path)) { lapply(coef_path, function(x) x[, seq_take]) } else { coef_path <- coef_path[, seq_take] } } return(coef_path) } .callbacks.only.train <- "save_model" .callbacks.only.cv <- "cv_predict" .process.callbacks <- function(callbacks, is_cv) { if (inherits(callbacks, "xgb.Callback")) { callbacks <- list(callbacks) } if (!is.list(callbacks)) { stop("'callbacks' must be a list.") } cb_names <- character() if (length(callbacks)) { is_callback <- sapply(callbacks, inherits, "xgb.Callback") if (!all(is_callback)) { stop("Entries in 'callbacks' must be 'xgb.Callback' objects.") } cb_names <- sapply(callbacks, function(cb) cb$cb_name) if (length(cb_names) != length(callbacks)) { stop("Passed invalid callback(s).") } if (anyDuplicated(cb_names) > 0) { stop("Callbacks must have unique names.") } if (is_cv) { if (any(.callbacks.only.train %in% cb_names)) { stop( "Passed callback(s) not supported for 'xgb.cv': ", paste(intersect(.callbacks.only.train, cb_names), collapse = ", ") ) } } else { if (any(.callbacks.only.cv %in% cb_names)) { stop( "Passed callback(s) not supported for 'xgb.train': ", paste(intersect(.callbacks.only.cv, cb_names), collapse = ", ") ) } } # Early stopping callback needs to be executed before the others if ("early_stop" %in% cb_names) { mask <- cb_names == "early_stop" callbacks <- c(list(callbacks[[which(mask)]]), callbacks[!mask]) } } return(list(callbacks = callbacks, cb_names = cb_names)) } # Note: don't try to use functions like 'append', as they will # merge the elements of the different callbacks into a single list. add.callback <- function(callbacks, cb, as_first_elt = FALSE) { if (!as_first_elt) { callbacks[[length(callbacks) + 1]] <- cb return(callbacks) } else { if (!length(callbacks)) { return(list(cb)) } new_cb <- vector("list", length(callbacks) + 1) new_cb[[1]] <- cb new_cb[seq(2, length(new_cb))] <- callbacks return(new_cb) } } has.callbacks <- function(callbacks, cb_name) { cb_names <- sapply(callbacks, function(cb) cb$name) return(cb_name %in% cb_names) } xgboost-3.0.0/R-package/R/utils.R000066400000000000000000000714521476511272400164770ustar00rootroot00000000000000# # This file is for the low level reusable utility functions # that are not supposed to be visible to a user. # # # General helper utilities ---------------------------------------------------- # # SQL-style NVL shortcut. NVL <- function(x, val) { if (is.null(x)) return(val) if (is.vector(x)) { x[is.na(x)] <- val return(x) } if (typeof(x) == 'closure') return(x) stop("typeof(x) == ", typeof(x), " is not supported by NVL") } # List of classification and ranking objectives .CLASSIFICATION_OBJECTIVES <- function() { return(c('binary:logistic', 'binary:logitraw', 'binary:hinge', 'multi:softmax', 'multi:softprob', 'rank:pairwise', 'rank:ndcg', 'rank:map')) } .RANKING_OBJECTIVES <- function() { return(c('rank:pairwise', 'rank:ndcg', 'rank:map')) } .OBJECTIVES_NON_DEFAULT_MODE <- function() { return(c("reg:logistic", "binary:logitraw", "multi:softmax")) } .BINARY_CLASSIF_OBJECTIVES <- function() { return(c("binary:logistic", "binary:hinge")) } .MULTICLASS_CLASSIF_OBJECTIVES <- function() { return("multi:softprob") } .SURVIVAL_RIGHT_CENSORING_OBJECTIVES <- function() { # nolint return(c("survival:cox", "survival:aft")) } .SURVIVAL_ALL_CENSORING_OBJECTIVES <- function() { # nolint return("survival:aft") } .REGRESSION_OBJECTIVES <- function() { return(c( "reg:squarederror", "reg:squaredlogerror", "reg:logistic", "reg:pseudohubererror", "reg:absoluteerror", "reg:quantileerror", "count:poisson", "reg:gamma", "reg:tweedie" )) } .MULTI_TARGET_OBJECTIVES <- function() { return(c( "reg:squarederror", "reg:squaredlogerror", "reg:logistic", "reg:pseudohubererror", "reg:quantileerror", "reg:gamma" )) } # # Low-level functions for boosting -------------------------------------------- # # Merges booster params with whatever is provided in ... # plus runs some checks check.booster.params <- function(params) { if (!identical(class(params), "list")) stop("params must be a list") # in R interface, allow for '.' instead of '_' in parameter names names(params) <- gsub(".", "_", names(params), fixed = TRUE) # providing a parameter multiple times makes sense only for 'eval_metric' name_freqs <- table(names(params)) multi_names <- setdiff(names(name_freqs[name_freqs > 1]), 'eval_metric') if (length(multi_names) > 0) { warning("The following parameters were provided multiple times:\n\t", paste(multi_names, collapse = ', '), "\n Only the last value for each of them will be used.\n") # While xgboost internals would choose the last value for a multiple-times parameter, # enforce it here in R as well (b/c multi-parameters might be used further in R code, # and R takes the 1st value when multiple elements with the same name are present in a list). for (n in multi_names) { del_idx <- which(n == names(params)) del_idx <- del_idx[-length(del_idx)] params[[del_idx]] <- NULL } } # for multiclass, expect num_class to be set if (typeof(params[['objective']]) == "character" && startsWith(NVL(params[['objective']], 'x'), 'multi:') && as.numeric(NVL(params[['num_class']], 0)) < 2) { stop("'num_class' > 1 parameter must be set for multiclass classification") } # monotone_constraints parser if (!is.null(params[['monotone_constraints']]) && typeof(params[['monotone_constraints']]) != "character") { vec2str <- paste(params[['monotone_constraints']], collapse = ',') vec2str <- paste0('(', vec2str, ')') params[['monotone_constraints']] <- vec2str } # interaction constraints parser (convert from list of column indices to string) if (!is.null(params[['interaction_constraints']]) && typeof(params[['interaction_constraints']]) != "character") { # check input class if (!identical(class(params[['interaction_constraints']]), 'list')) stop('interaction_constraints should be class list') if (!all(unique(sapply(params[['interaction_constraints']], class)) %in% c('numeric', 'integer'))) { stop('interaction_constraints should be a list of numeric/integer vectors') } # recast parameter as string interaction_constraints <- sapply(params[['interaction_constraints']], function(x) paste0('[', paste(x, collapse = ','), ']')) params[['interaction_constraints']] <- paste0('[', paste(interaction_constraints, collapse = ','), ']') } # for evaluation metrics, should generate multiple entries per metric if (NROW(params[['eval_metric']]) > 1) { eval_metrics <- as.list(params[["eval_metric"]]) names(eval_metrics) <- rep("eval_metric", length(eval_metrics)) params_without_ev_metrics <- within(params, rm("eval_metric")) params <- c(params_without_ev_metrics, eval_metrics) } return(params) } # Performs some checks related to custom objective function. check.custom.obj <- function(params, objective) { if (!is.null(params[['objective']]) && !is.null(objective)) stop("Setting objectives in 'params' and 'objective' at the same time is not allowed") if (!is.null(objective) && typeof(objective) != 'closure') { if (is.character(objective)) { msg <- paste( "Argument 'objective' is only for custom objectives.", "For built-in objectives, pass the objective under 'params'.", sep = " " ) error_on_deprecated <- getOption("xgboost.strict_mode", default = FALSE) if (error_on_deprecated) { stop(msg) } else { warning(msg, " This warning will become an error in a future version.") } params$objective <- objective return(list(params = params, objective = NULL)) } stop("'objective' must be a function") } # handle the case when custom objective function was provided through params if (!is.null(params[['objective']]) && typeof(params$objective) == 'closure') { objective <- params$objective params$objective <- NULL } return(list(params = params, objective = objective)) } # Performs some checks related to custom evaluation function. check.custom.eval <- function(params, custom_metric, maximize, early_stopping_rounds, callbacks) { if (!is.null(params[['eval_metric']]) && !is.null(custom_metric)) stop("Setting evaluation metrics in 'params' and 'custom_metric' at the same time is not allowed") if (!is.null(custom_metric) && typeof(custom_metric) != 'closure') stop("'custom_metric' must be a function") # handle a situation when custom eval function was provided through params if (!is.null(params[['eval_metric']]) && typeof(params$eval_metric) == 'closure') { custom_metric <- params$eval_metric params$eval_metric <- NULL } # require maximize to be set when custom metric and early stopping are used together if (!is.null(custom_metric) && is.null(maximize) && ( !is.null(early_stopping_rounds) || has.callbacks(callbacks, "early_stop"))) stop("Please set 'maximize' to indicate whether the evaluation metric needs to be maximized or not") return(list(params = params, custom_metric = custom_metric)) } # Update a booster handle for an iteration with dtrain data xgb.iter.update <- function(bst, dtrain, iter, objective) { if (!inherits(dtrain, "xgb.DMatrix")) { stop("dtrain must be of xgb.DMatrix class") } handle <- xgb.get.handle(bst) if (is.null(objective)) { .Call(XGBoosterUpdateOneIter_R, handle, as.integer(iter), dtrain) } else { pred <- predict( bst, dtrain, outputmargin = TRUE, training = TRUE ) gpair <- objective(pred, dtrain) n_samples <- dim(dtrain)[1L] grad <- gpair$grad hess <- gpair$hess if ((is.matrix(grad) && dim(grad)[1L] != n_samples) || (is.vector(grad) && length(grad) != n_samples) || (is.vector(grad) != is.vector(hess))) { warning(paste( "Since 2.1.0, the shape of the gradient and hessian is required to be ", "(n_samples, n_targets) or (n_samples, n_classes). Will reshape assuming ", "column-major order.", sep = "" )) grad <- matrix(grad, nrow = n_samples) hess <- matrix(hess, nrow = n_samples) } .Call( XGBoosterTrainOneIter_R, handle, dtrain, iter, grad, hess ) } return(TRUE) } # Evaluate one iteration. # Returns a named vector of evaluation metrics # with the names in a 'datasetname-metricname' format. xgb.iter.eval <- function(bst, evals, iter, custom_metric) { handle <- xgb.get.handle(bst) if (length(evals) == 0) return(NULL) evnames <- names(evals) if (is.null(custom_metric)) { msg <- .Call(XGBoosterEvalOneIter_R, handle, as.integer(iter), evals, as.list(evnames)) mat <- matrix(strsplit(msg, '\\s+|:')[[1]][-1], nrow = 2) res <- structure(as.numeric(mat[2, ]), names = mat[1, ]) } else { res <- sapply(seq_along(evals), function(j) { w <- evals[[j]] ## predict using all trees preds <- predict(bst, w, outputmargin = TRUE, iterationrange = "all") eval_res <- custom_metric(preds, w) out <- eval_res$value names(out) <- paste0(evnames[j], "-", eval_res$metric) out }) } return(res) } # # Helper functions for cross validation --------------------------------------- # # Possibly convert the labels into factors, depending on the objective. # The labels are converted into factors only when the given objective refers to the classification # or ranking tasks. convert.labels <- function(labels, objective_name) { if (objective_name %in% .CLASSIFICATION_OBJECTIVES()) { return(as.factor(labels)) } else { return(labels) } } # Generates random (stratified if needed) CV folds generate.cv.folds <- function(nfold, nrows, stratified, label, group, params) { if (NROW(group)) { if (stratified) { warning( paste0( "Stratified splitting is not supported when using 'group' attribute.", " Will use unstratified splitting." ) ) } return(generate.group.folds(nfold, group)) } objective <- params$objective if (stratified && !is.character(objective)) { warning("Will use unstratified splitting (custom objective used)") stratified <- FALSE } # cannot stratify if label is NULL if (stratified && is.null(label)) { warning("Will use unstratified splitting (no 'labels' available)") stratified <- FALSE } # cannot do it for rank if (is.character(objective) && strtrim(objective, 5) == 'rank:') { stop("\n\tAutomatic generation of CV-folds is not implemented for ranking without 'group' field!\n", "\tConsider providing pre-computed CV-folds through the 'folds=' parameter.\n") } # shuffle rnd_idx <- sample.int(nrows) if (stratified && length(label) == length(rnd_idx)) { y <- label[rnd_idx] # - For classification, need to convert y labels to factor before making the folds, # and then do stratification by factor levels. # - For regression, leave y numeric and do stratification by quantiles. if (is.character(objective)) { y <- convert.labels(y, objective) } folds <- xgb.createFolds(y = y, k = nfold) } else { # make simple non-stratified folds kstep <- length(rnd_idx) %/% nfold folds <- list() for (i in seq_len(nfold - 1)) { folds[[i]] <- rnd_idx[seq_len(kstep)] rnd_idx <- rnd_idx[-seq_len(kstep)] } folds[[nfold]] <- rnd_idx } return(folds) } generate.group.folds <- function(nfold, group) { ngroups <- length(group) - 1 if (ngroups < nfold) { stop("DMatrix has fewer groups than folds.") } seq_groups <- seq_len(ngroups) indices <- lapply(seq_groups, function(gr) seq(group[gr] + 1, group[gr + 1])) assignments <- base::split(seq_groups, as.integer(seq_groups %% nfold)) assignments <- unname(assignments) out <- vector("list", nfold) randomized_groups <- sample(ngroups) for (idx in seq_len(nfold)) { groups_idx_test <- randomized_groups[assignments[[idx]]] groups_test <- indices[groups_idx_test] idx_test <- unlist(groups_test) attributes(idx_test)$group_test <- lengths(groups_test) attributes(idx_test)$group_train <- lengths(indices[-groups_idx_test]) out[[idx]] <- idx_test } return(out) } # Creates CV folds stratified by the values of y. # It was borrowed from caret::createFolds and simplified # by always returning an unnamed list of fold indices. xgb.createFolds <- function(y, k) { if (is.numeric(y)) { ## Group the numeric data based on their magnitudes ## and sample within those groups. ## When the number of samples is low, we may have ## issues further slicing the numeric data into ## groups. The number of groups will depend on the ## ratio of the number of folds to the sample size. ## At most, we will use quantiles. If the sample ## is too small, we just do regular unstratified ## CV cuts <- floor(length(y) / k) if (cuts < 2) cuts <- 2 if (cuts > 5) cuts <- 5 y <- cut(y, unique(stats::quantile(y, probs = seq(0, 1, length = cuts))), include.lowest = TRUE) } if (k < length(y)) { ## reset levels so that the possible levels and ## the levels in the vector are the same y <- factor(as.character(y)) numInClass <- table(y) foldVector <- vector(mode = "integer", length(y)) ## For each class, balance the fold allocation as far ## as possible, then resample the remainder. ## The final assignment of folds is also randomized. for (i in seq_along(numInClass)) { ## create a vector of integers from 1:k as many times as possible without ## going over the number of samples in the class. Note that if the number ## of samples in a class is less than k, nothing is produced here. seqVector <- rep(seq_len(k), numInClass[i] %/% k) ## add enough random integers to get length(seqVector) == numInClass[i] if (numInClass[i] %% k > 0) seqVector <- c(seqVector, sample.int(k, numInClass[i] %% k)) ## shuffle the integers for fold assignment and assign to this classes's data ## seqVector[sample.int(length(seqVector))] is used to handle length(seqVector) == 1 foldVector[y == dimnames(numInClass)$y[i]] <- seqVector[sample.int(length(seqVector))] } } else { foldVector <- seq(along = y) } out <- split(seq(along = y), foldVector) names(out) <- NULL out } #' Model Serialization and Compatibility #' #' @description #' When it comes to serializing XGBoost models, it's possible to use R serializers such as #' [save()] or [saveRDS()] to serialize an XGBoost model object, but XGBoost also provides #' its own serializers with better compatibility guarantees, which allow loading #' said models in other language bindings of XGBoost. #' #' Note that an `xgb.Booster` object (**as produced by [xgb.train()]**, see rest of the doc #' for objects produced by [xgboost()]), outside of its core components, might also keep: #' - Additional model configuration (accessible through [xgb.config()]), which includes #' model fitting parameters like `max_depth` and runtime parameters like `nthread`. #' These are not necessarily useful for prediction/importance/plotting. #' - Additional R specific attributes - e.g. results of callbacks, such as evaluation logs, #' which are kept as a `data.table` object, accessible through #' `attributes(model)$evaluation_log` if present. #' #' The first one (configurations) does not have the same compatibility guarantees as #' the model itself, including attributes that are set and accessed through #' [xgb.attributes()] - that is, such configuration might be lost after loading the #' booster in a different XGBoost version, regardless of the serializer that was used. #' These are saved when using [saveRDS()], but will be discarded if loaded into an #' incompatible XGBoost version. They are not saved when using XGBoost's #' serializers from its public interface including [xgb.save()] and [xgb.save.raw()]. #' #' The second ones (R attributes) are not part of the standard XGBoost model structure, #' and thus are not saved when using XGBoost's own serializers. These attributes are #' only used for informational purposes, such as keeping track of evaluation metrics as #' the model was fit, or saving the R call that produced the model, but are otherwise #' not used for prediction / importance / plotting / etc. #' These R attributes are only preserved when using R's serializers. #' #' In addition to the regular `xgb.Booster` objects produced by [xgb.train()], the #' function [xgboost()] produces objects with a different subclass `xgboost` (which #' inherits from `xgb.Booster`), which keeps other additional metadata as R attributes #' such as class names in classification problems, and which has a dedicated `predict` #' method that uses different defaults and takes different argument names. XGBoost's #' own serializers can work with this `xgboost` class, but as they do not keep R #' attributes, the resulting object, when deserialized, is downcasted to the regular #' `xgb.Booster` class (i.e. it loses the metadata, and the resulting object will use #' [predict.xgb.Booster()] instead of [predict.xgboost()]) - for these `xgboost` objects, #' `saveRDS` might thus be a better option if the extra functionalities are needed. #' #' Note that XGBoost models in R starting from version `2.1.0` and onwards, and #' XGBoost models before version `2.1.0`; have a very different R object structure and #' are incompatible with each other. Hence, models that were saved with R serializers #' like [saveRDS()] or [save()] before version `2.1.0` will not work with latter #' `xgboost` versions and vice versa. Be aware that the structure of R model objects #' could in theory change again in the future, so XGBoost's serializers should be #' preferred for long-term storage. #' #' Furthermore, note that model objects from XGBoost might not be serializable with third-party #' R packages like `qs` or `qs2`. #' #' @details #' Use [xgb.save()] to save the XGBoost model as a stand-alone file. You may opt into #' the JSON format by specifying the JSON extension. To read the model back, use #' [xgb.load()]. #' #' Use [xgb.save.raw()] to save the XGBoost model as a sequence (vector) of raw bytes #' in a future-proof manner. Future releases of XGBoost will be able to read the raw bytes and #' re-construct the corresponding model. To read the model back, use [xgb.load.raw()]. #' The [xgb.save.raw()] function is useful if you would like to persist the XGBoost model #' as part of another R object. #' #' Use [saveRDS()] if you require the R-specific attributes that a booster might have, such #' as evaluation logs or the model class `xgboost` instead of `xgb.Booster`, but note that #' future compatibility of such objects is outside XGBoost's control as it relies on R's #' serialization format (see e.g. the details section in [serialize] and [save()] from base R). #' #' For more details and explanation about model persistence and archival, consult the page #' \url{https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html}. #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' bst <- xgb.train( #' data = xgb.DMatrix(agaricus.train$data, label = agaricus.train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = 2, #' objective = "binary:logistic" #' ) #' ) #' #' # Save as a stand-alone file; load it with xgb.load() #' fname <- file.path(tempdir(), "xgb_model.ubj") #' xgb.save(bst, fname) #' bst2 <- xgb.load(fname) #' #' # Save as a stand-alone file (JSON); load it with xgb.load() #' fname <- file.path(tempdir(), "xgb_model.json") #' xgb.save(bst, fname) #' bst2 <- xgb.load(fname) #' #' # Save as a raw byte vector; load it with xgb.load.raw() #' xgb_bytes <- xgb.save.raw(bst) #' bst2 <- xgb.load.raw(xgb_bytes) #' #' # Persist XGBoost model as part of another R object #' obj <- list(xgb_model_bytes = xgb.save.raw(bst), description = "My first XGBoost model") #' # Persist the R object. Here, saveRDS() is okay, since it doesn't persist #' # xgb.Booster directly. What's being persisted is the future-proof byte representation #' # as given by xgb.save.raw(). #' fname <- file.path(tempdir(), "my_object.Rds") #' saveRDS(obj, fname) #' # Read back the R object #' obj2 <- readRDS(fname) #' # Re-construct xgb.Booster object from the bytes #' bst2 <- xgb.load.raw(obj2$xgb_model_bytes) #' #' @name a-compatibility-note-for-saveRDS-save NULL #' @name xgboost-options #' @title XGBoost Options #' @description XGBoost offers an \link[base:options]{option setting} for controlling the behavior #' of deprecated and removed function arguments. #' #' Some of the arguments in functions like [xgb.train()] or [predict.xgb.Booster()] been renamed #' from how they were in previous versions, or have been removed. #' #' In order to make the transition to newer XGBoost versions easier, some of these parameters are #' still accepted but issue a warning when using them. \bold{Note that these warnings will become #' errors in the future!!} - this is just a temporary workaround to make the transition easier. #' #' One can optionally use 'strict mode' to turn these warnings into errors, in order to ensure #' that code calling xgboost will still work once those are removed in future releases. #' #' Currently, the only supported option is `xgboost.strict_mode`, which can be set to `TRUE` or #' `FALSE` (default). #' #' In addition to an R option, it can also be enabled through by setting environment variable #' `XGB_STRICT_MODE=1`. If set, this environment variable will take precedence over the option. #' @examples #' options("xgboost.strict_mode" = FALSE) #' options("xgboost.strict_mode" = TRUE) #' Sys.setenv("XGB_STRICT_MODE" = "1") #' Sys.setenv("XGB_STRICT_MODE" = "0") NULL get.strict.mode.option <- function() { env_var_option <- Sys.getenv("XGB_STRICT_MODE") if (!nchar(env_var_option)) { return(getOption("xgboost.strict_mode", default = FALSE)) } return(tolower(as.character(env_var_option)) %in% c("1", "true", "t", "yes", "y")) } # Lookup table for the deprecated parameters bookkeeping deprecated_train_params <- list( renamed = list( 'print.every.n' = 'print_every_n', 'early.stop.round' = 'early_stopping_rounds', 'training.data' = 'data', 'dtrain' = 'data', 'watchlist' = 'evals', 'feval' = 'custom_metric' ), removed = character() ) deprecated_cv_params <- deprecated_train_params deprecated_cv_params$removed <- 'label' deprecated_xgboost_params <- list( renamed = list( 'data' = 'x', 'label' = 'y', 'eta' = 'learning_rate', 'gamma' = 'min_split_loss', 'lambda' = 'reg_lambda', 'alpha' = 'reg_alpha', 'min.split.loss' = 'min_split_loss', 'reg.lambda' = 'reg_lambda', 'reg.alpha' = 'reg_alpha', 'watchlist' = 'evals' ), removed = c( 'params', 'save_period', 'save_name', 'xgb_model', 'callbacks', 'missing', 'maximize' ) ) deprecated_dttree_params <- list( renamed = list('n_first_tree' = 'trees'), removed = c("feature_names", "text") ) deprecated_plotimp_params <- list( renamed = list( 'plot.height' = 'plot_height', 'plot.width' = 'plot_width' ), removed = character() ) deprecated_multitrees_params <- list( renamed = c( deprecated_plotimp_params$renamed, list('features.keep' = 'features_keep') ), removed = "feature_names" ) deprecated_dump_params <- list( renamed = list('with.stats' = 'with_stats'), removed = character() ) deprecated_plottree_params <- c( renamed = list( deprecated_plotimp_params$renamed, deprecated_dump_params$renamed, list('trees' = 'tree_idx') ), removed = c("show_node_id", "feature_names") ) deprecated_predict_params <- list( renamed = list("ntreelimit" = "iterationrange"), removed = "reshape" ) deprecated_dmatrix_params <- list( renamed = character(), removed = "info" ) # These got moved from 'info' to function arguments args_previous_dmatrix_info <- c("label", "weight", "base_margin", "group") # Checks the dot-parameters for deprecated names # (including partial matching), gives a deprecation warning, # and sets new parameters to the old parameters' values within its parent frame. # WARNING: has side-effects check.deprecation <- function( deprecated_list, fn_call, ..., env = parent.frame(), allow_unrecognized = FALSE ) { params <- list(...) if (length(params) == 0) { return(NULL) } error_on_deprecated <- get.strict.mode.option() throw_err_or_depr_msg <- function(...) { if (error_on_deprecated) { stop(...) } else { warning(..., " This warning will become an error in a future version.") } } if (is.null(names(params)) || min(nchar(names(params))) == 0L) { throw_err_or_depr_msg("Passed invalid positional arguments") } list_renamed <- deprecated_list$renamed list_removed <- deprecated_list$removed has_params_arg <- length(list_renamed) == length(deprecated_train_params$renamed) && list_renamed[[1L]] == deprecated_train_params$renamed[[1L]] is_dmatrix_constructor <- length(list_removed) == length(deprecated_dmatrix_params$removed) && list_removed[[1L]] == deprecated_dmatrix_params$removed[[1L]] all_match <- pmatch(names(params), names(list_renamed)) # throw error on unrecognized parameters if (!allow_unrecognized && anyNA(all_match)) { names_unrecognized <- names(params)[is.na(all_match)] # make it informative if they match something that goes under 'params' if (has_params_arg) { names_params <- formalArgs(xgb.params) names_params <- c(names_params, gsub("_", ".", names_params, fixed = TRUE)) names_under_params <- intersect(names_unrecognized, names_params) if (length(names_under_params)) { if (error_on_deprecated) { stop( "Passed invalid function arguments: ", paste(head(names_under_params), collapse = ", "), ". These should be passed as a list to argument 'params'." ) } else { warning( "Passed invalid function arguments: ", paste(head(names_under_params), collapse = ", "), ". These should be passed as a list to argument 'params'.", " Conversion from argument to 'params' entry will be done automatically, but this ", "behavior will become an error in a future version." ) if (any(names_under_params %in% names(env[["params"]]))) { repeteated_params <- intersect(names_under_params, names(env[["params"]])) stop( "Passed entries as both function argument(s) and as elements under 'params': ", paste(head(repeteated_params), collapse = ", ") ) } else { env[["params"]] <- c(env[["params"]], params[names_under_params]) } } names_unrecognized <- setdiff(names_unrecognized, names_under_params) } } else if (is_dmatrix_constructor && NROW(params$info)) { # same thing for the earlier 'info' in 'xgb.DMatrix' throw_err_or_depr_msg( "Passed invalid argument 'info' - entries on it should be passed as direct arguments." ) entries_info <- names(params$info) if (length(setdiff(entries_info, args_previous_dmatrix_info))) { stop( "Passed unrecognized entries under info: ", paste(setdiff(entries_info, args_previous_dmatrix_info) |> head(), collapse = ", ") ) } for (entry_name in entries_info) { if (!is.null(env[[entry_name]])) { stop("Passed entry under both 'info' and function argument(s): ", entry_name) } env[[entry_name]] <- params$info[[entry_name]] } names_unrecognized <- setdiff(names_unrecognized, "info") } # check for parameters that were removed from a previous version names_removed <- intersect(names_unrecognized, list_removed) if (length(names_removed)) { throw_err_or_depr_msg( "Parameter(s) have been removed from this function: ", paste(names_removed, collapse = ", "), "." ) names_unrecognized <- setdiff(names_unrecognized, list_removed) } # otherwise throw a generic error if (length(names_unrecognized)) { throw_err_or_depr_msg( "Passed unrecognized parameters: ", paste(head(names_unrecognized), collapse = ", "), "." ) } } else { names_removed <- intersect(names(params)[is.na(all_match)], list_removed) if (length(names_removed)) { throw_err_or_depr_msg( "Parameter(s) have been removed from this function: ", paste(names_removed, collapse = ", "), "." ) } } matched_params <- list_renamed[all_match[!is.na(all_match)]] idx_orig <- seq_along(params)[!is.na(all_match)] function_args_passed <- names(as.list(fn_call))[-1L] for (idx in seq_along(matched_params)) { match_old <- names(matched_params)[[idx]] match_new <- matched_params[[idx]] throw_err_or_depr_msg( "Parameter '", match_old, "' has been renamed to '", match_new, "'." ) if (match_new %in% function_args_passed) { stop("Passed both '", match_new, "' and '", match_old, "'.") } env[[match_new]] <- params[[idx_orig[idx]]] } } xgboost-3.0.0/R-package/R/xgb.Booster.R000066400000000000000000001360741476511272400175350ustar00rootroot00000000000000# Construct an internal XGBoost Booster and get its current number of rounds. # internal utility function # Note: the number of rounds in the C booster gets reset to zero when changing # key booster parameters like 'process_type=update', but in some cases, when # replacing previous iterations, it needs to make a check that the new number # of iterations doesn't exceed the previous ones, hence it keeps track of the # current number of iterations before resetting the parameters in order to # perform the check later on. xgb.Booster <- function(params, cachelist, modelfile) { if (typeof(cachelist) != "list" || !all(vapply(cachelist, inherits, logical(1), what = 'xgb.DMatrix'))) { stop("cachelist must be a list of xgb.DMatrix objects") } ## Load existing model, dispatch for on disk model file and in memory buffer if (!is.null(modelfile)) { if (is.character(modelfile)) { ## A filename bst <- .Call(XGBoosterCreate_R, cachelist) modelfile <- path.expand(modelfile) .Call(XGBoosterLoadModel_R, xgb.get.handle(bst), enc2utf8(modelfile[1])) niter <- xgb.get.num.boosted.rounds(bst) if (length(params) > 0) { xgb.model.parameters(bst) <- params } return(list(bst = bst, niter = niter)) } else if (is.raw(modelfile)) { ## A memory buffer bst <- xgb.load.raw(modelfile) niter <- xgb.get.num.boosted.rounds(bst) xgb.model.parameters(bst) <- params return(list(bst = bst, niter = niter)) } else if (inherits(modelfile, "xgb.Booster")) { ## A booster object bst <- .Call(XGDuplicate_R, modelfile) niter <- xgb.get.num.boosted.rounds(bst) xgb.model.parameters(bst) <- params return(list(bst = bst, niter = niter)) } else { stop("modelfile must be either character filename, or raw booster dump, or xgb.Booster object") } } ## Create new model bst <- .Call(XGBoosterCreate_R, cachelist) if (length(params) > 0) { xgb.model.parameters(bst) <- params } return(list(bst = bst, niter = 0L)) } # Check whether xgb.Booster handle is null # internal utility function is.null.handle <- function(handle) { if (is.null(handle)) return(TRUE) if (!inherits(handle, "externalptr")) stop("argument type must be 'externalptr'") return(.Call(XGCheckNullPtr_R, handle)) } # Return a verified to be valid handle out of xgb.Booster # internal utility function xgb.get.handle <- function(object) { if (inherits(object, "xgb.Booster")) { handle <- object$ptr if (is.null(handle) || !inherits(handle, "externalptr")) { stop("'xgb.Booster' object is corrupted or is from an incompatible XGBoost version.") } } else { stop("argument must be an 'xgb.Booster' object.") } if (is.null.handle(handle)) { stop("invalid 'xgb.Booster' (blank 'externalptr').") } return(handle) } #' Predict method for XGBoost model #' #' Predict values on data based on XGBoost model. #' #' @param object Object of class `xgb.Booster`. #' @param newdata Takes `data.frame`, `matrix`, `dgCMatrix`, `dgRMatrix`, `dsparseVector`, #' local data file, or `xgb.DMatrix`. #' #' For single-row predictions on sparse data, it is recommended to use CSR format. If passing #' a sparse vector, it will take it as a row vector. #' #' Note that, for repeated predictions on the same data, one might want to create a DMatrix to #' pass here instead of passing R types like matrices or data frames, as predictions will be #' faster on DMatrix. #' #' If `newdata` is a `data.frame`, be aware that: #' - Columns will be converted to numeric if they aren't already, which could potentially make #' the operation slower than in an equivalent `matrix` object. #' - The order of the columns must match with that of the data from which the model was fitted #' (i.e. columns will not be referenced by their names, just by their order in the data), #' unless passing `validate_features = TRUE` (which is not the default). #' - If the model was fitted to data with categorical columns, these columns must be of #' `factor` type here, and must use the same encoding (i.e. have the same levels). #' - If `newdata` contains any `factor` columns, they will be converted to base-0 #' encoding (same as during DMatrix creation) - hence, one should not pass a `factor` #' under a column which during training had a different type. #' - Any columns with type other than `factor` will be interpreted as numeric. #' @param missing Float value that represents missing values in data #' (e.g., 0 or some other extreme value). #' #' This parameter is not used when `newdata` is an `xgb.DMatrix` - in such cases, #' should pass this as an argument to the DMatrix constructor instead. #' @param outputmargin Whether the prediction should be returned in the form of #' original untransformed sum of predictions from boosting iterations' results. #' E.g., setting `outputmargin = TRUE` for logistic regression would return log-odds #' instead of probabilities. #' @param predleaf Whether to predict per-tree leaf indices. #' @param predcontrib Whether to return feature contributions to individual predictions (see Details). #' @param approxcontrib Whether to use a fast approximation for feature contributions (see Details). #' @param predinteraction Whether to return contributions of feature interactions to individual predictions (see Details). #' @param training Whether the prediction result is used for training. For dart booster, #' training predicting will perform dropout. #' @param iterationrange Sequence of rounds/iterations from the model to use for prediction, specified by passing #' a two-dimensional vector with the start and end numbers in the sequence (same format as R's `seq` - i.e. #' base-1 indexing, and inclusive of both ends). #' #' For example, passing `c(1,20)` will predict using the first twenty iterations, while passing `c(1,1)` will #' predict using only the first one. #' #' If passing `NULL`, will either stop at the best iteration if the model used early stopping, or use all #' of the iterations (rounds) otherwise. #' #' If passing "all", will use all of the rounds regardless of whether the model had early stopping or not. #' #' Not applicable to `gblinear` booster. #' @param strict_shape Whether to always return an array with the same dimensions for the given prediction mode #' regardless of the model type - meaning that, for example, both a multi-class and a binary classification #' model would generate output arrays with the same number of dimensions, with the 'class' dimension having #' size equal to '1' for the binary model. #' #' If passing `FALSE` (the default), dimensions will be simplified according to the model type, so that a #' binary classification model for example would not have a redundant dimension for 'class'. #' #' See documentation for the return type for the exact shape of the output arrays for each prediction mode. #' @param avoid_transpose Whether to output the resulting predictions in the same memory layout in which they #' are generated by the core XGBoost library, without transposing them to match the expected output shape. #' #' Internally, XGBoost uses row-major order for the predictions it generates, while R arrays use column-major #' order, hence the result needs to be transposed in order to have the expected shape when represented as #' an R array or matrix, which might be a slow operation. #' #' If passing `TRUE`, then the result will have dimensions in reverse order - for example, rows #' will be the last dimensions instead of the first dimension. #' @param base_margin Base margin used for boosting from existing model (raw score that gets added to #' all observations independently of the trees in the model). #' #' If supplied, should be either a vector with length equal to the number of rows in `newdata` #' (for objectives which produces a single score per observation), or a matrix with number of #' rows matching to the number rows in `newdata` and number of columns matching to the number #' of scores estimated by the model (e.g. number of classes for multi-class classification). #' #' Note that, if `newdata` is an `xgb.DMatrix` object, this argument will #' be ignored as it needs to be added to the DMatrix instead (e.g. by passing it as #' an argument in its constructor, or by calling [setinfo.xgb.DMatrix()]. #' @param validate_features When `TRUE`, validate that the Booster's and newdata's #' feature_names match (only applicable when both `object` and `newdata` have feature names). #' #' If the column names differ and `newdata` is not an `xgb.DMatrix`, will try to reorder #' the columns in `newdata` to match with the booster's. #' #' If the booster has feature types and `newdata` is either an `xgb.DMatrix` or #' `data.frame`, will additionally verify that categorical columns are of the #' correct type in `newdata`, throwing an error if they do not match. #' #' If passing `FALSE`, it is assumed that the feature names and types are the same, #' and come in the same order as in the training data. #' #' Note that this check might add some sizable latency to the predictions, so it's #' recommended to disable it for performance-sensitive applications. #' @param ... Not used. #' #' @details #' Note that `iterationrange` would currently do nothing for predictions from "gblinear", #' since "gblinear" doesn't keep its boosting history. #' #' One possible practical applications of the `predleaf` option is to use the model #' as a generator of new features which capture non-linearity and interactions, #' e.g., as implemented in [xgb.create.features()]. #' #' Setting `predcontrib = TRUE` allows to calculate contributions of each feature to #' individual predictions. For "gblinear" booster, feature contributions are simply linear terms #' (feature_beta * feature_value). For "gbtree" booster, feature contributions are SHAP #' values (Lundberg 2017) that sum to the difference between the expected output #' of the model and the current prediction (where the hessian weights are used to compute the expectations). #' Setting `approxcontrib = TRUE` approximates these values following the idea explained #' in \url{http://blog.datadive.net/interpreting-random-forests/}. #' #' With `predinteraction = TRUE`, SHAP values of contributions of interaction of each pair of features #' are computed. Note that this operation might be rather expensive in terms of compute and memory. #' Since it quadratically depends on the number of features, it is recommended to perform selection #' of the most important features first. See below about the format of the returned results. #' #' The `predict()` method uses as many threads as defined in `xgb.Booster` object (all by default). #' If you want to change their number, assign a new number to `nthread` using [xgb.model.parameters<-()]. #' Note that converting a matrix to [xgb.DMatrix()] uses multiple threads too. #' #' @return #' A numeric vector or array, with corresponding dimensions depending on the prediction mode and on #' parameter `strict_shape` as follows: #' #' If passing `strict_shape=FALSE`:\itemize{ #' \item For regression or binary classification: a vector of length `nrows`. #' \item For multi-class and multi-target objectives: a matrix of dimensions `[nrows, ngroups]`. #' #' Note that objective variant `multi:softmax` defaults towards predicting most likely class (a vector #' `nrows`) instead of per-class probabilities. #' \item For `predleaf`: a matrix with one column per tree. #' #' For multi-class / multi-target, they will be arranged so that columns in the output will have #' the leafs from one group followed by leafs of the other group (e.g. order will be `group1:feat1`, #' `group1:feat2`, ..., `group2:feat1`, `group2:feat2`, ...). #' #' If there is more than one parallel tree (e.g. random forests), the parallel trees will be the #' last grouping in the resulting order, which will still be 2D. #' \item For `predcontrib`: when not multi-class / multi-target, a matrix with dimensions #' `[nrows, nfeats+1]`. The last "+ 1" column corresponds to the baseline value. #' #' For multi-class and multi-target objectives, will be an array with dimensions `[nrows, ngroups, nfeats+1]`. #' #' The contribution values are on the scale of untransformed margin (e.g., for binary classification, #' the values are log-odds deviations from the baseline). #' \item For `predinteraction`: when not multi-class / multi-target, the output is a 3D array of #' dimensions `[nrows, nfeats+1, nfeats+1]`. The off-diagonal (in the last two dimensions) #' elements represent different feature interaction contributions. The array is symmetric w.r.t. the last #' two dimensions. The "+ 1" columns corresponds to the baselines. Summing this array along the last #' dimension should produce practically the same result as `predcontrib = TRUE`. #' #' For multi-class and multi-target, will be a 4D array with dimensions `[nrows, ngroups, nfeats+1, nfeats+1]` #' } #' #' If passing `strict_shape=TRUE`, the result is always a matrix (if 2D) or array (if 3D or higher): #' - For normal predictions, the dimension is `[nrows, ngroups]`. #' - For `predcontrib=TRUE`, the dimension is `[nrows, ngroups, nfeats+1]`. #' - For `predinteraction=TRUE`, the dimension is `[nrows, ngroups, nfeats+1, nfeats+1]`. #' - For `predleaf=TRUE`, the dimension is `[nrows, niter, ngroups, num_parallel_tree]`. #' #' If passing `avoid_transpose=TRUE`, then the dimensions in all cases will be in reverse order - for #' example, for `predinteraction`, they will be `[nfeats+1, nfeats+1, ngroups, nrows]` #' instead of `[nrows, ngroups, nfeats+1, nfeats+1]`. #' @seealso [xgb.train()] #' @references #' 1. Scott M. Lundberg, Su-In Lee, "A Unified Approach to Interpreting Model Predictions", #' NIPS Proceedings 2017, \url{https://arxiv.org/abs/1705.07874} #' 2. Scott M. Lundberg, Su-In Lee, "Consistent feature attribution for tree ensembles", #' \url{https://arxiv.org/abs/1706.06060} #' #' @examples #' ## binary classification: #' #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' ## Keep the number of threads to 2 for examples #' nthread <- 2 #' data.table::setDTthreads(nthread) #' #' train <- agaricus.train #' test <- agaricus.test #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 5, #' params = xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic" #' ) #' ) #' #' # use all trees by default #' pred <- predict(bst, test$data) #' # use only the 1st tree #' pred1 <- predict(bst, test$data, iterationrange = c(1, 1)) #' #' # Predicting tree leafs: #' # the result is an nsamples X ntrees matrix #' pred_leaf <- predict(bst, test$data, predleaf = TRUE) #' str(pred_leaf) #' #' # Predicting feature contributions to predictions: #' # the result is an nsamples X (nfeatures + 1) matrix #' pred_contr <- predict(bst, test$data, predcontrib = TRUE) #' str(pred_contr) #' # verify that contributions' sums are equal to log-odds of predictions (up to float precision): #' summary(rowSums(pred_contr) - qlogis(pred)) #' # for the 1st record, let's inspect its features that had non-zero contribution to prediction: #' contr1 <- pred_contr[1,] #' contr1 <- contr1[-length(contr1)] # drop intercept #' contr1 <- contr1[contr1 != 0] # drop non-contributing features #' contr1 <- contr1[order(abs(contr1))] # order by contribution magnitude #' old_mar <- par("mar") #' par(mar = old_mar + c(0,7,0,0)) #' barplot(contr1, horiz = TRUE, las = 2, xlab = "contribution to prediction in log-odds") #' par(mar = old_mar) #' #' #' ## multiclass classification in iris dataset: #' #' lb <- as.numeric(iris$Species) - 1 #' num_class <- 3 #' #' set.seed(11) #' #' bst <- xgb.train( #' data = xgb.DMatrix(as.matrix(iris[, -5], nthread = 1), label = lb), #' nrounds = 10, #' params = xgb.params( #' max_depth = 4, #' nthread = 2, #' subsample = 0.5, #' objective = "multi:softprob", #' num_class = num_class #' ) #' ) #' #' # predict for softmax returns num_class probability numbers per case: #' pred <- predict(bst, as.matrix(iris[, -5])) #' str(pred) #' # convert the probabilities to softmax labels #' pred_labels <- max.col(pred) - 1 #' # the following should result in the same error as seen in the last iteration #' sum(pred_labels != lb) / length(lb) #' #' # compare with predictions from softmax: #' set.seed(11) #' #' bst <- xgb.train( #' data = xgb.DMatrix(as.matrix(iris[, -5], nthread = 1), label = lb), #' nrounds = 10, #' params = xgb.params( #' max_depth = 4, #' nthread = 2, #' subsample = 0.5, #' objective = "multi:softmax", #' num_class = num_class #' ) #' ) #' #' pred <- predict(bst, as.matrix(iris[, -5])) #' str(pred) #' all.equal(pred, pred_labels) #' # prediction from using only 5 iterations should result #' # in the same error as seen in iteration 5: #' pred5 <- predict(bst, as.matrix(iris[, -5]), iterationrange = c(1, 5)) #' sum(pred5 != lb) / length(lb) #' #' @export predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FALSE, predleaf = FALSE, predcontrib = FALSE, approxcontrib = FALSE, predinteraction = FALSE, training = FALSE, iterationrange = NULL, strict_shape = FALSE, avoid_transpose = FALSE, validate_features = FALSE, base_margin = NULL, ...) { check.deprecation(deprecated_predict_params, match.call(), ..., allow_unrecognized = TRUE) if (validate_features) { newdata <- validate.features(object, newdata) } is_dmatrix <- inherits(newdata, "xgb.DMatrix") if (is_dmatrix && !is.null(base_margin)) { stop( "'base_margin' is not supported when passing 'xgb.DMatrix' as input.", " Should be passed as argument to 'xgb.DMatrix' constructor." ) } if (is_dmatrix) { rnames <- NULL } else { rnames <- row.names(newdata) } use_as_df <- FALSE use_as_dense_matrix <- FALSE use_as_csr_matrix <- FALSE n_row <- NULL if (!is_dmatrix) { inplace_predict_supported <- !predcontrib && !predinteraction && !predleaf if (inplace_predict_supported) { booster_type <- xgb.booster_type(object) if (booster_type == "gblinear" || (booster_type == "dart" && training)) { inplace_predict_supported <- FALSE } } if (inplace_predict_supported) { if (is.matrix(newdata)) { use_as_dense_matrix <- TRUE } else if (is.data.frame(newdata)) { # note: since here it turns it into a non-data-frame list, # needs to keep track of the number of rows it had for later n_row <- nrow(newdata) newdata <- lapply( newdata, function(x) { if (is.factor(x)) { return(as.numeric(x) - 1) } else { return(as.numeric(x)) } } ) use_as_df <- TRUE } else if (inherits(newdata, "dgRMatrix")) { use_as_csr_matrix <- TRUE csr_data <- list(newdata@p, newdata@j, newdata@x, ncol(newdata)) } else if (inherits(newdata, "dsparseVector")) { use_as_csr_matrix <- TRUE n_row <- 1L i <- newdata@i - 1L if (storage.mode(i) != "integer") { storage.mode(i) <- "integer" } csr_data <- list(c(0L, length(i)), i, newdata@x, length(newdata)) } } } # if (!is_dmatrix) if (!is_dmatrix && !use_as_dense_matrix && !use_as_csr_matrix && !use_as_df) { nthread <- xgb.nthread(object) newdata <- xgb.DMatrix( newdata, missing = missing, base_margin = base_margin, nthread = NVL(nthread, -1) ) is_dmatrix <- TRUE } if (is.null(n_row)) { n_row <- nrow(newdata) } if (!is.null(iterationrange)) { if (is.character(iterationrange)) { stopifnot(iterationrange == "all") iterationrange <- c(0, 0) } else { iterationrange[1] <- iterationrange[1] - 1 # base-0 indexing } } else { ## no limit is supplied, use best best_iteration <- xgb.best_iteration(object) if (is.null(best_iteration)) { iterationrange <- c(0, 0) } else { iterationrange <- c(0, as.integer(best_iteration) + 1L) } } ## Handle the 0 length values. box <- function(val) { if (length(val) == 0) { cval <- vector(, 1) cval[0] <- val return(cval) } return(val) } args <- list( training = box(training), strict_shape = as.logical(strict_shape), iteration_begin = box(as.integer(iterationrange[1])), iteration_end = box(as.integer(iterationrange[2])), type = box(as.integer(0)) ) set_type <- function(type) { if (args$type != 0) { stop("One type of prediction at a time.") } return(box(as.integer(type))) } if (outputmargin) { args$type <- set_type(1) } if (predcontrib) { args$type <- set_type(if (approxcontrib) 3 else 2) } if (predinteraction) { args$type <- set_type(if (approxcontrib) 5 else 4) } if (predleaf) { args$type <- set_type(6) } json_conf <- jsonlite::toJSON(args, auto_unbox = TRUE) if (is_dmatrix) { arr <- .Call( XGBoosterPredictFromDMatrix_R, xgb.get.handle(object), newdata, json_conf ) } else if (use_as_dense_matrix) { arr <- .Call( XGBoosterPredictFromDense_R, xgb.get.handle(object), newdata, missing, json_conf, base_margin ) } else if (use_as_csr_matrix) { arr <- .Call( XGBoosterPredictFromCSR_R, xgb.get.handle(object), csr_data, missing, json_conf, base_margin ) } else if (use_as_df) { arr <- .Call( XGBoosterPredictFromColumnar_R, xgb.get.handle(object), newdata, missing, json_conf, base_margin ) } ## Needed regardless of whether strict shape is being used. if ((predcontrib || predinteraction) && !is.null(colnames(newdata))) { cnames <- c(colnames(newdata), "(Intercept)") dim_names <- vector(mode = "list", length = length(dim(arr))) dim_names[[1L]] <- cnames if (predinteraction) dim_names[[2L]] <- cnames .Call(XGSetArrayDimNamesInplace_R, arr, dim_names) } if (NROW(rnames)) { if (is.null(dim(arr))) { .Call(XGSetVectorNamesInplace_R, arr, rnames) } else { dim_names <- dimnames(arr) if (is.null(dim_names)) { dim_names <- vector(mode = "list", length = length(dim(arr))) } dim_names[[length(dim_names)]] <- rnames .Call(XGSetArrayDimNamesInplace_R, arr, dim_names) } } if (!avoid_transpose && is.array(arr)) { arr <- aperm(arr) } return(arr) } validate.features <- function(bst, newdata) { if (is.character(newdata)) { # this will be encountered when passing file paths return(newdata) } if (inherits(newdata, "sparseVector")) { # in this case, newdata won't have metadata return(newdata) } if (is.vector(newdata)) { newdata <- as.matrix(newdata) } booster_names <- getinfo(bst, "feature_name") checked_names <- FALSE if (NROW(booster_names)) { try_reorder <- FALSE if (inherits(newdata, "xgb.DMatrix")) { curr_names <- getinfo(newdata, "feature_name") } else { curr_names <- colnames(newdata) try_reorder <- TRUE } if (NROW(curr_names)) { checked_names <- TRUE if (length(curr_names) != length(booster_names) || any(curr_names != booster_names)) { if (!try_reorder) { stop("Feature names in 'newdata' do not match with booster's.") } else { if (inherits(newdata, "data.table")) { newdata <- newdata[, booster_names, with = FALSE] } else { newdata <- newdata[, booster_names, drop = FALSE] } } } } # if (NROW(curr_names)) { } # if (NROW(booster_names)) { if (inherits(newdata, c("data.frame", "xgb.DMatrix"))) { booster_types <- getinfo(bst, "feature_type") if (!NROW(booster_types)) { # Note: types in the booster are optional. Other interfaces # might not even save it as booster attributes for example, # even if the model uses categorical features. return(newdata) } if (inherits(newdata, "xgb.DMatrix")) { curr_types <- getinfo(newdata, "feature_type") if (length(curr_types) != length(booster_types) || any(curr_types != booster_types)) { stop("Feature types in 'newdata' do not match with booster's.") } } if (inherits(newdata, "data.frame")) { is_factor <- sapply(newdata, is.factor) if (any(is_factor != (booster_types == "c"))) { stop( paste0( "Feature types in 'newdata' do not match with booster's for same columns (by ", ifelse(checked_names, "name", "position"), ")." ) ) } } } return(newdata) } #' Accessors for serializable attributes of a model #' #' These methods allow to manipulate the key-value attribute strings of an XGBoost model. #' #' @details #' The primary purpose of XGBoost model attributes is to store some meta data about the model. #' Note that they are a separate concept from the object attributes in R. #' Specifically, they refer to key-value strings that can be attached to an XGBoost model, #' stored together with the model's binary representation, and accessed later #' (from R or any other interface). #' In contrast, any R attribute assigned to an R object of `xgb.Booster` class #' would not be saved by [xgb.save()] because an XGBoost model is an external memory object #' and its serialization is handled externally. #' Also, setting an attribute that has the same name as one of XGBoost's parameters wouldn't #' change the value of that parameter for a model. #' Use [xgb.model.parameters<-()] to set or change model parameters. #' #' The `xgb.attributes<-` setter either updates the existing or adds one or several attributes, #' but it doesn't delete the other existing attributes. #' #' Important: since this modifies the booster's C object, semantics for assignment here #' will differ from R's, as any object reference to the same booster will be modified #' too, while assignment of R attributes through `attributes(model)$ <- ` #' will follow the usual copy-on-write R semantics (see [xgb.copy.Booster()] for an #' example of these behaviors). #' #' @param object Object of class `xgb.Booster`. **Will be modified in-place** when assigning to it. #' @param name A non-empty character string specifying which attribute is to be accessed. #' @param value For `xgb.attr<-`, a value of an attribute; for `xgb.attributes<-`, #' it is a list (or an object coercible to a list) with the names of attributes to set #' and the elements corresponding to attribute values. #' Non-character values are converted to character. #' When an attribute value is not a scalar, only the first index is used. #' Use `NULL` to remove an attribute. #' @return #' - `xgb.attr()` returns either a string value of an attribute #' or `NULL` if an attribute wasn't stored in a model. #' - `xgb.attributes()` returns a list of all attributes stored in a model #' or `NULL` if a model has no stored attributes. #' #' @examples #' data(agaricus.train, package = "xgboost") #' train <- agaricus.train #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = 2, #' objective = "binary:logistic" #' ) #' ) #' #' xgb.attr(bst, "my_attribute") <- "my attribute value" #' print(xgb.attr(bst, "my_attribute")) #' xgb.attributes(bst) <- list(a = 123, b = "abc") #' #' fname <- file.path(tempdir(), "xgb.ubj") #' xgb.save(bst, fname) #' bst1 <- xgb.load(fname) #' print(xgb.attr(bst1, "my_attribute")) #' print(xgb.attributes(bst1)) #' #' # deletion: #' xgb.attr(bst1, "my_attribute") <- NULL #' print(xgb.attributes(bst1)) #' xgb.attributes(bst1) <- list(a = NULL, b = NULL) #' print(xgb.attributes(bst1)) #' #' @rdname xgb.attr #' @export xgb.attr <- function(object, name) { if (is.null(name) || nchar(as.character(name[1])) == 0) stop("invalid attribute name") handle <- xgb.get.handle(object) out <- .Call(XGBoosterGetAttr_R, handle, as.character(name[1])) if (!NROW(out) || !nchar(out)) { return(NULL) } if (!is.null(out)) { if (name %in% c("best_iteration", "best_score")) { out <- as.numeric(out) } } return(out) } #' @rdname xgb.attr #' @export `xgb.attr<-` <- function(object, name, value) { name <- as.character(name[1]) if (!NROW(name) || !nchar(name)) stop("invalid attribute name") handle <- xgb.get.handle(object) if (!is.null(value)) { # Coerce the elements to be scalar strings. # Q: should we warn user about non-scalar elements? if (is.numeric(value[1])) { value <- format(value[1], digits = 17) } else { value <- as.character(value[1]) } } .Call(XGBoosterSetAttr_R, handle, name, value) return(object) } #' @rdname xgb.attr #' @export xgb.attributes <- function(object) { handle <- xgb.get.handle(object) attr_names <- .Call(XGBoosterGetAttrNames_R, handle) if (!NROW(attr_names)) return(list()) out <- lapply(attr_names, function(name) xgb.attr(object, name)) names(out) <- attr_names return(out) } #' @rdname xgb.attr #' @export `xgb.attributes<-` <- function(object, value) { a <- as.list(value) if (is.null(names(a)) || any(nchar(names(a)) == 0)) { stop("attribute names cannot be empty strings") } for (i in seq_along(a)) { xgb.attr(object, names(a[i])) <- a[[i]] } return(object) } #' Accessors for model parameters as JSON string #' #' @details #' Note that assignment is performed in-place on the booster C object, which unlike assignment #' of R attributes, doesn't follow typical copy-on-write semantics for assignment - i.e. all references #' to the same booster will also get updated. #' #' See [xgb.copy.Booster()] for an example of this behavior. #' #' @param object Object of class `xgb.Booster`.**Will be modified in-place** when assigning to it. #' @param value A list. #' @return Parameters as a list. #' @examples #' data(agaricus.train, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' train <- agaricus.train #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic" #' ) #' ) #' #' config <- xgb.config(bst) #' #' @rdname xgb.config #' @export xgb.config <- function(object) { handle <- xgb.get.handle(object) return(jsonlite::fromJSON(.Call(XGBoosterSaveJsonConfig_R, handle))) } #' @rdname xgb.config #' @export `xgb.config<-` <- function(object, value) { handle <- xgb.get.handle(object) .Call( XGBoosterLoadJsonConfig_R, handle, jsonlite::toJSON(value, auto_unbox = TRUE, null = "null") ) return(object) } #' Accessors for model parameters #' #' Only the setter for XGBoost parameters is currently implemented. #' #' @details #' Just like [xgb.attr()], this function will make in-place modifications #' on the booster object which do not follow typical R assignment semantics - that is, #' all references to the same booster will also be updated, unlike assingment of R #' attributes which follow copy-on-write semantics. #' #' See [xgb.copy.Booster()] for an example of this behavior. #' #' Be aware that setting parameters of a fitted booster related to training continuation / updates #' will reset its number of rounds indicator to zero. #' @param object Object of class `xgb.Booster`. **Will be modified in-place**. #' @param value A list (or an object coercible to a list) with the names of parameters to set #' and the elements corresponding to parameter values. #' @return The same booster `object`, which gets modified in-place. #' @examples #' data(agaricus.train, package = "xgboost") #' #' train <- agaricus.train #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' learning_rate = 1, #' nthread = 2, #' objective = "binary:logistic" #' ) #' ) #' #' xgb.model.parameters(bst) <- list(learning_rate = 0.1) #' #' @rdname xgb.model.parameters #' @export `xgb.model.parameters<-` <- function(object, value) { if (length(value) == 0) return(object) p <- as.list(value) if (is.null(names(p)) || any(nchar(names(p)) == 0)) { stop("parameter names cannot be empty strings") } names(p) <- gsub(".", "_", names(p), fixed = TRUE) p <- lapply(p, function(x) { if (is.vector(x) && length(x) == 1) { return(as.character(x)[1]) } else { return(jsonlite::toJSON(x, auto_unbox = TRUE)) } }) handle <- xgb.get.handle(object) for (i in seq_along(p)) { .Call(XGBoosterSetParam_R, handle, names(p[i]), p[[i]]) } return(object) } #' @rdname getinfo #' @export getinfo.xgb.Booster <- function(object, name) { name <- as.character(head(name, 1L)) allowed_fields <- c("feature_name", "feature_type") if (!(name %in% allowed_fields)) { stop("getinfo: name must be one of the following: ", paste(allowed_fields, collapse = ", ")) } handle <- xgb.get.handle(object) out <- .Call( XGBoosterGetStrFeatureInfo_R, handle, name ) if (!NROW(out)) { return(NULL) } return(out) } #' @rdname getinfo #' @export setinfo.xgb.Booster <- function(object, name, info) { name <- as.character(head(name, 1L)) allowed_fields <- c("feature_name", "feature_type") if (!(name %in% allowed_fields)) { stop("setinfo: unknown info name ", name) } info <- as.character(info) handle <- xgb.get.handle(object) .Call( XGBoosterSetStrFeatureInfo_R, handle, name, info ) return(TRUE) } #' Get number of boosting in a fitted booster #' #' @param model,x A fitted `xgb.Booster` model. #' @return The number of rounds saved in the model as an integer. #' @details Note that setting booster parameters related to training #' continuation / updates through [xgb.model.parameters<-()] will reset the #' number of rounds to zero. #' @export #' @rdname xgb.get.num.boosted.rounds xgb.get.num.boosted.rounds <- function(model) { return(.Call(XGBoosterBoostedRounds_R, xgb.get.handle(model))) } #' @rdname xgb.get.num.boosted.rounds #' @export length.xgb.Booster <- function(x) { return(xgb.get.num.boosted.rounds(x)) } #' Slice Booster by Rounds #' #' Creates a new booster including only a selected range of rounds / iterations #' from an existing booster, as given by the sequence `seq(start, end, step)`. #' #' @details #' Note that any R attributes that the booster might have, will not be copied into #' the resulting object. #' #' @param model,x A fitted `xgb.Booster` object, which is to be sliced by taking only a subset #' of its rounds / iterations. #' @param start Start of the slice (base-1 and inclusive, like R's [seq()]). #' @param end End of the slice (base-1 and inclusive, like R's [seq()]). #' Passing a value of zero here is equivalent to passing the full number of rounds in the #' booster object. #' @param step Step size of the slice. Passing '1' will take every round in the sequence defined by #' `(start, end)`, while passing '2' will take every second value, and so on. #' @return A sliced booster object containing only the requested rounds. #' @examples #' data(mtcars) #' #' y <- mtcars$mpg #' x <- as.matrix(mtcars[, -1]) #' #' dm <- xgb.DMatrix(x, label = y, nthread = 1) #' model <- xgb.train(data = dm, params = xgb.params(nthread = 1), nrounds = 5) #' model_slice <- xgb.slice.Booster(model, 1, 3) #' # Prediction for first three rounds #' predict(model, x, predleaf = TRUE)[, 1:3] #' #' # The new model has only those rounds, so #' # a full prediction from it is equivalent #' predict(model_slice, x, predleaf = TRUE) #' @export #' @rdname xgb.slice.Booster xgb.slice.Booster <- function(model, start, end = xgb.get.num.boosted.rounds(model), step = 1L) { # This makes the slice mimic the behavior of R's 'seq', # which truncates on the end of the slice when the step # doesn't reach it. if (end > start && step > 1) { d <- (end - start + 1) / step if (d != floor(d)) { end <- start + step * ceiling(d) - 1 } } return( .Call( XGBoosterSlice_R, xgb.get.handle(model), start - 1, end, step ) ) } #' @export #' @rdname xgb.slice.Booster #' @param i The indices - must be an increasing sequence as generated by e.g. `seq(...)`. `[.xgb.Booster` <- function(x, i) { if (missing(i)) { return(xgb.slice.Booster(x, 1, 0)) } if (length(i) == 1) { return(xgb.slice.Booster(x, i, i)) } steps <- diff(i) if (any(steps < 0)) { stop("Can only slice booster with ascending sequences.") } if (length(unique(steps)) > 1) { stop("Can only slice booster with fixed-step sequences.") } return(xgb.slice.Booster(x, i[1L], i[length(i)], steps[1L])) } #' Get Features Names from Booster #' #' @description #' Returns the feature / variable / column names from a fitted #' booster object, which are set automatically during the call to [xgb.train()] #' from the DMatrix names, or which can be set manually through [setinfo()]. #' #' If the object doesn't have feature names, will return `NULL`. #' #' It is equivalent to calling `getinfo(object, "feature_name")`. #' @param object An `xgb.Booster` object. #' @param ... Not used. #' @export variable.names.xgb.Booster <- function(object, ...) { return(getinfo(object, "feature_name")) } xgb.nthread <- function(bst) { config <- xgb.config(bst) out <- strtoi(config$learner$generic_param$nthread) return(out) } xgb.booster_type <- function(bst) { config <- xgb.config(bst) out <- config$learner$learner_train_param$booster return(out) } xgb.num_class <- function(bst) { config <- xgb.config(bst) out <- strtoi(config$learner$learner_model_param$num_class) return(out) } xgb.feature_names <- function(bst) { return(getinfo(bst, "feature_name")) } xgb.feature_types <- function(bst) { return(getinfo(bst, "feature_type")) } xgb.num_feature <- function(bst) { handle <- xgb.get.handle(bst) return(.Call(XGBoosterGetNumFeature_R, handle)) } xgb.best_iteration <- function(bst) { out <- xgb.attr(bst, "best_iteration") if (!NROW(out) || !nchar(out)) { out <- NULL } return(out) } xgb.has_categ_features <- function(bst) { return("c" %in% xgb.feature_types(bst)) } #' Extract coefficients from linear booster #' #' @description #' Extracts the coefficients from a 'gblinear' booster object, #' as produced by [xgb.train()] when using parameter `booster="gblinear"`. #' #' Note: this function will error out if passing a booster model #' which is not of "gblinear" type. #' #' @param object A fitted booster of 'gblinear' type. #' @param ... Not used. #' @return The extracted coefficients: #' - If there is only one coefficient per column in the data, will be returned as a #' vector, potentially containing the feature names if available, with the intercept #' as first column. #' - If there is more than one coefficient per column in the data (e.g. when using #' `objective="multi:softmax"`), will be returned as a matrix with dimensions equal #' to `[num_features, num_cols]`, with the intercepts as first row. Note that the column #' (classes in multi-class classification) dimension will not be named. #' #' The intercept returned here will include the 'base_score' parameter (unlike the 'bias' #' or the last coefficient in the model dump, which doesn't have 'base_score' added to it), #' hence one should get the same values from calling `predict(..., outputmargin = TRUE)` and #' from performing a matrix multiplication with `model.matrix(~., ...)`. #' #' Be aware that the coefficients are obtained by first converting them to strings and #' back, so there will always be some very small lose of precision compared to the actual #' coefficients as used by [predict.xgb.Booster]. #' @examples #' library(xgboost) #' #' data(mtcars) #' #' y <- mtcars[, 1] #' x <- as.matrix(mtcars[, -1]) #' #' dm <- xgb.DMatrix(data = x, label = y, nthread = 1) #' params <- xgb.params(booster = "gblinear", nthread = 1) #' model <- xgb.train(data = dm, params = params, nrounds = 2) #' coef(model) #' @export coef.xgb.Booster <- function(object, ...) { return(.internal.coef.xgb.Booster(object, add_names = TRUE)) } .internal.coef.xgb.Booster <- function(object, add_names = TRUE) { booster_type <- xgb.booster_type(object) if (booster_type != "gblinear") { stop("Coefficients are not defined for Booster type ", booster_type) } model_json <- jsonlite::fromJSON(rawToChar(xgb.save.raw(object, raw_format = "json"))) base_score <- model_json$learner$learner_model_param$base_score num_feature <- as.numeric(model_json$learner$learner_model_param$num_feature) weights <- model_json$learner$gradient_booster$model$weights n_cols <- length(weights) / (num_feature + 1) if (n_cols != floor(n_cols) || n_cols < 1) { stop("Internal error: could not determine shape of coefficients.") } sep <- num_feature * n_cols coefs <- weights[seq(1, sep)] intercepts <- weights[seq(sep + 1, length(weights))] intercepts <- intercepts + as.numeric(base_score) if (add_names) { feature_names <- xgb.feature_names(object) if (!NROW(feature_names)) { # This mimics the default naming in R which names columns as "V1..N" # when names are needed but not available feature_names <- paste0("V", seq(1L, num_feature)) } feature_names <- c("(Intercept)", feature_names) } if (n_cols == 1L) { out <- c(intercepts, coefs) if (add_names) { .Call(XGSetVectorNamesInplace_R, out, feature_names) } } else { coefs <- matrix(coefs, nrow = num_feature, byrow = TRUE) dim(intercepts) <- c(1L, n_cols) out <- rbind(intercepts, coefs) out_names <- vector(mode = "list", length = 2) if (add_names) { out_names[[1L]] <- feature_names } if (inherits(object, "xgboost")) { metadata <- attributes(object)$metadata if (NROW(metadata$y_levels)) { out_names[[2L]] <- metadata$y_levels } else if (NROW(metadata$y_names)) { out_names[[2L]] <- metadata$y_names } } .Call(XGSetArrayDimNamesInplace_R, out, out_names) } return(out) } #' Deep-copies a Booster Object #' #' Creates a deep copy of an 'xgb.Booster' object, such that the #' C object pointer contained will be a different object, and hence functions #' like [xgb.attr()] will not affect the object from which it was copied. #' #' @param model An 'xgb.Booster' object. #' @return A deep copy of `model` - it will be identical in every way, but C-level #' functions called on that copy will not affect the `model` variable. #' @examples #' library(xgboost) #' #' data(mtcars) #' #' y <- mtcars$mpg #' x <- mtcars[, -1] #' #' dm <- xgb.DMatrix(x, label = y, nthread = 1) #' #' model <- xgb.train( #' data = dm, #' params = xgb.params(nthread = 1), #' nrounds = 3 #' ) #' #' # Set an arbitrary attribute kept at the C level #' xgb.attr(model, "my_attr") <- 100 #' print(xgb.attr(model, "my_attr")) #' #' # Just assigning to a new variable will not create #' # a deep copy - C object pointer is shared, and in-place #' # modifications will affect both objects #' model_shallow_copy <- model #' xgb.attr(model_shallow_copy, "my_attr") <- 333 #' # 'model' was also affected by this change: #' print(xgb.attr(model, "my_attr")) #' #' model_deep_copy <- xgb.copy.Booster(model) #' xgb.attr(model_deep_copy, "my_attr") <- 444 #' # 'model' was NOT affected by this change #' # (keeps previous value that was assigned before) #' print(xgb.attr(model, "my_attr")) #' #' # Verify that the new object was actually modified #' print(xgb.attr(model_deep_copy, "my_attr")) #' @export xgb.copy.Booster <- function(model) { if (!inherits(model, "xgb.Booster")) { stop("'model' must be an 'xgb.Booster' object.") } return(.Call(XGDuplicate_R, model)) } #' Check if two boosters share the same C object #' #' Checks whether two booster objects refer to the same underlying C object. #' #' @details #' As booster objects (as returned by e.g. [xgb.train()]) contain an R 'externalptr' #' object, they don't follow typical copy-on-write semantics of other R objects - that is, if #' one assigns a booster to a different variable and modifies that new variable through in-place #' methods like [xgb.attr<-()], the modification will be applied to both the old and the new #' variable, unlike typical R assignments which would only modify the latter. #' #' This function allows checking whether two booster objects share the same 'externalptr', #' regardless of the R attributes that they might have. #' #' In order to duplicate a booster in such a way that the copy wouldn't share the same #' 'externalptr', one can use function [xgb.copy.Booster()]. #' @param obj1 Booster model to compare with `obj2`. #' @param obj2 Booster model to compare with `obj1`. #' @return Either `TRUE` or `FALSE` according to whether the two boosters share the #' underlying C object. #' @seealso [xgb.copy.Booster()] #' @examples #' library(xgboost) #' #' data(mtcars) #' #' y <- mtcars$mpg #' x <- as.matrix(mtcars[, -1]) #' #' model <- xgb.train( #' params = xgb.params(nthread = 1), #' data = xgb.DMatrix(x, label = y, nthread = 1), #' nrounds = 3 #' ) #' #' model_shallow_copy <- model #' xgb.is.same.Booster(model, model_shallow_copy) # same C object #' #' model_deep_copy <- xgb.copy.Booster(model) #' xgb.is.same.Booster(model, model_deep_copy) # different C objects #' #' # In-place assignments modify all references, #' # but not full/deep copies of the booster #' xgb.attr(model_shallow_copy, "my_attr") <- 111 #' xgb.attr(model, "my_attr") # gets modified #' xgb.attr(model_deep_copy, "my_attr") # doesn't get modified #' @export xgb.is.same.Booster <- function(obj1, obj2) { if (!inherits(obj1, "xgb.Booster") || !inherits(obj2, "xgb.Booster")) { stop("'xgb.is.same.Booster' is only applicable to 'xgb.Booster' objects.") } return( .Call( XGPointerEqComparison_R, xgb.get.handle(obj1), xgb.get.handle(obj2) ) ) } #' @title Print xgb.Booster #' @description Print information about `xgb.Booster`. #' @param x An `xgb.Booster` object. #' @param ... Not used. #' @return The same `x` object, returned invisibly #' @examples #' data(agaricus.train, package = "xgboost") #' train <- agaricus.train #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = 2, #' objective = "binary:logistic" #' ) #' ) #' #' attr(bst, "myattr") <- "memo" #' #' print(bst) #' @method print xgb.Booster #' @export print.xgb.Booster <- function(x, ...) { # this lets it error out when the object comes from an earlier R XGBoost version handle <- xgb.get.handle(x) cat('##### xgb.Booster\n') R_attrs <- attributes(x) if (!is.null(R_attrs$call)) { cat('call:\n ') print(R_attrs$call) } cat('# of features:', xgb.num_feature(x), '\n') cat('# of rounds: ', xgb.get.num.boosted.rounds(x), '\n') attr_names <- .Call(XGBoosterGetAttrNames_R, handle) if (NROW(attr_names)) { cat('xgb.attributes:\n') cat(" ", paste(attr_names, collapse = ", "), "\n") } additional_attr <- setdiff(names(R_attrs), .reserved_cb_names) if (NROW(additional_attr)) { cat("callbacks:\n ", paste(additional_attr, collapse = ", "), "\n") } if (!is.null(R_attrs$evaluation_log)) { cat('evaluation_log:\n') print(R_attrs$evaluation_log, row.names = FALSE, topn = 2) } return(invisible(x)) } xgboost-3.0.0/R-package/R/xgb.DMatrix.R000066400000000000000000001324011476511272400174560ustar00rootroot00000000000000#' Construct xgb.DMatrix object #' #' Construct an 'xgb.DMatrix' object from a given data source, which can then be passed to functions #' such as [xgb.train()] or [predict()]. #' #' Function `xgb.QuantileDMatrix()` will construct a DMatrix with quantization for the histogram #' method already applied to it, which can be used to reduce memory usage (compared to using a #' a regular DMatrix first and then creating a quantization out of it) when using the histogram #' method (`tree_method = "hist"`, which is the default algorithm), but is not usable for the #' sorted-indices method (`tree_method = "exact"`), nor for the approximate method #' (`tree_method = "approx"`). #' #' @param data Data from which to create a DMatrix, which can then be used for fitting models or #' for getting predictions out of a fitted model. #' #' Supported input types are as follows: #' - `matrix` objects, with types `numeric`, `integer`, or `logical`. #' - `data.frame` objects, with columns of types `numeric`, `integer`, `logical`, or `factor` #' #' Note that xgboost uses base-0 encoding for categorical types, hence `factor` types (which use base-1 #' encoding') will be converted inside the function call. Be aware that the encoding used for `factor` #' types is not kept as part of the model, so in subsequent calls to `predict`, it is the user's #' responsibility to ensure that factor columns have the same levels as the ones from which the DMatrix #' was constructed. #' #' Other column types are not supported. #' - CSR matrices, as class `dgRMatrix` from package `Matrix`. #' - CSC matrices, as class `dgCMatrix` from package `Matrix`. #' #' These are **not** supported by `xgb.QuantileDMatrix`. #' - XGBoost's own binary format for DMatrices, as produced by [xgb.DMatrix.save()]. #' - Single-row CSR matrices, as class `dsparseVector` from package `Matrix`, which is interpreted #' as a single row (only when making predictions from a fitted model). #' #' @param label Label of the training data. For classification problems, should be passed encoded as #' integers with numeration starting at zero. #' @param weight Weight for each instance. #' #' Note that, for ranking task, weights are per-group. In ranking task, one weight #' is assigned to each group (not each data point). This is because we #' only care about the relative ordering of data points within each group, #' so it doesn't make sense to assign weights to individual data points. #' @param base_margin Base margin used for boosting from existing model. #' #' In the case of multi-output models, one can also pass multi-dimensional base_margin. #' @param missing A float value to represents missing values in data (not used when creating DMatrix #' from text files). It is useful to change when a zero, infinite, or some other #' extreme value represents missing values in data. #' @param silent whether to suppress printing an informational message after loading from a file. #' @param feature_names Set names for features. Overrides column names in data frame and matrix. #' #' Note: columns are not referenced by name when calling `predict`, so the column order there #' must be the same as in the DMatrix construction, regardless of the column names. #' @param feature_types Set types for features. #' #' If `data` is a `data.frame` and passing `feature_types` is not supplied, #' feature types will be deduced automatically from the column types. #' #' Otherwise, one can pass a character vector with the same length as number of columns in `data`, #' with the following possible values: #' - "c", which represents categorical columns. #' - "q", which represents numeric columns. #' - "int", which represents integer columns. #' - "i", which represents logical (boolean) columns. #' #' Note that, while categorical types are treated differently from the rest for model fitting #' purposes, the other types do not influence the generated model, but have effects in other #' functionalities such as feature importances. #' #' **Important**: Categorical features, if specified manually through `feature_types`, must #' be encoded as integers with numeration starting at zero, and the same encoding needs to be #' applied when passing data to [predict()]. Even if passing `factor` types, the encoding will #' not be saved, so make sure that `factor` columns passed to `predict` have the same `levels`. #' @param nthread Number of threads used for creating DMatrix. #' @param group Group size for all ranking group. #' @param qid Query ID for data samples, used for ranking. #' @param label_lower_bound Lower bound for survival training. #' @param label_upper_bound Upper bound for survival training. #' @param feature_weights Set feature weights for column sampling. #' @param data_split_mode Not used yet. This parameter is for distributed training, which is not yet available for the R package. #' @inheritParams xgb.train #' @return An 'xgb.DMatrix' object. If calling `xgb.QuantileDMatrix`, it will have additional #' subclass `xgb.QuantileDMatrix`. #' #' @details #' Note that DMatrix objects are not serializable through R functions such as [saveRDS()] or [save()]. #' If a DMatrix gets serialized and then de-serialized (for example, when saving data in an R session or caching #' chunks in an Rmd file), the resulting object will not be usable anymore and will need to be reconstructed #' from the original source of data. #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' dtrain <- with( #' agaricus.train, xgb.DMatrix(data, label = label, nthread = nthread) #' ) #' fname <- file.path(tempdir(), "xgb.DMatrix.data") #' xgb.DMatrix.save(dtrain, fname) #' dtrain <- xgb.DMatrix(fname, nthread = 1) #' @export #' @rdname xgb.DMatrix xgb.DMatrix <- function( data, label = NULL, weight = NULL, base_margin = NULL, missing = NA, silent = FALSE, feature_names = colnames(data), feature_types = NULL, nthread = NULL, group = NULL, qid = NULL, label_lower_bound = NULL, label_upper_bound = NULL, feature_weights = NULL, data_split_mode = "row", ... ) { check.deprecation(deprecated_dmatrix_params, match.call(), ...) if (!is.null(group) && !is.null(qid)) { stop("Either one of 'group' or 'qid' should be NULL") } if (data_split_mode != "row") { stop("'data_split_mode' is not supported yet.") } nthread <- as.integer(NVL(nthread, -1L)) if (typeof(data) == "character") { if (length(data) > 1) { stop( "'data' has class 'character' and length ", length(data), ".\n 'data' accepts either a numeric matrix or a single filename." ) } data <- path.expand(data) if (data_split_mode == "row") { data_split_mode <- 0L } else if (data_split_mode == "col") { data_split_mode <- 1L } else { stop("Passed invalid 'data_split_mode': ", data_split_mode) } handle <- .Call(XGDMatrixCreateFromURI_R, data, as.integer(silent), data_split_mode) } else if (is.matrix(data)) { handle <- .Call( XGDMatrixCreateFromMat_R, data, missing, nthread ) } else if (inherits(data, "dgCMatrix")) { handle <- .Call( XGDMatrixCreateFromCSC_R, data@p, data@i, data@x, nrow(data), missing, nthread ) } else if (inherits(data, "dgRMatrix")) { handle <- .Call( XGDMatrixCreateFromCSR_R, data@p, data@j, data@x, ncol(data), missing, nthread ) } else if (inherits(data, "dsparseVector")) { indptr <- c(0L, as.integer(length(data@i))) ind <- as.integer(data@i) - 1L handle <- .Call( XGDMatrixCreateFromCSR_R, indptr, ind, data@x, length(data), missing, nthread ) } else if (is.data.frame(data)) { tmp <- .process.df.for.dmatrix(data, feature_types) feature_types <- tmp$feature_types handle <- .Call( XGDMatrixCreateFromDF_R, tmp$lst, missing, nthread ) rm(tmp) } else { stop("xgb.DMatrix does not support construction from ", typeof(data)) } dmat <- handle attributes(dmat) <- list( class = "xgb.DMatrix", fields = new.env() ) .set.dmatrix.fields( dmat = dmat, label = label, weight = weight, base_margin = base_margin, feature_names = feature_names, feature_types = feature_types, group = group, qid = qid, label_lower_bound = label_lower_bound, label_upper_bound = label_upper_bound, feature_weights = feature_weights ) return(dmat) } .process.df.for.dmatrix <- function(df, feature_types) { if (!nrow(df) || !ncol(df)) { stop("'data' is an empty data.frame.") } if (!is.null(feature_types)) { if (!is.character(feature_types) || length(feature_types) != ncol(df)) { stop( "'feature_types' must be a character vector with one entry per column in 'data'." ) } } else { feature_types <- sapply(df, function(col) { if (is.factor(col)) { return("c") } else if (is.integer(col)) { return("int") } else if (is.logical(col)) { return("i") } else { if (!is.numeric(col)) { stop("Invalid type in dataframe.") } return("float") } }) } lst <- lapply(df, function(col) { is_factor <- is.factor(col) col <- as.numeric(col) if (is_factor) { col <- col - 1 } return(col) }) return(list(lst = lst, feature_types = feature_types)) } .set.dmatrix.fields <- function( dmat, label, weight, base_margin, feature_names, feature_types, group, qid, label_lower_bound, label_upper_bound, feature_weights ) { if (!is.null(label)) { setinfo(dmat, "label", label) } if (!is.null(weight)) { setinfo(dmat, "weight", weight) } if (!is.null(base_margin)) { setinfo(dmat, "base_margin", base_margin) } if (!is.null(feature_names)) { setinfo(dmat, "feature_name", feature_names) } if (!is.null(feature_types)) { setinfo(dmat, "feature_type", feature_types) } if (!is.null(group)) { setinfo(dmat, "group", group) } if (!is.null(qid)) { setinfo(dmat, "qid", qid) } if (!is.null(label_lower_bound)) { setinfo(dmat, "label_lower_bound", label_lower_bound) } if (!is.null(label_upper_bound)) { setinfo(dmat, "label_upper_bound", label_upper_bound) } if (!is.null(feature_weights)) { setinfo(dmat, "feature_weights", feature_weights) } } #' @param ref The training dataset that provides quantile information, needed when creating #' validation/test dataset with [xgb.QuantileDMatrix()]. Supplying the training DMatrix #' as a reference means that the same quantisation applied to the training data is #' applied to the validation/test data #' @param max_bin The number of histogram bin, should be consistent with the training parameter #' `max_bin`. #' #' This is only supported when constructing a QuantileDMatrix. #' @export #' @rdname xgb.DMatrix xgb.QuantileDMatrix <- function( data, label = NULL, weight = NULL, base_margin = NULL, missing = NA, feature_names = colnames(data), feature_types = NULL, nthread = NULL, group = NULL, qid = NULL, label_lower_bound = NULL, label_upper_bound = NULL, feature_weights = NULL, ref = NULL, max_bin = NULL ) { nthread <- as.integer(NVL(nthread, -1L)) if (!is.null(ref) && !inherits(ref, "xgb.DMatrix")) { stop("'ref' must be an xgb.DMatrix object.") } # Note: when passing an integer matrix, it won't get casted to numeric. # Since 'int' values as understood by languages like C cannot have missing values, # R represents missingness there by assigning them a value equal to the minimum # integer. The 'missing' value here is set before the data, so in case of integers, # need to make the conversion manually beforehand. if (is.matrix(data) && storage.mode(data) %in% c("integer", "logical") && is.na(missing)) { missing <- .Call(XGGetRNAIntAsDouble) } iterator_env <- as.environment( list( data = data, label = label, weight = weight, base_margin = base_margin, missing = missing, feature_names = feature_names, feature_types = feature_types, group = group, qid = qid, label_lower_bound = label_lower_bound, label_upper_bound = label_upper_bound, feature_weights = feature_weights ) ) data_iterator <- .single.data.iterator(iterator_env) env_keep_alive <- new.env() env_keep_alive$keepalive <- NULL # Note: the ProxyDMatrix has its finalizer assigned in the R externalptr # object, but that finalizer will only be called once the object is # garbage-collected, which doesn't happen immediately after it goes out # of scope, hence this piece of code to tigger its destruction earlier # and free memory right away. proxy_handle <- .make.proxy.handle() on.exit({ .Call(XGDMatrixFree_R, proxy_handle) }) iterator_next <- function() { return(xgb.ProxyDMatrix(proxy_handle, data_iterator, env_keep_alive)) } iterator_reset <- function() { env_keep_alive$keepalive <- NULL return(data_iterator$f_reset(iterator_env)) } calling_env <- environment() dmat <- .Call( XGQuantileDMatrixCreateFromCallback_R, iterator_next, iterator_reset, calling_env, proxy_handle, nthread, missing, max_bin, ref ) attributes(dmat) <- list( class = c("xgb.DMatrix", "xgb.QuantileDMatrix"), fields = attributes(proxy_handle)$fields ) return(dmat) } #' XGBoost Data Iterator #' #' @description #' Interface to create a custom data iterator in order to construct a DMatrix #' from external memory. #' #' This function is responsible for generating an R object structure containing callback #' functions and an environment shared with them. #' #' The output structure from this function is then meant to be passed to [xgb.ExtMemDMatrix()], #' which will consume the data and create a DMatrix from it by executing the callback functions. #' #' For more information, and for a usage example, see the documentation for [xgb.ExtMemDMatrix()]. #' #' @param env An R environment to pass to the callback functions supplied here, which can be #' used to keep track of variables to determine how to handle the batches. #' #' For example, one might want to keep track of an iteration number in this environment in order #' to know which part of the data to pass next. #' @param f_next `function(env)` which is responsible for: #' - Accessing or retrieving the next batch of data in the iterator. #' - Supplying this data by calling function [xgb.DataBatch()] on it and returning the result. #' - Keeping track of where in the iterator batch it is or will go next, which can for example #' be done by modifiying variables in the `env` variable that is passed here. #' - Signaling whether there are more batches to be consumed or not, by returning `NULL` #' when the stream of data ends (all batches in the iterator have been consumed), or the result from #' calling [xgb.DataBatch()] when there are more batches in the line to be consumed. #' @param f_reset `function(env)` which is responsible for reseting the data iterator #' (i.e. taking it back to the first batch, called before and after the sequence of batches #' has been consumed). #' #' Note that, after resetting the iterator, the batches will be accessed again, so the same data #' (and in the same order) must be passed in subsequent iterations. #' @return An `xgb.DataIter` object, containing the same inputs supplied here, which can then #' be passed to [xgb.ExtMemDMatrix()]. #' @seealso [xgb.ExtMemDMatrix()], [xgb.DataBatch()]. #' @export xgb.DataIter <- function(env = new.env(), f_next, f_reset) { if (!is.function(f_next)) { stop("'f_next' must be a function.") } if (!is.function(f_reset)) { stop("'f_reset' must be a function.") } out <- list( env = env, f_next = f_next, f_reset = f_reset ) class(out) <- "xgb.DataIter" return(out) } .qdm.single.fnext <- function(env) { curr_iter <- env[["iter"]] if (curr_iter >= 1L) { return(NULL) } on.exit({ env[["iter"]] <- curr_iter + 1L }) return( xgb.DataBatch( data = env[["data"]], label = env[["label"]], weight = env[["weight"]], base_margin = env[["base_margin"]], feature_names = env[["feature_names"]], feature_types = env[["feature_types"]], group = env[["group"]], qid = env[["qid"]], label_lower_bound = env[["label_lower_bound"]], label_upper_bound = env[["label_upper_bound"]], feature_weights = env[["feature_weights"]] ) ) } .qdm.single.freset <- function(env) { env[["iter"]] <- 0L return(invisible(NULL)) } .single.data.iterator <- function(env) { env[["iter"]] <- 0L return(xgb.DataIter(env, .qdm.single.fnext, .qdm.single.freset)) } # Only for internal usage .make.proxy.handle <- function() { out <- .Call(XGProxyDMatrixCreate_R) attributes(out) <- list( class = c("xgb.DMatrix", "xgb.ProxyDMatrix"), fields = new.env() ) return(out) } #' Structure for Data Batches #' #' @description #' Helper function to supply data in batches of a data iterator when #' constructing a DMatrix from external memory through [xgb.ExtMemDMatrix()] #' or through [xgb.QuantileDMatrix.from_iterator()]. #' #' This function is **only** meant to be called inside of a callback function (which #' is passed as argument to function [xgb.DataIter()] to construct a data iterator) #' when constructing a DMatrix through external memory - otherwise, one should call #' [xgb.DMatrix()] or [xgb.QuantileDMatrix()]. #' #' The object that results from calling this function directly is **not** like #' an `xgb.DMatrix` - i.e. cannot be used to train a model, nor to get predictions - only #' possible usage is to supply data to an iterator, from which a DMatrix is then constructed. #' #' For more information and for example usage, see the documentation for [xgb.ExtMemDMatrix()]. #' @inheritParams xgb.DMatrix #' @param data Batch of data belonging to this batch. #' #' Note that not all of the input types supported by [xgb.DMatrix()] are possible #' to pass here. Supported types are: #' - `matrix`, with types `numeric`, `integer`, and `logical`. Note that for types #' `integer` and `logical`, missing values might not be automatically recognized as #' as such - see the documentation for parameter `missing` in [xgb.ExtMemDMatrix()] #' for details on this. #' - `data.frame`, with the same types as supported by 'xgb.DMatrix' and same #' conversions applied to it. See the documentation for parameter `data` in #' [xgb.DMatrix()] for details on it. #' - CSR matrices, as class `dgRMatrix` from package "Matrix". #' @return An object of class `xgb.DataBatch`, which is just a list containing the #' data and parameters passed here. It does **not** inherit from `xgb.DMatrix`. #' @seealso [xgb.DataIter()], [xgb.ExtMemDMatrix()]. #' @export xgb.DataBatch <- function( data, label = NULL, weight = NULL, base_margin = NULL, feature_names = colnames(data), feature_types = NULL, group = NULL, qid = NULL, label_lower_bound = NULL, label_upper_bound = NULL, feature_weights = NULL ) { stopifnot(inherits(data, c("matrix", "data.frame", "dgRMatrix"))) out <- list( data = data, label = label, weight = weight, base_margin = base_margin, feature_names = feature_names, feature_types = feature_types, group = group, qid = qid, label_lower_bound = label_lower_bound, label_upper_bound = label_upper_bound, feature_weights = feature_weights ) class(out) <- "xgb.DataBatch" return(out) } # This is only for internal usage, class is not exposed to the user. xgb.ProxyDMatrix <- function(proxy_handle, data_iterator, env_keep_alive) { env_keep_alive$keepalive <- NULL lst <- data_iterator$f_next(data_iterator$env) if (is.null(lst)) { return(0L) } if (!inherits(lst, "xgb.DataBatch")) { stop("DataIter 'f_next' must return either NULL or the result from calling 'xgb.DataBatch'.") } if (!is.null(lst$group) && !is.null(lst$qid)) { stop("Either one of 'group' or 'qid' should be NULL") } if (is.data.frame(lst$data)) { data <- lst$data lst$data <- NULL tmp <- .process.df.for.dmatrix(data, lst$feature_types) lst$feature_types <- tmp$feature_types data <- NULL env_keep_alive$keepalive <- tmp .Call(XGProxyDMatrixSetDataColumnar_R, proxy_handle, tmp$lst) } else if (is.matrix(lst$data)) { env_keep_alive$keepalive <- lst .Call(XGProxyDMatrixSetDataDense_R, proxy_handle, lst$data) } else if (inherits(lst$data, "dgRMatrix")) { tmp <- list(p = lst$data@p, j = lst$data@j, x = lst$data@x, ncol = ncol(lst$data)) env_keep_alive$keepalive <- tmp .Call(XGProxyDMatrixSetDataCSR_R, proxy_handle, tmp) } else { stop("'data' has unsupported type.") } .set.dmatrix.fields( dmat = proxy_handle, label = lst$label, weight = lst$weight, base_margin = lst$base_margin, feature_names = lst$feature_names, feature_types = lst$feature_types, group = lst$group, qid = lst$qid, label_lower_bound = lst$label_lower_bound, label_upper_bound = lst$label_upper_bound, feature_weights = lst$feature_weights ) return(1L) } #' DMatrix from External Data #' #' @description #' Create a special type of XGBoost 'DMatrix' object from external data #' supplied by an [xgb.DataIter()] object, potentially passed in batches from a #' bigger set that might not fit entirely in memory. #' #' The data supplied by the iterator is accessed on-demand as needed, multiple times, #' without being concatenated, but note that fields like 'label' **will** be #' concatenated from multiple calls to the data iterator. #' #' For more information, see the guide 'Using XGBoost External Memory Version': #' \url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html} #' @details #' Be aware that construction of external data DMatrices \bold{will cache data on disk} #' in a compressed format, under the path supplied in `cache_prefix`. #' #' External data is not supported for the exact tree method. #' @inheritParams xgb.DMatrix #' @param data_iterator A data iterator structure as returned by [xgb.DataIter()], #' which includes an environment shared between function calls, and functions to access #' the data in batches on-demand. #' @param cache_prefix The path of cache file, caller must initialize all the directories in this path. #' @param missing A float value to represents missing values in data. #' #' Note that, while functions like [xgb.DMatrix()] can take a generic `NA` and interpret it #' correctly for different types like `numeric` and `integer`, if an `NA` value is passed here, #' it will not be adapted for different input types. #' #' For example, in R `integer` types, missing values are represented by integer number `-2147483648` #' (since machine 'integer' types do not have an inherent 'NA' value) - hence, if one passes `NA`, #' which is interpreted as a floating-point NaN by [xgb.ExtMemDMatrix()] and by #' [xgb.QuantileDMatrix.from_iterator()], these integer missing values will not be treated as missing. #' This should not pose any problem for `numeric` types, since they do have an inheret NaN value. #' @return An 'xgb.DMatrix' object, with subclass 'xgb.ExtMemDMatrix', in which the data is not #' held internally but accessed through the iterator when needed. #' @seealso [xgb.DataIter()], [xgb.DataBatch()], [xgb.QuantileDMatrix.from_iterator()] #' @examples #' data(mtcars) #' #' # This custom environment will be passed to the iterator #' # functions at each call. It is up to the user to keep #' # track of the iteration number in this environment. #' iterator_env <- as.environment( #' list( #' iter = 0, #' x = mtcars[, -1], #' y = mtcars[, 1] #' ) #' ) #' #' # Data is passed in two batches. #' # In this example, batches are obtained by subsetting the 'x' variable. #' # This is not advantageous to do, since the data is already loaded in memory #' # and can be passed in full in one go, but there can be situations in which #' # only a subset of the data will fit in the computer's memory, and it can #' # be loaded in batches that are accessed one-at-a-time only. #' iterator_next <- function(iterator_env) { #' curr_iter <- iterator_env[["iter"]] #' if (curr_iter >= 2) { #' # there are only two batches, so this signals end of the stream #' return(NULL) #' } #' #' if (curr_iter == 0) { #' x_batch <- iterator_env[["x"]][1:16, ] #' y_batch <- iterator_env[["y"]][1:16] #' } else { #' x_batch <- iterator_env[["x"]][17:32, ] #' y_batch <- iterator_env[["y"]][17:32] #' } #' on.exit({ #' iterator_env[["iter"]] <- curr_iter + 1 #' }) #' #' # Function 'xgb.DataBatch' must be called manually #' # at each batch with all the appropriate attributes, #' # such as feature names and feature types. #' return(xgb.DataBatch(data = x_batch, label = y_batch)) #' } #' #' # This moves the iterator back to its beginning #' iterator_reset <- function(iterator_env) { #' iterator_env[["iter"]] <- 0 #' } #' #' data_iterator <- xgb.DataIter( #' env = iterator_env, #' f_next = iterator_next, #' f_reset = iterator_reset #' ) #' cache_prefix <- tempdir() #' #' # DMatrix will be constructed from the iterator's batches #' dm <- xgb.ExtMemDMatrix(data_iterator, cache_prefix, nthread = 1) #' #' # After construction, can be used as a regular DMatrix #' params <- xgb.params(nthread = 1, objective = "reg:squarederror") #' model <- xgb.train(data = dm, nrounds = 2, params = params) #' #' # Predictions can also be called on it, and should be the same #' # as if the data were passed differently. #' pred_dm <- predict(model, dm) #' pred_mat <- predict(model, as.matrix(mtcars[, -1])) #' @export xgb.ExtMemDMatrix <- function( data_iterator, cache_prefix = tempdir(), missing = NA, nthread = NULL ) { stopifnot(inherits(data_iterator, "xgb.DataIter")) stopifnot(is.character(cache_prefix)) cache_prefix <- path.expand(cache_prefix) nthread <- as.integer(NVL(nthread, -1L)) # The purpose of this environment is to keep data alive (protected from the # garbage collector) after setting the data in the proxy dmatrix. The data # held here (under name 'keepalive') should be unset (leaving it unprotected # for garbage collection) before the start of each data iteration batch and # during each iterator reset. env_keep_alive <- new.env() env_keep_alive$keepalive <- NULL proxy_handle <- .make.proxy.handle() on.exit({ .Call(XGDMatrixFree_R, proxy_handle) }) iterator_next <- function() { return(xgb.ProxyDMatrix(proxy_handle, data_iterator, env_keep_alive)) } iterator_reset <- function() { env_keep_alive$keepalive <- NULL return(data_iterator$f_reset(data_iterator$env)) } calling_env <- environment() dmat <- .Call( XGDMatrixCreateFromCallback_R, iterator_next, iterator_reset, calling_env, proxy_handle, nthread, missing, cache_prefix ) attributes(dmat) <- list( class = c("xgb.DMatrix", "xgb.ExtMemDMatrix"), fields = attributes(proxy_handle)$fields ) return(dmat) } #' QuantileDMatrix from External Data #' #' @description #' Create an `xgb.QuantileDMatrix` object (exact same class as would be returned by #' calling function [xgb.QuantileDMatrix()], with the same advantages and limitations) from #' external data supplied by [xgb.DataIter()], potentially passed in batches from #' a bigger set that might not fit entirely in memory, same way as [xgb.ExtMemDMatrix()]. #' #' Note that, while external data will only be loaded through the iterator (thus the full data #' might not be held entirely in-memory), the quantized representation of the data will get #' created in-memory, being concatenated from multiple calls to the data iterator. The quantized #' version is typically lighter than the original data, so there might be cases in which this #' representation could potentially fit in memory even if the full data does not. #' #' For more information, see the guide 'Using XGBoost External Memory Version': #' \url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html} #' @inheritParams xgb.ExtMemDMatrix #' @inheritParams xgb.QuantileDMatrix #' @return An 'xgb.DMatrix' object, with subclass 'xgb.QuantileDMatrix'. #' @seealso [xgb.DataIter()], [xgb.DataBatch()], [xgb.ExtMemDMatrix()], #' [xgb.QuantileDMatrix()] #' @export xgb.QuantileDMatrix.from_iterator <- function( # nolint data_iterator, missing = NA, nthread = NULL, ref = NULL, max_bin = NULL ) { stopifnot(inherits(data_iterator, "xgb.DataIter")) if (!is.null(ref) && !inherits(ref, "xgb.DMatrix")) { stop("'ref' must be an xgb.DMatrix object.") } nthread <- as.integer(NVL(nthread, -1L)) env_keep_alive <- new.env() env_keep_alive$keepalive <- NULL proxy_handle <- .make.proxy.handle() on.exit({ .Call(XGDMatrixFree_R, proxy_handle) }) iterator_next <- function() { return(xgb.ProxyDMatrix(proxy_handle, data_iterator, env_keep_alive)) } iterator_reset <- function() { env_keep_alive$keepalive <- NULL return(data_iterator$f_reset(data_iterator$env)) } calling_env <- environment() dmat <- .Call( XGQuantileDMatrixCreateFromCallback_R, iterator_next, iterator_reset, calling_env, proxy_handle, nthread, missing, max_bin, ref ) attributes(dmat) <- list( class = c("xgb.DMatrix", "xgb.QuantileDMatrix"), fields = attributes(proxy_handle)$fields ) return(dmat) } #' Check whether DMatrix object has a field #' #' Checks whether an xgb.DMatrix object has a given field assigned to #' it, such as weights, labels, etc. #' @param object The DMatrix object to check for the given `info` field. #' @param info The field to check for presence or absence in `object`. #' @seealso [xgb.DMatrix()], [getinfo.xgb.DMatrix()], [setinfo.xgb.DMatrix()] #' @examples #' x <- matrix(1:10, nrow = 5) #' dm <- xgb.DMatrix(x, nthread = 1) #' #' # 'dm' so far does not have any fields set #' xgb.DMatrix.hasinfo(dm, "label") #' #' # Fields can be added after construction #' setinfo(dm, "label", 1:5) #' xgb.DMatrix.hasinfo(dm, "label") #' @export xgb.DMatrix.hasinfo <- function(object, info) { if (!inherits(object, "xgb.DMatrix")) { stop("Object is not an 'xgb.DMatrix'.") } if (.Call(XGCheckNullPtr_R, object)) { warning("xgb.DMatrix object is invalid. Must be constructed again.") return(FALSE) } return(NVL(attr(object, "fields")[[info]], FALSE)) } #' Dimensions of xgb.DMatrix #' #' Returns a vector of numbers of rows and of columns in an `xgb.DMatrix`. #' #' @param x Object of class `xgb.DMatrix` #' #' @details #' Note: since [nrow()] and [ncol()] internally use [dim()], they can also #' be directly used with an `xgb.DMatrix` object. #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' train <- agaricus.train #' dtrain <- xgb.DMatrix(train$data, label = train$label, nthread = 2) #' #' stopifnot(nrow(dtrain) == nrow(train$data)) #' stopifnot(ncol(dtrain) == ncol(train$data)) #' stopifnot(all(dim(dtrain) == dim(train$data))) #' #' @export dim.xgb.DMatrix <- function(x) { c(.Call(XGDMatrixNumRow_R, x), .Call(XGDMatrixNumCol_R, x)) } #' Handling of column names of `xgb.DMatrix` #' #' Only column names are supported for `xgb.DMatrix`, thus setting of #' row names would have no effect and returned row names would be `NULL`. #' #' @param x Object of class `xgb.DMatrix`. #' @param value A list of two elements: the first one is ignored #' and the second one is column names #' #' @details #' Generic [dimnames()] methods are used by [colnames()]. #' Since row names are irrelevant, it is recommended to use [colnames()] directly. #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' train <- agaricus.train #' dtrain <- xgb.DMatrix(train$data, label = train$label, nthread = 2) #' dimnames(dtrain) #' colnames(dtrain) #' colnames(dtrain) <- make.names(1:ncol(train$data)) #' print(dtrain, verbose = TRUE) #' #' @rdname dimnames.xgb.DMatrix #' @export dimnames.xgb.DMatrix <- function(x) { fn <- getinfo(x, "feature_name") ## row names is null. list(NULL, fn) } #' @rdname dimnames.xgb.DMatrix #' @export `dimnames<-.xgb.DMatrix` <- function(x, value) { if (!is.list(value) || length(value) != 2L) stop("invalid 'dimnames' given: must be a list of two elements") if (!is.null(value[[1L]])) stop("xgb.DMatrix does not have rownames") if (is.null(value[[2]])) { setinfo(x, "feature_name", NULL) return(x) } if (ncol(x) != length(value[[2]])) { stop("can't assign ", length(value[[2]]), " colnames to a ", ncol(x), " column xgb.DMatrix") } setinfo(x, "feature_name", value[[2]]) x } #' Get or set information of xgb.DMatrix and xgb.Booster objects #' #' @param object Object of class `xgb.DMatrix` or `xgb.Booster`. #' @param name The name of the information field to get (see details). #' @return For `getinfo()`, will return the requested field. For `setinfo()`, #' will always return value `TRUE` if it succeeds. #' @details #' The `name` field can be one of the following for `xgb.DMatrix`: #' - label #' - weight #' - base_margin #' - label_lower_bound #' - label_upper_bound #' - group #' - feature_type #' - feature_name #' - nrow #' #' See the documentation for [xgb.DMatrix()] for more information about these fields. #' #' For `xgb.Booster`, can be one of the following: #' - `feature_type` #' - `feature_name` #' #' Note that, while 'qid' cannot be retrieved, it is possible to get the equivalent 'group' #' for a DMatrix that had 'qid' assigned. #' #' **Important**: when calling [setinfo()], the objects are modified in-place. See #' [xgb.copy.Booster()] for an idea of this in-place assignment works. #' @examples #' data(agaricus.train, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' #' labels <- getinfo(dtrain, "label") #' setinfo(dtrain, "label", 1 - labels) #' #' labels2 <- getinfo(dtrain, "label") #' stopifnot(all(labels2 == 1 - labels)) #' @rdname getinfo #' @export getinfo <- function(object, name) UseMethod("getinfo") #' @rdname getinfo #' @export getinfo.xgb.DMatrix <- function(object, name) { allowed_int_fields <- 'group' allowed_float_fields <- c( 'label', 'weight', 'base_margin', 'label_lower_bound', 'label_upper_bound' ) allowed_str_fields <- c("feature_type", "feature_name") allowed_fields <- c(allowed_float_fields, allowed_int_fields, allowed_str_fields, 'nrow') if (typeof(name) != "character" || length(name) != 1 || !name %in% allowed_fields) { stop("getinfo: name must be one of the following\n", paste(paste0("'", allowed_fields, "'"), collapse = ", ")) } if (name == "nrow") { ret <- nrow(object) } else if (name %in% allowed_str_fields) { ret <- .Call(XGDMatrixGetStrFeatureInfo_R, object, name) } else if (name %in% allowed_float_fields) { ret <- .Call(XGDMatrixGetFloatInfo_R, object, name) if (length(ret) > nrow(object)) { ret <- matrix(ret, nrow = nrow(object), byrow = TRUE) } } else if (name %in% allowed_int_fields) { if (name == "group") { name <- "group_ptr" } ret <- .Call(XGDMatrixGetUIntInfo_R, object, name) if (length(ret) > nrow(object)) { ret <- matrix(ret, nrow = nrow(object), byrow = TRUE) } } if (length(ret) == 0) return(NULL) return(ret) } #' @rdname getinfo #' @param info The specific field of information to set. #' #' @details #' See the documentation for [xgb.DMatrix()] for possible fields that can be set #' (which correspond to arguments in that function). #' #' Note that the following fields are allowed in the construction of an `xgb.DMatrix` #' but **are not** allowed here: #' - data #' - missing #' - silent #' - nthread #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' #' labels <- getinfo(dtrain, "label") #' setinfo(dtrain, "label", 1 - labels) #' #' labels2 <- getinfo(dtrain, "label") #' stopifnot(all.equal(labels2, 1 - labels)) #' @export setinfo <- function(object, name, info) UseMethod("setinfo") #' @rdname getinfo #' @export setinfo.xgb.DMatrix <- function(object, name, info) { .internal.setinfo.xgb.DMatrix(object, name, info) attr(object, "fields")[[name]] <- TRUE return(TRUE) } .internal.setinfo.xgb.DMatrix <- function(object, name, info) { if (name == "label") { if (NROW(info) != nrow(object)) stop("The length of labels must equal to the number of rows in the input data") .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "label_lower_bound") { if (NROW(info) != nrow(object)) stop("The length of lower-bound labels must equal to the number of rows in the input data") .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "label_upper_bound") { if (NROW(info) != nrow(object)) stop("The length of upper-bound labels must equal to the number of rows in the input data") .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "weight") { .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "base_margin") { .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "group") { if (sum(info) != nrow(object)) stop("The sum of groups must equal to the number of rows in the input data") .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "qid") { if (NROW(info) != nrow(object)) stop("The length of qid assignments must equal to the number of rows in the input data") .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } if (name == "feature_weights") { if (NROW(info) != ncol(object)) { stop("The number of feature weights must equal to the number of columns in the input data") } .Call(XGDMatrixSetInfo_R, object, name, info) return(TRUE) } set_feat_info <- function(name) { msg <- sprintf( "The number of %s must equal to the number of columns in the input data. %s vs. %s", name, length(info), ncol(object) ) if (!is.null(info)) { info <- as.list(info) if (length(info) != ncol(object)) { stop(msg) } } .Call(XGDMatrixSetStrFeatureInfo_R, object, name, info) } if (name == "feature_name") { set_feat_info("feature_name") return(TRUE) } if (name == "feature_type") { set_feat_info("feature_type") return(TRUE) } stop("setinfo: unknown info name ", name) } #' Get Quantile Cuts from DMatrix #' #' @description #' Get the quantile cuts (a.k.a. borders) from an `xgb.DMatrix` #' that has been quantized for the histogram method (`tree_method = "hist"`). #' #' These cuts are used in order to assign observations to bins - i.e. these are ordered #' boundaries which are used to determine assignment condition `border_low < x < border_high`. #' As such, the first and last bin will be outside of the range of the data, so as to include #' all of the observations there. #' #' If a given column has 'n' bins, then there will be 'n+1' cuts / borders for that column, #' which will be output in sorted order from lowest to highest. #' #' Different columns can have different numbers of bins according to their range. #' @param dmat An `xgb.DMatrix` object, as returned by [xgb.DMatrix()]. #' @param output Output format for the quantile cuts. Possible options are: #' - "list"` will return the output as a list with one entry per column, where #' each column will have a numeric vector with the cuts. The list will be named if #' `dmat` has column names assigned to it. #' - `"arrays"` will return a list with entries `indptr` (base-0 indexing) and #' `data`. Here, the cuts for column 'i' are obtained by slicing 'data' from entries #' ` indptr[i]+1` to `indptr[i+1]`. #' @return The quantile cuts, in the format specified by parameter `output`. #' @examples #' data(mtcars) #' #' y <- mtcars$mpg #' x <- as.matrix(mtcars[, -1]) #' dm <- xgb.DMatrix(x, label = y, nthread = 1) #' #' # DMatrix is not quantized right away, but will be once a hist model is generated #' model <- xgb.train( #' data = dm, #' params = xgb.params(tree_method = "hist", max_bin = 8, nthread = 1), #' nrounds = 3 #' ) #' #' # Now can get the quantile cuts #' xgb.get.DMatrix.qcut(dm) #' @export xgb.get.DMatrix.qcut <- function(dmat, output = c("list", "arrays")) { # nolint stopifnot(inherits(dmat, "xgb.DMatrix")) output <- head(output, 1L) stopifnot(output %in% c("list", "arrays")) res <- .Call(XGDMatrixGetQuantileCut_R, dmat) if (output == "arrays") { return(res) } else { feature_names <- getinfo(dmat, "feature_name") ncols <- length(res$indptr) - 1 out <- lapply( seq(1, ncols), function(col) { st <- res$indptr[col] end <- res$indptr[col + 1] if (end <= st) { return(numeric()) } return(res$data[seq(1 + st, end)]) } ) if (NROW(feature_names)) { names(out) <- feature_names } return(out) } } #' Get Number of Non-Missing Entries in DMatrix #' #' @param dmat An `xgb.DMatrix` object, as returned by [xgb.DMatrix()]. #' @return The number of non-missing entries in the DMatrix. #' @export xgb.get.DMatrix.num.non.missing <- function(dmat) { # nolint stopifnot(inherits(dmat, "xgb.DMatrix")) return(.Call(XGDMatrixNumNonMissing_R, dmat)) } #' Get DMatrix Data #' #' @param dmat An `xgb.DMatrix` object, as returned by [xgb.DMatrix()]. #' @return The data held in the DMatrix, as a sparse CSR matrix (class `dgRMatrix` #' from package `Matrix`). If it had feature names, these will be added as column names #' in the output. #' @export xgb.get.DMatrix.data <- function(dmat) { stopifnot(inherits(dmat, "xgb.DMatrix")) res <- .Call(XGDMatrixGetDataAsCSR_R, dmat) out <- methods::new("dgRMatrix") nrows <- as.integer(length(res$indptr) - 1) out@p <- res$indptr out@j <- res$indices out@x <- res$data out@Dim <- as.integer(c(nrows, res$ncols)) feature_names <- getinfo(dmat, "feature_name") dim_names <- list(NULL, NULL) if (NROW(feature_names)) { dim_names[[2L]] <- feature_names } out@Dimnames <- dim_names return(out) } #' Slice DMatrix #' #' Get a new DMatrix containing the specified rows of original xgb.DMatrix object. #' #' @param object Object of class `xgb.DMatrix`. #' @param idxset An integer vector of indices of rows needed (base-1 indexing). #' @param allow_groups Whether to allow slicing an `xgb.DMatrix` with `group` (or #' equivalently `qid`) field. Note that in such case, the result will not have #' the groups anymore - they need to be set manually through [setinfo()]. #' @param colset Currently not used (columns subsetting is not available). #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' #' dsub <- xgb.slice.DMatrix(dtrain, 1:42) #' labels1 <- getinfo(dsub, "label") #' #' dsub <- dtrain[1:42, ] #' labels2 <- getinfo(dsub, "label") #' all.equal(labels1, labels2) #' #' @rdname xgb.slice.DMatrix #' @export xgb.slice.DMatrix <- function(object, idxset, allow_groups = FALSE) { if (!inherits(object, "xgb.DMatrix")) { stop("object must be xgb.DMatrix") } ret <- .Call(XGDMatrixSliceDMatrix_R, object, idxset, allow_groups) attr_list <- attributes(object) nr <- nrow(object) len <- sapply(attr_list, NROW) ind <- which(len == nr) if (length(ind) > 0) { nms <- names(attr_list)[ind] for (i in seq_along(ind)) { obj_attr <- attr(object, nms[i]) if (NCOL(obj_attr) > 1) { attr(ret, nms[i]) <- obj_attr[idxset, ] } else { attr(ret, nms[i]) <- obj_attr[idxset] } } } out <- structure(ret, class = "xgb.DMatrix") parent_fields <- as.list(attributes(object)$fields) if (NROW(parent_fields)) { child_fields <- parent_fields[!(names(parent_fields) %in% c("group", "qid"))] child_fields <- as.environment(child_fields) attributes(out)$fields <- child_fields } return(out) } #' @rdname xgb.slice.DMatrix #' @export `[.xgb.DMatrix` <- function(object, idxset, colset = NULL) { xgb.slice.DMatrix(object, idxset) } #' Print xgb.DMatrix #' #' Print information about xgb.DMatrix. #' Currently it displays dimensions and presence of info-fields and colnames. #' #' @param x An xgb.DMatrix object. #' @param verbose Whether to print colnames (when present). #' @param ... Not currently used. #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' dtrain #' #' print(dtrain, verbose = TRUE) #' #' @method print xgb.DMatrix #' @export print.xgb.DMatrix <- function(x, verbose = FALSE, ...) { if (.Call(XGCheckNullPtr_R, x)) { cat("INVALID xgb.DMatrix object. Must be constructed anew.\n") return(invisible(x)) } class_print <- if (inherits(x, "xgb.QuantileDMatrix")) { "xgb.QuantileDMatrix" } else if (inherits(x, "xgb.ExtMemDMatrix")) { "xgb.ExtMemDMatrix" } else if (inherits(x, "xgb.ProxyDMatrix")) { "xgb.ProxyDMatrix" } else { "xgb.DMatrix" } cat(class_print, ' dim:', nrow(x), 'x', ncol(x), ' info: ') infos <- names(attributes(x)$fields) infos <- infos[infos != "feature_name"] if (!NROW(infos)) infos <- "NA" infos <- infos[order(infos)] infos <- paste(infos, collapse = ", ") cat(infos) cnames <- colnames(x) cat(' colnames:') if (verbose && !is.null(cnames)) { cat("\n'") cat(cnames, sep = "','") cat("'") } else { if (is.null(cnames)) cat(' no') else cat(' yes') } cat("\n") invisible(x) } xgboost-3.0.0/R-package/R/xgb.DMatrix.save.R000066400000000000000000000014651476511272400204200ustar00rootroot00000000000000#' Save xgb.DMatrix object to binary file #' #' Save xgb.DMatrix object to binary file #' #' @param dmatrix the `xgb.DMatrix` object #' @param fname the name of the file to write. #' #' @examples #' \dontshow{RhpcBLASctl::omp_set_num_threads(1)} #' data(agaricus.train, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' fname <- file.path(tempdir(), "xgb.DMatrix.data") #' xgb.DMatrix.save(dtrain, fname) #' dtrain <- xgb.DMatrix(fname, nthread = 1) #' @export xgb.DMatrix.save <- function(dmatrix, fname) { if (typeof(fname) != "character") stop("fname must be character") if (!inherits(dmatrix, "xgb.DMatrix")) stop("dmatrix must be xgb.DMatrix") fname <- path.expand(fname) .Call(XGDMatrixSaveBinary_R, dmatrix, fname[1], 0L) return(TRUE) } xgboost-3.0.0/R-package/R/xgb.config.R000066400000000000000000000040021476511272400173460ustar00rootroot00000000000000#' Set and get global configuration #' #' Global configuration consists of a collection of parameters that can be applied in the global #' scope. See \url{https://xgboost.readthedocs.io/en/stable/parameter.html} for the full list of #' parameters supported in the global configuration. Use `xgb.set.config()` to update the #' values of one or more global-scope parameters. Use `xgb.get.config()` to fetch the current #' values of all global-scope parameters (listed in #' \url{https://xgboost.readthedocs.io/en/stable/parameter.html}). #' #' @details #' Note that serialization-related functions might use a globally-configured number of threads, #' which is managed by the system's OpenMP (OMP) configuration instead. Typically, XGBoost methods #' accept an `nthreads` parameter, but some methods like [readRDS()] might get executed before such #' parameter can be supplied. #' #' The number of OMP threads can in turn be configured for example through an environment variable #' `OMP_NUM_THREADS` (needs to be set before R is started), or through `RhpcBLASctl::omp_set_num_threads`. #' @rdname xgbConfig #' @name xgb.set.config, xgb.get.config #' @export xgb.set.config xgb.get.config #' @param ... List of parameters to be set, as keyword arguments #' @return #' `xgb.set.config()` returns `TRUE` to signal success. `xgb.get.config()` returns #' a list containing all global-scope parameters and their values. #' #' @examples #' # Set verbosity level to silent (0) #' xgb.set.config(verbosity = 0) #' # Now global verbosity level is 0 #' config <- xgb.get.config() #' print(config$verbosity) #' # Set verbosity level to warning (1) #' xgb.set.config(verbosity = 1) #' # Now global verbosity level is 1 #' config <- xgb.get.config() #' print(config$verbosity) xgb.set.config <- function(...) { new_config <- list(...) .Call(XGBSetGlobalConfig_R, jsonlite::toJSON(new_config, auto_unbox = TRUE)) return(TRUE) } #' @rdname xgbConfig xgb.get.config <- function() { config <- .Call(XGBGetGlobalConfig_R) return(jsonlite::fromJSON(config)) } xgboost-3.0.0/R-package/R/xgb.create.features.R000066400000000000000000000074321476511272400211730ustar00rootroot00000000000000#' Create new features from a previously learned model #' #' May improve the learning by adding new features to the training data based on the #' decision trees from a previously learned model. #' #' @details #' This is the function inspired from the paragraph 3.1 of the paper: #' #' **Practical Lessons from Predicting Clicks on Ads at Facebook** #' #' *(Xinran He, Junfeng Pan, Ou Jin, Tianbing Xu, Bo Liu, Tao Xu, Yan, xin Shi, Antoine Atallah, Ralf Herbrich, Stuart Bowers, #' Joaquin Quinonero Candela)* #' #' International Workshop on Data Mining for Online Advertising (ADKDD) - August 24, 2014 #' #' \url{https://research.facebook.com/publications/practical-lessons-from-predicting-clicks-on-ads-at-facebook/}. #' #' Extract explaining the method: #' #' "We found that boosted decision trees are a powerful and very #' convenient way to implement non-linear and tuple transformations #' of the kind we just described. We treat each individual #' tree as a categorical feature that takes as value the #' index of the leaf an instance ends up falling in. We use #' 1-of-K coding of this type of features. #' #' For example, consider the boosted tree model in Figure 1 with 2 subtrees, #' where the first subtree has 3 leafs and the second 2 leafs. If an #' instance ends up in leaf 2 in the first subtree and leaf 1 in #' second subtree, the overall input to the linear classifier will #' be the binary vector `[0, 1, 0, 1, 0]`, where the first 3 entries #' correspond to the leaves of the first subtree and last 2 to #' those of the second subtree. #' #' ... #' #' We can understand boosted decision tree #' based transformation as a supervised feature encoding that #' converts a real-valued vector into a compact binary-valued #' vector. A traversal from root node to a leaf node represents #' a rule on certain features." #' #' @param model Decision tree boosting model learned on the original data. #' @param data Original data (usually provided as a `dgCMatrix` matrix). #' #' @return A `dgCMatrix` matrix including both the original data and the new features. #' #' @examples #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' dtest <- with(agaricus.test, xgb.DMatrix(data, label = label, nthread = 2)) #' #' param <- list(max_depth = 2, learning_rate = 1, objective = 'binary:logistic', nthread = 1) #' nrounds = 4 #' #' bst <- xgb.train(params = param, data = dtrain, nrounds = nrounds) #' #' # Model accuracy without new features #' accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) / #' length(agaricus.test$label) #' #' # Convert previous features to one hot encoding #' new.features.train <- xgb.create.features(model = bst, agaricus.train$data) #' new.features.test <- xgb.create.features(model = bst, agaricus.test$data) #' #' # learning with new features #' new.dtrain <- xgb.DMatrix( #' data = new.features.train, label = agaricus.train$label, nthread = 1 #' ) #' new.dtest <- xgb.DMatrix( #' data = new.features.test, label = agaricus.test$label, nthread = 1 #' ) #' bst <- xgb.train(params = param, data = new.dtrain, nrounds = nrounds) #' #' # Model accuracy with new features #' accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) / #' length(agaricus.test$label) #' #' # Here the accuracy was already good and is now perfect. #' cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now", #' accuracy.after, "!\n")) #' #' @export xgb.create.features <- function(model, data) { pred_with_leaf <- predict.xgb.Booster(model, data, predleaf = TRUE) cols <- lapply(as.data.frame(pred_with_leaf), factor) cbind(data, sparse.model.matrix(~ . -1, cols)) # nolint } xgboost-3.0.0/R-package/R/xgb.cv.R000066400000000000000000000313501476511272400165170ustar00rootroot00000000000000#' Cross Validation #' #' The cross validation function of xgboost. #' #' @inheritParams xgb.train #' @param data An `xgb.DMatrix` object, with corresponding fields like `label` or bounds as required #' for model training by the objective. #' #' Note that only the basic `xgb.DMatrix` class is supported - variants such as `xgb.QuantileDMatrix` #' or `xgb.ExtMemDMatrix` are not supported here. #' @param nfold The original dataset is randomly partitioned into `nfold` equal size subsamples. #' @param prediction A logical value indicating whether to return the test fold predictions #' from each CV model. This parameter engages the [xgb.cb.cv.predict()] callback. #' @param showsd Logical value whether to show standard deviation of cross validation. #' @param metrics List of evaluation metrics to be used in cross validation, #' when it is not specified, the evaluation metric is chosen according to objective function. #' Possible options are: #' - `error`: Binary classification error rate #' - `rmse`: Root mean square error #' - `logloss`: Negative log-likelihood function #' - `mae`: Mean absolute error #' - `mape`: Mean absolute percentage error #' - `auc`: Area under curve #' - `aucpr`: Area under PR curve #' - `merror`: Exact matching error used to evaluate multi-class classification #' @param stratified Logical flag indicating whether sampling of folds should be stratified #' by the values of outcome labels. For real-valued labels in regression objectives, #' stratification will be done by discretizing the labels into up to 5 buckets beforehand. #' #' If passing "auto", will be set to `TRUE` if the objective in `params` is a classification #' objective (from XGBoost's built-in objectives, doesn't apply to custom ones), and to #' `FALSE` otherwise. #' #' This parameter is ignored when `data` has a `group` field - in such case, the splitting #' will be based on whole groups (note that this might make the folds have different sizes). #' #' Value `TRUE` here is **not** supported for custom objectives. #' @param folds List with pre-defined CV folds (each element must be a vector of test fold's indices). #' When folds are supplied, the `nfold` and `stratified` parameters are ignored. #' #' If `data` has a `group` field and the objective requires this field, each fold (list element) #' must additionally have two attributes (retrievable through `attributes`) named `group_test` #' and `group_train`, which should hold the `group` to assign through [setinfo.xgb.DMatrix()] to #' the resulting DMatrices. #' @param train_folds List specifying which indices to use for training. If `NULL` #' (the default) all indices not specified in `folds` will be used for training. #' #' This is not supported when `data` has `group` field. #' @param callbacks A list of callback functions to perform various task during boosting. #' See [xgb.Callback()]. Some of the callbacks are automatically created depending on the #' parameters' values. User can provide either existing or their own callback methods in order #' to customize the training process. #' @details #' The original sample is randomly partitioned into `nfold` equal size subsamples. #' #' Of the `nfold` subsamples, a single subsample is retained as the validation data for testing the model, #' and the remaining `nfold - 1` subsamples are used as training data. #' #' The cross-validation process is then repeated `nrounds` times, with each of the #' `nfold` subsamples used exactly once as the validation data. #' #' All observations are used for both training and validation. #' #' Adapted from \url{https://en.wikipedia.org/wiki/Cross-validation_\%28statistics\%29} #' #' @return #' An object of class 'xgb.cv.synchronous' with the following elements: #' - `call`: Function call. #' - `params`: Parameters that were passed to the xgboost library. Note that it does not #' capture parameters changed by the [xgb.cb.reset.parameters()] callback. #' - `evaluation_log`: Evaluation history stored as a `data.table` with the #' first column corresponding to iteration number and the rest corresponding to the #' CV-based evaluation means and standard deviations for the training and test CV-sets. #' It is created by the [xgb.cb.evaluation.log()] callback. #' - `niter`: Number of boosting iterations. #' - `nfeatures`: Number of features in training data. #' - `folds`: The list of CV folds' indices - either those passed through the `folds` #' parameter or randomly generated. #' - `best_iteration`: Iteration number with the best evaluation metric value #' (only available with early stopping). #' #' Plus other potential elements that are the result of callbacks, such as a list `cv_predict` with #' a sub-element `pred` when passing `prediction = TRUE`, which is added by the [xgb.cb.cv.predict()] #' callback (note that one can also pass it manually under `callbacks` with different settings, #' such as saving also the models created during cross validation); or a list `early_stop` which #' will contain elements such as `best_iteration` when using the early stopping callback ([xgb.cb.early.stop()]). #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) #' #' cv <- xgb.cv( #' data = dtrain, #' nrounds = 3, #' params = xgb.params( #' nthread = 2, #' max_depth = 3, #' objective = "binary:logistic" #' ), #' nfold = 5, #' metrics = list("rmse","auc") #' ) #' print(cv) #' print(cv, verbose = TRUE) #' #' @export xgb.cv <- function(params = xgb.params(), data, nrounds, nfold, prediction = FALSE, showsd = TRUE, metrics = list(), objective = NULL, custom_metric = NULL, stratified = "auto", folds = NULL, train_folds = NULL, verbose = TRUE, print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL, callbacks = list(), ...) { check.deprecation(deprecated_cv_params, match.call(), ...) stopifnot(inherits(data, "xgb.DMatrix")) if (inherits(data, "xgb.DMatrix") && .Call(XGCheckNullPtr_R, data)) { stop("'data' is an invalid 'xgb.DMatrix' object. Must be constructed again.") } if (inherits(data, "xgb.QuantileDMatrix")) { stop("'xgb.QuantileDMatrix' is not supported as input to 'xgb.cv'.") } params <- check.booster.params(params) # TODO: should we deprecate the redundant 'metrics' parameter? for (m in metrics) params <- c(params, list("eval_metric" = m)) tmp <- check.custom.obj(params, objective) params <- tmp$params objective <- tmp$objective tmp <- check.custom.eval(params, custom_metric, maximize, early_stopping_rounds, callbacks) params <- tmp$params custom_metric <- tmp$custom_metric if (stratified == "auto") { if (is.character(params$objective)) { stratified <- ( (params$objective %in% .CLASSIFICATION_OBJECTIVES()) && !(params$objective %in% .RANKING_OBJECTIVES()) ) } else { stratified <- FALSE } } # Check the labels and groups cv_label <- getinfo(data, "label") cv_group <- getinfo(data, "group") if (!is.null(train_folds) && NROW(cv_group)) { stop("'train_folds' is not supported for DMatrix object with 'group' field.") } # CV folds if (!is.null(folds)) { if (!is.list(folds) || length(folds) < 2) stop("'folds' must be a list with 2 or more elements that are vectors of indices for each CV-fold") nfold <- length(folds) } else { if (nfold <= 1) stop("'nfold' must be > 1") folds <- generate.cv.folds(nfold, nrow(data), stratified, cv_label, cv_group, params) } # Callbacks tmp <- .process.callbacks(callbacks, is_cv = TRUE) callbacks <- tmp$callbacks cb_names <- tmp$cb_names rm(tmp) # Early stopping callback if (!is.null(early_stopping_rounds) && !("early_stop" %in% cb_names)) { callbacks <- add.callback( callbacks, xgb.cb.early.stop( early_stopping_rounds, maximize = maximize, verbose = verbose, save_best = FALSE ), as_first_elt = TRUE ) } # verbosity & evaluation printing callback: params <- c(params, list(silent = 1)) print_every_n <- max(as.integer(print_every_n), 1L) if (verbose && !("print_evaluation" %in% cb_names)) { callbacks <- add.callback(callbacks, xgb.cb.print.evaluation(print_every_n, showsd = showsd)) } # evaluation log callback: always is on in CV if (!("evaluation_log" %in% cb_names)) { callbacks <- add.callback(callbacks, xgb.cb.evaluation.log()) } # CV-predictions callback if (prediction && !("cv_predict" %in% cb_names)) { callbacks <- add.callback(callbacks, xgb.cb.cv.predict(save_models = FALSE)) } # create the booster-folds # train_folds dall <- data bst_folds <- lapply(seq_along(folds), function(k) { dtest <- xgb.slice.DMatrix(dall, folds[[k]], allow_groups = TRUE) # code originally contributed by @RolandASc on stackoverflow if (is.null(train_folds)) dtrain <- xgb.slice.DMatrix(dall, unlist(folds[-k]), allow_groups = TRUE) else dtrain <- xgb.slice.DMatrix(dall, train_folds[[k]], allow_groups = TRUE) if (!is.null(attributes(folds[[k]])$group_test)) { setinfo(dtest, "group", attributes(folds[[k]])$group_test) setinfo(dtrain, "group", attributes(folds[[k]])$group_train) } bst <- xgb.Booster( params = params, cachelist = list(dtrain, dtest), modelfile = NULL ) bst <- bst$bst list(dtrain = dtrain, bst = bst, evals = list(train = dtrain, test = dtest), index = folds[[k]]) }) # extract parameters that can affect the relationship b/w #trees and #iterations num_class <- max(as.numeric(NVL(params[['num_class']], 1)), 1) # nolint # those are fixed for CV (no training continuation) begin_iteration <- 1 end_iteration <- nrounds .execute.cb.before.training( callbacks, bst_folds, dall, NULL, begin_iteration, end_iteration ) # synchronous CV boosting: run CV folds' models within each iteration for (iteration in begin_iteration:end_iteration) { .execute.cb.before.iter( callbacks, bst_folds, dall, NULL, iteration ) msg <- lapply(bst_folds, function(fd) { xgb.iter.update( bst = fd$bst, dtrain = fd$dtrain, iter = iteration - 1, objective = objective ) xgb.iter.eval( bst = fd$bst, evals = fd$evals, iter = iteration - 1, custom_metric = custom_metric ) }) msg <- simplify2array(msg) should_stop <- .execute.cb.after.iter( callbacks, bst_folds, dall, NULL, iteration, msg ) if (should_stop) break } cb_outputs <- .execute.cb.after.training( callbacks, bst_folds, dall, NULL, iteration, msg ) # the CV result ret <- list( call = match.call(), params = params, niter = iteration, nfeatures = ncol(dall), folds = folds ) ret <- c(ret, cb_outputs) class(ret) <- 'xgb.cv.synchronous' return(invisible(ret)) } #' Print xgb.cv result #' #' Prints formatted results of [xgb.cv()]. #' #' @param x An `xgb.cv.synchronous` object. #' @param verbose Whether to print detailed data. #' @param ... Passed to `data.table.print()`. #' #' @details #' When not verbose, it would only print the evaluation results, #' including the best iteration (when available). #' #' @examples #' data(agaricus.train, package = "xgboost") #' #' train <- agaricus.train #' cv <- xgb.cv( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nfold = 5, #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = 2, #' objective = "binary:logistic" #' ) #' ) #' print(cv) #' print(cv, verbose = TRUE) #' #' @rdname print.xgb.cv #' @method print xgb.cv.synchronous #' @export print.xgb.cv.synchronous <- function(x, verbose = FALSE, ...) { cat('##### xgb.cv ', length(x$folds), '-folds\n', sep = '') if (verbose) { if (!is.null(x$call)) { cat('call:\n ') print(x$call) } if (!is.null(x$params)) { cat('params (as set within xgb.cv):\n') cat(' ', paste(names(x$params), paste0('"', unlist(x$params), '"'), sep = ' = ', collapse = ', '), '\n', sep = '') } for (n in c('niter', 'best_iteration')) { if (is.null(x$early_stop[[n]])) next cat(n, ': ', x$early_stop[[n]], '\n', sep = '') } if (!is.null(x$cv_predict$pred)) { cat('pred:\n') str(x$cv_predict$pred) } } if (verbose) cat('evaluation_log:\n') print(x$evaluation_log, row.names = FALSE, ...) if (!is.null(x$early_stop$best_iteration)) { cat('Best iteration:\n') print(x$evaluation_log[x$early_stop$best_iteration], row.names = FALSE, ...) } invisible(x) } xgboost-3.0.0/R-package/R/xgb.dump.R000066400000000000000000000065711476511272400170630ustar00rootroot00000000000000#' Dump an XGBoost model in text format. #' #' Dump an XGBoost model in text format. #' #' @param model The model object. #' @param fname The name of the text file where to save the model text dump. #' If not provided or set to `NULL`, the model is returned as a character vector. #' @param fmap Feature map file representing feature types. See demo/ for a walkthrough #' example in R, and \url{https://github.com/dmlc/xgboost/blob/master/demo/data/featmap.txt} #' to see an example of the value. #' @param with_stats Whether to dump some additional statistics about the splits. #' When this option is on, the model dump contains two additional values: #' gain is the approximate loss function gain we get in each split; #' cover is the sum of second order gradient in each node. #' @param dump_format Either 'text', 'json', or 'dot' (graphviz) format could be specified. #' #' Format 'dot' for a single tree can be passed directly to packages that consume this format #' for graph visualization, such as function `DiagrammeR::grViz()` #' @inheritParams xgb.train #' @return #' If fname is not provided or set to `NULL` the function will return the model #' as a character vector. Otherwise it will return `TRUE`. #' #' @examples #' \dontshow{RhpcBLASctl::omp_set_num_threads(1)} #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' train <- agaricus.train #' test <- agaricus.test #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = 2, #' objective = "binary:logistic" #' ) #' ) #' #' # save the model in file 'xgb.model.dump' #' dump_path = file.path(tempdir(), 'model.dump') #' xgb.dump(bst, dump_path, with_stats = TRUE) #' #' # print the model without saving it to a file #' print(xgb.dump(bst, with_stats = TRUE)) #' #' # print in JSON format: #' cat(xgb.dump(bst, with_stats = TRUE, dump_format = "json")) #' #' # plot first tree leveraging the 'dot' format #' if (requireNamespace('DiagrammeR', quietly = TRUE)) { #' DiagrammeR::grViz(xgb.dump(bst, dump_format = "dot")[[1L]]) #' } #' @export xgb.dump <- function(model, fname = NULL, fmap = "", with_stats = FALSE, dump_format = c("text", "json", "dot"), ...) { check.deprecation(deprecated_dump_params, match.call(), ...) dump_format <- match.arg(dump_format) if (!inherits(model, "xgb.Booster")) stop("model: argument must be of type xgb.Booster") if (!(is.null(fname) || is.character(fname))) stop("fname: argument must be a character string (when provided)") if (!(is.null(fmap) || is.character(fmap))) stop("fmap: argument must be a character string (when provided)") model_dump <- .Call( XGBoosterDumpModel_R, xgb.get.handle(model), NVL(fmap, "")[1], as.integer(with_stats), as.character(dump_format) ) if (dump_format == "dot") { return(sapply(model_dump, function(x) gsub("^booster\\[\\d+\\]\\n", "\\1", x))) } if (is.null(fname)) model_dump <- gsub('\t', '', model_dump, fixed = TRUE) if (dump_format == "text") model_dump <- unlist(strsplit(model_dump, '\n', fixed = TRUE)) model_dump <- grep('^\\s*$', model_dump, invert = TRUE, value = TRUE) if (is.null(fname)) { return(model_dump) } else { fname <- path.expand(fname) writeLines(model_dump, fname[1]) return(TRUE) } } xgboost-3.0.0/R-package/R/xgb.ggplot.R000066400000000000000000000210171476511272400174020ustar00rootroot00000000000000# ggplot backend for the xgboost plotting facilities #' @rdname xgb.plot.importance #' @export xgb.ggplot.importance <- function(importance_matrix = NULL, top_n = NULL, measure = NULL, rel_to_first = FALSE, n_clusters = seq_len(10), ...) { importance_matrix <- xgb.plot.importance(importance_matrix, top_n = top_n, measure = measure, rel_to_first = rel_to_first, plot = FALSE, ...) if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 package is required", call. = FALSE) } if (!requireNamespace("Ckmeans.1d.dp", quietly = TRUE)) { stop("Ckmeans.1d.dp package is required", call. = FALSE) } clusters <- suppressWarnings( Ckmeans.1d.dp::Ckmeans.1d.dp(importance_matrix$Importance, n_clusters) ) importance_matrix[, Cluster := as.character(clusters$cluster)] plot <- ggplot2::ggplot(importance_matrix, ggplot2::aes(x = factor(Feature, levels = rev(Feature)), y = Importance, width = 0.5), environment = environment()) + ggplot2::geom_bar(ggplot2::aes(fill = Cluster), stat = "identity", position = "identity") + ggplot2::coord_flip() + ggplot2::xlab("Features") + ggplot2::ggtitle("Feature importance") + ggplot2::theme(plot.title = ggplot2::element_text(lineheight = .9, face = "bold"), panel.grid.major.y = ggplot2::element_blank()) return(plot) } #' @rdname xgb.plot.deepness #' @export xgb.ggplot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med.depth", "med.weight")) { if (!requireNamespace("ggplot2", quietly = TRUE)) stop("ggplot2 package is required for plotting the graph deepness.", call. = FALSE) which <- match.arg(which) dt_depths <- xgb.plot.deepness(model = model, plot = FALSE) dt_summaries <- dt_depths[, .(.N, Cover = mean(Cover)), Depth] setkey(dt_summaries, 'Depth') if (which == "2x1") { p1 <- ggplot2::ggplot(dt_summaries) + ggplot2::geom_bar(ggplot2::aes(x = Depth, y = N), stat = "Identity") + ggplot2::xlab("") + ggplot2::ylab("Number of leafs") + ggplot2::ggtitle("Model complexity") + ggplot2::theme( plot.title = ggplot2::element_text(lineheight = 0.9, face = "bold"), panel.grid.major.y = ggplot2::element_blank(), axis.ticks = ggplot2::element_blank(), axis.text.x = ggplot2::element_blank() ) p2 <- ggplot2::ggplot(dt_summaries) + ggplot2::geom_bar(ggplot2::aes(x = Depth, y = Cover), stat = "Identity") + ggplot2::xlab("Leaf depth") + ggplot2::ylab("Weighted cover") multiplot(p1, p2, cols = 1) return(invisible(list(p1, p2))) } else if (which == "max.depth") { p <- ggplot2::ggplot(dt_depths[, max(Depth), Tree]) + ggplot2::geom_jitter(ggplot2::aes(x = Tree, y = V1), height = 0.15, alpha = 0.4, size = 3, stroke = 0) + ggplot2::xlab("tree #") + ggplot2::ylab("Max tree leaf depth") return(p) } else if (which == "med.depth") { p <- ggplot2::ggplot(dt_depths[, median(as.numeric(Depth)), Tree]) + ggplot2::geom_jitter(ggplot2::aes(x = Tree, y = V1), height = 0.15, alpha = 0.4, size = 3, stroke = 0) + ggplot2::xlab("tree #") + ggplot2::ylab("Median tree leaf depth") return(p) } else if (which == "med.weight") { p <- ggplot2::ggplot(dt_depths[, median(abs(Weight)), Tree]) + ggplot2::geom_point(ggplot2::aes(x = Tree, y = V1), alpha = 0.4, size = 3, stroke = 0) + ggplot2::xlab("tree #") + ggplot2::ylab("Median absolute leaf weight") return(p) } } #' @rdname xgb.plot.shap.summary #' @export xgb.ggplot.shap.summary <- function(data, shap_contrib = NULL, features = NULL, top_n = 10, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL) { if (inherits(data, "xgb.DMatrix")) { stop( "'xgb.ggplot.shap.summary' is not compatible with 'xgb.DMatrix' objects. Try passing a matrix or data.frame." ) } cols_categ <- NULL if (!is.null(model)) { ftypes <- getinfo(model, "feature_type") if (NROW(ftypes)) { if (length(ftypes) != ncol(data)) { stop(sprintf("'data' has incorrect number of columns (expected: %d, got: %d).", length(ftypes), ncol(data))) } cols_categ <- colnames(data)[ftypes == "c"] } } else if (inherits(data, "data.frame")) { cols_categ <- names(data)[sapply(data, function(x) is.factor(x) || is.character(x))] } if (NROW(cols_categ)) { warning("Categorical features are ignored in 'xgb.ggplot.shap.summary'.") } data_list <- xgb.shap.data( data = data, shap_contrib = shap_contrib, features = features, top_n = top_n, model = model, trees = trees, target_class = target_class, approxcontrib = approxcontrib, subsample = subsample, max_observations = 10000 # 10,000 samples per feature. ) if (NROW(cols_categ)) { data_list <- lapply(data_list, function(x) x[, !(colnames(x) %in% cols_categ), drop = FALSE]) } p_data <- prepare.ggplot.shap.data(data_list, normalize = TRUE) # Reverse factor levels so that the first level is at the top of the plot p_data[, "feature" := factor(feature, rev(levels(feature)))] p <- ggplot2::ggplot(p_data, ggplot2::aes(x = feature, y = p_data$shap_value, colour = p_data$feature_value)) + ggplot2::geom_jitter(alpha = 0.5, width = 0.1) + ggplot2::scale_colour_viridis_c(limits = c(-3, 3), option = "plasma", direction = -1) + ggplot2::geom_abline(slope = 0, intercept = 0, colour = "darkgrey") + ggplot2::coord_flip() p } #' Combine feature values and SHAP values #' #' Internal function used to combine and melt feature values and SHAP contributions #' as required for ggplot functions related to SHAP. #' #' @param data_list The result of `xgb.shap.data()`. #' @param normalize Whether to standardize feature values to mean 0 and #' standard deviation 1. This is useful for comparing multiple features on the same #' plot. Default is `FALSE`. Note that it cannot be used when the data contains #' categorical features. #' @return A `data.table` containing the observation ID, the feature name, the #' feature value (normalized if specified), and the SHAP contribution value. #' @noRd #' @keywords internal prepare.ggplot.shap.data <- function(data_list, normalize = FALSE) { data <- data_list[["data"]] shap_contrib <- data_list[["shap_contrib"]] data <- data.table::as.data.table(as.matrix(data)) if (normalize) { data[, (names(data)) := lapply(.SD, normalize)] } data[, "id" := seq_len(nrow(data))] data_m <- data.table::melt.data.table(data, id.vars = "id", variable.name = "feature", value.name = "feature_value") shap_contrib <- data.table::as.data.table(as.matrix(shap_contrib)) shap_contrib[, "id" := seq_len(nrow(shap_contrib))] shap_contrib_m <- data.table::melt.data.table(shap_contrib, id.vars = "id", variable.name = "feature", value.name = "shap_value") p_data <- data.table::merge.data.table(data_m, shap_contrib_m, by = c("id", "feature")) p_data } #' Scale feature values #' #' Internal function that scales feature values to mean 0 and standard deviation 1. #' Useful to compare multiple features on the same plot. #' #' @param x Numeric vector. #' @return Numeric vector with mean 0 and standard deviation 1. #' @noRd #' @keywords internal normalize <- function(x) { loc <- mean(x, na.rm = TRUE) scale <- stats::sd(x, na.rm = TRUE) (x - loc) / scale } # Plot multiple ggplot graph aligned by rows and columns. # ... the plots # cols number of columns # internal utility function multiplot <- function(..., cols) { plots <- list(...) num_plots <- length(plots) layout <- matrix(seq(1, cols * ceiling(num_plots / cols)), ncol = cols, nrow = ceiling(num_plots / cols)) if (num_plots == 1) { print(plots[[1]]) } else { grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(layout), ncol(layout)))) for (i in 1:num_plots) { # Get the i,j matrix positions of the regions that contain this subplot matchidx <- as.data.table(which(layout == i, arr.ind = TRUE)) print( plots[[i]], vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } } globalVariables(c( "Cluster", "ggplot", "aes", "geom_bar", "coord_flip", "xlab", "ylab", "ggtitle", "theme", "element_blank", "element_text", "V1", "Weight", "feature" )) xgboost-3.0.0/R-package/R/xgb.importance.R000066400000000000000000000143701476511272400202530ustar00rootroot00000000000000#' Feature importance #' #' Creates a `data.table` of feature importances. #' #' @details #' This function works for both linear and tree models. #' #' For linear models, the importance is the absolute magnitude of linear coefficients. #' To obtain a meaningful ranking by importance for linear models, the features need to #' be on the same scale (which is also recommended when using L1 or L2 regularization). #' #' @param feature_names Character vector used to overwrite the feature names #' of the model. The default is `NULL` (use original feature names). #' @param model Object of class `xgb.Booster`. #' @param trees An integer vector of (base-1) tree indices that should be included #' into the importance calculation (only for the "gbtree" booster). #' The default (`NULL`) parses all trees. #' It could be useful, e.g., in multiclass classification to get feature importances #' for each class separately. #' @return A `data.table` with the following columns: #' #' For a tree model: #' - `Features`: Names of the features used in the model. #' - `Gain`: Fractional contribution of each feature to the model based on #' the total gain of this feature's splits. Higher percentage means higher importance. #' - `Cover`: Metric of the number of observation related to this feature. #' - `Frequency`: Percentage of times a feature has been used in trees. #' #' For a linear model: #' - `Features`: Names of the features used in the model. #' - `Weight`: Linear coefficient of this feature. #' - `Class`: Class label (only for multiclass models). For objects of class `xgboost` (as #' produced by [xgboost()]), it will be a `factor`, while for objects of class `xgb.Booster` #' (as produced by [xgb.train()]), it will be a zero-based integer vector. #' #' If `feature_names` is not provided and `model` doesn't have `feature_names`, #' the index of the features will be used instead. Because the index is extracted from the model dump #' (based on C++ code), it starts at 0 (as in C/C++ or Python) instead of 1 (usual in R). #' #' @examples #' # binary classification using "gbtree": #' data("ToothGrowth") #' x <- ToothGrowth[, c("len", "dose")] #' y <- ToothGrowth$supp #' model_tree_binary <- xgboost( #' x, y, #' nrounds = 5L, #' nthreads = 1L, #' booster = "gbtree", #' max_depth = 2L #' ) #' xgb.importance(model_tree_binary) #' #' # binary classification using "gblinear": #' model_tree_linear <- xgboost( #' x, y, #' nrounds = 5L, #' nthreads = 1L, #' booster = "gblinear", #' learning_rate = 0.3 #' ) #' xgb.importance(model_tree_linear) #' #' # multi-class classification using "gbtree": #' data("iris") #' x <- iris[, c("Sepal.Length", "Sepal.Width", "Petal.Length", "Petal.Width")] #' y <- iris$Species #' model_tree_multi <- xgboost( #' x, y, #' nrounds = 5L, #' nthreads = 1L, #' booster = "gbtree", #' max_depth = 3 #' ) #' # all classes clumped together: #' xgb.importance(model_tree_multi) #' # inspect importances separately for each class: #' num_classes <- 3L #' nrounds <- 5L #' xgb.importance( #' model_tree_multi, trees = seq(from = 1, by = num_classes, length.out = nrounds) #' ) #' xgb.importance( #' model_tree_multi, trees = seq(from = 2, by = num_classes, length.out = nrounds) #' ) #' xgb.importance( #' model_tree_multi, trees = seq(from = 3, by = num_classes, length.out = nrounds) #' ) #' #' # multi-class classification using "gblinear": #' model_linear_multi <- xgboost( #' x, y, #' nrounds = 5L, #' nthreads = 1L, #' booster = "gblinear", #' learning_rate = 0.2 #' ) #' xgb.importance(model_linear_multi) #' @export xgb.importance <- function(model = NULL, feature_names = getinfo(model, "feature_name"), trees = NULL) { if (!(is.null(feature_names) || is.character(feature_names))) stop("feature_names: Has to be a character vector") if (!is.null(trees)) { if (!is.vector(trees)) { stop("'trees' must be a vector of tree indices.") } trees <- trees - 1L if (anyNA(trees)) { stop("Passed invalid tree indices.") } } handle <- xgb.get.handle(model) if (xgb.booster_type(model) == "gblinear") { args <- list(importance_type = "weight", feature_names = feature_names) results <- .Call( XGBoosterFeatureScore_R, handle, jsonlite::toJSON(args, auto_unbox = TRUE, null = "null") ) names(results) <- c("features", "shape", "weight") if (length(results$shape) == 2) { n_classes <- results$shape[2] } else { n_classes <- 0 } importance <- if (n_classes == 0) { return(data.table(Feature = results$features, Weight = results$weight)[order(-abs(Weight))]) } else { out <- data.table( Feature = rep(results$features, each = n_classes), Weight = results$weight, Class = seq_len(n_classes) - 1 )[order(Class, -abs(Weight))] if (inherits(model, "xgboost") && NROW(attributes(model)$metadata$y_levels)) { class_vec <- out$Class class_vec <- as.integer(class_vec) + 1L attributes(class_vec)$levels <- attributes(model)$metadata$y_levels attributes(class_vec)$class <- "factor" out[, Class := class_vec] } return(out[]) } } else { concatenated <- list() output_names <- vector() for (importance_type in c("weight", "total_gain", "total_cover")) { args <- list(importance_type = importance_type, feature_names = feature_names, tree_idx = trees) results <- .Call( XGBoosterFeatureScore_R, handle, jsonlite::toJSON(args, auto_unbox = TRUE, null = "null") ) names(results) <- c("features", "shape", importance_type) concatenated[ switch(importance_type, "weight" = "Frequency", "total_gain" = "Gain", "total_cover" = "Cover") ] <- results[importance_type] output_names <- results$features } importance <- data.table( Feature = output_names, Gain = concatenated$Gain / sum(concatenated$Gain), Cover = concatenated$Cover / sum(concatenated$Cover), Frequency = concatenated$Frequency / sum(concatenated$Frequency) )[order(Gain, decreasing = TRUE)] } importance } # Avoid error messages during CRAN check. # The reason is that these variables are never declared # They are mainly column names inferred by Data.table... globalVariables(c(".", ".N", "Gain", "Cover", "Frequency", "Feature", "Class")) xgboost-3.0.0/R-package/R/xgb.load.R000066400000000000000000000034421476511272400170270ustar00rootroot00000000000000#' Load XGBoost model from binary file #' #' Load XGBoost model from binary model file. #' #' @param modelfile The name of the binary input file. #' #' @details #' The input file is expected to contain a model saved in an XGBoost model format #' using either [xgb.save()] in R, or using some #' appropriate methods from other XGBoost interfaces. E.g., a model trained in Python and #' saved from there in XGBoost format, could be loaded from R. #' #' Note: a model saved as an R object has to be loaded using corresponding R-methods, #' not by [xgb.load()]. #' #' @return #' An object of `xgb.Booster` class. #' #' @seealso [xgb.save()] #' #' @examples #' \dontshow{RhpcBLASctl::omp_set_num_threads(1)} #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' #' train <- agaricus.train #' test <- agaricus.test #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic" #' ) #' ) #' #' fname <- file.path(tempdir(), "xgb.ubj") #' xgb.save(bst, fname) #' bst <- xgb.load(fname) #' @export xgb.load <- function(modelfile) { if (is.null(modelfile)) stop("xgb.load: modelfile cannot be NULL") bst <- xgb.Booster( params = list(), cachelist = list(), modelfile = modelfile ) bst <- bst$bst # re-use modelfile if it is raw so we do not need to serialize if (typeof(modelfile) == "raw") { warning( paste( "The support for loading raw booster with `xgb.load` will be ", "discontinued in upcoming release. Use `xgb.load.raw` instead. " ) ) } return(bst) } xgboost-3.0.0/R-package/R/xgb.load.raw.R000066400000000000000000000005741476511272400176220ustar00rootroot00000000000000#' Load serialised XGBoost model from R's raw vector #' #' User can generate raw memory buffer by calling [xgb.save.raw()]. #' #' @param buffer The buffer returned by [xgb.save.raw()]. #' @export xgb.load.raw <- function(buffer) { cachelist <- list() bst <- .Call(XGBoosterCreate_R, cachelist) .Call(XGBoosterLoadModelFromRaw_R, xgb.get.handle(bst), buffer) return(bst) } xgboost-3.0.0/R-package/R/xgb.model.dt.tree.R000066400000000000000000000163621476511272400205610ustar00rootroot00000000000000#' Parse model text dump #' #' Parse a boosted tree model text dump into a `data.table` structure. #' #' Note that this function does not work with models that were fitted to #' categorical data, and is only applicable to tree-based boosters (not `gblinear`). #' @param model Object of class `xgb.Booster`. If it contains feature names (they can #' be set through [setinfo()]), they will be used in the output from this function. #' #' If the model contains categorical features, an error will be thrown. #' @param trees An integer vector of (base-1) tree indices that should be used. The default #' (`NULL`) uses all trees. Useful, e.g., in multiclass classification to get only #' the trees of one class. #' @param use_int_id A logical flag indicating whether nodes in columns "Yes", "No", and #' "Missing" should be represented as integers (when `TRUE`) or as "Tree-Node" #' character strings (when `FALSE`, default). #' @inheritParams xgb.train #' @return #' A `data.table` with detailed information about tree nodes. It has the following columns: #' - `Tree`: integer ID of a tree in a model (zero-based index). #' - `Node`: integer ID of a node in a tree (zero-based index). #' - `ID`: character identifier of a node in a model (only when `use_int_id = FALSE`). #' - `Feature`: for a branch node, a feature ID or name (when available); #' for a leaf node, it simply labels it as `"Leaf"`. #' - `Split`: location of the split for a branch node (split condition is always "less than"). #' - `Yes`: ID of the next node when the split condition is met. #' - `No`: ID of the next node when the split condition is not met. #' - `Missing`: ID of the next node when the branch value is missing. #' - `Gain`: either the split gain (change in loss) or the leaf value. #' - `Cover`: metric related to the number of observations either seen by a split #' or collected by a leaf during training. #' #' When `use_int_id = FALSE`, columns "Yes", "No", and "Missing" point to model-wide node identifiers #' in the "ID" column. When `use_int_id = TRUE`, those columns point to node identifiers from #' the corresponding trees in the "Node" column. #' #' @examples #' # Basic use: #' #' data(agaricus.train, package = "xgboost") #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' #' bst <- xgb.train( #' data = xgb.DMatrix(agaricus.train$data, label = agaricus.train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic" #' ) #' ) #' #' # This bst model already has feature_names stored with it, so those would be used when #' # feature_names is not set: #' dt <- xgb.model.dt.tree(bst) #' #' # How to match feature names of splits that are following a current 'Yes' branch: #' merge( #' dt, #' dt[, .(ID, Y.Feature = Feature)], by.x = "Yes", by.y = "ID", all.x = TRUE #' )[ #' order(Tree, Node) #' ] #' #' @export xgb.model.dt.tree <- function(model, trees = NULL, use_int_id = FALSE, ...) { check.deprecation(deprecated_dttree_params, match.call(), ...) if (!inherits(model, "xgb.Booster")) { stop("Either 'model' must be an object of class xgb.Booster") } if (xgb.has_categ_features(model)) { stop("Cannot produce tables for models having categorical features.") } if (!is.null(trees)) { if (!is.vector(trees) || (!is.numeric(trees) && !is.integer(trees))) { stop("trees: must be a vector of integers.") } trees <- trees - 1L if (anyNA(trees) || min(trees) < 0) { stop("Passed invalid tree indices.") } } feature_names <- NULL if (inherits(model, "xgb.Booster")) { feature_names <- xgb.feature_names(model) } text <- xgb.dump(model = model, with_stats = TRUE) if (length(text) < 2 || !any(grepl('leaf=(-?\\d+)', text))) { stop("Non-tree model detected! This function can only be used with tree models.") } position <- which(grepl("booster", text, fixed = TRUE)) add.tree.id <- function(node, tree) if (use_int_id) node else paste(tree, node, sep = "-") anynumber_regex <- "[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?" td <- data.table(t = text) td[position, Tree := 1L] td[, Tree := cumsum(ifelse(is.na(Tree), 0L, Tree)) - 1L] if (is.null(trees)) { trees <- 0:max(td$Tree) } else { trees <- trees[trees >= 0 & trees <= max(td$Tree)] } td <- td[Tree %in% trees & !is.na(t) & !startsWith(t, 'booster')] td[, Node := as.integer(sub("^([0-9]+):.*", "\\1", t))] if (!use_int_id) td[, ID := add.tree.id(Node, Tree)] td[, isLeaf := grepl("leaf", t, fixed = TRUE)] # parse branch lines branch_rx_nonames <- paste0("f(\\d+)<(", anynumber_regex, ")\\] yes=(\\d+),no=(\\d+),missing=(\\d+),", "gain=(", anynumber_regex, "),cover=(", anynumber_regex, ")") branch_rx_w_names <- paste0("\\d+:\\[(.+)<(", anynumber_regex, ")\\] yes=(\\d+),no=(\\d+),missing=(\\d+),", "gain=(", anynumber_regex, "),cover=(", anynumber_regex, ")") text_has_feature_names <- FALSE if (NROW(feature_names)) { branch_rx <- branch_rx_w_names text_has_feature_names <- TRUE } else { branch_rx <- branch_rx_nonames } branch_cols <- c("Feature", "Split", "Yes", "No", "Missing", "Gain", "Cover") td[ isLeaf == FALSE, (branch_cols) := { matches <- regmatches(t, regexec(branch_rx, t)) # skip some indices with spurious capture groups from anynumber_regex xtr <- do.call(rbind, matches)[, c(2, 3, 5, 6, 7, 8, 10), drop = FALSE] xtr[, 3:5] <- add.tree.id(xtr[, 3:5], Tree) if (length(xtr) == 0) { as.data.table( list(Feature = "NA", Split = "NA", Yes = "NA", No = "NA", Missing = "NA", Gain = "NA", Cover = "NA") ) } else { as.data.table(xtr) } } ] # assign feature_names when available is_stump <- function() { return(length(td$Feature) == 1 && is.na(td$Feature)) } if (!text_has_feature_names) { if (!is.null(feature_names) && !is_stump()) { if (length(feature_names) <= max(as.numeric(td$Feature), na.rm = TRUE)) stop("feature_names has less elements than there are features used in the model") td[isLeaf == FALSE, Feature := feature_names[as.numeric(Feature) + 1]] } } # parse leaf lines leaf_rx <- paste0("leaf=(", anynumber_regex, "),cover=(", anynumber_regex, ")") leaf_cols <- c("Feature", "Gain", "Cover") td[ isLeaf == TRUE, (leaf_cols) := { matches <- regmatches(t, regexec(leaf_rx, t)) xtr <- do.call(rbind, matches)[, c(2, 4)] if (length(xtr) == 2) { c("Leaf", as.data.table(xtr[1]), as.data.table(xtr[2])) } else { c("Leaf", as.data.table(xtr)) } } ] # convert some columns to numeric numeric_cols <- c("Split", "Gain", "Cover") td[, (numeric_cols) := lapply(.SD, as.numeric), .SDcols = numeric_cols] if (use_int_id) { int_cols <- c("Yes", "No", "Missing") td[, (int_cols) := lapply(.SD, as.integer), .SDcols = int_cols] } td[, t := NULL] td[, isLeaf := NULL] td[order(Tree, Node)] } # Avoid notes during CRAN check. # The reason is that these variables are never declared # They are mainly column names inferred by Data.table... globalVariables(c("Tree", "Node", "ID", "Feature", "t", "isLeaf", ".SD", ".SDcols")) xgboost-3.0.0/R-package/R/xgb.plot.deepness.R000066400000000000000000000137531476511272400207010ustar00rootroot00000000000000#' Plot model tree depth #' #' Visualizes distributions related to the depth of tree leaves. #' - `xgb.plot.deepness()` uses base R graphics, while #' - `xgb.ggplot.deepness()` uses "ggplot2". #' #' @param model Either an `xgb.Booster` model, or the "data.table" returned #' by [xgb.model.dt.tree()]. #' @param which Which distribution to plot (see details). #' @param plot Should the plot be shown? Default is `TRUE`. #' @param ... Other parameters passed to [graphics::barplot()] or [graphics::plot()]. #' #' @details #' #' When `which = "2x1"`, two distributions with respect to the leaf depth #' are plotted on top of each other: #' 1. The distribution of the number of leaves in a tree model at a certain depth. #' 2. The distribution of the average weighted number of observations ("cover") #' ending up in leaves at a certain depth. #' #' Those could be helpful in determining sensible ranges of the `max_depth` #' and `min_child_weight` parameters. #' #' When `which = "max.depth"` or `which = "med.depth"`, plots of either maximum or #' median depth per tree with respect to the tree number are created. #' #' Finally, `which = "med.weight"` allows to see how #' a tree's median absolute leaf weight changes through the iterations. #' #' These functions have been inspired by the blog post #' . #' #' @return #' The return value of the two functions is as follows: #' - `xgb.plot.deepness()`: A "data.table" (invisibly). #' Each row corresponds to a terminal leaf in the model. It contains its information #' about depth, cover, and weight (used in calculating predictions). #' If `plot = TRUE`, also a plot is shown. #' - `xgb.ggplot.deepness()`: When `which = "2x1"`, a list of two "ggplot" objects, #' and a single "ggplot" object otherwise. #' #' @seealso [xgb.train()] and [xgb.model.dt.tree()]. #' #' @examples #' #' data(agaricus.train, package = "xgboost") #' ## Keep the number of threads to 2 for examples #' nthread <- 2 #' data.table::setDTthreads(nthread) #' #' ## Change max_depth to a higher number to get a more significant result #' model <- xgboost( #' agaricus.train$data, factor(agaricus.train$label), #' nrounds = 50, #' max_depth = 6, #' nthreads = nthread, #' subsample = 0.5, #' min_child_weight = 2 #' ) #' #' xgb.plot.deepness(model) #' xgb.ggplot.deepness(model) #' #' xgb.plot.deepness( #' model, which = "max.depth", pch = 16, col = rgb(0, 0, 1, 0.3), cex = 2 #' ) #' #' xgb.plot.deepness( #' model, which = "med.weight", pch = 16, col = rgb(0, 0, 1, 0.3), cex = 2 #' ) #' #' @rdname xgb.plot.deepness #' @export xgb.plot.deepness <- function(model = NULL, which = c("2x1", "max.depth", "med.depth", "med.weight"), plot = TRUE, ...) { if (!(inherits(model, "xgb.Booster") || is.data.table(model))) stop("model: Has to be either an xgb.Booster model generaged by the xgb.train function\n", "or a data.table result of the xgb.importance function") if (!requireNamespace("igraph", quietly = TRUE)) stop("igraph package is required for plotting the graph deepness.", call. = FALSE) which <- match.arg(which) dt_tree <- model if (inherits(model, "xgb.Booster")) dt_tree <- xgb.model.dt.tree(model = model) if (!all(c("Feature", "Tree", "ID", "Yes", "No", "Cover") %in% colnames(dt_tree))) stop("Model tree columns are not as expected!\n", " Note that this function works only for tree models.") dt_depths <- merge(get.leaf.depth(dt_tree), dt_tree[, .(ID, Cover, Weight = Gain)], by = "ID") setkeyv(dt_depths, c("Tree", "ID")) # count by depth levels, and also calculate average cover at a depth dt_summaries <- dt_depths[, .(.N, Cover = mean(Cover)), Depth] setkey(dt_summaries, "Depth") if (plot) { if (which == "2x1") { op <- par(no.readonly = TRUE) par(mfrow = c(2, 1), oma = c(3, 1, 3, 1) + 0.1, mar = c(1, 4, 1, 0) + 0.1) dt_summaries[, barplot(N, border = NA, ylab = 'Number of leafs', ...)] dt_summaries[, barplot(Cover, border = NA, ylab = "Weighted cover", names.arg = Depth, ...)] title("Model complexity", xlab = "Leaf depth", outer = TRUE, line = 1) par(op) } else if (which == "max.depth") { dt_depths[, max(Depth), Tree][ , plot(jitter(V1, amount = 0.1) ~ Tree, ylab = 'Max tree leaf depth', xlab = "tree #", ...)] } else if (which == "med.depth") { dt_depths[, median(as.numeric(Depth)), Tree][ , plot(jitter(V1, amount = 0.1) ~ Tree, ylab = 'Median tree leaf depth', xlab = "tree #", ...)] } else if (which == "med.weight") { dt_depths[, median(abs(Weight)), Tree][ , plot(V1 ~ Tree, ylab = 'Median absolute leaf weight', xlab = "tree #", ...)] } } invisible(dt_depths) } # Extract path depths from root to leaf # from data.table containing the nodes and edges of the trees. # internal utility function get.leaf.depth <- function(dt_tree) { # extract tree graph's edges dt_edges <- rbindlist(list( dt_tree[Feature != "Leaf", .(ID, To = Yes, Tree)], dt_tree[Feature != "Leaf", .(ID, To = No, Tree)] )) # whether "To" is a leaf: dt_edges <- merge(dt_edges, dt_tree[Feature == "Leaf", .(ID, Leaf = TRUE)], all.x = TRUE, by.x = "To", by.y = "ID") dt_edges[is.na(Leaf), Leaf := FALSE] dt_edges[, { graph <- igraph::graph_from_data_frame(.SD[, .(ID, To)]) # min(ID) in a tree is a root node paths_tmp <- igraph::shortest_paths(graph, from = min(ID), to = To[Leaf == TRUE]) # list of paths to each leaf in a tree paths <- lapply(paths_tmp$vpath, names) # combine into a resulting path lengths table for a tree data.table(Depth = lengths(paths), ID = To[Leaf == TRUE]) }, by = Tree] } # Avoid error messages during CRAN check. # The reason is that these variables are never declared # They are mainly column names inferred by Data.table... globalVariables( c( ".N", "N", "Depth", "Gain", "Cover", "Tree", "ID", "Yes", "No", "Feature", "Leaf", "Weight" ) ) xgboost-3.0.0/R-package/R/xgb.plot.importance.R000066400000000000000000000125361476511272400212320ustar00rootroot00000000000000#' Plot feature importance #' #' Represents previously calculated feature importance as a bar graph. #' - `xgb.plot.importance()` uses base R graphics, while #' - `xgb.ggplot.importance()` uses "ggplot". #' #' @details #' The graph represents each feature as a horizontal bar of length proportional to the #' importance of a feature. Features are sorted by decreasing importance. #' It works for both "gblinear" and "gbtree" models. #' #' When `rel_to_first = FALSE`, the values would be plotted as in `importance_matrix`. #' For a "gbtree" model, that would mean being normalized to the total of 1 #' ("what is feature's importance contribution relative to the whole model?"). #' For linear models, `rel_to_first = FALSE` would show actual values of the coefficients. #' Setting `rel_to_first = TRUE` allows to see the picture from the perspective of #' "what is feature's importance contribution relative to the most important feature?" #' #' The "ggplot" backend performs 1-D clustering of the importance values, #' with bar colors corresponding to different clusters having similar importance values. #' #' @param importance_matrix A `data.table` as returned by [xgb.importance()]. #' @param top_n Maximal number of top features to include into the plot. #' @param measure The name of importance measure to plot. #' When `NULL`, 'Gain' would be used for trees and 'Weight' would be used for gblinear. #' @param rel_to_first Whether importance values should be represented as relative to #' the highest ranked feature, see Details. #' @param left_margin Adjust the left margin size to fit feature names. #' When `NULL`, the existing `par("mar")` is used. #' @param cex Passed as `cex.names` parameter to [graphics::barplot()]. #' @param plot Should the barplot be shown? Default is `TRUE`. #' @param n_clusters A numeric vector containing the min and the max range #' of the possible number of clusters of bars. #' @param ... Other parameters passed to [graphics::barplot()] #' (except `horiz`, `border`, `cex.names`, `names.arg`, and `las`). #' Only used in `xgb.plot.importance()`. #' @return #' The return value depends on the function: #' - `xgb.plot.importance()`: Invisibly, a "data.table" with `n_top` features sorted #' by importance. If `plot = TRUE`, the values are also plotted as barplot. #' - `xgb.ggplot.importance()`: A customizable "ggplot" object. #' E.g., to change the title, set `+ ggtitle("A GRAPH NAME")`. #' #' @seealso [graphics::barplot()] #' #' @examples #' data(agaricus.train) #' #' ## Keep the number of threads to 2 for examples #' nthread <- 2 #' data.table::setDTthreads(nthread) #' #' model <- xgboost( #' agaricus.train$data, factor(agaricus.train$label), #' nrounds = 2, #' max_depth = 3, #' nthreads = nthread #' ) #' #' importance_matrix <- xgb.importance(model) #' xgb.plot.importance( #' importance_matrix, rel_to_first = TRUE, xlab = "Relative importance" #' ) #' #' gg <- xgb.ggplot.importance( #' importance_matrix, measure = "Frequency", rel_to_first = TRUE #' ) #' gg #' gg + ggplot2::ylab("Frequency") #' #' @rdname xgb.plot.importance #' @export xgb.plot.importance <- function(importance_matrix = NULL, top_n = NULL, measure = NULL, rel_to_first = FALSE, left_margin = 10, cex = NULL, plot = TRUE, ...) { check.deprecation(deprecated_plotimp_params, match.call(), ..., allow_unrecognized = TRUE) if (!is.data.table(importance_matrix)) { stop("importance_matrix: must be a data.table") } imp_names <- colnames(importance_matrix) if (is.null(measure)) { if (all(c("Feature", "Gain") %in% imp_names)) { measure <- "Gain" } else if (all(c("Feature", "Weight") %in% imp_names)) { measure <- "Weight" } else { stop("Importance matrix column names are not as expected!") } } else { if (!measure %in% imp_names) stop("Invalid `measure`") if (!"Feature" %in% imp_names) stop("Importance matrix column names are not as expected!") } # also aggregate, just in case when the values were not yet summed up by feature importance_matrix <- importance_matrix[ , lapply(.SD, sum) , .SDcols = setdiff(names(importance_matrix), "Feature") , by = Feature ][ , Importance := get(measure) ] # make sure it's ordered importance_matrix <- importance_matrix[order(-abs(Importance))] if (!is.null(top_n)) { top_n <- min(top_n, nrow(importance_matrix)) importance_matrix <- head(importance_matrix, top_n) } if (rel_to_first) { importance_matrix[, Importance := Importance / max(abs(Importance))] } if (is.null(cex)) { cex <- 2.5 / log2(1 + nrow(importance_matrix)) } if (plot) { original_mar <- par()$mar # reset margins so this function doesn't have side effects on.exit({ par(mar = original_mar) }) mar <- original_mar if (!is.null(left_margin)) mar[2] <- left_margin par(mar = mar) # reverse the order of rows to have the highest ranked at the top importance_matrix[rev(seq_len(nrow(importance_matrix))), barplot(Importance, horiz = TRUE, border = NA, cex.names = cex, names.arg = Feature, las = 1, ...)] } invisible(importance_matrix) } # Avoid error messages during CRAN check. # The reason is that these variables are never declared # They are mainly column names inferred by Data.table... globalVariables(c("Feature", "Importance")) xgboost-3.0.0/R-package/R/xgb.plot.multi.trees.R000066400000000000000000000134461476511272400213450ustar00rootroot00000000000000#' Project all trees on one tree #' #' Visualization of the ensemble of trees as a single collective unit. #' #' Note that this function does not work with models that were fitted to #' categorical data. #' @details #' This function tries to capture the complexity of a gradient boosted tree model #' in a cohesive way by compressing an ensemble of trees into a single tree-graph representation. #' The goal is to improve the interpretability of a model generally seen as black box. #' #' Note: this function is applicable to tree booster-based models only. #' #' It takes advantage of the fact that the shape of a binary tree is only defined by #' its depth (therefore, in a boosting model, all trees have similar shape). #' #' Moreover, the trees tend to reuse the same features. #' #' The function projects each tree onto one, and keeps for each position the #' `features_keep` first features (based on the Gain per feature measure). #' #' This function is inspired by this blog post: #' #' #' @inheritParams xgb.plot.tree #' @param features_keep Number of features to keep in each position of the multi trees, #' by default 5. #' @param render Should the graph be rendered or not? The default is `TRUE`. #' @inherit xgb.plot.tree return #' #' @examples #' #' data(agaricus.train, package = "xgboost") #' #' ## Keep the number of threads to 2 for examples #' nthread <- 2 #' data.table::setDTthreads(nthread) #' #' model <- xgboost( #' agaricus.train$data, factor(agaricus.train$label), #' nrounds = 30, #' verbosity = 0L, #' nthreads = nthread, #' max_depth = 15, #' learning_rate = 1, #' min_child_weight = 50 #' ) #' #' p <- xgb.plot.multi.trees(model, features_keep = 3) #' print(p) #' #' # Below is an example of how to save this plot to a file. #' if (require("DiagrammeR") && require("DiagrammeRsvg") && require("rsvg")) { #' fname <- file.path(tempdir(), "tree.pdf") #' gr <- xgb.plot.multi.trees(model, features_keep = 3, render = FALSE) #' export_graph(gr, fname, width = 1500, height = 600) #' } #' @export xgb.plot.multi.trees <- function(model, features_keep = 5, plot_width = NULL, plot_height = NULL, render = TRUE, ...) { check.deprecation(deprecated_multitrees_params, match.call(), ...) if (!requireNamespace("DiagrammeR", quietly = TRUE)) { stop("DiagrammeR is required for xgb.plot.multi.trees") } if (xgb.has_categ_features(model)) { stop( "Cannot use 'xgb.plot.multi.trees' for models with categorical features.", " Try 'xgb.plot.tree' instead." ) } tree.matrix <- xgb.model.dt.tree(model = model) # first number of the path represents the tree, then the following numbers are related to the path to follow # root init root.nodes <- tree.matrix[Node == 0, ID] tree.matrix[ID %in% root.nodes, abs.node.position := root.nodes] precedent.nodes <- root.nodes while (tree.matrix[, sum(is.na(abs.node.position))] > 0) { yes.row.nodes <- tree.matrix[abs.node.position %in% precedent.nodes & !is.na(Yes)] no.row.nodes <- tree.matrix[abs.node.position %in% precedent.nodes & !is.na(No)] yes.nodes.abs.pos <- paste0(yes.row.nodes[, abs.node.position], "_0") no.nodes.abs.pos <- paste0(no.row.nodes[, abs.node.position], "_1") tree.matrix[ID %in% yes.row.nodes[, Yes], abs.node.position := yes.nodes.abs.pos] tree.matrix[ID %in% no.row.nodes[, No], abs.node.position := no.nodes.abs.pos] precedent.nodes <- c(yes.nodes.abs.pos, no.nodes.abs.pos) } tree.matrix[!is.na(Yes), Yes := paste0(abs.node.position, "_0")] tree.matrix[!is.na(No), No := paste0(abs.node.position, "_1")] for (nm in c("abs.node.position", "Yes", "No")) data.table::set(tree.matrix, j = nm, value = sub("^\\d+-", "", tree.matrix[[nm]])) nodes.dt <- tree.matrix[ , .(Gain = sum(Gain)) , by = .(abs.node.position, Feature) ][, .(Text = paste0( paste0( Feature[seq_len(min(length(Feature), features_keep))], " (", format(Gain[seq_len(min(length(Gain), features_keep))], digits = 5), ")" ), collapse = "\n" ) ) , by = abs.node.position ] edges.dt <- data.table::rbindlist( l = list( tree.matrix[Feature != "Leaf", .(From = abs.node.position, To = Yes)], tree.matrix[Feature != "Leaf", .(From = abs.node.position, To = No)] ) ) edges.dt <- edges.dt[, .N, .(From, To)] edges.dt[, N := NULL] nodes <- DiagrammeR::create_node_df( n = nrow(nodes.dt), label = nodes.dt[, Text] ) edges <- DiagrammeR::create_edge_df( from = match(edges.dt[, From], nodes.dt[, abs.node.position]), to = match(edges.dt[, To], nodes.dt[, abs.node.position]), rel = "leading_to") graph <- DiagrammeR::create_graph( nodes_df = nodes, edges_df = edges, attr_theme = NULL ) graph <- DiagrammeR::add_global_graph_attrs( graph = graph, attr_type = "graph", attr = c("layout", "rankdir"), value = c("dot", "LR") ) graph <- DiagrammeR::add_global_graph_attrs( graph = graph, attr_type = "node", attr = c("color", "fillcolor", "style", "shape", "fontname"), value = c("DimGray", "beige", "filled", "rectangle", "Helvetica") ) graph <- DiagrammeR::add_global_graph_attrs( graph = graph, attr_type = "edge", attr = c("color", "arrowsize", "arrowhead", "fontname"), value = c("DimGray", "1.5", "vee", "Helvetica") ) if (!render) return(invisible(graph)) DiagrammeR::render_graph(graph, width = plot_width, height = plot_height) } globalVariables(c(".N", "N", "From", "To", "Text", "Feature", "no.nodes.abs.pos", "ID", "Yes", "No", "Tree", "yes.nodes.abs.pos", "abs.node.position")) xgboost-3.0.0/R-package/R/xgb.plot.shap.R000066400000000000000000000344721476511272400200270ustar00rootroot00000000000000#' SHAP dependence plots #' #' Visualizes SHAP values against feature values to gain an impression of feature effects. #' #' @param data The data to explain as a `matrix`, `dgCMatrix`, or `data.frame`. #' @param shap_contrib Matrix of SHAP contributions of `data`. #' The default (`NULL`) computes it from `model` and `data`. #' @param features Vector of column indices or feature names to plot. When `NULL` #' (default), the `top_n` most important features are selected by [xgb.importance()]. #' @param top_n How many of the most important features (<= 100) should be selected? #' By default 1 for SHAP dependence and 10 for SHAP summary. #' Only used when `features = NULL`. #' @param model An `xgb.Booster` model. Only required when `shap_contrib = NULL` or #' `features = NULL`. #' @param trees Passed to [xgb.importance()] when `features = NULL`. #' @param target_class Only relevant for multiclass models. The default (`NULL`) #' averages the SHAP values over all classes. Pass a (0-based) class index #' to show only SHAP values of that class. #' @param approxcontrib Passed to [predict.xgb.Booster()] when `shap_contrib = NULL`. #' @param subsample Fraction of data points randomly picked for plotting. #' The default (`NULL`) will use up to 100k data points. #' @param n_col Number of columns in a grid of plots. #' @param col Color of the scatterplot markers. #' @param pch Scatterplot marker. #' @param discrete_n_uniq Maximal number of unique feature values to consider the #' feature as discrete. #' @param discrete_jitter Jitter amount added to the values of discrete features. #' @param ylab The y-axis label in 1D plots. #' @param plot_NA Should contributions of cases with missing values be plotted? #' Default is `TRUE`. #' @param col_NA Color of marker for missing value contributions. #' @param pch_NA Marker type for `NA` values. #' @param pos_NA Relative position of the x-location where `NA` values are shown: #' `min(x) + (max(x) - min(x)) * pos_NA`. #' @param plot_loess Should loess-smoothed curves be plotted? (Default is `TRUE`). #' The smoothing is only done for features with more than 5 distinct values. #' @param col_loess Color of loess curves. #' @param span_loess The `span` parameter of [stats::loess()]. #' @param which Whether to do univariate or bivariate plotting. Currently, only "1d" is implemented. #' @param plot Should the plot be drawn? (Default is `TRUE`). #' If `FALSE`, only a list of matrices is returned. #' @param ... Other parameters passed to [graphics::plot()]. #' #' @details #' #' These scatterplots represent how SHAP feature contributions depend of feature values. #' The similarity to partial dependence plots is that they also give an idea for how feature values #' affect predictions. However, in partial dependence plots, we see marginal dependencies #' of model prediction on feature value, while SHAP dependence plots display the estimated #' contributions of a feature to the prediction for each individual case. #' #' When `plot_loess = TRUE`, feature values are rounded to three significant digits and #' weighted LOESS is computed and plotted, where the weights are the numbers of data points #' at each rounded value. #' #' Note: SHAP contributions are on the scale of the model margin. #' E.g., for a logistic binomial objective, the margin is on log-odds scale. #' Also, since SHAP stands for "SHapley Additive exPlanation" (model prediction = sum of SHAP #' contributions for all features + bias), depending on the objective used, transforming SHAP #' contributions for a feature from the marginal to the prediction space is not necessarily #' a meaningful thing to do. #' #' @return #' In addition to producing plots (when `plot = TRUE`), it silently returns a list of two matrices: #' - `data`: Feature value matrix. #' - `shap_contrib`: Corresponding SHAP value matrix. #' #' @references #' 1. Scott M. Lundberg, Su-In Lee, "A Unified Approach to Interpreting Model Predictions", #' NIPS Proceedings 2017, #' 2. Scott M. Lundberg, Su-In Lee, "Consistent feature attribution for tree ensembles", #' #' #' @examples #' #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' nrounds <- 20 #' #' model_binary <- xgboost( #' agaricus.train$data, factor(agaricus.train$label), #' nrounds = nrounds, #' verbosity = 0L, #' learning_rate = 0.1, #' max_depth = 3L, #' subsample = 0.5, #' nthreads = nthread #' ) #' #' xgb.plot.shap(agaricus.test$data, model = model_binary, features = "odor=none") #' #' contr <- predict(model_binary, agaricus.test$data, type = "contrib") #' xgb.plot.shap(agaricus.test$data, contr, model = model_binary, top_n = 12, n_col = 3) #' #' # Summary plot #' xgb.ggplot.shap.summary(agaricus.test$data, contr, model = model_binary, top_n = 12) #' #' # Multiclass example - plots for each class separately: #' x <- as.matrix(iris[, -5]) #' set.seed(123) #' is.na(x[sample(nrow(x) * 4, 30)]) <- TRUE # introduce some missing values #' #' model_multiclass <- xgboost( #' x, iris$Species, #' nrounds = nrounds, #' verbosity = 0, #' max_depth = 2, #' subsample = 0.5, #' nthreads = nthread #' ) #' nclass <- 3 #' trees0 <- seq(from = 1, by = nclass, length.out = nrounds) #' col <- rgb(0, 0, 1, 0.5) #' #' xgb.plot.shap( #' x, #' model = model_multiclass, #' trees = trees0, #' target_class = 0, #' top_n = 4, #' n_col = 2, #' col = col, #' pch = 16, #' pch_NA = 17 #' ) #' #' xgb.plot.shap( #' x, #' model = model_multiclass, #' trees = trees0 + 1, #' target_class = 1, #' top_n = 4, #' n_col = 2, #' col = col, #' pch = 16, #' pch_NA = 17 #' ) #' #' xgb.plot.shap( #' x, #' model = model_multiclass, #' trees = trees0 + 2, #' target_class = 2, #' top_n = 4, #' n_col = 2, #' col = col, #' pch = 16, #' pch_NA = 17 #' ) #' #' # Summary plot #' xgb.ggplot.shap.summary(x, model = model_multiclass, target_class = 0, top_n = 4) #' #' @rdname xgb.plot.shap #' @export xgb.plot.shap <- function(data, shap_contrib = NULL, features = NULL, top_n = 1, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL, n_col = 1, col = rgb(0, 0, 1, 0.2), pch = '.', discrete_n_uniq = 5, discrete_jitter = 0.01, ylab = "SHAP", plot_NA = TRUE, col_NA = rgb(0.7, 0, 1, 0.6), pch_NA = '.', pos_NA = 1.07, plot_loess = TRUE, col_loess = 2, span_loess = 0.5, which = c("1d", "2d"), plot = TRUE, ...) { data_list <- xgb.shap.data( data = data, shap_contrib = shap_contrib, features = features, top_n = top_n, model = model, trees = trees, target_class = target_class, approxcontrib = approxcontrib, subsample = subsample, max_observations = 100000 ) data <- data_list[["data"]] shap_contrib <- data_list[["shap_contrib"]] features <- colnames(data) which <- match.arg(which) if (which == "2d") stop("2D plots are not implemented yet") if (n_col > length(features)) n_col <- length(features) if (plot && which == "1d") { op <- par(mfrow = c(ceiling(length(features) / n_col), n_col), oma = c(0, 0, 0, 0) + 0.2, mar = c(3.5, 3.5, 0, 0) + 0.1, mgp = c(1.7, 0.6, 0)) for (f in features) { ord <- order(data[, f]) x <- data[, f][ord] y <- shap_contrib[, f][ord] x_lim <- range(x, na.rm = TRUE) y_lim <- range(y, na.rm = TRUE) do_na <- plot_NA && anyNA(x) if (do_na) { x_range <- diff(x_lim) loc_na <- min(x, na.rm = TRUE) + x_range * pos_NA x_lim <- range(c(x_lim, loc_na)) } x_uniq <- unique(x) x2plot <- x # add small jitter for discrete features with <= 5 distinct values if (length(x_uniq) <= discrete_n_uniq) x2plot <- jitter(x, amount = discrete_jitter * min(diff(x_uniq), na.rm = TRUE)) plot(x2plot, y, pch = pch, xlab = f, col = col, xlim = x_lim, ylim = y_lim, ylab = ylab, ...) grid() if (plot_loess) { # compress x to 3 digits, and mean-aggregate y zz <- data.table(x = signif(x, 3), y)[, .(.N, y = mean(y)), x] if (nrow(zz) <= 5) { lines(zz$x, zz$y, col = col_loess) } else { lo <- stats::loess(y ~ x, data = zz, weights = zz$N, span = span_loess) zz$y_lo <- predict(lo, zz, type = "link") lines(zz$x, zz$y_lo, col = col_loess) } } if (do_na) { i_na <- which(is.na(x)) x_na <- rep(loc_na, length(i_na)) x_na <- jitter(x_na, amount = x_range * 0.01) points(x_na, y[i_na], pch = pch_NA, col = col_NA) } } par(op) } if (plot && which == "2d") { # TODO warning("Bivariate plotting is currently not available.") } invisible(list(data = data, shap_contrib = shap_contrib)) } #' SHAP summary plot #' #' Visualizes SHAP contributions of different features. #' #' A point plot (each point representing one observation from `data`) is #' produced for each feature, with the points plotted on the SHAP value axis. #' Each point (observation) is coloured based on its feature value. #' #' The plot allows to see which features have a negative / positive contribution #' on the model prediction, and whether the contribution is different for larger #' or smaller values of the feature. Inspired by the summary plot of #' . #' #' @inheritParams xgb.plot.shap #' #' @return A `ggplot2` object. #' @export #' #' @examples #' # See examples in xgb.plot.shap() #' #' @seealso [xgb.plot.shap()], [xgb.ggplot.shap.summary()], #' and the Python library . xgb.plot.shap.summary <- function(data, shap_contrib = NULL, features = NULL, top_n = 10, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL) { # Only ggplot implementation is available. xgb.ggplot.shap.summary(data, shap_contrib, features, top_n, model, trees, target_class, approxcontrib, subsample) } #' Prepare data for SHAP plots #' #' Internal function used in [xgb.plot.shap()], [xgb.plot.shap.summary()], etc. #' #' @inheritParams xgb.plot.shap #' @param max_observations Maximum number of observations to consider. #' @keywords internal #' @noRd #' #' @return #' A list containing: #' - `data`: The matrix of feature values. #' - `shap_contrib`: The matrix with corresponding SHAP values. xgb.shap.data <- function(data, shap_contrib = NULL, features = NULL, top_n = 1, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL, max_observations = 100000) { if (!inherits(data, c("matrix", "dsparseMatrix", "data.frame"))) stop("data: must be matrix, sparse matrix, or data.frame.") if (inherits(data, "data.frame") && length(class(data)) > 1L) { data <- as.data.frame(data) } if (is.null(shap_contrib) && (is.null(model) || !inherits(model, "xgb.Booster"))) stop("when shap_contrib is not provided, one must provide an xgb.Booster model") if (is.null(features) && (is.null(model) || !inherits(model, "xgb.Booster"))) stop("when features are not provided, one must provide an xgb.Booster model to rank the features") last_dim <- function(v) dim(v)[length(dim(v))] if (!is.null(shap_contrib) && (!is.array(shap_contrib) || nrow(shap_contrib) != nrow(data) || last_dim(shap_contrib) != ncol(data) + 1)) stop("shap_contrib is not compatible with the provided data") if (is.character(features) && is.null(colnames(data))) stop("either provide `data` with column names or provide `features` as column indices") model_feature_names <- NULL if (is.null(features) && !is.null(model)) { model_feature_names <- xgb.feature_names(model) } if (is.null(model_feature_names) && xgb.num_feature(model) != ncol(data)) stop("if model has no feature_names, columns in `data` must match features in model") if (!is.null(subsample)) { if (subsample <= 0 || subsample >= 1) { stop("'subsample' must be a number between zero and one (non-inclusive).") } sample_size <- as.integer(subsample * nrow(data)) if (sample_size < 2) { stop("Sampling fraction involves less than 2 rows.") } idx <- sample(x = seq_len(nrow(data)), size = sample_size, replace = FALSE) } else { idx <- seq_len(min(nrow(data), max_observations)) } data <- data[idx, ] if (is.null(colnames(data))) { colnames(data) <- paste0("X", seq_len(ncol(data))) } reshape_3d_shap_contrib <- function(shap_contrib, target_class) { # multiclass: either choose a class or merge if (is.list(shap_contrib)) { if (!is.null(target_class)) { shap_contrib <- shap_contrib[[target_class + 1]] } else { shap_contrib <- Reduce("+", lapply(shap_contrib, abs)) } } else if (length(dim(shap_contrib)) > 2) { if (!is.null(target_class)) { orig_shape <- dim(shap_contrib) shap_contrib <- shap_contrib[, target_class + 1, , drop = TRUE] if (!is.matrix(shap_contrib)) { shap_contrib <- matrix(shap_contrib, orig_shape[c(1L, 3L)]) } } else { shap_contrib <- apply(abs(shap_contrib), c(1L, 3L), sum) } } return(shap_contrib) } if (is.null(shap_contrib)) { shap_contrib <- predict.xgb.Booster( model, newdata = data, predcontrib = TRUE, approxcontrib = approxcontrib ) } shap_contrib <- reshape_3d_shap_contrib(shap_contrib, target_class) if (is.null(colnames(shap_contrib))) { colnames(shap_contrib) <- paste0("X", seq_len(ncol(data))) } if (is.null(features)) { if (!is.null(model_feature_names)) { imp <- xgb.importance(model = model, trees = trees) } else { imp <- xgb.importance(model = model, trees = trees, feature_names = colnames(data)) } top_n <- top_n[1] if (top_n < 1 || top_n > 100) stop("top_n: must be an integer within [1, 100]") features <- imp$Feature[seq_len(min(top_n, NROW(imp)))] } if (is.character(features)) { features <- match(features, colnames(data)) } shap_contrib <- shap_contrib[, features, drop = FALSE] data <- data[, features, drop = FALSE] list( data = data, shap_contrib = shap_contrib ) } xgboost-3.0.0/R-package/R/xgb.plot.tree.R000066400000000000000000000056001476511272400200220ustar00rootroot00000000000000#' Plot boosted trees #' #' Read a tree model text dump and plot the model. #' #' @details #' The content of each node is visualized as follows: #' - For non-terminal nodes, it will display the split condition (number or name #' if available, and the condition that would decide to which node to go #' next). #' - Those nodes will be connected to their children by arrows that indicate #' whether the branch corresponds to the condition being met or not being met. #' - Terminal (leaf) nodes contain the margin to add when ending there. #' #' The "Yes" branches are marked by the "< split_value" label. #' The branches also used for missing values are marked as bold #' (as in "carrying extra capacity"). #' #' This function uses [GraphViz](https://www.graphviz.org/) as DiagrammeR #' backend. #' #' @param model Object of class `xgb.Booster`. If it contains feature names #' (they can be set through [setinfo()], they will be used in the #' output from this function. #' @param tree_idx An integer of the tree index that should be used. This #' is an 1-based index. #' @param plot_width,plot_height Width and height of the graph in pixels. #' The values are passed to `DiagrammeR::render_graph()`. #' @param with_stats Whether to dump some additional statistics about the #' splits. When this option is on, the model dump contains two additional #' values: gain is the approximate loss function gain we get in each split; #' cover is the sum of second order gradient in each node. #' @inheritParams xgb.train #' @return #' #' Rendered graph object which is an htmlwidget of ' class `grViz`. Similar to #' "ggplot" objects, it needs to be printed when not running from the command #' line. #' #' @examples #' data("ToothGrowth") #' x <- ToothGrowth[, c("len", "dose")] #' y <- ToothGrowth$supp #' model <- xgboost( #' x, y, #' nthreads = 1L, #' nrounds = 3L, #' max_depth = 3L #' ) #' #' # plot the first tree #' xgb.plot.tree(model, tree_idx = 1) #' #' # Below is an example of how to save this plot to a file. #' if (require("DiagrammeR") && require("htmlwidgets")) { #' fname <- file.path(tempdir(), "plot.html'") #' gr <- xgb.plot.tree(model, tree_idx = 1) #' htmlwidgets::saveWidget(gr, fname) #' } #' @export xgb.plot.tree <- function(model, tree_idx = 1, plot_width = NULL, plot_height = NULL, with_stats = FALSE, ...) { check.deprecation(deprecated_plottree_params, match.call(), ...) if (!inherits(model, "xgb.Booster")) { stop("model has to be an object of the class xgb.Booster") } if (!requireNamespace("DiagrammeR", quietly = TRUE)) { stop("The DiagrammeR package is required for xgb.plot.tree", call. = FALSE) } txt <- xgb.dump(model, dump_format = "dot", with_stats = with_stats) DiagrammeR::grViz( txt[[tree_idx]], width = plot_width, height = plot_height ) } xgboost-3.0.0/R-package/R/xgb.save.R000066400000000000000000000054531476511272400170520ustar00rootroot00000000000000#' Save XGBoost model to binary file #' #' Save XGBoost model to a file in binary or JSON format. #' #' @param model Model object of `xgb.Booster` class. #' @param fname Name of the file to write. Its extension determines the serialization format: #' - ".ubj": Use the universal binary JSON format (recommended). #' This format uses binary types for e.g. floating point numbers, thereby preventing any loss #' of precision when converting to a human-readable JSON text or similar. #' - ".json": Use plain JSON, which is a human-readable format. #' - ".deprecated": Use **deprecated** binary format. This format will #' not be able to save attributes introduced after v1 of XGBoost, such as the "best_iteration" #' attribute that boosters might keep, nor feature names or user-specifiec attributes. #' - If the format is not specified by passing one of the file extensions above, will #' default to UBJ. #' #' @details #' #' This methods allows to save a model in an XGBoost-internal binary or text format which is universal #' among the various xgboost interfaces. In R, the saved model file could be read later #' using either the [xgb.load()] function or the `xgb_model` parameter of [xgb.train()]. #' #' Note: a model can also be saved as an R object (e.g., by using [readRDS()] #' or [save()]). However, it would then only be compatible with R, and #' corresponding R methods would need to be used to load it. Moreover, persisting the model with #' [readRDS()] or [save()] might cause compatibility problems in #' future versions of XGBoost. Consult [a-compatibility-note-for-saveRDS-save] to learn #' how to persist models in a future-proof way, i.e., to make the model accessible in future #' releases of XGBoost. #' #' @seealso [xgb.load()] #' #' @examples #' \dontshow{RhpcBLASctl::omp_set_num_threads(1)} #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' #' train <- agaricus.train #' test <- agaricus.test #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic" #' ) #' ) #' #' fname <- file.path(tempdir(), "xgb.ubj") #' xgb.save(bst, fname) #' bst <- xgb.load(fname) #' @export xgb.save <- function(model, fname) { if (typeof(fname) != "character") stop("fname must be character") if (!inherits(model, "xgb.Booster")) { stop("model must be xgb.Booster.", if (inherits(model, "xgb.DMatrix")) " Use xgb.DMatrix.save to save an xgb.DMatrix object." else "") } fname <- path.expand(fname) .Call(XGBoosterSaveModel_R, xgb.get.handle(model), enc2utf8(fname[1])) return(TRUE) } xgboost-3.0.0/R-package/R/xgb.save.raw.R000066400000000000000000000024161476511272400176360ustar00rootroot00000000000000#' Save XGBoost model to R's raw vector #' #' Save XGBoost model from [xgboost()] or [xgb.train()]. #' Call [xgb.load.raw()] to load the model back from raw vector. #' #' @param model The model object. #' @param raw_format The format for encoding the booster: #' - "json": Encode the booster into JSON text document. #' - "ubj": Encode the booster into Universal Binary JSON. #' - "deprecated": Encode the booster into old customized binary format. #' #' @examples #' \dontshow{RhpcBLASctl::omp_set_num_threads(1)} #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' #' train <- agaricus.train #' test <- agaricus.test #' #' bst <- xgb.train( #' data = xgb.DMatrix(train$data, label = train$label, nthread = 1), #' nrounds = 2, #' params = xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic" #' ) #' ) #' #' raw <- xgb.save.raw(bst) #' bst <- xgb.load.raw(raw) #' #' @export xgb.save.raw <- function(model, raw_format = "ubj") { handle <- xgb.get.handle(model) args <- list(format = raw_format) .Call(XGBoosterSaveModelToRaw_R, handle, jsonlite::toJSON(args, auto_unbox = TRUE)) } xgboost-3.0.0/R-package/R/xgb.train.R000066400000000000000000001500311476511272400172220ustar00rootroot00000000000000#' @title Fit XGBoost Model #' @description Fits an XGBoost model to given data in DMatrix format (e.g. as produced by [xgb.DMatrix()]). #' See the tutorial [Introduction to Boosted Trees](https://xgboost.readthedocs.io/en/stable/tutorials/model.html) #' for a longer explanation of what XGBoost does, and the rest of the #' [XGBoost Tutorials](https://xgboost.readthedocs.io/en/latest/tutorials/index.html) for further #' explanations XGBoost's features and usage. #' #' Compared to function [xgboost()] which is a user-friendly function targeted towards interactive #' usage, ``xgb.train`` is a lower-level interface which allows finer-grained control and exposes #' further functionalities offered by the core library (such as learning-to-rank objectives), but #' which works exclusively with XGBoost's own data format ("DMatrices") instead of with regular R #' objects. #' #' The syntax of this function closely mimics the same function from the Python package for XGBoost, #' and is recommended to use for package developers over `xgboost()` as it will provide a more #' stable interface (with fewer breaking changes) and lower overhead from data validations. #' #' See also the [migration guide](https://xgboost.readthedocs.io/en/latest/R-package/migration_guide.html) #' if coming from a previous version of XGBoost in the 1.x series. #' @param params List of XGBoost parameters which control the model building process. #' See the [online documentation](https://xgboost.readthedocs.io/en/latest/parameter.html) #' and the documentation for [xgb.params()] for details. #' #' Should be passed as list with named entries. Parameters that are not specified in this #' list will use their default values. #' #' A list of named parameters can be created through the function [xgb.params()], which #' accepts all valid parameters as function arguments. #' @param data Training dataset. `xgb.train()` accepts only an `xgb.DMatrix` as the input. #' #' Note that there is a function [xgboost()] which is meant to accept R data objects #' as inputs, such as data frames and matrices. #' @param nrounds Max number of boosting iterations. #' @param evals Named list of `xgb.DMatrix` datasets to use for evaluating model performance. #' Metrics specified in either `eval_metric` (under params) or `custom_metric` (function #' argument here) will be computed for each of these datasets during each boosting iteration, #' and stored in the end as a field named `evaluation_log` in the resulting object. #' #' When either `verbose>=1` or [xgb.cb.print.evaluation()] callback is engaged, the performance #' results are continuously printed out during the training. #' #' E.g., specifying `evals=list(validation1=mat1, validation2=mat2)` allows to track #' the performance of each round's model on `mat1` and `mat2`. #' @param objective Customized objective function. Should take two arguments: the first one will be the #' current predictions (either a numeric vector or matrix depending on the number of targets / classes), #' and the second one will be the `data` DMatrix object that is used for training. #' #' It should return a list with two elements `grad` and `hess` (in that order), as either #' numeric vectors or numeric matrices depending on the number of targets / classes (same #' dimension as the predictions that are passed as first argument). #' @param custom_metric Customized evaluation function. Just like `objective`, should take two arguments, #' with the first one being the predictions and the second one the `data` DMatrix. #' #' Should return a list with two elements `metric` (name that will be displayed for this metric, #' should be a string / character), and `value` (the number that the function calculates, should #' be a numeric scalar). #' #' Note that even if passing `custom_metric`, objectives also have an associated default metric that #' will be evaluated in addition to it. In order to disable the built-in metric, one can pass #' parameter `disable_default_eval_metric = TRUE`. #' @param verbose If 0, xgboost will stay silent. If 1, it will print information about performance. #' If 2, some additional information will be printed out. #' Note that setting `verbose > 0` automatically engages the #' `xgb.cb.print.evaluation(period=1)` callback function. #' @param print_every_n When passing `verbose>0`, evaluation logs (metrics calculated on the #' data passed under `evals`) will be printed every nth iteration according to the value passed #' here. The first and last iteration are always included regardless of this 'n'. #' #' Only has an effect when passing data under `evals` and when passing `verbose>0`. The parameter #' is passed to the [xgb.cb.print.evaluation()] callback. #' @param early_stopping_rounds Number of boosting rounds after which training will be stopped #' if there is no improvement in performance (as measured by the evaluatiation metric that is #' supplied or selected by default for the objective) on the evaluation data passed under #' `evals`. #' #' Must pass `evals` in order to use this functionality. Setting this parameter adds the #' [xgb.cb.early.stop()] callback. #' #' If `NULL`, early stopping will not be used. #' @param maximize If `feval` and `early_stopping_rounds` are set, then this parameter must be set as well. #' When it is `TRUE`, it means the larger the evaluation score the better. #' This parameter is passed to the [xgb.cb.early.stop()] callback. #' @param save_period When not `NULL`, model is saved to disk after every `save_period` rounds. #' 0 means save at the end. The saving is handled by the [xgb.cb.save.model()] callback. #' @param save_name the name or path for periodically saved model file. #' @param xgb_model A previously built model to continue the training from. #' Could be either an object of class `xgb.Booster`, or its raw data, or the name of a #' file with a previously saved model. #' @param callbacks A list of callback functions to perform various task during boosting. #' See [xgb.Callback()]. Some of the callbacks are automatically created depending on the #' parameters' values. User can provide either existing or their own callback methods in order #' to customize the training process. #' #' Note that some callbacks might try to leave attributes in the resulting model object, #' such as an evaluation log (a `data.table` object) - be aware that these objects are kept #' as R attributes, and thus do not get saved when using XGBoost's own serializaters like #' [xgb.save()] (but are kept when using R serializers like [saveRDS()]). #' @param ... Not used. #' #' Some arguments that were part of this function in previous XGBoost versions are currently #' deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw #' a warning (by default) and use its current equivalent instead. This warning will become an #' error if using the \link[=xgboost-options]{'strict mode' option}. #' #' If some additional argument is passed that is neither a current function argument nor #' a deprecated or renamed argument, a warning or error will be thrown depending on the #' 'strict mode' option. #' #' \bold{Important:} `...` will be removed in a future version, and all the current #' deprecation warnings will become errors. Please use only arguments that form part of #' the function signature. #' @return An object of class `xgb.Booster`. #' @details #' Compared to [xgboost()], the `xgb.train()` interface supports advanced features such as #' `evals`, customized objective and evaluation metric functions, among others, with the #' difference these work `xgb.DMatrix` objects and do not follow typical R idioms. #' #' Parallelization is automatically enabled if OpenMP is present. #' Number of threads can also be manually specified via the `nthread` parameter. #' #' While in XGBoost language bindings, the default random seed defaults to zero, in R, if a parameter `seed` #' is not manually supplied, it will generate a random seed through R's own random number generator, #' whose seed in turn is controllable through `set.seed`. If `seed` is passed, it will override the #' RNG from R. #' #' The following callbacks are automatically created when certain parameters are set: #' - [xgb.cb.print.evaluation()] is turned on when `verbose > 0` and the `print_every_n` #' parameter is passed to it. #' - [xgb.cb.evaluation.log()] is on when `evals` is present. #' - [xgb.cb.early.stop()]: When `early_stopping_rounds` is set. #' - [xgb.cb.save.model()]: When `save_period > 0` is set. #' #' Note that objects of type `xgb.Booster` as returned by this function behave a bit differently #' from typical R objects (it's an 'altrep' list class), and it makes a separation between #' internal booster attributes (restricted to jsonifyable data), accessed through [xgb.attr()] #' and shared between interfaces through serialization functions like [xgb.save()]; and #' R-specific attributes (typically the result from a callback), accessed through [attributes()] #' and [attr()], which are otherwise #' only used in the R interface, only kept when using R's serializers like [saveRDS()], and #' not anyhow used by functions like `predict.xgb.Booster()`. #' #' Be aware that one such R attribute that is automatically added is `params` - this attribute #' is assigned from the `params` argument to this function, and is only meant to serve as a #' reference for what went into the booster, but is not used in other methods that take a booster #' object - so for example, changing the booster's configuration requires calling `xgb.config<-` #' or `xgb.model.parameters<-`, while simply modifying `attributes(model)$params$<...>` will have no #' effect elsewhere. #' #' @seealso [xgb.Callback()], [predict.xgb.Booster()], [xgb.cv()] #' #' @references #' Tianqi Chen and Carlos Guestrin, "XGBoost: A Scalable Tree Boosting System", #' 22nd SIGKDD Conference on Knowledge Discovery and Data Mining, 2016, \url{https://arxiv.org/abs/1603.02754} #' #' @examples #' data(agaricus.train, package = "xgboost") #' data(agaricus.test, package = "xgboost") #' #' ## Keep the number of threads to 1 for examples #' nthread <- 1 #' data.table::setDTthreads(nthread) #' #' dtrain <- with( #' agaricus.train, xgb.DMatrix(data, label = label, nthread = nthread) #' ) #' dtest <- with( #' agaricus.test, xgb.DMatrix(data, label = label, nthread = nthread) #' ) #' evals <- list(train = dtrain, eval = dtest) #' #' ## A simple xgb.train example: #' param <- xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = "binary:logistic", #' eval_metric = "auc" #' ) #' bst <- xgb.train(param, dtrain, nrounds = 2, evals = evals, verbose = 0) #' #' ## An xgb.train example where custom objective and evaluation metric are #' ## used: #' logregobj <- function(preds, dtrain) { #' labels <- getinfo(dtrain, "label") #' preds <- 1/(1 + exp(-preds)) #' grad <- preds - labels #' hess <- preds * (1 - preds) #' return(list(grad = grad, hess = hess)) #' } #' evalerror <- function(preds, dtrain) { #' labels <- getinfo(dtrain, "label") #' err <- as.numeric(sum(labels != (preds > 0)))/length(labels) #' return(list(metric = "error", value = err)) #' } #' #' # These functions could be used by passing them as 'objective' and #' # 'eval_metric' parameters in the params list: #' param <- xgb.params( #' max_depth = 2, #' nthread = nthread, #' objective = logregobj, #' eval_metric = evalerror #' ) #' bst <- xgb.train(param, dtrain, nrounds = 2, evals = evals, verbose = 0) #' #' # ... or as dedicated 'objective' and 'custom_metric' parameters of xgb.train: #' bst <- xgb.train( #' within(param, rm("objective", "eval_metric")), #' dtrain, nrounds = 2, evals = evals, #' objective = logregobj, custom_metric = evalerror #' ) #' #' #' ## An xgb.train example of using variable learning rates at each iteration: #' param <- xgb.params( #' max_depth = 2, #' learning_rate = 1, #' nthread = nthread, #' objective = "binary:logistic", #' eval_metric = "auc" #' ) #' my_learning_rates <- list(learning_rate = c(0.5, 0.1)) #' #' bst <- xgb.train( #' param, #' dtrain, #' nrounds = 2, #' evals = evals, #' verbose = 0, #' callbacks = list(xgb.cb.reset.parameters(my_learning_rates)) #' ) #' #' ## Early stopping: #' bst <- xgb.train( #' param, dtrain, nrounds = 25, evals = evals, early_stopping_rounds = 3 #' ) #' @export xgb.train <- function(params = xgb.params(), data, nrounds, evals = list(), objective = NULL, custom_metric = NULL, verbose = 1, print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL, save_period = NULL, save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...) { check.deprecation(deprecated_train_params, match.call(), ...) params <- check.booster.params(params) tmp <- check.custom.obj(params, objective) params <- tmp$params objective <- tmp$objective tmp <- check.custom.eval(params, custom_metric, maximize, early_stopping_rounds, callbacks) params <- tmp$params custom_metric <- tmp$custom_metric # data & evals checks dtrain <- data if (!inherits(dtrain, "xgb.DMatrix")) stop("second argument dtrain must be xgb.DMatrix") if (length(evals) > 0) { if (typeof(evals) != "list" || !all(vapply(evals, inherits, logical(1), what = 'xgb.DMatrix'))) stop("'evals' must be a list of xgb.DMatrix elements") evnames <- names(evals) if (is.null(evnames) || any(evnames == "")) stop("each element of 'evals' must have a name tag") } # Handle multiple evaluation metrics given as a list for (m in params$eval_metric) { params <- c(params, list(eval_metric = m)) } params <- c(params) params['validate_parameters'] <- TRUE if (!("seed" %in% names(params))) { params[["seed"]] <- sample(.Machine$integer.max, size = 1) } # callbacks tmp <- .process.callbacks(callbacks, is_cv = FALSE) callbacks <- tmp$callbacks cb_names <- tmp$cb_names rm(tmp) # Early stopping callback (should always come first) if (!is.null(early_stopping_rounds) && !("early_stop" %in% cb_names)) { callbacks <- add.callback( callbacks, xgb.cb.early.stop( early_stopping_rounds, maximize = maximize, verbose = verbose ), as_first_elt = TRUE ) } # evaluation printing callback print_every_n <- max(as.integer(print_every_n), 1L) if (verbose && !("print_evaluation" %in% cb_names)) { callbacks <- add.callback(callbacks, xgb.cb.print.evaluation(print_every_n)) } # evaluation log callback: it is automatically enabled when 'evals' is provided if (length(evals) && !("evaluation_log" %in% cb_names)) { callbacks <- add.callback(callbacks, xgb.cb.evaluation.log()) } # Model saving callback if (!is.null(save_period) && !("save_model" %in% cb_names)) { callbacks <- add.callback(callbacks, xgb.cb.save.model(save_period, save_name)) } # The tree updating process would need slightly different handling is_update <- NVL(params[['process_type']], '.') == 'update' # Construct a booster (either a new one or load from xgb_model) bst <- xgb.Booster( params = params, cachelist = append(evals, dtrain), modelfile = xgb_model ) niter_init <- bst$niter bst <- bst$bst .Call( XGBoosterCopyInfoFromDMatrix_R, xgb.get.handle(bst), dtrain ) if (is_update && nrounds > niter_init) stop("nrounds cannot be larger than ", niter_init, " (nrounds of xgb_model)") niter_skip <- ifelse(is_update, 0, niter_init) begin_iteration <- niter_skip + 1 end_iteration <- niter_skip + nrounds .execute.cb.before.training( callbacks, bst, dtrain, evals, begin_iteration, end_iteration ) # the main loop for boosting iterations for (iteration in begin_iteration:end_iteration) { .execute.cb.before.iter( callbacks, bst, dtrain, evals, iteration ) xgb.iter.update( bst = bst, dtrain = dtrain, iter = iteration - 1, objective = objective ) bst_evaluation <- NULL if (length(evals) > 0) { bst_evaluation <- xgb.iter.eval( bst = bst, evals = evals, iter = iteration - 1, custom_metric = custom_metric ) } should_stop <- .execute.cb.after.iter( callbacks, bst, dtrain, evals, iteration, bst_evaluation ) if (should_stop) break } cb_outputs <- .execute.cb.after.training( callbacks, bst, dtrain, evals, iteration, bst_evaluation ) extra_attrs <- list( call = match.call(), params = params ) curr_attrs <- attributes(bst) if (NROW(curr_attrs)) { curr_attrs <- curr_attrs[ setdiff( names(curr_attrs), c(names(extra_attrs), names(cb_outputs)) ) ] } curr_attrs <- c(extra_attrs, curr_attrs) if (NROW(cb_outputs)) { curr_attrs <- c(curr_attrs, cb_outputs) } attributes(bst) <- curr_attrs return(bst) } # nolint start: line_length_linter. #' @title XGBoost Parameters #' @description Convenience function to generate a list of named XGBoost parameters, which #' can be passed as argument `params` to [xgb.train()]. See the [online documentation]( #' https://xgboost.readthedocs.io/en/stable/parameter.html) for more details. #' #' The purpose of this function is to enable IDE autocompletions and to provide in-package #' documentation for all the possible parameters that XGBoost accepts. The output from this #' function is just a regular R list containing the parameters that were set to non-default #' values. Note that this function will not perform any validation on the supplied arguments. #' #' If passing `NULL` for a given parameter (the default for all of them), then the default #' value for that parameter will be used. Default values are automatically determined by the #' XGBoost core library upon calls to [xgb.train()] or [xgb.cv()], and are subject to change #' over XGBoost library versions. Some of them might differ according to the #' booster type (e.g. defaults for regularization are different for linear and tree-based boosters). #' @return A list with the entries that were passed non-NULL values. It is intended to #' be passed as argument `params` to [xgb.train()] or [xgb.cv()]. #' @export #' @param objective (default=`"reg:squarederror"`) #' Specify the learning task and the corresponding learning objective or a custom objective function to be used. #' #' For custom objective, see [Custom Objective and Evaluation Metric](https://xgboost.readthedocs.io/en/latest/tutorials/custom_metric_obj.html) #' and [Custom objective and metric](https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html#custom-obj-metric) for more information, #' along with the end note for function signatures. #' #' Supported values are: #' - `"reg:squarederror"`: regression with squared loss. #' - `"reg:squaredlogerror"`: regression with squared log loss \eqn{\frac{1}{2}[log(pred + 1) - log(label + 1)]^2}. All input labels are required to be greater than -1. Also, see metric `rmsle` for possible issue with this objective. #' - `"reg:logistic"`: logistic regression, output probability #' - `"reg:pseudohubererror"`: regression with Pseudo Huber loss, a twice differentiable alternative to absolute loss. #' - `"reg:absoluteerror"`: Regression with L1 error. When tree model is used, leaf value is refreshed after tree construction. If used in distributed training, the leaf value is calculated as the mean value from all workers, which is not guaranteed to be optimal. #' #' Version added: 1.7.0 #' - `"reg:quantileerror"`: Quantile loss, also known as "pinball loss". See later sections for its parameter and [Quantile Regression](https://xgboost.readthedocs.io/en/latest/python/examples/quantile_regression.html#sphx-glr-python-examples-quantile-regression-py) for a worked example. #' #' Version added: 2.0.0 #' - `"binary:logistic"`: logistic regression for binary classification, output probability #' - `"binary:logitraw"`: logistic regression for binary classification, output score before logistic transformation #' - `"binary:hinge"`: hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities. #' - `"count:poisson"`: Poisson regression for count data, output mean of Poisson distribution. #' `"max_delta_step"` is set to 0.7 by default in Poisson regression (used to safeguard optimization) #' - `"survival:cox"`: Cox regression for right censored survival time data (negative values are considered right censored). #' #' Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function `h(t) = h0(t) * HR`). #' - `"survival:aft"`: Accelerated failure time model for censored survival time data. #' See [Survival Analysis with Accelerated Failure Time](https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html) for details. #' - `"multi:softmax"`: set XGBoost to do multiclass classification using the softmax objective, you also need to set num_class(number of classes) #' - `"multi:softprob"`: same as softmax, but output a vector of `ndata * nclass`, which can be further reshaped to `ndata * nclass` matrix. The result contains predicted probability of each data point belonging to each class. #' - `"rank:ndcg"`: Use LambdaMART to perform pair-wise ranking where [Normalized Discounted Cumulative Gain (NDCG)](https://en.wikipedia.org/wiki/NDCG) is maximized. This objective supports position debiasing for click data. #' - `"rank:map"`: Use LambdaMART to perform pair-wise ranking where [Mean Average Precision (MAP)](https://en.wikipedia.org/wiki/Mean_average_precision#Mean_average_precision) is maximized #' - `"rank:pairwise"`: Use LambdaRank to perform pair-wise ranking using the `ranknet` objective. #' - `"reg:gamma"`: gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be [gamma-distributed](https://en.wikipedia.org/wiki/Gamma_distribution#Occurrence_and_applications). #' - `"reg:tweedie"`: Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be [Tweedie-distributed](https://en.wikipedia.org/wiki/Tweedie_distribution#Occurrence_and_applications). #' @param verbosity (default=1) #' Verbosity of printing messages. Valid values are 0 (silent), 1 (warning), 2 (info), 3 #' (debug). Sometimes XGBoost tries to change configurations based on heuristics, which #' is displayed as warning message. If there's unexpected behaviour, please try to #' increase value of verbosity. #' @param nthread (default to maximum number of threads available if not set) #' Number of parallel threads used to run XGBoost. When choosing it, please keep thread #' contention and hyperthreading in mind. #' @param seed Random number seed. If not specified, will take a random seed through R's own RNG engine. #' @param booster (default= `"gbtree"`) #' Which booster to use. Can be `"gbtree"`, `"gblinear"` or `"dart"`; `"gbtree"` and `"dart"` use tree based models while `"gblinear"` uses linear functions. #' @param eta,learning_rate (two aliases for the same parameter) #' Step size shrinkage used in update to prevent overfitting. After each boosting step, we can directly get the weights of new features, and `eta` shrinks the feature weights to make the boosting process more conservative. #' - range: \eqn{[0,1]} #' - default value: 0.3 for tree-based boosters, 0.5 for linear booster. #' #' Note: should only pass one of `eta` or `learning_rate`. Both refer to the same parameter and there's thus no difference between one or the other. #' @param gamma,min_split_loss (two aliases for the same parameter) (for Tree Booster) (default=0, alias: `gamma`) #' Minimum loss reduction required to make a further partition on a leaf node of the tree. The larger `min_split_loss` is, the more conservative the algorithm will be. Note that a tree where no splits were made might still contain a single terminal node with a non-zero score. #' #' range: \eqn{[0, \infty)} #' #' Note: should only pass one of `gamma` or `min_split_loss`. Both refer to the same parameter and there's thus no difference between one or the other. #' @param max_depth (for Tree Booster) (default=6, type=int32) #' Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. `"exact"` tree method requires non-zero value. #' #' range: \eqn{[0, \infty)} #' @param min_child_weight (for Tree Booster) (default=1) #' Minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than `min_child_weight`, then the building process will give up further partitioning. In linear regression task, this simply corresponds to minimum number of instances needed to be in each node. The larger `min_child_weight` is, the more conservative the algorithm will be. #' #' range: \eqn{[0, \infty)} #' @param max_delta_step (for Tree Booster) (default=0) #' Maximum delta step we allow each leaf output to be. If the value is set to 0, it means there is no constraint. If it is set to a positive value, it can help making the update step more conservative. Usually this parameter is not needed, but it might help in logistic regression when class is extremely imbalanced. Set it to value of 1-10 might help control the update. #' #' range: \eqn{[0, \infty)} #' @param subsample (for Tree Booster) (default=1) #' Subsample ratio of the training instances. Setting it to 0.5 means that XGBoost would randomly sample half of the training data prior to growing trees. and this will prevent overfitting. Subsampling will occur once in every boosting iteration. #' #' range: \eqn{(0,1]} #' @param sampling_method (for Tree Booster) (default= `"uniform"`) #' The method to use to sample the training instances. #' - `"uniform"`: each training instance has an equal probability of being selected. Typically set #' `"subsample"` >= 0.5 for good results. #' - `"gradient_based"`: the selection probability for each training instance is proportional to the #' \bold{regularized absolute value} of gradients (more specifically, \eqn{\sqrt{g^2+\lambda h^2}}). #' `"subsample"` may be set to as low as 0.1 without loss of model accuracy. Note that this #' sampling method is only supported when `"tree_method"` is set to `"hist"` and the device is `"cuda"`; other tree #' methods only support `"uniform"` sampling. #' @param colsample_bytree,colsample_bylevel,colsample_bynode (for Tree Booster) (default=1) #' This is a family of parameters for subsampling of columns. #' - All `"colsample_by*"` parameters have a range of \eqn{(0, 1]}, the default value of 1, and specify the fraction of columns to be subsampled. #' - `"colsample_bytree"` is the subsample ratio of columns when constructing each tree. Subsampling occurs once for every tree constructed. #' - `"colsample_bylevel"` is the subsample ratio of columns for each level. Subsampling occurs once for every new depth level reached in a tree. Columns are subsampled from the set of columns chosen for the current tree. #' - `"colsample_bynode"` is the subsample ratio of columns for each node (split). Subsampling occurs once every time a new split is evaluated. Columns are subsampled from the set of columns chosen for the current level. This is not supported by the exact tree method. #' - `"colsample_by*"` parameters work cumulatively. For instance, #' the combination `{'colsample_bytree'=0.5, 'colsample_bylevel'=0.5, 'colsample_bynode'=0.5}` with 64 features will leave 8 features to choose from at #' each split. #' #' One can set the `"feature_weights"` for DMatrix to #' define the probability of each feature being selected when using column sampling. #' @param lambda,reg_lambda (two aliases for the same parameter) #' #' - For tree-based boosters: #' - L2 regularization term on weights. Increasing this value will make model more conservative. #' - default: 1 #' - range: \eqn{[0, \infty]} #' - For linear booster: #' - L2 regularization term on weights. Increasing this value will make model more conservative. Normalised to number of training examples. #' - default: 0 #' - range: \eqn{[0, \infty)} #' #' Note: should only pass one of `lambda` or `reg_lambda`. Both refer to the same parameter and there's thus no difference between one or the other. #' @param alpha,reg_alpha (two aliases for the same parameter) #' - L1 regularization term on weights. Increasing this value will make model more conservative. #' - For the linear booster, it's normalised to number of training examples. #' - default: 0 #' - range: \eqn{[0, \infty)} #' #' Note: should only pass one of `alpha` or `reg_alpha`. Both refer to the same parameter and there's thus no difference between one or the other. #' @param tree_method (for Tree Booster) (default= `"auto"`) #' The tree construction algorithm used in XGBoost. See description in the [reference paper](https://arxiv.org/abs/1603.02754) and [Tree Methods](https://xgboost.readthedocs.io/en/latest/treemethod.html). #' #' Choices: `"auto"`, `"exact"`, `"approx"`, `"hist"`, this is a combination of commonly #' used updaters. For other updaters like `"refresh"`, set the parameter `updater` #' directly. #' - `"auto"`: Same as the `"hist"` tree method. #' - `"exact"`: Exact greedy algorithm. Enumerates all split candidates. #' - `"approx"`: Approximate greedy algorithm using quantile sketch and gradient histogram. #' - `"hist"`: Faster histogram optimized approximate greedy algorithm. #' @param scale_pos_weight (for Tree Booster) (default=1) #' Control the balance of positive and negative weights, useful for unbalanced classes. A typical value to consider: `sum(negative instances) / sum(positive instances)`. See [Parameters Tuning](https://xgboost.readthedocs.io/en/latest/tutorials/param_tuning.html) for more discussion. Also, see Higgs Kaggle competition demo for examples: [R](https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-train.R), [py1](https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-numpy.py), [py2](https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-cv.py), [py3](https://github.com/dmlc/xgboost/blob/master/demo/guide-python/cross_validation.py). #' @param updater Has different meanings depending on the type of booster. #' #' - For tree-based boosters: #' A comma separated string defining the sequence of tree updaters to run, providing a modular way to construct and to modify the trees. This is an advanced parameter that is usually set automatically, depending on some other parameters. However, it could be also set explicitly by a user. The following updaters exist: #' - `"grow_colmaker"`: non-distributed column-based construction of trees. #' - `"grow_histmaker"`: distributed tree construction with row-based data splitting based on global proposal of histogram counting. #' - `"grow_quantile_histmaker"`: Grow tree using quantized histogram. #' - `"grow_gpu_hist"`: Enabled when `tree_method` is set to `"hist"` along with `device="cuda"`. #' - `"grow_gpu_approx"`: Enabled when `tree_method` is set to `"approx"` along with `device="cuda"`. #' - `"sync"`: synchronizes trees in all distributed nodes. #' - `"refresh"`: refreshes tree's statistics and/or leaf values based on the current data. Note that no random subsampling of data rows is performed. #' - `"prune"`: prunes the splits where loss < `min_split_loss` (or `gamma`) and nodes that have depth greater than `max_depth`. #' #' - For `booster="gblinear"`: #' (default= `"shotgun"`) Choice of algorithm to fit linear model #' - `"shotgun"`: Parallel coordinate descent algorithm based on shotgun algorithm. Uses 'hogwild' parallelism and therefore produces a nondeterministic solution on each run. #' - `"coord_descent"`: Ordinary coordinate descent algorithm. Also multithreaded but still produces a deterministic solution. When the `device` parameter is set to `"cuda"` or `"gpu"`, a GPU variant would be used. #' @param refresh_leaf (for Tree Booster) (default=1) #' This is a parameter of the `"refresh"` updater. When this flag is 1, tree leafs as well as tree nodes' stats are updated. When it is 0, only node stats are updated. #' @param grow_policy (for Tree Booster) (default= `"depthwise"`) #' - Controls a way new nodes are added to the tree. #' - Currently supported only if `tree_method` is set to `"hist"` or `"approx"`. #' - Choices: `"depthwise"`, `"lossguide"` #' - `"depthwise"`: split at nodes closest to the root. #' - `"lossguide"`: split at nodes with highest loss change. #' @param max_leaves (for Tree Booster) (default=0, type=int32) #' Maximum number of nodes to be added. Not used by `"exact"` tree method. #' @param max_bin (for Tree Booster) (default=256, type=int32) #' - Only used if `tree_method` is set to `"hist"` or `"approx"`. #' - Maximum number of discrete bins to bucket continuous features. #' - Increasing this number improves the optimality of splits at the cost of higher computation time. #' @param num_parallel_tree (for Tree Booster) (default=1) #' Number of parallel trees constructed during each iteration. This option is used to support boosted random forest. #' @param monotone_constraints (for Tree Booster) #' Constraint of variable monotonicity. See [Monotonic Constraints](https://xgboost.readthedocs.io/en/latest/tutorials/monotonic.html) for more information. #' @param interaction_constraints (for Tree Booster) #' Constraints for interaction representing permitted interactions. The constraints must #' be specified in the form of a nest list, e.g. `list(c(0, 1), c(2, 3, 4))`, where each inner #' list is a group of indices of features (base-0 numeration) that are allowed to interact with each other. #' See [Feature Interaction Constraints](https://xgboost.readthedocs.io/en/latest/tutorials/feature_interaction_constraint.html) for more information. #' @param multi_strategy (for Tree Booster) (default = `"one_output_per_tree"`) #' The strategy used for training multi-target models, including multi-target regression #' and multi-class classification. See [Multiple Outputs](https://xgboost.readthedocs.io/en/latest/tutorials/multioutput.html) for more information. #' - `"one_output_per_tree"`: One model for each target. #' - `"multi_output_tree"`: Use multi-target trees. #' #' Version added: 2.0.0 #' #' Note: This parameter is working-in-progress. #' @param base_score #' - The initial prediction score of all instances, global bias #' - The parameter is automatically estimated for selected objectives before training. To #' disable the estimation, specify a real number argument. #' - If `base_margin` is supplied, `base_score` will not be added. #' - For sufficient number of iterations, changing this value will not have too much effect. #' @param eval_metric (default according to objective) #' - Evaluation metrics for validation data, a default metric will be assigned according to objective (rmse for regression, and logloss for classification, `mean average precision` for ``rank:map``, etc.) #' - User can add multiple evaluation metrics. #' - The choices are listed below: #' - `"rmse"`: [root mean square error](https://en.wikipedia.org/wiki/Root_mean_square_error) #' - `"rmsle"`: root mean square log error: \eqn{\sqrt{\frac{1}{N}[log(pred + 1) - log(label + 1)]^2}}. Default metric of `"reg:squaredlogerror"` objective. This metric reduces errors generated by outliers in dataset. But because `log` function is employed, `"rmsle"` might output `nan` when prediction value is less than -1. See `"reg:squaredlogerror"` for other requirements. #' - `"mae"`: [mean absolute error](https://en.wikipedia.org/wiki/Mean_absolute_error) #' - `"mape"`: [mean absolute percentage error](https://en.wikipedia.org/wiki/Mean_absolute_percentage_error) #' - `"mphe"`: [mean Pseudo Huber error](https://en.wikipedia.org/wiki/Huber_loss). Default metric of `"reg:pseudohubererror"` objective. #' - `"logloss"`: [negative log-likelihood](https://en.wikipedia.org/wiki/Log-likelihood) #' - `"error"`: Binary classification error rate. It is calculated as `#(wrong cases)/#(all cases)`. For the predictions, the evaluation will regard the instances with prediction value larger than 0.5 as positive instances, and the others as negative instances. #' - `"error@t"`: a different than 0.5 binary classification threshold value could be specified by providing a numerical value through 't'. #' - `"merror"`: Multiclass classification error rate. It is calculated as `#(wrong cases)/#(all cases)`. #' - `"mlogloss"`: [Multiclass logloss](https://scikit-learn.org/stable/modules/generated/sklearn.metrics.log_loss.html). #' - `"auc"`: [Receiver Operating Characteristic Area under the Curve](https://en.wikipedia.org/wiki/Receiver_operating_characteristic#Area_under_the_curve). #' Available for classification and learning-to-rank tasks. #' - When used with binary classification, the objective should be `"binary:logistic"` or similar functions that work on probability. #' - When used with multi-class classification, objective should be `"multi:softprob"` instead of `"multi:softmax"`, as the latter doesn't output probability. Also the AUC is calculated by 1-vs-rest with reference class weighted by class prevalence. #' - When used with LTR task, the AUC is computed by comparing pairs of documents to count correctly sorted pairs. This corresponds to pairwise learning to rank. The implementation has some issues with average AUC around groups and distributed workers not being well-defined. #' - On a single machine the AUC calculation is exact. In a distributed environment the AUC is a weighted average over the AUC of training rows on each node - therefore, distributed AUC is an approximation sensitive to the distribution of data across workers. Use another metric in distributed environments if precision and reproducibility are important. #' - When input dataset contains only negative or positive samples, the output is `NaN`. The behavior is implementation defined, for instance, `scikit-learn` returns \eqn{0.5} instead. #' - `"aucpr"`: [Area under the PR curve](https://en.wikipedia.org/wiki/Precision_and_recall). #' Available for classification and learning-to-rank tasks. #' #' After XGBoost 1.6, both of the requirements and restrictions for using `"aucpr"` in classification problem are similar to `"auc"`. For ranking task, only binary relevance label \eqn{y \in [0, 1]} is supported. Different from `"map"` (mean average precision), `"aucpr"` calculates the *interpolated* area under precision recall curve using continuous interpolation. #' #' - `"pre"`: Precision at \eqn{k}. Supports only learning to rank task. #' - `"ndcg"`: [Normalized Discounted Cumulative Gain](https://en.wikipedia.org/wiki/NDCG) #' - `"map"`: [Mean Average Precision](https://en.wikipedia.org/wiki/Mean_average_precision#Mean_average_precision) #' #' The `average precision` is defined as: #' #' \eqn{AP@l = \frac{1}{min{(l, N)}}\sum^l_{k=1}P@k \cdot I_{(k)}} #' #' where \eqn{I_{(k)}} is an indicator function that equals to \eqn{1} when the document at \eqn{k} is relevant and \eqn{0} otherwise. The \eqn{P@k} is the precision at \eqn{k}, and \eqn{N} is the total number of relevant documents. Lastly, the `mean average precision` is defined as the weighted average across all queries. #' #' - `"ndcg@n"`, `"map@n"`, `"pre@n"`: \eqn{n} can be assigned as an integer to cut off the top positions in the lists for evaluation. #' - `"ndcg-"`, `"map-"`, `"ndcg@n-"`, `"map@n-"`: In XGBoost, the NDCG and MAP evaluate the score of a list without any positive samples as \eqn{1}. By appending "-" to the evaluation metric name, we can ask XGBoost to evaluate these scores as \eqn{0} to be consistent under some conditions. #' - `"poisson-nloglik"`: negative log-likelihood for Poisson regression #' - `"gamma-nloglik"`: negative log-likelihood for gamma regression #' - `"cox-nloglik"`: negative partial log-likelihood for Cox proportional hazards regression #' - `"gamma-deviance"`: residual deviance for gamma regression #' - `"tweedie-nloglik"`: negative log-likelihood for Tweedie regression (at a specified value of the `tweedie_variance_power` parameter) #' - `"aft-nloglik"`: Negative log likelihood of Accelerated Failure Time model. #' See [Survival Analysis with Accelerated Failure Time](https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html) for details. #' - `"interval-regression-accuracy"`: Fraction of data points whose predicted labels fall in the interval-censored labels. #' Only applicable for interval-censored data. See [Survival Analysis with Accelerated Failure Time](https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html) for details. #' @param seed_per_iteration (default= `FALSE`) #' Seed PRNG determnisticly via iterator number. #' @param device (default= `"cpu"`) #' Device for XGBoost to run. User can set it to one of the following values: #' - `"cpu"`: Use CPU. #' - `"cuda"`: Use a GPU (CUDA device). #' - `"cuda:"`: `` is an integer that specifies the ordinal of the GPU (which GPU do you want to use if you have more than one devices). #' - `"gpu"`: Default GPU device selection from the list of available and supported devices. Only `"cuda"` devices are supported currently. #' - `"gpu:"`: Default GPU device selection from the list of available and supported devices. Only `"cuda"` devices are supported currently. #' #' For more information about GPU acceleration, see [XGBoost GPU Support](https://xgboost.readthedocs.io/en/latest/gpu/index.html). In distributed environments, ordinal selection is handled by distributed frameworks instead of XGBoost. As a result, using `"cuda:"` will result in an error. Use `"cuda"` instead. #' #' Version added: 2.0.0 #' #' Note: if XGBoost was installed from CRAN, it won't have GPU support enabled, thus only `"cpu"` will be available. #' To get GPU support, the R package for XGBoost must be installed from source or from the GitHub releases - see #' [instructions](https://xgboost.readthedocs.io/en/latest/install.html#r). #' @param disable_default_eval_metric (default= `FALSE`) #' Flag to disable default metric. Set to 1 or `TRUE` to disable. #' @param use_rmm Whether to use RAPIDS Memory Manager (RMM) to allocate cache GPU #' memory. The primary memory is always allocated on the RMM pool when XGBoost is built #' (compiled) with the RMM plugin enabled. Valid values are `TRUE` and `FALSE`. See #' [Using XGBoost with RAPIDS Memory Manager (RMM) plugin](https://xgboost.readthedocs.io/en/latest/python/rmm-examples/index.html) for details. #' @param max_cached_hist_node (for Non-Exact Tree Methods) (default = 65536) #' Maximum number of cached nodes for histogram. This can be used with the `"hist"` and the #' `"approx"` tree methods. #' #' Version added: 2.0.0 #' #' - For most of the cases this parameter should not be set except for growing deep #' trees. After 3.0, this parameter affects GPU algorithms as well. #' @param extmem_single_page (for Non-Exact Tree Methods) (default = `FALSE`) #' This parameter is only used for the `"hist"` tree method with `device="cuda"` and #' `subsample != 1.0`. Before 3.0, pages were always concatenated. #' #' Version added: 3.0.0 #' #' Whether the GPU-based `"hist"` tree method should concatenate the training data into a #' single batch instead of fetching data on-demand when external memory is used. For GPU #' devices that don't support address translation services, external memory training is #' expensive. This parameter can be used in combination with subsampling to reduce overall #' memory usage without significant overhead. See [Using XGBoost External Memory Version](https://xgboost.readthedocs.io/en/latest/tutorials/external_memory.html) for #' more information. #' @param max_cat_to_onehot (for Non-Exact Tree Methods) #' A threshold for deciding whether XGBoost should use one-hot encoding based split for #' categorical data. When number of categories is lesser than the threshold then one-hot #' encoding is chosen, otherwise the categories will be partitioned into children nodes. #' #' Version added: 1.6.0 #' @param max_cat_threshold (for Non-Exact Tree Methods) #' Maximum number of categories considered for each split. Used only by partition-based #' splits for preventing over-fitting. #' #' Version added: 1.7.0 #' @param sample_type (for Dart Booster) (default= `"uniform"`) #' Type of sampling algorithm. #' - `"uniform"`: dropped trees are selected uniformly. #' - `"weighted"`: dropped trees are selected in proportion to weight. #' @param normalize_type (for Dart Booster) (default= `"tree"`) #' Type of normalization algorithm. #' - `"tree"`: new trees have the same weight of each of dropped trees. #' - Weight of new trees are `1 / (k + learning_rate)`. #' - Dropped trees are scaled by a factor of `k / (k + learning_rate)`. #' - `"forest"`: new trees have the same weight of sum of dropped trees (forest). #' - Weight of new trees are `1 / (1 + learning_rate)`. #' - Dropped trees are scaled by a factor of `1 / (1 + learning_rate)`. #' @param rate_drop (for Dart Booster) (default=0.0) #' Dropout rate (a fraction of previous trees to drop during the dropout). #' #' range: \eqn{[0.0, 1.0]} #' @param one_drop (for Dart Booster) (default=0) #' When this flag is enabled, at least one tree is always dropped during the dropout (allows Binomial-plus-one or epsilon-dropout from the original DART paper). #' @param skip_drop (for Dart Booster) (default=0.0) #' Probability of skipping the dropout procedure during a boosting iteration. #' - If a dropout is skipped, new trees are added in the same manner as `"gbtree"`. #' - Note that non-zero `skip_drop` has higher priority than `rate_drop` or `one_drop`. #' #' range: \eqn{[0.0, 1.0]} #' @param feature_selector (for Linear Booster) (default= `"cyclic"`) #' Feature selection and ordering method #' - `"cyclic"`: Deterministic selection by cycling through features one at a time. #' - `"shuffle"`: Similar to `"cyclic"` but with random feature shuffling prior to each update. #' - `"random"`: A random (with replacement) coordinate selector. #' - `"greedy"`: Select coordinate with the greatest gradient magnitude. It has `O(num_feature^2)` complexity. It is fully deterministic. It allows restricting the selection to `top_k` features per group with the largest magnitude of univariate weight change, by setting the `top_k` parameter. Doing so would reduce the complexity to `O(num_feature*top_k)`. #' - `"thrifty"`: Thrifty, approximately-greedy feature selector. Prior to cyclic updates, reorders features in descending magnitude of their univariate weight changes. This operation is multithreaded and is a linear complexity approximation of the quadratic greedy selection. It allows restricting the selection to `top_k` features per group with the largest magnitude of univariate weight change, by setting the `top_k` parameter. #' @param top_k (for Linear Booster) (default=0) #' The number of top features to select in `greedy` and `thrifty` feature selector. The value of 0 means using all the features. #' @param num_class Number of classes when using multi-class classification objectives (e.g. `objective="multi:softprob"`) #' @param tweedie_variance_power (for Tweedie Regression (`"objective=reg:tweedie"`)) (default=1.5) #' - Parameter that controls the variance of the Tweedie distribution `var(y) ~ E(y)^tweedie_variance_power` #' - range: \eqn{(1,2)} #' - Set closer to 2 to shift towards a gamma distribution #' - Set closer to 1 to shift towards a Poisson distribution. #' @param huber_slope (for using Pseudo-Huber (`"reg:pseudohubererror`")) (default = 1.0) #' A parameter used for Pseudo-Huber loss to define the \eqn{\delta} term. #' @param quantile_alpha (for using Quantile Loss (`"reg:quantileerror"`)) #' A scalar or a list of targeted quantiles (passed as a numeric vector). #' #' Version added: 2.0.0 #' @param aft_loss_distribution (for using AFT Survival Loss (`"survival:aft"`) and Negative Log Likelihood of AFT metric (`"aft-nloglik"`)) #' Probability Density Function, `"normal"`, `"logistic"`, or `"extreme"`. #' @param lambdarank_pair_method (for learning to rank (`"rank:ndcg"`, `"rank:map"`, `"rank:pairwise"`)) (default = `"topk"`) #' How to construct pairs for pair-wise learning. #' - `"mean"`: Sample `lambdarank_num_pair_per_sample` pairs for each document in the query list. #' - `"topk"`: Focus on top-`lambdarank_num_pair_per_sample` documents. Construct \eqn{|query|} pairs for each document at the top-`lambdarank_num_pair_per_sample` ranked by the model. #' @param lambdarank_num_pair_per_sample (for learning to rank (`"rank:ndcg"`, `"rank:map"`, `"rank:pairwise"`)) #' It specifies the number of pairs sampled for each document when pair method is `"mean"`, or the truncation level for queries when the pair method is `"topk"`. For example, to train with `ndcg@6`, set `"lambdarank_num_pair_per_sample"` to \eqn{6} and `lambdarank_pair_method` to `"topk"`. #' #' range = \eqn{[1, \infty)} #' @param lambdarank_normalization (for learning to rank (`"rank:ndcg"`, `"rank:map"`, `"rank:pairwise"`)) (default = `TRUE`) #' Whether to normalize the leaf value by lambda gradient. This can sometimes stagnate the training progress. #' #' Version added: 2.1.0 #' #' @param lambdarank_score_normalization #' #' Whether to normalize the delta metric by the difference of prediction scores. This can #' sometimes stagnate the training progress. With pairwise ranking, we can normalize the #' gradient using the difference between two samples in each pair to reduce influence from #' the pairs that have large difference in ranking scores. This can help us regularize the #' model to reduce bias and prevent overfitting. Similar to other regularization #' techniques, this might prevent training from converging. #' #' There was no normalization before 2.0. In 2.0 and later versions this is used by #' default. In 3.0, we made this an option that users can disable. #' #' Version added: 3.0.0 #' #' @param lambdarank_unbiased (for learning to rank (`"rank:ndcg"`, `"rank:map"`, `"rank:pairwise"`)) (default = `FALSE`) #' Specify whether do we need to debias input click data. #' @param lambdarank_bias_norm (for learning to rank (`"rank:ndcg"`, `"rank:map"`, `"rank:pairwise"`)) (default = 2.0) #' \eqn{L_p} normalization for position debiasing, default is \eqn{L_2}. Only relevant when `lambdarank_unbiased` is set to `TRUE`. #' @param ndcg_exp_gain (for learning to rank (`"rank:ndcg"`, `"rank:map"`, `"rank:pairwise"`)) (default = `TRUE`) #' Whether we should use exponential gain function for `NDCG`. There are two forms of gain function for `NDCG`, one is using relevance value directly while the other is using\eqn{2^{rel} - 1} to emphasize on retrieving relevant documents. When `ndcg_exp_gain` is `TRUE` (the default), relevance degree cannot be greater than 31. xgb.params <- function( objective = NULL, verbosity = NULL, nthread = NULL, seed = NULL, booster = NULL, eta = NULL, learning_rate = NULL, gamma = NULL, min_split_loss = NULL, max_depth = NULL, min_child_weight = NULL, max_delta_step = NULL, subsample = NULL, sampling_method = NULL, colsample_bytree = NULL, colsample_bylevel = NULL, colsample_bynode = NULL, lambda = NULL, reg_lambda = NULL, alpha = NULL, reg_alpha = NULL, tree_method = NULL, scale_pos_weight = NULL, updater = NULL, refresh_leaf = NULL, grow_policy = NULL, max_leaves = NULL, max_bin = NULL, num_parallel_tree = NULL, monotone_constraints = NULL, interaction_constraints = NULL, multi_strategy = NULL, base_score = NULL, eval_metric = NULL, seed_per_iteration = NULL, device = NULL, disable_default_eval_metric = NULL, use_rmm = NULL, max_cached_hist_node = NULL, extmem_single_page = NULL, max_cat_to_onehot = NULL, max_cat_threshold = NULL, sample_type = NULL, normalize_type = NULL, rate_drop = NULL, one_drop = NULL, skip_drop = NULL, feature_selector = NULL, top_k = NULL, num_class = NULL, tweedie_variance_power = NULL, huber_slope = NULL, quantile_alpha = NULL, aft_loss_distribution = NULL, lambdarank_pair_method = NULL, lambdarank_num_pair_per_sample = NULL, lambdarank_normalization = NULL, lambdarank_score_normalization = NULL, lambdarank_unbiased = NULL, lambdarank_bias_norm = NULL, ndcg_exp_gain = NULL ) { # nolint end out <- as.list(environment()) out <- out[!sapply(out, is.null)] return(out) } xgboost-3.0.0/R-package/R/xgboost.R000066400000000000000000001662231476511272400170250ustar00rootroot00000000000000prescreen.objective <- function(objective) { if (!is.null(objective)) { if (!is.character(objective) || length(objective) != 1L || is.na(objective)) { stop("'objective' must be a single character/string variable.") } if (objective %in% .OBJECTIVES_NON_DEFAULT_MODE()) { stop( "Objectives with non-default prediction mode (", paste(.OBJECTIVES_NON_DEFAULT_MODE(), collapse = ", "), ") are not supported in 'xgboost()'. Try 'xgb.train()'." ) } if (objective %in% .RANKING_OBJECTIVES()) { stop("Ranking objectives are not supported in 'xgboost()'. Try 'xgb.train()'.") } } } process.base.margin <- function(base_margin, nrows, ncols) { if (!NROW(base_margin)) { return(NULL) } if (is.array(base_margin) && length(dim(base_margin)) > 2) { stop( "'base_margin' should not have more than 2 dimensions for any objective (got: ", length(dim(base_margin)), " dimensions)." ) } if (inherits(base_margin, c("sparseMatrix", "sparseVector"))) { warning( "Got a sparse matrix type (class: ", paste(class(base_margin), collapse = ", "), ") for 'base_margin'. Will convert to dense matrix." ) base_margin <- as.matrix(base_margin) } if (NROW(base_margin) != nrows) { stop( "'base_margin' has incorrect number of rows. Expected: ", nrows, ". Got: ", NROW(base_margin) ) } if (ncols == 1L) { if (inherits(base_margin, c("matrix", "data.frame"))) { if (ncol(base_margin) != 1L) { stop("'base_margin' should be a 1-d vector for the given objective and data.") } if (is.data.frame(base_margin)) { base_margin <- base_margin[[1L]] } else { base_margin <- base_margin[, 1L] } } if (!is.numeric(base_margin)) { base_margin <- as.numeric(base_margin) } } else { supported_multicol <- c("matrix", "data.frame") if (!inherits(base_margin, supported_multicol)) { stop( "'base_margin' should be a matrix with ", ncols, " columns for the given objective and data. Got class: ", paste(class(base_margin), collapse = ", ") ) } if (ncol(base_margin) != ncols) { stop( "'base_margin' has incorrect number of columns. Expected: ", ncols, ". Got: ", ncol(base_margin) ) } if (!is.matrix(base_margin)) { base_margin <- as.matrix(base_margin) } } return(base_margin) } process.y.margin.and.objective <- function( y, base_margin, objective, params ) { if (!NROW(y)) { stop("Passed empty 'y'.") } if (is.array(y) && length(dim(y)) > 2) { stop( "'y' should not have more than 2 dimensions for any objective (got: ", length(dim(y)), ")." ) } if (inherits(y, c("sparseMatrix", "sparseVector"))) { warning( "Got a sparse matrix type (class: ", paste(class(y), collapse = ", "), ") for 'y'. Will convert to dense matrix." ) y <- as.matrix(y) } if (is.character(y)) { if (!is.vector(y)) { if (NCOL(y) > 1L) { stop("Multi-column categorical 'y' is not supported.") } y <- as.vector(y) } y <- factor(y) } if (is.logical(y)) { if (!is.vector(y)) { if (NCOL(y) > 1L) { stop("Multi-column logical/boolean 'y' is not supported.") } y <- as.vector(y) } y <- factor(y, c(FALSE, TRUE)) } if (is.factor(y)) { y_levels <- levels(y) if (length(y_levels) < 2) { stop("Factor 'y' has less than 2 levels.") } if (length(y_levels) == 2) { if (is.null(objective)) { objective <- "binary:logistic" } else { if (!(objective %in% .BINARY_CLASSIF_OBJECTIVES())) { stop( "Got binary 'y' - supported objectives for this data are: ", paste(.BINARY_CLASSIF_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } } if (!is.null(base_margin)) { base_margin <- process.base.margin(base_margin, length(y), 1) } out <- list( params = list( objective = objective ), metadata = list( y_levels = y_levels, n_targets = 1 ) ) } else { # length(levels) > 2 if (is.null(objective)) { objective <- "multi:softprob" } else { if (!(objective %in% .MULTICLASS_CLASSIF_OBJECTIVES())) { stop( "Got non-binary factor 'y' - supported objectives for this data are: ", paste(.MULTICLASS_CLASSIF_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } } if (!is.null(base_margin)) { base_margin <- process.base.margin(base_margin, length(y), length(y_levels)) } out <- list( params = list( objective = objective, num_class = length(y_levels) ), metadata = list( y_levels = y_levels, n_targets = length(y_levels) ) ) } out$dmatrix_args <- list( label = as.numeric(y) - 1, base_margin = base_margin ) } else if (inherits(y, "Surv")) { y_attr <- attributes(y) supported_surv_types <- c("left", "right", "interval") if (!(y_attr$type %in% supported_surv_types)) { stop( "Survival objectives are only supported for types: ", paste(supported_surv_types, collapse = ", "), ". Was passed: ", y_attr$type ) } if (is.null(objective)) { objective <- "survival:aft" } else { if (y_attr$type == "right") { if (!(objective %in% .SURVIVAL_RIGHT_CENSORING_OBJECTIVES())) { stop( "Got right-censored 'y' variable - supported objectives for this data are: ", paste(.SURVIVAL_RIGHT_CENSORING_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } } else { if (!(objective %in% .SURVIVAL_ALL_CENSORING_OBJECTIVES())) { stop( "Got ", y_attr$type, "-censored 'y' variable - supported objectives for this data are:", paste(.SURVIVAL_ALL_CENSORING_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } } } if (!is.null(base_margin)) { base_margin <- process.base.margin(base_margin, nrow(y), 1) } out <- list( params = list( objective = objective ), metadata = list( n_targets = 1 ) ) # Note: the 'Surv' object class that is passed as 'y' might have either 2 or 3 columns # depending on the type of censoring, and the last column in both cases is the one that # indicates the observation type (e.g. censored / uncensored). # In the case of interval censoring, the second column will not always have values with # infinites filled in. For more information, see the code behind the 'print.Surv' method. if (objective == "survival:cox") { # Can only get here when using right censoring if (y_attr$type != "right") { stop("Internal error.") } out$dmatrix_args <- list( label = y[, 1L] * (2 * (y[, 2L] - 0.5)) ) } else { if (y_attr$type == "left") { lb <- ifelse( y[, 2L] == 0, 0, y[, 1L] ) ub <- y[, 1L] out$dmatrix_args <- list( label_lower_bound = lb, label_upper_bound = ub ) } else if (y_attr$type == "right") { lb <- y[, 1L] ub <- ifelse( y[, 2L] == 0, Inf, y[, 1L] ) out$dmatrix_args <- list( label_lower_bound = lb, label_upper_bound = ub ) } else if (y_attr$type == "interval") { out$dmatrix_args <- list( label_lower_bound = ifelse(y[, 3L] == 2, 0, y[, 1L]), label_upper_bound = ifelse( y[, 3L] == 0, Inf, ifelse(y[, 3L] == 3, y[, 2L], y[, 1L]) ) ) } if (min(out$dmatrix_args$label_lower_bound) < 0) { stop("Survival objectives are only defined for non-negative 'y'.") } } out$dmatrix_args$base_margin <- base_margin } else if (is.vector(y)) { if (is.null(objective)) { objective <- "reg:squarederror" } else if (!(objective %in% .REGRESSION_OBJECTIVES())) { stop( "Got numeric 'y' - supported objectives for this data are: ", paste(.REGRESSION_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } n_targets <- 1L if (objective == "reg:quantileerror" && NROW(params$quantile_alpha) > 1) { n_targets <- NROW(params$quantile_alpha) } if (!is.null(base_margin)) { base_margin <- process.base.margin(base_margin, length(y), n_targets) } out <- list( params = list( objective = objective ), metadata = list( n_targets = n_targets ), dmatrix_args = list( label = as.numeric(y), base_margin = base_margin ) ) } else if (is.data.frame(y)) { if (ncol(y) == 1L) { return(process.y.margin.and.objective(y[[1L]], base_margin, objective, params)) } if (is.null(objective)) { objective <- "reg:squarederror" } else if (!(objective %in% .MULTI_TARGET_OBJECTIVES())) { stop( "Got multi-column 'y' - supported objectives for this data are: ", paste(.MULTI_TARGET_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } y_names <- names(y) y <- lapply(y, function(x) { if (!inherits(x, c("numeric", "integer"))) { stop( "Multi-target 'y' only supports 'numeric' and 'integer' types. Got: ", paste(class(x), collapse = ", ") ) } return(as.numeric(x)) }) y <- as.data.frame(y) |> as.matrix() if (!is.null(base_margin)) { base_margin <- process.base.margin(base_margin, length(y), ncol(y)) } out <- list( params = list( objective = objective ), dmatrix_args = list( label = y, base_margin = base_margin ), metadata = list( y_names = y_names, n_targets = ncol(y) ) ) } else if (is.matrix(y)) { if (ncol(y) == 1L) { return(process.y.margin.and.objective(as.vector(y), base_margin, objective, params)) } if (!is.null(objective) && !(objective %in% .MULTI_TARGET_OBJECTIVES())) { stop( "Got multi-column 'y' - supported objectives for this data are: ", paste(.MULTI_TARGET_OBJECTIVES(), collapse = ", "), ". Was passed: ", objective ) } if (is.null(objective)) { objective <- "reg:squarederror" } y_names <- colnames(y) if (storage.mode(y) != "double") { storage.mode(y) <- "double" } if (!is.null(base_margin)) { base_margin <- process.base.margin(base_margin, nrow(y), ncol(y)) } out <- list( params = list( objective = objective ), dmatrix_args = list( label = y, base_margin = base_margin ), metadata = list( n_targets = ncol(y) ) ) if (NROW(y_names) == ncol(y)) { out$metadata$y_names <- y_names } } else { stop("Passed 'y' object with unsupported class: ", paste(class(y), collapse = ", ")) } return(out) } process.row.weights <- function(w, lst_args) { if (!is.null(w)) { if ("label" %in% names(lst_args$dmatrix_args)) { nrow_y <- NROW(lst_args$dmatrix_args$label) } else if ("label_lower_bound" %in% names(lst_args$dmatrix_args)) { nrow_y <- length(lst_args$dmatrix_args$label_lower_bound) } else { stop("Internal error.") } if (!is.numeric(w)) { w <- as.numeric(w) } if (length(w) != nrow_y) { stop( "'weights' must be a 1-d vector with the same length as 'y' (", length(w), " vs. ", nrow_y, ")." ) } lst_args$dmatrix_args$weight <- w } return(lst_args) } check.nthreads <- function(nthreads) { if (is.null(nthreads)) { return(1L) } if (!inherits(nthreads, c("numeric", "integer")) || !NROW(nthreads)) { stop("'nthreads' must be a positive scalar value.") } if (length(nthreads) > 1L) { nthreads <- utils::head(nthreads, 1L) } if (is.na(nthreads) || nthreads < 0) { stop("Passed invalid 'nthreads': ", nthreads) } if (is.numeric(nthreads)) { if (floor(nthreads) != nthreads) { stop("'nthreads' must be an integer.") } } return(as.integer(nthreads)) } check.can.use.qdm <- function(x, params, eval_set) { if ("booster" %in% names(params)) { if (params$booster == "gblinear") { return(FALSE) } } if ("tree_method" %in% names(params)) { if (params$tree_method %in% c("exact", "approx")) { return(FALSE) } } if (NROW(eval_set)) { return(FALSE) } return(TRUE) } process.x.and.col.args <- function( x, monotone_constraints, interaction_constraints, feature_weights, lst_args, use_qdm ) { if (is.null(x)) { stop("'x' cannot be NULL.") } if (inherits(x, "xgb.DMatrix")) { stop("Cannot pass 'xgb.DMatrix' as 'x' to 'xgboost()'. Try 'xgb.train()' instead.") } supported_x_types <- c("data.frame", "matrix", "dgTMatrix", "dgCMatrix", "dgRMatrix") if (!inherits(x, supported_x_types)) { stop( "'x' must be one of the following classes: ", paste(supported_x_types, collapse = ", "), ". Got: ", paste(class(x), collapse = ", ") ) } if (use_qdm && inherits(x, "sparseMatrix") && !inherits(x, "dgRMatrix")) { x <- methods::as(x, "RsparseMatrix") if (!inherits(x, "RsparseMatrix")) { stop("Internal error: casting sparse matrix did not yield 'dgRMatrix'.") } } if (NROW(feature_weights)) { if (is.list(feature_weights)) { feature_weights <- unlist(feature_weights) } if (!inherits(feature_weights, c("numeric", "integer"))) { stop("'feature_weights' must be a numeric vector or named list matching to columns of 'x'.") } if (NROW(names(feature_weights)) && NROW(colnames(x))) { matched <- match(colnames(x), names(feature_weights)) matched <- matched[!is.na(matched)] matched <- matched[!duplicated(matched)] if (length(matched) > 0 && length(matched) < length(feature_weights)) { stop( "'feature_weights' names do not contain all columns of 'x'. Missing: ", utils::head(setdiff(colnames(x), names(feature_weights))) ) } if (length(matched)) { feature_weights <- feature_weights[matched] } else { warning("Names of 'feature_weights' do not match with 'x'. Names will be ignored.") } } lst_args$dmatrix_args$feature_weights <- unname(feature_weights) } if (NROW(monotone_constraints)) { if (NROW(monotone_constraints) > ncol(x)) { stop( "'monotone_constraints' contains more entries than there are columns in 'x' (", NROW(monotone_constraints), " vs. ", ncol(x), ")." ) } if (is.list(monotone_constraints)) { if (!NROW(names(monotone_constraints))) { stop( "If passing 'monotone_constraints' as a named list,", " must have names matching to columns of 'x'." ) } if (!NROW(colnames(x))) { stop("If passing 'monotone_constraints' as a named list, 'x' must have column names.") } if (anyDuplicated(names(monotone_constraints))) { stop( "'monotone_constraints' contains duplicated names: ", paste( names(monotone_constraints)[duplicated(names(monotone_constraints))] |> utils::head(), collapse = ", " ) ) } if (NROW(setdiff(names(monotone_constraints), colnames(x)))) { stop( "'monotone_constraints' contains column names not present in 'x': ", paste(utils::head(names(monotone_constraints)), collapse = ", ") ) } vec_monotone_constr <- rep(0, ncol(x)) matched <- match(names(monotone_constraints), colnames(x)) vec_monotone_constr[matched] <- unlist(monotone_constraints) lst_args$params$monotone_constraints <- unname(vec_monotone_constr) } else if (inherits(monotone_constraints, c("numeric", "integer"))) { if (NROW(names(monotone_constraints)) && NROW(colnames(x))) { if (length(monotone_constraints) < ncol(x)) { return( process.x.and.col.args( x, as.list(monotone_constraints), interaction_constraints, feature_weights, lst_args, use_qdm ) ) } else { matched <- match(names(monotone_constraints), colnames(x)) matched <- matched[!is.na(matched)] matched <- matched[!duplicated(matched)] if (length(matched)) { monotone_constraints <- monotone_constraints[matched] } else { warning("Names of 'monotone_constraints' do not match with 'x'. Names will be ignored.") } } } else { if (length(monotone_constraints) != ncol(x)) { stop( "If passing 'monotone_constraints' as unnamed vector or not using column names,", " must have length matching to number of columns in 'x'. Got: ", length(monotone_constraints), " (vs. ", ncol(x), ")" ) } } lst_args$params$monotone_constraints <- unname(monotone_constraints) } else if (is.character(monotone_constraints)) { lst_args$params$monotone_constraints <- monotone_constraints } else { stop( "Passed unsupported type for 'monotone_constraints': ", paste(class(monotone_constraints), collapse = ", ") ) } } if (NROW(interaction_constraints)) { if (!is.list(interaction_constraints)) { stop("'interaction_constraints' must be a list of vectors.") } cnames <- colnames(x) lst_args$params$interaction_constraints <- lapply(interaction_constraints, function(idx) { if (!NROW(idx)) { stop("Elements in 'interaction_constraints' cannot be empty.") } if (is.character(idx)) { if (!NROW(cnames)) { stop( "Passed a character vector for 'interaction_constraints', but 'x' ", "has no column names to match them against." ) } out <- match(idx, cnames) - 1L if (anyNA(out)) { stop( "'interaction_constraints' contains column names not present in 'x': ", paste(utils::head(idx[which(is.na(out))]), collapse = ", ") ) } return(out) } else if (inherits(idx, c("numeric", "integer"))) { if (anyNA(idx)) { stop("'interaction_constraints' cannot contain NA values.") } if (min(idx) < 1) { stop("Column indices for 'interaction_constraints' must follow base-1 indexing.") } if (max(idx) > ncol(x)) { stop("'interaction_constraints' contains invalid column indices.") } if (is.numeric(idx)) { if (any(idx != floor(idx))) { stop( "'interaction_constraints' must contain only integer indices. Got non-integer: ", paste(utils::head(idx[which(idx != floor(idx))]), collapse = ", ") ) } } return(idx - 1L) } else { stop( "Elements in 'interaction_constraints' must be vectors of types ", "'integer', 'numeric', or 'character'. Got: ", paste(class(idx), collapse = ", ") ) } }) } lst_args$dmatrix_args$data <- x return(lst_args) } process.eval.set <- function(eval_set, lst_args) { if (!NROW(eval_set)) { return(NULL) } nrows <- nrow(lst_args$dmatrix_args$data) is_classif <- hasName(lst_args$metadata, "y_levels") processed_y <- lst_args$dmatrix_args$label eval_set <- as.vector(eval_set) if (length(eval_set) == 1L) { eval_set <- as.numeric(eval_set) if (is.na(eval_set) || eval_set < 0 || eval_set >= 1) { stop("'eval_set' as a fraction must be a number between zero and one (non-inclusive).") } if (eval_set == 0) { return(NULL) } nrow_eval <- as.integer(round(nrows * eval_set, 0)) if (nrow_eval < 1) { warning( "Desired 'eval_set' fraction amounts to zero observations.", " Will not create evaluation set." ) return(NULL) } nrow_train <- nrows - nrow_eval if (nrow_train < 2L) { stop("Desired 'eval_set' fraction would leave less than 2 observations for training data.") } if (is_classif && nrow_train < length(lst_args$metadata$y_levels)) { stop("Desired 'eval_set' fraction would not leave enough samples for each class of 'y'.") } seed <- lst_args$params$seed if (!is.null(seed)) { set.seed(seed) } idx_shuffled <- sample(nrows, nrows, replace = FALSE) idx_eval <- idx_shuffled[seq(1L, nrow_eval)] idx_train <- idx_shuffled[seq(nrow_eval + 1L, nrows)] # Here we want the training set to include all of the classes of 'y' for classification # objectives. If that condition doesn't hold with the random sample, then it forcibly # makes a new random selection in such a way that the condition would always hold, by # first sampling one random example of 'y' for training and then choosing the evaluation # set from the remaining rows. The procedure here is quite inefficient, but there aren't # enough random-related functions in base R to be able to construct an efficient version. if (is_classif && length(unique(processed_y[idx_train])) < length(lst_args$metadata$y_levels)) { # These are defined in order to avoid NOTEs from CRAN checks # when using non-standard data.table evaluation with column names. idx <- NULL y <- NULL ranked_idx <- NULL chosen <- NULL dt <- data.table::data.table(y = processed_y, idx = seq(1L, nrows))[ , .( ranked_idx = seq(1L, .N), chosen = rep(sample(.N, 1L), .N), idx ) , by = y ] min_idx_train <- dt[ranked_idx == chosen, idx] rem_idx <- dt[ranked_idx != chosen, idx] if (length(rem_idx) == nrow_eval) { idx_train <- min_idx_train idx_eval <- rem_idx } else { rem_idx <- rem_idx[sample(length(rem_idx), length(rem_idx), replace = FALSE)] idx_eval <- rem_idx[seq(1L, nrow_eval)] idx_train <- c(min_idx_train, rem_idx[seq(nrow_eval + 1L, length(rem_idx))]) } } } else { if (any(eval_set != floor(eval_set))) { stop("'eval_set' as indices must contain only integers.") } eval_set <- as.integer(eval_set) idx_min <- min(eval_set) if (is.na(idx_min) || idx_min < 1L) { stop("'eval_set' contains invalid indices.") } idx_max <- max(eval_set) if (is.na(idx_max) || idx_max > nrows) { stop("'eval_set' contains row indices beyond the size of the input data.") } idx_train <- seq(1L, nrows)[-eval_set] if (is_classif && length(unique(processed_y[idx_train])) < length(lst_args$metadata$y_levels)) { warning("'eval_set' indices will leave some classes of 'y' outside of the training data.") } idx_eval <- eval_set } # Note: slicing is done in the constructed DMatrix object instead of in the # original input, because objects from 'Matrix' might change class after # being sliced (e.g. 'dgRMatrix' turns into 'dgCMatrix'). return(list(idx_train = idx_train, idx_eval = idx_eval)) } check.early.stopping.rounds <- function(early_stopping_rounds, eval_set) { if (is.null(early_stopping_rounds)) { return(NULL) } if (is.null(eval_set)) { stop("'early_stopping_rounds' requires passing 'eval_set'.") } if (NROW(early_stopping_rounds) != 1L) { stop("'early_stopping_rounds' must be NULL or an integer greater than zero.") } early_stopping_rounds <- as.integer(early_stopping_rounds) if (is.na(early_stopping_rounds) || early_stopping_rounds <= 0L) { stop( "'early_stopping_rounds' must be NULL or an integer greater than zero. Got: ", early_stopping_rounds ) } return(early_stopping_rounds) } # nolint start: line_length_linter. #' Fit XGBoost Model #' #' @export #' @description #' Fits an XGBoost model (boosted decision tree ensemble) to given x/y data. #' #' See the tutorial [Introduction to Boosted Trees](https://xgboost.readthedocs.io/en/stable/tutorials/model.html) #' for a longer explanation of what XGBoost does, and the rest of the #' [XGBoost Tutorials](https://xgboost.readthedocs.io/en/latest/tutorials/index.html) for further #' explanations XGBoost's features and usage. #' #' This function is intended to provide a user-friendly interface for XGBoost that follows #' R's conventions for model fitting and predictions, but which doesn't expose all of the #' possible functionalities of the core XGBoost library. #' #' See [xgb.train()] for a more flexible low-level alternative which is similar across different #' language bindings of XGBoost and which exposes additional functionalities such as training on #' external memory data and learning-to-rank objectives. #' #' See also the [migration guide](https://xgboost.readthedocs.io/en/latest/R-package/migration_guide.html) #' if coming from a previous version of XGBoost in the 1.x series. #' #' By default, most of the parameters here have a value of `NULL`, which signals XGBoost to use its #' default value. Default values are automatically determined by the XGBoost core library, and are #' subject to change over XGBoost library versions. Some of them might differ according to the #' booster type (e.g. defaults for regularization are different for linear and tree-based boosters). #' See [xgb.params()] and the [online documentation](https://xgboost.readthedocs.io/en/latest/parameter.html) #' for more details about parameters - but note that some of the parameters are not supported in #' the `xgboost()` interface. #' @details #' For package authors using 'xgboost' as a dependency, it is highly recommended to use #' [xgb.train()] in package code instead of [xgboost()], since it has a more stable interface #' and performs fewer data conversions and copies along the way. #' @references #' - Chen, Tianqi, and Carlos Guestrin. "Xgboost: A scalable tree boosting system." #' Proceedings of the 22nd acm sigkdd international conference on knowledge discovery and #' data mining. 2016. #' - \url{https://xgboost.readthedocs.io/en/stable/} #' @param x The features / covariates. Can be passed as: #' - A numeric or integer `matrix`. #' - A `data.frame`, in which all columns are one of the following types: #' - `numeric` #' - `integer` #' - `logical` #' - `factor` #' #' Columns of `factor` type will be assumed to be categorical, while other column types will #' be assumed to be numeric. #' - A sparse matrix from the `Matrix` package, either as `dgCMatrix` or `dgRMatrix` class. #' #' Note that categorical features are only supported for `data.frame` inputs, and are automatically #' determined based on their types. See [xgb.train()] with [xgb.DMatrix()] for more flexible #' variants that would allow something like categorical features on sparse matrices. #' @param y The response variable. Allowed values are: #' - A numeric or integer vector (for regression tasks). #' - A factor or character vector (for binary and multi-class classification tasks). #' - A logical (boolean) vector (for binary classification tasks). #' - A numeric or integer matrix or `data.frame` with numeric/integer columns #' (for multi-task regression tasks). #' - A `Surv` object from the 'survival' package (for survival tasks). #' #' If `objective` is `NULL`, the right task will be determined automatically based on #' the class of `y`. #' #' If `objective` is not `NULL`, it must match with the type of `y` - e.g. `factor` types of `y` #' can only be used with classification objectives and vice-versa. #' #' For binary classification, the last factor level of `y` will be used as the "positive" #' class - that is, the numbers from `predict` will reflect the probabilities of belonging to this #' class instead of to the first factor level. If `y` is a `logical` vector, then `TRUE` will be #' set as the last level. #' @param objective Optimization objective to minimize based on the supplied data, to be passed #' by name as a string / character (e.g. `reg:absoluteerror`). See the #' [Learning Task Parameters](https://xgboost.readthedocs.io/en/stable/parameter.html#learning-task-parameters) #' page and the [xgb.params()] documentation for more detailed information on allowed values. #' #' If `NULL` (the default), will be automatically determined from `y` according to the following #' logic: #' - If `y` is a factor with 2 levels, will use `binary:logistic`. #' - If `y` is a factor with more than 2 levels, will use `multi:softprob` (number of classes #' will be determined automatically, should not be passed under `params`). #' - If `y` is a `Surv` object from the `survival` package, will use `survival:aft` (note that #' the only types supported are left / right / interval censored). #' - Otherwise, will use `reg:squarederror`. #' #' If `objective` is not `NULL`, it must match with the type of `y` - e.g. `factor` types of `y` #' can only be used with classification objectives and vice-versa. #' #' Note that not all possible `objective` values supported by the core XGBoost library are allowed #' here - for example, objectives which are a variation of another but with a different default #' prediction type (e.g. `multi:softmax` vs. `multi:softprob`) are not allowed, and neither are #' ranking objectives, nor custom objectives at the moment. #' #' Supported values are: #' - `"reg:squarederror"`: regression with squared loss. #' - `"reg:squaredlogerror"`: regression with squared log loss \eqn{\frac{1}{2}[log(pred + 1) - log(label + 1)]^2}. All input labels are required to be greater than -1. Also, see metric `rmsle` for possible issue with this objective. #' - `"reg:pseudohubererror"`: regression with Pseudo Huber loss, a twice differentiable alternative to absolute loss. #' - `"reg:absoluteerror"`: Regression with L1 error. When tree model is used, leaf value is refreshed after tree construction. If used in distributed training, the leaf value is calculated as the mean value from all workers, which is not guaranteed to be optimal. #' - `"reg:quantileerror"`: Quantile loss, also known as "pinball loss". See later sections for its parameter and [Quantile Regression](https://xgboost.readthedocs.io/en/latest/python/examples/quantile_regression.html#sphx-glr-python-examples-quantile-regression-py) for a worked example. #' - `"binary:logistic"`: logistic regression for binary classification, output probability #' - `"binary:hinge"`: hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities. #' - `"count:poisson"`: Poisson regression for count data, output mean of Poisson distribution. #' `"max_delta_step"` is set to 0.7 by default in Poisson regression (used to safeguard optimization) #' - `"survival:cox"`: Cox regression for right censored survival time data (negative values are considered right censored). #' #' Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function `h(t) = h0(t) * HR`). #' - `"survival:aft"`: Accelerated failure time model for censored survival time data. #' See [Survival Analysis with Accelerated Failure Time](https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html) for details. #' - `"multi:softprob"`: multi-class classification throgh multinomial logistic likelihood. #' - `"reg:gamma"`: gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be [gamma-distributed](https://en.wikipedia.org/wiki/Gamma_distribution#Occurrence_and_applications). #' - `"reg:tweedie"`: Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be [Tweedie-distributed](https://en.wikipedia.org/wiki/Tweedie_distribution#Occurrence_and_applications). #' #' The following values are \bold{NOT} supported by `xgboost`, but are supported by [xgb.train()] #' (see [xgb.params()] for details): #' - `"reg:logistic"` #' - `"binary:logitraw"` #' - `"multi:softmax"` #' - `"rank:ndcg"` #' - `"rank:map"` #' - `"rank:pairwise"` #' @param nrounds Number of boosting iterations / rounds. #' #' Note that the number of default boosting rounds here is not automatically tuned, and different #' problems will have vastly different optimal numbers of boosting rounds. #' @param weights Sample weights for each row in `x` and `y`. If `NULL` (the default), each row #' will have the same weight. #' #' If not `NULL`, should be passed as a numeric vector with length matching to the number of rows in `x`. #' @param verbosity Verbosity of printing messages. Valid values of 0 (silent), 1 (warning), #' 2 (info), and 3 (debug). #' @param monitor_training Whether to monitor objective optimization progress on the input data. #' Note that same 'x' and 'y' data are used for both model fitting and evaluation. #' @param eval_set Subset of the data to use as evaluation set. Can be passed as: #' - A vector of row indices (base-1 numeration) indicating the observations that are to be designed #' as evaluation data. #' - A number between zero and one indicating a random fraction of the input data to use as #' evaluation data. Note that the selection will be done uniformly at random, regardless of #' argument `weights`. #' #' If passed, this subset of the data will be excluded from the training procedure, and the #' evaluation metric(s) supplied under `eval_metric` will be calculated on this dataset after each #' boosting iteration (pass `verbosity>0` to have these metrics printed during training). If #' `eval_metric` is not passed, a default metric will be selected according to `objective`. #' #' If passing a fraction, in classification problems, the evaluation set will be chosen in such a #' way that at least one observation of each class will be kept in the training data. #' #' For more elaborate evaluation variants (e.g. custom metrics, multiple evaluation sets, etc.), #' one might want to use [xgb.train()] instead. #' @param early_stopping_rounds Number of boosting rounds after which training will be stopped #' if there is no improvement in performance (as measured by the last metric passed under #' `eval_metric`, or by the default metric for the objective if `eval_metric` is not passed) on the #' evaluation data from `eval_set`. Must pass `eval_set` in order to use this functionality. #' #' If `NULL`, early stopping will not be used. #' @param print_every_n When passing `verbosity>0` and either `monitor_training=TRUE` or `eval_set`, #' evaluation logs (metrics calculated on the training and/or evaluation data) will be printed every #' nth iteration according to the value passed here. The first and last iteration are always #' included regardless of this 'n'. #' #' Only has an effect when passing `verbosity>0`. #' @param nthreads Number of parallel threads to use. If passing zero, will use all CPU threads. #' @param seed Seed to use for random number generation. If passing `NULL`, will draw a random #' number using R's PRNG system to use as seed. #' @param monotone_constraints Optional monotonicity constraints for features. #' #' Can be passed either as a named list (when `x` has column names), or as a vector. If passed #' as a vector and `x` has column names, will try to match the elements by name. #' #' A value of `+1` for a given feature makes the model predictions / scores constrained to be #' a monotonically increasing function of that feature (that is, as the value of the feature #' increases, the model prediction cannot decrease), while a value of `-1` makes it a monotonically #' decreasing function. A value of zero imposes no constraint. #' #' The input for `monotone_constraints` can be a subset of the columns of `x` if named, in which #' case the columns that are not referred to in `monotone_constraints` will be assumed to have #' a value of zero (no constraint imposed on the model for those features). #' #' See the tutorial [Monotonic Constraints](https://xgboost.readthedocs.io/en/stable/tutorials/monotonic.html) #' for a more detailed explanation. #' @param interaction_constraints Constraints for interaction representing permitted interactions. #' The constraints must be specified in the form of a list of vectors referencing columns in the #' data, e.g. `list(c(1, 2), c(3, 4, 5))` (with these numbers being column indices, numeration #' starting at 1 - i.e. the first sublist references the first and second columns) or #' `list(c("Sepal.Length", "Sepal.Width"), c("Petal.Length", "Petal.Width"))` (references #' columns by names), where each vector is a group of indices of features that are allowed to #' interact with each other. #' #' See the tutorial [Feature Interaction Constraints](https://xgboost.readthedocs.io/en/stable/tutorials/feature_interaction_constraint.html) #' for more information. #' @param feature_weights Feature weights for column sampling. #' #' Can be passed either as a vector with length matching to columns of `x`, or as a named #' list (only if `x` has column names) with names matching to columns of 'x'. If it is a #' named vector, will try to match the entries to column names of `x` by name. #' #' If `NULL` (the default), all columns will have the same weight. #' @param base_margin Base margin used for boosting from existing model. #' #' If passing it, will start the gradient boosting procedure from the scores that are provided #' here - for example, one can pass the raw scores from a previous model, or some per-observation #' offset, or similar. #' #' Should be either a numeric vector or numeric matrix (for multi-class and multi-target objectives) #' with the same number of rows as `x` and number of columns corresponding to number of optimization #' targets, and should be in the untransformed scale (for example, for objective `binary:logistic`, #' it should have log-odds, not probabilities; and for objective `multi:softprob`, should have #' number of columns matching to number of classes in the data). #' #' Note that, if it contains more than one column, then columns will not be matched by name to #' the corresponding `y` - `base_margin` should have the same column order that the model will use #' (for example, for objective `multi:softprob`, columns of `base_margin` will be matched against #' `levels(y)` by their position, regardless of what `colnames(base_margin)` returns). #' #' If `NULL`, will start from zero, but note that for most objectives, an intercept is usually #' added (controllable through parameter `base_score` instead) when `base_margin` is not passed. #' @param min_split_loss (for Tree Booster) (default=0, alias: `gamma`) #' Minimum loss reduction required to make a further partition on a leaf node of the tree. The larger `min_split_loss` is, the more conservative the algorithm will be. Note that a tree where no splits were made might still contain a single terminal node with a non-zero score. #' #' range: \eqn{[0, \infty)} #' @param learning_rate (alias: `eta`) #' Step size shrinkage used in update to prevent overfitting. After each boosting step, we can directly get the weights of new features, and `learning_rate` shrinks the feature weights to make the boosting process more conservative. #' - range: \eqn{[0,1]} #' - default value: 0.3 for tree-based boosters, 0.5 for linear booster. #' @param reg_lambda (alias: `lambda`) #' - For tree-based boosters: #' - L2 regularization term on weights. Increasing this value will make model more conservative. #' - default: 1 #' - range: \eqn{[0, \infty]} #' - For linear booster: #' - L2 regularization term on weights. Increasing this value will make model more conservative. Normalised to number of training examples. #' - default: 0 #' - range: \eqn{[0, \infty)} #' @param reg_alpha (alias: `reg_alpha`) #' - L1 regularization term on weights. Increasing this value will make model more conservative. #' - For the linear booster, it's normalised to number of training examples. #' - default: 0 #' - range: \eqn{[0, \infty)} #' @param updater (for Linear Booster) (default= `"shotgun"`) #' Choice of algorithm to fit linear model #' - `"shotgun"`: Parallel coordinate descent algorithm based on shotgun algorithm. Uses 'hogwild' parallelism and therefore produces a nondeterministic solution on each run. #' - `"coord_descent"`: Ordinary coordinate descent algorithm. Also multithreaded but still produces a deterministic solution. When the `device` parameter is set to `"cuda"` or `"gpu"`, a GPU variant would be used. #' @inheritParams xgb.params #' @inheritParams xgb.train #' @return A model object, inheriting from both `xgboost` and `xgb.Booster`. Compared to the regular #' `xgb.Booster` model class produced by [xgb.train()], this `xgboost` class will have an #' #' additional attribute `metadata` containing information which is used for formatting prediction #' outputs, such as class names for classification problems. #' #' @examples #' data(mtcars) #' #' # Fit a small regression model on the mtcars data #' model_regression <- xgboost(mtcars[, -1], mtcars$mpg, nthreads = 1, nrounds = 3) #' predict(model_regression, mtcars, validate_features = TRUE) #' #' # Task objective is determined automatically according to the type of 'y' #' data(iris) #' model_classif <- xgboost(iris[, -5], iris$Species, nthreads = 1, nrounds = 5) #' predict(model_classif, iris[1:10,]) #' predict(model_classif, iris[1:10,], type = "class") #' #' # Can nevertheless choose a non-default objective if needed #' model_poisson <- xgboost( #' mtcars[, -1], mtcars$mpg, #' objective = "count:poisson", #' nthreads = 1, #' nrounds = 3 #' ) #' #' # Can calculate evaluation metrics during boosting rounds #' data(ToothGrowth) #' xgboost( #' ToothGrowth[, c("len", "dose")], #' ToothGrowth$supp, #' eval_metric = c("auc", "logloss"), #' eval_set = 0.2, #' monitor_training = TRUE, #' verbosity = 1, #' nthreads = 1, #' nrounds = 3 #' ) xgboost <- function( x, y, objective = NULL, nrounds = 100L, max_depth = NULL, learning_rate = NULL, min_child_weight = NULL, min_split_loss = NULL, reg_lambda = NULL, weights = NULL, verbosity = if (is.null(eval_set)) 0L else 1L, monitor_training = verbosity > 0, eval_set = NULL, early_stopping_rounds = NULL, print_every_n = 1L, eval_metric = NULL, nthreads = parallel::detectCores(), seed = 0L, base_margin = NULL, monotone_constraints = NULL, interaction_constraints = NULL, reg_alpha = NULL, max_bin = NULL, max_leaves = NULL, booster = NULL, subsample = NULL, sampling_method = NULL, feature_weights = NULL, colsample_bytree = NULL, colsample_bylevel = NULL, colsample_bynode = NULL, tree_method = NULL, max_delta_step = NULL, scale_pos_weight = NULL, updater = NULL, grow_policy = NULL, num_parallel_tree = NULL, multi_strategy = NULL, base_score = NULL, seed_per_iteration = NULL, device = NULL, disable_default_eval_metric = NULL, use_rmm = NULL, max_cached_hist_node = NULL, extmem_single_page = NULL, max_cat_to_onehot = NULL, max_cat_threshold = NULL, sample_type = NULL, normalize_type = NULL, rate_drop = NULL, one_drop = NULL, skip_drop = NULL, feature_selector = NULL, top_k = NULL, tweedie_variance_power = NULL, huber_slope = NULL, quantile_alpha = NULL, aft_loss_distribution = NULL, ... ) { # nolint end check.deprecation(deprecated_xgboost_params, match.call(), ...) params <- as.list(environment()) params <- params[ (names(params) %in% formalArgs(xgb.params)) & !sapply(params, is.null) & !(names(params) %in% c( # these undergo additional processing here "objective", "base_margin", "monotone_constraints", "interaction_constraints" )) ] prescreen.objective(objective) use_qdm <- check.can.use.qdm(x, params, eval_set) lst_args <- process.y.margin.and.objective(y, base_margin, objective, params) lst_args <- process.row.weights(weights, lst_args) lst_args <- process.x.and.col.args( x, monotone_constraints, interaction_constraints, feature_weights, lst_args, use_qdm ) eval_set <- process.eval.set(eval_set, lst_args) if (use_qdm && hasName(params, "max_bin")) { lst_args$dmatrix_args$max_bin <- params$max_bin } nthreads <- check.nthreads(nthreads) lst_args$dmatrix_args$nthread <- nthreads lst_args$params$nthread <- nthreads params <- c(lst_args$params, params) params$verbosity <- verbosity fn_dm <- if (use_qdm) xgb.QuantileDMatrix else xgb.DMatrix dm <- do.call(fn_dm, lst_args$dmatrix_args) if (!is.null(eval_set)) { dm_eval <- xgb.slice.DMatrix(dm, eval_set$idx_eval) dm <- xgb.slice.DMatrix(dm, eval_set$idx_train) } evals <- list() if (monitor_training) { evals <- list(train = dm) } if (!is.null(eval_set)) { evals <- c(evals, list(eval = dm_eval)) } model <- xgb.train( params = params, data = dm, nrounds = nrounds, verbose = verbosity, print_every_n = print_every_n, evals = evals ) attributes(model)$metadata <- lst_args$metadata attributes(model)$call <- match.call() class(model) <- c("xgboost", class(model)) return(model) } #' @title Compute predictions from XGBoost model on new data #' @description Predict values on data based on XGBoost model. #' @param object An XGBoost model object of class `xgboost`, as produced by function [xgboost()]. #' #' Note that there is also a lower-level [predict.xgb.Booster()] method for models of class #' `xgb.Booster` as produced by [xgb.train()], which can also be used for `xgboost` class models as #' an alternative that performs fewer validations and post-processings. #' @param newdata Data on which to compute predictions from the model passed in `object`. Supported #' input classes are: #' - Data Frames (class `data.frame` from base R and subclasses like `data.table`). #' - Matrices (class `matrix` from base R). #' - Sparse matrices from package `Matrix`, either as class `dgRMatrix` (CSR) or `dgCMatrix` (CSC). #' - Sparse vectors from package `Matrix`, which will be interpreted as containing a single #' observation. #' #' In the case of data frames, if there are any categorical features, they should be of class #' `factor` and should have the same levels as the `factor` columns of the data from which the model #' was constructed. Any columns with type other than `factor` will be interpreted as numeric. #' #' If there are named columns and the model was fitted to data with named columns, they will be #' matched by name by default (see `validate_features`). #' @param type Type of prediction to make. Supported options are: #' - `"response"`: will output model predictions on the scale of the response variable (e.g. #' probabilities of belonging to the last class in the case of binary classification). Result will #' be either a numeric vector with length matching to rows in `newdata`, or a numeric matrix with #' shape `[nrows(newdata), nscores]` (for objectives that produce more than one score per #' observation such as multi-class classification or multi-quantile regression). #' - `"raw"`: will output the unprocessed boosting scores (e.g. log-odds in the case of objective #' `binary:logistic`). Same output shape and type as for `"response"`. #' - `"class"`: will output the class with the highest predicted probability, returned as a `factor` #' (only applicable to classification objectives) with length matching to rows in `newdata`. #' - `"leaf"`: will output the terminal node indices of each observation across each tree, as an #' integer matrix of shape `[nrows(newdata), ntrees]`, or as an integer array with an extra one or #' two dimensions, up to `[nrows(newdata), ntrees, nscores, n_parallel_trees]` for models that #' produce more than one score per tree and/or which have more than one parallel tree (e.g. #' random forests). #' #' Only applicable to tree-based boosters (not `gblinear`). #' - `"contrib"`: will produce per-feature contribution estimates towards the model score for a #' given observation, based on SHAP values. The contribution values are on the scale of #' untransformed margin (e.g., for binary classification, the values are log-odds deviations from #' the baseline). #' #' Output will be a numeric matrix with shape `[nrows, nfeatures+1]`, with the intercept being the #' last feature, or a numeric array with shape `[nrows, nscores, nfeatures+1]` if the model #' produces more than one score per observation. #' - `"interaction"`: similar to `"contrib"`, but computing SHAP values of contributions of #' interaction of each pair of features. Note that this operation might be rather expensive in #' terms of compute and memory. #' #' Since it quadratically depends on the number of features, it is recommended to perform #' selection of the most important features first. #' #' Output will be a numeric array of shape `[nrows, nfeatures+1, nfeatures+1]`, or shape #' `[nrows, nscores, nfeatures+1, nfeatures+1]` (for objectives that produce more than one score #' per observation). #' @param base_margin Base margin used for boosting from existing model (raw score that gets added to #' all observations independently of the trees in the model). #' #' If supplied, should be either a vector with length equal to the number of rows in `newdata` #' (for objectives which produces a single score per observation), or a matrix with number of #' rows matching to the number rows in `newdata` and number of columns matching to the number #' of scores estimated by the model (e.g. number of classes for multi-class classification). #' @param iteration_range Sequence of rounds/iterations from the model to use for prediction, specified by passing #' a two-dimensional vector with the start and end numbers in the sequence (same format as R's `seq` - i.e. #' base-1 indexing, and inclusive of both ends). #' #' For example, passing `c(1,20)` will predict using the first twenty iterations, while passing `c(1,1)` will #' predict using only the first one. #' #' If passing `NULL`, will either stop at the best iteration if the model used early stopping, or use all #' of the iterations (rounds) otherwise. #' #' If passing "all", will use all of the rounds regardless of whether the model had early stopping or not. #' #' Not applicable to `gblinear` booster. #' @param validate_features Validate that the feature names in the data match to the feature names #' in the column, and reorder them in the data otherwise. #' #' If passing `FALSE`, it is assumed that the feature names and types are the same, #' and come in the same order as in the training data. #' #' Be aware that this only applies to column names and not to factor levels in categorical columns. #' #' Note that this check might add some sizable latency to the predictions, so it's #' recommended to disable it for performance-sensitive applications. #' @param ... Not used. #' @return Either a numeric vector (for 1D outputs), numeric matrix (for 2D outputs), numeric array #' (for 3D and higher), or `factor` (for class predictions). See documentation for parameter `type` #' for details about what the output type and shape will be. #' @method predict xgboost #' @export #' @examples #' data("ToothGrowth") #' y <- ToothGrowth$supp #' x <- ToothGrowth[, -2L] #' model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) #' pred_prob <- predict(model, x[1:5, ], type = "response") #' pred_raw <- predict(model, x[1:5, ], type = "raw") #' pred_class <- predict(model, x[1:5, ], type = "class") #' #' # Relationships between these #' manual_probs <- 1 / (1 + exp(-pred_raw)) #' manual_class <- ifelse(manual_probs < 0.5, levels(y)[1], levels(y)[2]) #' #' # They should match up to numerical precision #' round(pred_prob, 6) == round(manual_probs, 6) #' pred_class == manual_class predict.xgboost <- function( object, newdata, type = "response", base_margin = NULL, iteration_range = NULL, validate_features = TRUE, ... ) { if (inherits(newdata, "xgb.DMatrix")) { stop( "Predictions on 'xgb.DMatrix' objects are not supported with 'xgboost' class.", " Try 'xgb.train' or 'predict.xgb.Booster'." ) } outputmargin <- FALSE predleaf <- FALSE predcontrib <- FALSE predinteraction <- FALSE pred_class <- FALSE strict_shape <- FALSE allowed_types <- c( "response", "raw", "class", "leaf", "contrib", "interaction" ) type <- head(type, 1L) if (!is.character(type) || !(type %in% allowed_types)) { stop("'type' must be one of: ", paste(allowed_types, collapse = ", ")) } if (type != "response") { switch( type, "raw" = { outputmargin <- TRUE }, "class" = { if (is.null(attributes(object)$metadata$y_levels)) { stop("Prediction type 'class' is only for classification objectives.") } pred_class <- TRUE outputmargin <- TRUE }, "leaf" = { predleaf <- TRUE strict_shape <- TRUE # required for 3D and 4D outputs }, "contrib" = { predcontrib <- TRUE }, "interaction" = { predinteraction <- TRUE } ) } out <- predict.xgb.Booster( object, newdata, outputmargin = outputmargin, predleaf = predleaf, predcontrib = predcontrib, predinteraction = predinteraction, iterationrange = iteration_range, strict_shape = strict_shape, validate_features = validate_features, base_margin = base_margin ) if (strict_shape) { # Should only end up here for leaf predictions out_dims <- dim(out) dims_remove <- integer() if (out_dims[3L] == 1L) { dims_remove <- c(dims_remove, -3L) } if (length(out_dims) >= 4L && out_dims[4L] == 1L) { dims_remove <- c(dims_remove, -4L) } if (length(dims_remove)) { new_dimnames <- dimnames(out)[dims_remove] dim(out) <- out_dims[dims_remove] dimnames(out) <- new_dimnames } } if (pred_class) { if (is.null(dim(out))) { out <- as.integer(out >= 0) + 1L } else { out <- max.col(out, ties.method = "first") } attr_out <- attributes(out) attr_out$class <- "factor" attr_out$levels <- attributes(object)$metadata$y_levels attributes(out) <- attr_out } else if (NCOL(out) > 1L || (strict_shape && length(dim(out)) >= 3L)) { names_use <- NULL if (NROW(attributes(object)$metadata$y_levels) > 2L) { names_use <- attributes(object)$metadata$y_levels } else if (NROW(attributes(object)$metadata$y_names)) { names_use <- attributes(object)$metadata$y_names } else if (NROW(attributes(object)$params$quantile_alpha) > 1L) { names_use <- paste0("q", attributes(object)$params$quantile_alpha) if (anyDuplicated(names_use)) { warning("Cannot add quantile names to output due to clashes in their character conversions") names_use <- NULL } } if (NROW(names_use)) { dimnames_out <- dimnames(out) dim_with_names <- if (type == "leaf") 3L else 2L dimnames_out[[dim_with_names]] <- names_use .Call(XGSetArrayDimNamesInplace_R, out, dimnames_out) } } return(out) } #' @title Print info from XGBoost model #' @description Prints basic properties of an XGBoost model object. #' @param x An XGBoost model object of class `xgboost`, as produced by function [xgboost()]. #' @param ... Not used. #' @return Same object `x`, after printing its info. #' @method print xgboost #' @export print.xgboost <- function(x, ...) { cat("XGBoost model object\n") cat("Call:\n ") print(attributes(x)$call) cat("Objective: ", attributes(x)$params$objective, "\n", sep = "") cat("Number of iterations: ", xgb.get.num.boosted.rounds(x), "\n", sep = "") cat("Number of features: ", xgb.num_feature(x), "\n", sep = "") printable_head <- function(v) { v_sub <- utils::head(v, 5L) return( sprintf( "%s%s", paste(v_sub, collapse = ", "), ifelse(length(v_sub) < length(v), ", ...", "") ) ) } if (NROW(attributes(x)$metadata$y_levels)) { cat( "Classes: ", printable_head(attributes(x)$metadata$y_levels), "\n", sep = "" ) } else if (NROW(attributes(x)$params$quantile_alpha)) { cat( "Prediction quantile", ifelse(length(attributes(x)$params$quantile_alpha) > 1L, "s", ""), ": ", printable_head(attributes(x)$params$quantile_alpha), "\n", sep = "" ) } else if (NROW(attributes(x)$metadata$y_names)) { cat( "Prediction targets: ", printable_head(attributes(x)$metadata$y_names), "\n", sep = "" ) } else if (attributes(x)$metadata$n_targets > 1L) { cat( "Number of predition targets: ", attributes(x)$metadata$n_targets, "\n", sep = "" ) } return(x) } #' Training part from Mushroom Data Set #' #' This data set is originally from the Mushroom data set, #' UCI Machine Learning Repository. #' #' It includes the following fields: #' - `label`: The label for each record. #' - `data`: A sparse Matrix of 'dgCMatrix' class with 126 columns. #' #' @references #' #' #' Bache, K. & Lichman, M. (2013). UCI Machine Learning Repository #' . Irvine, CA: University of California, #' School of Information and Computer Science. #' #' @docType data #' @keywords datasets #' @name agaricus.train #' @usage data(agaricus.train) #' @format A list containing a label vector, and a dgCMatrix object with 6513 #' rows and 127 variables NULL #' Test part from Mushroom Data Set #' #' This data set is originally from the Mushroom data set, #' UCI Machine Learning Repository. #' #' It includes the following fields: #' - `label`: The label for each record. #' - `data`: A sparse Matrix of 'dgCMatrix' class with 126 columns. #' #' @references #' #' #' Bache, K. & Lichman, M. (2013). UCI Machine Learning Repository #' . Irvine, CA: University of California, #' School of Information and Computer Science. #' #' @docType data #' @keywords datasets #' @name agaricus.test #' @usage data(agaricus.test) #' @format A list containing a label vector, and a dgCMatrix object with 1611 #' rows and 126 variables NULL # Various imports #' @importClassesFrom Matrix dgCMatrix dgRMatrix CsparseMatrix #' @importFrom Matrix sparse.model.matrix #' @importFrom data.table data.table #' @importFrom data.table is.data.table #' @importFrom data.table as.data.table #' @importFrom data.table := #' @importFrom data.table rbindlist #' @importFrom data.table setkey #' @importFrom data.table setkeyv #' @importFrom data.table setnames #' @importFrom jsonlite fromJSON #' @importFrom jsonlite toJSON #' @importFrom methods new #' @importFrom utils object.size str tail #' @importFrom stats coef #' @importFrom stats predict #' @importFrom stats median #' @importFrom stats sd #' @importFrom stats variable.names #' @importFrom utils head #' @importFrom graphics barplot #' @importFrom graphics lines #' @importFrom graphics points #' @importFrom graphics grid #' @importFrom graphics par #' @importFrom graphics title #' @importFrom grDevices rgb #' #' @import methods #' @useDynLib xgboost, .registration = TRUE NULL xgboost-3.0.0/R-package/README.md000066400000000000000000000021661476511272400162660ustar00rootroot00000000000000XGBoost R Package ================= [![CRAN Status Badge](http://www.r-pkg.org/badges/version/xgboost)](https://cran.r-project.org/web/packages/xgboost) [![CRAN Downloads](http://cranlogs.r-pkg.org/badges/xgboost)](https://cran.rstudio.com/web/packages/xgboost/index.html) [![Documentation Status](https://readthedocs.org/projects/xgboost/badge/?version=latest)](https://xgboost.readthedocs.org/en/latest/R-package/index.html) Resources --------- * [XGBoost R Package Online Documentation](https://xgboost.readthedocs.org/en/stable/R-package/index.html) - Check this out for detailed documents, examples and tutorials. Installation ------------ We are [on CRAN](https://cran.r-project.org/web/packages/xgboost/index.html) now. For stable/pre-compiled(for Windows and OS X) version, please install from CRAN: ```r install.packages('xgboost') ``` For more detailed installation instructions, please see [here](https://xgboost.readthedocs.io/en/stable/install.html). Development ----------- * See the [R Package section](https://xgboost.readthedocs.io/en/latest/contrib/coding_guide.html#r-coding-guideline) of the contributors guide. xgboost-3.0.0/R-package/cleanup000077500000000000000000000000361476511272400163560ustar00rootroot00000000000000#!/bin/sh rm -f src/Makevars xgboost-3.0.0/R-package/config.h.in000066400000000000000000000035631476511272400170340ustar00rootroot00000000000000/* config.h.in. Generated from configure.ac by autoheader. */ /* Define if building universal (internal helper macro) */ #undef AC_APPLE_UNIVERSAL_BUILD /* Define to 1 if you have the header file. */ #undef HAVE_INTTYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDIO_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the home page for this package. */ #undef PACKAGE_URL /* Define to the version of this package. */ #undef PACKAGE_VERSION /* Define to 1 if all of the C90 standard headers exist (not just the ones required in a freestanding environment). This macro is provided for backward compatibility; new code need not use it. */ #undef STDC_HEADERS /* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most significant byte first (like Motorola and SPARC, unlike Intel). */ #if defined AC_APPLE_UNIVERSAL_BUILD # if defined __BIG_ENDIAN__ # define WORDS_BIGENDIAN 1 # endif #else # ifndef WORDS_BIGENDIAN # undef WORDS_BIGENDIAN # endif #endif xgboost-3.0.0/R-package/configure000077500000000000000000004030131476511272400167120ustar00rootroot00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.71 for xgboost 3.0.0. # # # Copyright (C) 1992-1996, 1998-2017, 2020-2021 Free Software Foundation, # Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh as_nop=: if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else $as_nop case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi # Reset variables that may have inherited troublesome values from # the environment. # IFS needs to be set, to space, tab, and newline, in precisely that order. # (If _AS_PATH_WALK were called with IFS unset, it would have the # side effect of setting IFS to empty, thus disabling word splitting.) # Quoting is to prevent editors from complaining about space-tab. as_nl=' ' export as_nl IFS=" "" $as_nl" PS1='$ ' PS2='> ' PS4='+ ' # Ensure predictable behavior from utilities with locale-dependent output. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # We cannot yet rely on "unset" to work, but we need these variables # to be unset--not just set to an empty or harmless value--now, to # avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct # also avoids known problems related to "unset" and subshell syntax # in other old shells (e.g. bash 2.01 and pdksh 5.2.14). for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH do eval test \${$as_var+y} \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done # Ensure that fds 0, 1, and 2 are open. if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi if (exec 3>&2) ; then :; else exec 2>/dev/null; fi # The user is always right. if ${PATH_SEPARATOR+false} :; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS case $as_dir in #((( '') as_dir=./ ;; */) ;; *) as_dir=$as_dir/ ;; esac test -r "$as_dir$0" && as_myself=$as_dir$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Use a proper internal environment variable to ensure we don't fall # into an infinite loop, continuously re-executing ourselves. if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then _as_can_reexec=no; export _as_can_reexec; # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi # We don't want this to propagate to other subprocesses. { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="as_nop=: if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else \$as_nop case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ) then : else \$as_nop exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 blah=\$(echo \$(echo blah)) test x\"\$blah\" = xblah || exit 1 test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" if (eval "$as_required") 2>/dev/null then : as_have_required=yes else $as_nop as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null then : else $as_nop as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS case $as_dir in #((( '') as_dir=./ ;; */) ;; *) as_dir=$as_dir/ ;; esac as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null then : CONFIG_SHELL=$as_shell as_have_required=yes if as_run=a "$as_shell" -c "$as_bourne_compatible""$as_suggested" 2>/dev/null then : break 2 fi fi done;; esac as_found=false done IFS=$as_save_IFS if $as_found then : else $as_nop if { test -f "$SHELL" || test -f "$SHELL.exe"; } && as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null then : CONFIG_SHELL=$SHELL as_have_required=yes fi fi if test "x$CONFIG_SHELL" != x then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi if test x$as_have_required = xno then : printf "%s\n" "$0: This script requires a shell more modern than all" printf "%s\n" "$0: the shells that I found on your system." if test ${ZSH_VERSION+y} ; then printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should" printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later." else printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system, $0: including any error possibly output before this $0: message. Then install a modern shell, or manually run $0: the script under such a shell if you do have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_nop # --------- # Do nothing but, unlike ":", preserve the value of $?. as_fn_nop () { return $? } as_nop=as_fn_nop # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || printf "%s\n" X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null then : eval 'as_fn_append () { eval $1+=\$2 }' else $as_nop as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null then : eval 'as_fn_arith () { as_val=$(( $* )) }' else $as_nop as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_nop # --------- # Do nothing but, unlike ":", preserve the value of $?. as_fn_nop () { return $? } as_nop=as_fn_nop # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi printf "%s\n" "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || printf "%s\n" X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { printf "%s\n" "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # If we had to re-execute with $CONFIG_SHELL, we're ensured to have # already done that, so ensure we don't try to do so again and fall # in an infinite loop. This has already happened in practice. _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } # Determine whether it's possible to make 'echo' print without a newline. # These variables are no longer used directly by Autoconf, but are AC_SUBSTed # for compatibility with existing Makefiles. ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac # For backward compatibility with old third-party macros, we provide # the shell variables $as_echo and $as_echo_n. New code should use # AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. as_echo='printf %s\n' as_echo_n='printf %s' rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='xgboost' PACKAGE_TARNAME='xgboost' PACKAGE_VERSION='3.0.0' PACKAGE_STRING='xgboost 3.0.0' PACKAGE_BUGREPORT='' PACKAGE_URL='' # Factoring default headers for most tests. ac_includes_default="\ #include #ifdef HAVE_STDIO_H # include #endif #ifdef HAVE_STDLIB_H # include #endif #ifdef HAVE_STRING_H # include #endif #ifdef HAVE_INTTYPES_H # include #endif #ifdef HAVE_STDINT_H # include #endif #ifdef HAVE_STRINGS_H # include #endif #ifdef HAVE_SYS_TYPES_H # include #endif #ifdef HAVE_SYS_STAT_H # include #endif #ifdef HAVE_UNISTD_H # include #endif" ac_header_cxx_list= ac_subst_vars='LTLIBOBJS LIBOBJS XGBOOST_MM_PREFETCH_PRESENT XGBOOST_BUILTIN_PREFETCH_PRESENT BACKTRACE_LIB DMLC_DEFS ENDIAN_FLAG OPENMP_LIB OPENMP_CXXFLAGS USE_LITTLE_ENDIAN OBJEXT EXEEXT ac_ct_CXX CPPFLAGS LDFLAGS CXXFLAGS CXX target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir runstatedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking ' ac_precious_vars='build_alias host_alias target_alias CXX CXXFLAGS LDFLAGS LIBS CPPFLAGS CCC USE_LITTLE_ENDIAN' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' runstatedir='${localstatedir}/run' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: \`$ac_useropt'" ac_useropt_orig=$ac_useropt ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: \`$ac_useropt'" ac_useropt_orig=$ac_useropt ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -runstatedir | --runstatedir | --runstatedi | --runstated \ | --runstate | --runstat | --runsta | --runst | --runs \ | --run | --ru | --r) ac_prev=runstatedir ;; -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ | --run=* | --ru=* | --r=*) runstatedir=$ac_optarg ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: \`$ac_useropt'" ac_useropt_orig=$ac_useropt ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: \`$ac_useropt'" ac_useropt_orig=$ac_useropt ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. printf "%s\n" "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && printf "%s\n" "$as_me: WARNING: invalid host type: $ac_option" >&2 : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir runstatedir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || printf "%s\n" X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures xgboost 3.0.0 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/xgboost] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of xgboost 3.0.0:";; esac cat <<\_ACEOF Some influential environment variables: CXX C++ compiler command CXXFLAGS C++ compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory LIBS libraries to pass to the linker, e.g. -l CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory USE_LITTLE_ENDIAN "Whether to build with little endian (checks at compile time if unset)" Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to the package provider. _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for configure.gnu first; this name is used for a wrapper for # Metaconfig's "Configure" on case-insensitive file systems. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else printf "%s\n" "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF xgboost configure 3.0.0 generated by GNU Autoconf 2.71 Copyright (C) 2021 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## # ac_fn_cxx_try_compile LINENO # ---------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest.beam if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext then : ac_retval=0 else $as_nop printf "%s\n" "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_compile # ac_fn_cxx_try_link LINENO # ------------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest.beam conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext } then : ac_retval=0 else $as_nop printf "%s\n" "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_link # ac_fn_cxx_check_func LINENO FUNC VAR # ------------------------------------ # Tests whether FUNC exists, setting the cache variable VAR accordingly ac_fn_cxx_check_func () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 printf %s "checking for $2... " >&6; } if eval test \${$3+y} then : printf %s "(cached) " >&6 else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. */ #include #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main (void) { return $2 (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO" then : eval "$3=yes" else $as_nop eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.beam \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 printf "%s\n" "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_func # ac_fn_cxx_try_run LINENO # ------------------------ # Try to run conftest.$ac_ext, and return whether this succeeded. Assumes that # executables *can* be run. ac_fn_cxx_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; } then : ac_retval=0 else $as_nop printf "%s\n" "$as_me: program exited with status $ac_status" >&5 printf "%s\n" "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_run # ac_fn_cxx_check_header_compile LINENO HEADER VAR INCLUDES # --------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_cxx_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 printf %s "checking for $2... " >&6; } if eval test \${$3+y} then : printf %s "(cached) " >&6 else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : eval "$3=yes" else $as_nop eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi eval ac_res=\$$3 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 printf "%s\n" "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_header_compile ac_configure_args_raw= for ac_arg do case $ac_arg in *\'*) ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append ac_configure_args_raw " '$ac_arg'" done case $ac_configure_args_raw in *$as_nl*) ac_safe_unquote= ;; *) ac_unsafe_z='|&;<>()$`\\"*?[ '' ' # This string ends in space, tab. ac_unsafe_a="$ac_unsafe_z#~" ac_safe_unquote="s/ '\\([^$ac_unsafe_a][^$ac_unsafe_z]*\\)'/ \\1/g" ac_configure_args_raw=` printf "%s\n" "$ac_configure_args_raw" | sed "$ac_safe_unquote"`;; esac cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by xgboost $as_me 3.0.0, which was generated by GNU Autoconf 2.71. Invocation command line was $ $0$ac_configure_args_raw _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS case $as_dir in #((( '') as_dir=./ ;; */) ;; *) as_dir=$as_dir/ ;; esac printf "%s\n" "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Sanitize IFS. IFS=" "" $as_nl" # Save into config.log some information that might help in debugging. { echo printf "%s\n" "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo printf "%s\n" "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac printf "%s\n" "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then printf "%s\n" "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac printf "%s\n" "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then printf "%s\n" "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && printf "%s\n" "$as_me: caught signal $ac_signal" printf "%s\n" "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h printf "%s\n" "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. printf "%s\n" "#define PACKAGE_NAME \"$PACKAGE_NAME\"" >>confdefs.h printf "%s\n" "#define PACKAGE_TARNAME \"$PACKAGE_TARNAME\"" >>confdefs.h printf "%s\n" "#define PACKAGE_VERSION \"$PACKAGE_VERSION\"" >>confdefs.h printf "%s\n" "#define PACKAGE_STRING \"$PACKAGE_STRING\"" >>confdefs.h printf "%s\n" "#define PACKAGE_BUGREPORT \"$PACKAGE_BUGREPORT\"" >>confdefs.h printf "%s\n" "#define PACKAGE_URL \"$PACKAGE_URL\"" >>confdefs.h # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. if test -n "$CONFIG_SITE"; then ac_site_files="$CONFIG_SITE" elif test "x$prefix" != xNONE; then ac_site_files="$prefix/share/config.site $prefix/etc/config.site" else ac_site_files="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" fi for ac_site_file in $ac_site_files do case $ac_site_file in #( */*) : ;; #( *) : ac_site_file=./$ac_site_file ;; esac if test -f "$ac_site_file" && test -r "$ac_site_file"; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 printf "%s\n" "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 printf "%s\n" "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 printf "%s\n" "$as_me: creating cache $cache_file" >&6;} >$cache_file fi # Test code for whether the C++ compiler supports C++98 (global declarations) ac_cxx_conftest_cxx98_globals=' // Does the compiler advertise C++98 conformance? #if !defined __cplusplus || __cplusplus < 199711L # error "Compiler does not advertise C++98 conformance" #endif // These inclusions are to reject old compilers that // lack the unsuffixed header files. #include #include // and are *not* freestanding headers in C++98. extern void assert (int); namespace std { extern int strcmp (const char *, const char *); } // Namespaces, exceptions, and templates were all added after "C++ 2.0". using std::exception; using std::strcmp; namespace { void test_exception_syntax() { try { throw "test"; } catch (const char *s) { // Extra parentheses suppress a warning when building autoconf itself, // due to lint rules shared with more typical C programs. assert (!(strcmp) (s, "test")); } } template struct test_template { T const val; explicit test_template(T t) : val(t) {} template T add(U u) { return static_cast(u) + val; } }; } // anonymous namespace ' # Test code for whether the C++ compiler supports C++98 (body of main) ac_cxx_conftest_cxx98_main=' assert (argc); assert (! argv[0]); { test_exception_syntax (); test_template tt (2.0); assert (tt.add (4) == 6.0); assert (true && !false); } ' # Test code for whether the C++ compiler supports C++11 (global declarations) ac_cxx_conftest_cxx11_globals=' // Does the compiler advertise C++ 2011 conformance? #if !defined __cplusplus || __cplusplus < 201103L # error "Compiler does not advertise C++11 conformance" #endif namespace cxx11test { constexpr int get_val() { return 20; } struct testinit { int i; double d; }; class delegate { public: delegate(int n) : n(n) {} delegate(): delegate(2354) {} virtual int getval() { return this->n; }; protected: int n; }; class overridden : public delegate { public: overridden(int n): delegate(n) {} virtual int getval() override final { return this->n * 2; } }; class nocopy { public: nocopy(int i): i(i) {} nocopy() = default; nocopy(const nocopy&) = delete; nocopy & operator=(const nocopy&) = delete; private: int i; }; // for testing lambda expressions template Ret eval(Fn f, Ret v) { return f(v); } // for testing variadic templates and trailing return types template auto sum(V first) -> V { return first; } template auto sum(V first, Args... rest) -> V { return first + sum(rest...); } } ' # Test code for whether the C++ compiler supports C++11 (body of main) ac_cxx_conftest_cxx11_main=' { // Test auto and decltype auto a1 = 6538; auto a2 = 48573953.4; auto a3 = "String literal"; int total = 0; for (auto i = a3; *i; ++i) { total += *i; } decltype(a2) a4 = 34895.034; } { // Test constexpr short sa[cxx11test::get_val()] = { 0 }; } { // Test initializer lists cxx11test::testinit il = { 4323, 435234.23544 }; } { // Test range-based for int array[] = {9, 7, 13, 15, 4, 18, 12, 10, 5, 3, 14, 19, 17, 8, 6, 20, 16, 2, 11, 1}; for (auto &x : array) { x += 23; } } { // Test lambda expressions using cxx11test::eval; assert (eval ([](int x) { return x*2; }, 21) == 42); double d = 2.0; assert (eval ([&](double x) { return d += x; }, 3.0) == 5.0); assert (d == 5.0); assert (eval ([=](double x) mutable { return d += x; }, 4.0) == 9.0); assert (d == 5.0); } { // Test use of variadic templates using cxx11test::sum; auto a = sum(1); auto b = sum(1, 2); auto c = sum(1.0, 2.0, 3.0); } { // Test constructor delegation cxx11test::delegate d1; cxx11test::delegate d2(); cxx11test::delegate d3(45); } { // Test override and final cxx11test::overridden o1(55464); } { // Test nullptr char *c = nullptr; } { // Test template brackets test_template<::test_template> v(test_template(12)); } { // Unicode literals char const *utf8 = u8"UTF-8 string \u2500"; char16_t const *utf16 = u"UTF-8 string \u2500"; char32_t const *utf32 = U"UTF-32 string \u2500"; } ' # Test code for whether the C compiler supports C++11 (complete). ac_cxx_conftest_cxx11_program="${ac_cxx_conftest_cxx98_globals} ${ac_cxx_conftest_cxx11_globals} int main (int argc, char **argv) { int ok = 0; ${ac_cxx_conftest_cxx98_main} ${ac_cxx_conftest_cxx11_main} return ok; } " # Test code for whether the C compiler supports C++98 (complete). ac_cxx_conftest_cxx98_program="${ac_cxx_conftest_cxx98_globals} int main (int argc, char **argv) { int ok = 0; ${ac_cxx_conftest_cxx98_main} return ok; } " as_fn_append ac_header_cxx_list " stdio.h stdio_h HAVE_STDIO_H" as_fn_append ac_header_cxx_list " stdlib.h stdlib_h HAVE_STDLIB_H" as_fn_append ac_header_cxx_list " string.h string_h HAVE_STRING_H" as_fn_append ac_header_cxx_list " inttypes.h inttypes_h HAVE_INTTYPES_H" as_fn_append ac_header_cxx_list " stdint.h stdint_h HAVE_STDINT_H" as_fn_append ac_header_cxx_list " strings.h strings_h HAVE_STRINGS_H" as_fn_append ac_header_cxx_list " sys/stat.h sys_stat_h HAVE_SYS_STAT_H" as_fn_append ac_header_cxx_list " sys/types.h sys_types_h HAVE_SYS_TYPES_H" as_fn_append ac_header_cxx_list " unistd.h unistd_h HAVE_UNISTD_H" # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 printf "%s\n" "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 printf "%s\n" "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 printf "%s\n" "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 printf "%s\n" "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 printf "%s\n" "$as_me: former value: \`$ac_old_val'" >&2;} { printf "%s\n" "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 printf "%s\n" "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`printf "%s\n" "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 printf "%s\n" "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`${MAKE-make} distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu : ${R_HOME=`R RHOME`} if test -z "${R_HOME}"; then echo "could not determine R_HOME" exit 1 fi CXX17=`"${R_HOME}/bin/R" CMD config CXX17` CXX17STD=`"${R_HOME}/bin/R" CMD config CXX17STD` CXX="${CXX17} ${CXX17STD}" CXXFLAGS=`"${R_HOME}/bin/R" CMD config CXXFLAGS` CC=`"${R_HOME}/bin/R" CMD config CC` CFLAGS=`"${R_HOME}/bin/R" CMD config CFLAGS` CPPFLAGS=`"${R_HOME}/bin/R" CMD config CPPFLAGS` LDFLAGS=`"${R_HOME}/bin/R" CMD config LDFLAGS` ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu DMLC_DEFS="" { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Checking if/where backtrace is available" >&5 printf "%s\n" "$as_me: Checking if/where backtrace is available" >&6;} ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC clang++ do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 printf %s "checking for $ac_word... " >&6; } if test ${ac_cv_prog_CXX+y} then : printf %s "(cached) " >&6 else $as_nop if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS case $as_dir in #((( '') as_dir=./ ;; */) ;; *) as_dir=$as_dir/ ;; esac for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 printf "%s\n" "$CXX" >&6; } else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC clang++ do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 printf %s "checking for $ac_word... " >&6; } if test ${ac_cv_prog_ac_ct_CXX+y} then : printf %s "(cached) " >&6 else $as_nop if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS case $as_dir in #((( '') as_dir=./ ;; */) ;; *) as_dir=$as_dir/ ;; esac for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 printf "%s\n" "$ac_ct_CXX" >&6; } else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the C++ compiler works" >&5 printf %s "checking whether the C++ compiler works... " >&6; } ac_link_default=`printf "%s\n" "$ac_link" | sed 's/ -o *conftest[^ ]*//'` # The possible output files: ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ac_rmfiles= for ac_file in $ac_files do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; * ) ac_rmfiles="$ac_rmfiles $ac_file";; esac done rm -f $ac_rmfiles if { { ac_try="$ac_link_default" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_link_default") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then : # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. # So ignore a value of `no', otherwise this would lead to `EXEEXT = no' # in a Makefile. We should not override ac_cv_exeext if it was cached, # so that the user can short-circuit this test for compilers unknown to # Autoconf. for ac_file in $ac_files '' do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) if test ${ac_cv_exeext+y} && test "$ac_cv_exeext" != no; then :; else ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` fi # We set ac_cv_exeext here because the later test for it is not # safe: cross compilers may not add the suffix if given an `-o' # argument, so we may need to know it at that point already. # Even if this section looks crufty: it has the advantage of # actually working. break;; * ) break;; esac done test "$ac_cv_exeext" = no && ac_cv_exeext= else $as_nop ac_file='' fi if test -z "$ac_file" then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } printf "%s\n" "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "C++ compiler cannot create executables See \`config.log' for more details" "$LINENO" 5; } else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for C++ compiler default output file name" >&5 printf %s "checking for C++ compiler default output file name... " >&6; } { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 printf "%s\n" "$ac_file" >&6; } ac_exeext=$ac_cv_exeext rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 printf %s "checking for suffix of executables... " >&6; } if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then : # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` break;; * ) break;; esac done else $as_nop { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of executables: cannot compile and link See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest conftest$ac_cv_exeext { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 printf "%s\n" "$ac_cv_exeext" >&6; } rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main (void) { FILE *f = fopen ("conftest.out", "w"); return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF ac_clean_files="$ac_clean_files conftest.out" # Check that the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 printf %s "checking whether we are cross compiling... " >&6; } if test "$cross_compiling" != yes; then { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if { ac_try='./conftest$ac_cv_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot run C++ compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details" "$LINENO" 5; } fi fi fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 printf "%s\n" "$cross_compiling" >&6; } rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out ac_clean_files=$ac_clean_files_save { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 printf %s "checking for suffix of object files... " >&6; } if test ${ac_cv_objext+y} then : printf %s "(cached) " >&6 else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" printf "%s\n" "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>&5 ac_status=$? printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then : for ac_file in conftest.o conftest.obj conftest.*; do test -f "$ac_file" || continue; case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else $as_nop printf "%s\n" "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of object files: cannot compile See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 printf "%s\n" "$ac_cv_objext" >&6; } OBJEXT=$ac_cv_objext ac_objext=$OBJEXT { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the compiler supports GNU C++" >&5 printf %s "checking whether the compiler supports GNU C++... " >&6; } if test ${ac_cv_cxx_compiler_gnu+y} then : printf %s "(cached) " >&6 else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : ac_compiler_gnu=yes else $as_nop ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 printf "%s\n" "$ac_cv_cxx_compiler_gnu" >&6; } ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+y} ac_save_CXXFLAGS=$CXXFLAGS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 printf %s "checking whether $CXX accepts -g... " >&6; } if test ${ac_cv_prog_cxx_g+y} then : printf %s "(cached) " >&6 else $as_nop ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : ac_cv_prog_cxx_g=yes else $as_nop CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : else $as_nop ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 printf "%s\n" "$ac_cv_prog_cxx_g" >&6; } if test $ac_test_CXXFLAGS; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_prog_cxx_stdcxx=no if test x$ac_prog_cxx_stdcxx = xno then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CXX option to enable C++11 features" >&5 printf %s "checking for $CXX option to enable C++11 features... " >&6; } if test ${ac_cv_prog_cxx_11+y} then : printf %s "(cached) " >&6 else $as_nop ac_cv_prog_cxx_11=no ac_save_CXX=$CXX cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_cxx_conftest_cxx11_program _ACEOF for ac_arg in '' -std=gnu++11 -std=gnu++0x -std=c++11 -std=c++0x -qlanglvl=extended0x -AA do CXX="$ac_save_CXX $ac_arg" if ac_fn_cxx_try_compile "$LINENO" then : ac_cv_prog_cxx_cxx11=$ac_arg fi rm -f core conftest.err conftest.$ac_objext conftest.beam test "x$ac_cv_prog_cxx_cxx11" != "xno" && break done rm -f conftest.$ac_ext CXX=$ac_save_CXX fi if test "x$ac_cv_prog_cxx_cxx11" = xno then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 printf "%s\n" "unsupported" >&6; } else $as_nop if test "x$ac_cv_prog_cxx_cxx11" = x then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 printf "%s\n" "none needed" >&6; } else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_cxx11" >&5 printf "%s\n" "$ac_cv_prog_cxx_cxx11" >&6; } CXX="$CXX $ac_cv_prog_cxx_cxx11" fi ac_cv_prog_cxx_stdcxx=$ac_cv_prog_cxx_cxx11 ac_prog_cxx_stdcxx=cxx11 fi fi if test x$ac_prog_cxx_stdcxx = xno then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CXX option to enable C++98 features" >&5 printf %s "checking for $CXX option to enable C++98 features... " >&6; } if test ${ac_cv_prog_cxx_98+y} then : printf %s "(cached) " >&6 else $as_nop ac_cv_prog_cxx_98=no ac_save_CXX=$CXX cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_cxx_conftest_cxx98_program _ACEOF for ac_arg in '' -std=gnu++98 -std=c++98 -qlanglvl=extended -AA do CXX="$ac_save_CXX $ac_arg" if ac_fn_cxx_try_compile "$LINENO" then : ac_cv_prog_cxx_cxx98=$ac_arg fi rm -f core conftest.err conftest.$ac_objext conftest.beam test "x$ac_cv_prog_cxx_cxx98" != "xno" && break done rm -f conftest.$ac_ext CXX=$ac_save_CXX fi if test "x$ac_cv_prog_cxx_cxx98" = xno then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 printf "%s\n" "unsupported" >&6; } else $as_nop if test "x$ac_cv_prog_cxx_cxx98" = x then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 printf "%s\n" "none needed" >&6; } else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_cxx98" >&5 printf "%s\n" "$ac_cv_prog_cxx_cxx98" >&6; } CXX="$CXX $ac_cv_prog_cxx_cxx98" fi ac_cv_prog_cxx_stdcxx=$ac_cv_prog_cxx_cxx98 ac_prog_cxx_stdcxx=cxx98 fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for backtrace in -lexecinfo" >&5 printf %s "checking for backtrace in -lexecinfo... " >&6; } if test ${ac_cv_lib_execinfo_backtrace+y} then : printf %s "(cached) " >&6 else $as_nop ac_check_lib_save_LIBS=$LIBS LIBS="-lexecinfo $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ namespace conftest { extern "C" int backtrace (); } int main (void) { return conftest::backtrace (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO" then : ac_cv_lib_execinfo_backtrace=yes else $as_nop ac_cv_lib_execinfo_backtrace=no fi rm -f core conftest.err conftest.$ac_objext conftest.beam \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_execinfo_backtrace" >&5 printf "%s\n" "$ac_cv_lib_execinfo_backtrace" >&6; } if test "x$ac_cv_lib_execinfo_backtrace" = xyes then : BACKTRACE_LIB=-lexecinfo else $as_nop BACKTRACE_LIB='' fi if test -z "${BACKTRACE_LIB}" then : ac_fn_cxx_check_func "$LINENO" "backtrace" "ac_cv_func_backtrace" if test "x$ac_cv_func_backtrace" = xyes then : else $as_nop DMLC_DEFS="$DMLC_DEFS -DDMLC_LOG_STACK_TRACE=0" fi fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Checking whether fopen64 is available" >&5 printf "%s\n" "$as_me: Checking whether fopen64 is available" >&6;} ac_fn_cxx_check_func "$LINENO" "fopen64" "ac_cv_func_fopen64" if test "x$ac_cv_func_fopen64" = xyes then : else $as_nop DMLC_DEFS="$DMLC_DEFS -DDMLC_USE_FOPEN64=0" fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Endian detection" >&5 printf "%s\n" "$as_me: Endian detection" >&6;} ac_header= ac_cache= for ac_item in $ac_header_cxx_list do if test $ac_cache; then ac_fn_cxx_check_header_compile "$LINENO" $ac_header ac_cv_header_$ac_cache "$ac_includes_default" if eval test \"x\$ac_cv_header_$ac_cache\" = xyes; then printf "%s\n" "#define $ac_item 1" >> confdefs.h fi ac_header= ac_cache= elif test $ac_header; then ac_cache=$ac_item else ac_header=$ac_item fi done if test $ac_cv_header_stdlib_h = yes && test $ac_cv_header_string_h = yes then : printf "%s\n" "#define STDC_HEADERS 1" >>confdefs.h fi if test -z "${USE_LITTLE_ENDIAN+x}" then : { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Checking system endianness as USE_LITTLE_ENDIAN is unset" >&5 printf "%s\n" "$as_me: Checking system endianness as USE_LITTLE_ENDIAN is unset" >&6;} { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether byte ordering is bigendian" >&5 printf %s "checking whether byte ordering is bigendian... " >&6; } if test ${ac_cv_c_bigendian+y} then : printf %s "(cached) " >&6 else $as_nop ac_cv_c_bigendian=unknown # See if we're dealing with a universal compiler. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifndef __APPLE_CC__ not a universal capable compiler #endif typedef int dummy; _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : # Check for potential -arch flags. It is not universal unless # there are at least two -arch flags with different values. ac_arch= ac_prev= for ac_word in $CC $CFLAGS $CPPFLAGS $LDFLAGS; do if test -n "$ac_prev"; then case $ac_word in i?86 | x86_64 | ppc | ppc64) if test -z "$ac_arch" || test "$ac_arch" = "$ac_word"; then ac_arch=$ac_word else ac_cv_c_bigendian=universal break fi ;; esac ac_prev= elif test "x$ac_word" = "x-arch"; then ac_prev=arch fi done fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext if test $ac_cv_c_bigendian = unknown; then # See if sys/param.h defines the BYTE_ORDER macro. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main (void) { #if ! (defined BYTE_ORDER && defined BIG_ENDIAN \ && defined LITTLE_ENDIAN && BYTE_ORDER && BIG_ENDIAN \ && LITTLE_ENDIAN) bogus endian macros #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : # It does; now see whether it defined to BIG_ENDIAN or not. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main (void) { #if BYTE_ORDER != BIG_ENDIAN not big endian #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : ac_cv_c_bigendian=yes else $as_nop ac_cv_c_bigendian=no fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi if test $ac_cv_c_bigendian = unknown; then # See if defines _LITTLE_ENDIAN or _BIG_ENDIAN (e.g., Solaris). cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main (void) { #if ! (defined _LITTLE_ENDIAN || defined _BIG_ENDIAN) bogus endian macros #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : # It does; now see whether it defined to _BIG_ENDIAN or not. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main (void) { #ifndef _BIG_ENDIAN not big endian #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : ac_cv_c_bigendian=yes else $as_nop ac_cv_c_bigendian=no fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext fi if test $ac_cv_c_bigendian = unknown; then # Compile a test program. if test "$cross_compiling" = yes then : # Try to guess by grepping values from an object file. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ unsigned short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; unsigned short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; int use_ascii (int i) { return ascii_mm[i] + ascii_ii[i]; } unsigned short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; unsigned short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; int use_ebcdic (int i) { return ebcdic_mm[i] + ebcdic_ii[i]; } extern int foo; int main (void) { return use_ascii (foo) == use_ebcdic (foo); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO" then : if grep BIGenDianSyS conftest.$ac_objext >/dev/null; then ac_cv_c_bigendian=yes fi if grep LiTTleEnDian conftest.$ac_objext >/dev/null ; then if test "$ac_cv_c_bigendian" = unknown; then ac_cv_c_bigendian=no else # finding both strings is unlikely to happen, but who knows? ac_cv_c_bigendian=unknown fi fi fi rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext else $as_nop cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main (void) { /* Are we little or big endian? From Harbison&Steele. */ union { long int l; char c[sizeof (long int)]; } u; u.l = 1; return u.c[sizeof (long int) - 1] == 1; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO" then : ac_cv_c_bigendian=no else $as_nop ac_cv_c_bigendian=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_bigendian" >&5 printf "%s\n" "$ac_cv_c_bigendian" >&6; } case $ac_cv_c_bigendian in #( yes) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: using big endian" >&5 printf "%s\n" "using big endian" >&6; } ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=0";; #( no) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: using little endian" >&5 printf "%s\n" "using little endian" >&6; } ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=1" ;; #( universal) printf "%s\n" "#define AC_APPLE_UNIVERSAL_BUILD 1" >>confdefs.h ;; #( *) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unknown" >&5 printf "%s\n" "unknown" >&6; } as_fn_error $? "Could not determine endianness. Please set USE_LITTLE_ENDIAN" "$LINENO" 5 ;; esac else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Forcing endianness to: ${USE_LITTLE_ENDIAN}" >&5 printf "%s\n" "$as_me: Forcing endianness to: ${USE_LITTLE_ENDIAN}" >&6;} ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=${USE_LITTLE_ENDIAN}" fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Checking for prefetch builtin" >&5 printf "%s\n" "$as_me: Checking for prefetch builtin" >&6;} cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main (void) { __builtin_prefetch ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO" then : XGBOOST_BUILTIN_PREFETCH_PRESENT="-DXGBOOST_BUILTIN_PREFETCH_PRESENT=1" else $as_nop XGBOOST_BUILTIN_PREFETCH_PRESENT="" fi rm -f core conftest.err conftest.$ac_objext conftest.beam \ conftest$ac_exeext conftest.$ac_ext if [ "$XGBOOST_BUILTIN_PREFETCH_PRESENT" = "" ]; then echo "Has __builtin_prefetch" else echo "Doesn't have __builtin_prefetch" fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: Checking for mm_prefetch" >&5 printf "%s\n" "$as_me: Checking for mm_prefetch" >&6;} cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main (void) { _mm_prefetch ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO" then : XGBOOST_MM_PREFETCH_PRESENT="-DXGBOOST_MM_PREFETCH_PRESENT=1" else $as_nop XGBOOST_MM_PREFETCH_PRESENT="" fi rm -f core conftest.err conftest.$ac_objext conftest.beam \ conftest$ac_exeext conftest.$ac_ext if [ "$XGBOOST_MM_PREFETCH_PRESENT" = "" ]; then echo "Has _mm_prefetch" else echo "Doesn't have _mm_prefetch" fi OPENMP_CXXFLAGS="" if test `uname -s` = "Linux" then OPENMP_CXXFLAGS="\$(SHLIB_OPENMP_CXXFLAGS)" fi if test `uname -s` = "Darwin" then if command -v brew &> /dev/null then HOMEBREW_LIBOMP_PREFIX=`brew --prefix libomp` else # Homebrew not found HOMEBREW_LIBOMP_PREFIX='' fi OPENMP_CXXFLAGS="-Xpreprocessor -fopenmp -I${HOMEBREW_LIBOMP_PREFIX}/include" OPENMP_LIB="-lomp -L${HOMEBREW_LIBOMP_PREFIX}/lib" ac_pkg_openmp=no { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether OpenMP will work in a package" >&5 printf %s "checking whether OpenMP will work in a package... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main (void) { return (omp_get_max_threads() <= 1); ; return 0; } _ACEOF ${CXX} -o conftest conftest.cpp ${CPPFLAGS} ${LDFLAGS} ${OPENMP_LIB} ${OPENMP_CXXFLAGS} 2>/dev/null && ./conftest && ac_pkg_openmp=yes { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: ${ac_pkg_openmp}" >&5 printf "%s\n" "${ac_pkg_openmp}" >&6; } if test "${ac_pkg_openmp}" = no; then OPENMP_CXXFLAGS='' OPENMP_LIB='' echo '*****************************************************************************************' echo ' OpenMP is unavailable on this Mac OSX system. Training speed may be suboptimal.' echo ' To use all CPU cores for training jobs, you should install OpenMP by running' echo ' brew install libomp' echo '*****************************************************************************************' fi fi ac_config_files="$ac_config_files src/Makevars" ac_config_headers="$ac_config_headers config.h" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test ${\1+y} || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then if test "x$cache_file" != "x/dev/null"; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 printf "%s\n" "$as_me: updating cache $cache_file" >&6;} if test ! -f "$cache_file" || test -h "$cache_file"; then cat confcache >"$cache_file" else case $cache_file in #( */* | ?:*) mv -f confcache "$cache_file"$$ && mv -f "$cache_file"$$ "$cache_file" ;; #( *) mv -f confcache "$cache_file" ;; esac fi fi else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 printf "%s\n" "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`printf "%s\n" "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 printf "%s\n" "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh as_nop=: if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else $as_nop case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi # Reset variables that may have inherited troublesome values from # the environment. # IFS needs to be set, to space, tab, and newline, in precisely that order. # (If _AS_PATH_WALK were called with IFS unset, it would have the # side effect of setting IFS to empty, thus disabling word splitting.) # Quoting is to prevent editors from complaining about space-tab. as_nl=' ' export as_nl IFS=" "" $as_nl" PS1='$ ' PS2='> ' PS4='+ ' # Ensure predictable behavior from utilities with locale-dependent output. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # We cannot yet rely on "unset" to work, but we need these variables # to be unset--not just set to an empty or harmless value--now, to # avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct # also avoids known problems related to "unset" and subshell syntax # in other old shells (e.g. bash 2.01 and pdksh 5.2.14). for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH do eval test \${$as_var+y} \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done # Ensure that fds 0, 1, and 2 are open. if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi if (exec 3>&2) ; then :; else exec 2>/dev/null; fi # The user is always right. if ${PATH_SEPARATOR+false} :; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS case $as_dir in #((( '') as_dir=./ ;; */) ;; *) as_dir=$as_dir/ ;; esac test -r "$as_dir$0" && as_myself=$as_dir$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi printf "%s\n" "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null then : eval 'as_fn_append () { eval $1+=\$2 }' else $as_nop as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null then : eval 'as_fn_arith () { as_val=$(( $* )) }' else $as_nop as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || printf "%s\n" X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits # Determine whether it's possible to make 'echo' print without a newline. # These variables are no longer used directly by Autoconf, but are AC_SUBSTed # for compatibility with existing Makefiles. ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac # For backward compatibility with old third-party macros, we provide # the shell variables $as_echo and $as_echo_n. New code should use # AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. as_echo='printf %s\n' as_echo_n='printf %s' rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || printf "%s\n" X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by xgboost $as_me 3.0.0, which was generated by GNU Autoconf 2.71. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac case $ac_config_headers in *" "*) set x $ac_config_headers; shift; ac_config_headers=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Report bugs to the package provider." _ACEOF ac_cs_config=`printf "%s\n" "$ac_configure_args" | sed "$ac_safe_unquote"` ac_cs_config_escaped=`printf "%s\n" "$ac_cs_config" | sed "s/^ //; s/'/'\\\\\\\\''/g"` cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config='$ac_cs_config_escaped' ac_cs_version="\\ xgboost config.status 3.0.0 configured by $0, generated by GNU Autoconf 2.71, with options \\"\$ac_cs_config\\" Copyright (C) 2021 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) printf "%s\n" "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) printf "%s\n" "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header as_fn_error $? "ambiguous option: \`$1' Try \`$0 --help' for more information.";; --help | --hel | -h ) printf "%s\n" "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \printf "%s\n" "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX printf "%s\n" "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "src/Makevars") CONFIG_FILES="$CONFIG_FILES src/Makevars" ;; "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test ${CONFIG_FILES+y} || CONFIG_FILES=$config_files test ${CONFIG_HEADERS+y} || CONFIG_HEADERS=$config_headers fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= ac_tmp= trap 'exit_status=$? : "${ac_tmp:=$tmp}" { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 ac_tmp=$tmp # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" # Set up the scripts for CONFIG_HEADERS section. # No need to generate them if there are no CONFIG_HEADERS. # This happens for instance with `./config.status Makefile'. if test -n "$CONFIG_HEADERS"; then cat >"$ac_tmp/defines.awk" <<\_ACAWK || BEGIN { _ACEOF # Transform confdefs.h into an awk script `defines.awk', embedded as # here-document in config.status, that substitutes the proper values into # config.h.in to produce config.h. # Create a delimiter string that does not exist in confdefs.h, to ease # handling of long lines. ac_delim='%!_!# ' for ac_last_try in false false :; do ac_tt=`sed -n "/$ac_delim/p" confdefs.h` if test -z "$ac_tt"; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done # For the awk script, D is an array of macro values keyed by name, # likewise P contains macro parameters if any. Preserve backslash # newline sequences. ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* sed -n ' s/.\{148\}/&'"$ac_delim"'/g t rset :rset s/^[ ]*#[ ]*define[ ][ ]*/ / t def d :def s/\\$// t bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3"/p s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p d :bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3\\\\\\n"\\/p t cont s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p t cont d :cont n s/.\{148\}/&'"$ac_delim"'/g t clear :clear s/\\$// t bsnlc s/["\\]/\\&/g; s/^/"/; s/$/"/p d :bsnlc s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p b cont ' >$CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 for (key in D) D_is_set[key] = 1 FS = "" } /^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { line = \$ 0 split(line, arg, " ") if (arg[1] == "#") { defundef = arg[2] mac1 = arg[3] } else { defundef = substr(arg[1], 2) mac1 = arg[2] } split(mac1, mac2, "(") #) macro = mac2[1] prefix = substr(line, 1, index(line, defundef) - 1) if (D_is_set[macro]) { # Preserve the white space surrounding the "#". print prefix "define", macro P[macro] D[macro] next } else { # Replace #undef with comments. This is necessary, for example, # in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. if (defundef == "undef") { print "/*", prefix defundef, macro, "*/" next } } } { print } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 fi # test -n "$CONFIG_HEADERS" eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS " shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`printf "%s\n" "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` printf "%s\n" "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 printf "%s\n" "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`printf "%s\n" "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$ac_tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || printf "%s\n" X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 printf "%s\n" "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ "$ac_tmp/out"`; test -z "$ac_out"; } && { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 printf "%s\n" "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$ac_tmp/stdin" case $ac_file in -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # if test x"$ac_file" != x-; then { printf "%s\n" "/* $configure_input */" >&1 \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" } >"$ac_tmp/config.h" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 printf "%s\n" "$as_me: $ac_file is unchanged" >&6;} else rm -f "$ac_file" mv "$ac_tmp/config.h" "$ac_file" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 fi else printf "%s\n" "/* $configure_input */" >&1 \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ || as_fn_error $? "could not create -" "$LINENO" 5 fi ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi xgboost-3.0.0/R-package/configure.ac000066400000000000000000000077461476511272400173060ustar00rootroot00000000000000### configure.ac -*- Autoconf -*- AC_PREREQ(2.69) AC_INIT([xgboost],[3.0.0],[],[xgboost],[]) : ${R_HOME=`R RHOME`} if test -z "${R_HOME}"; then echo "could not determine R_HOME" exit 1 fi CXX17=`"${R_HOME}/bin/R" CMD config CXX17` CXX17STD=`"${R_HOME}/bin/R" CMD config CXX17STD` CXX="${CXX17} ${CXX17STD}" CXXFLAGS=`"${R_HOME}/bin/R" CMD config CXXFLAGS` CC=`"${R_HOME}/bin/R" CMD config CC` CFLAGS=`"${R_HOME}/bin/R" CMD config CFLAGS` CPPFLAGS=`"${R_HOME}/bin/R" CMD config CPPFLAGS` LDFLAGS=`"${R_HOME}/bin/R" CMD config LDFLAGS` AC_LANG(C++) DMLC_DEFS="" AC_MSG_NOTICE([Checking if/where backtrace is available]) AC_CHECK_LIB([execinfo], [backtrace], [BACKTRACE_LIB=-lexecinfo], [BACKTRACE_LIB='']) AS_IF([test -z "${BACKTRACE_LIB}"], dnl backtrace() might be unavailable (e.g., in musl libc) [AC_CHECK_FUNC(backtrace, [], [DMLC_DEFS="$DMLC_DEFS -DDMLC_LOG_STACK_TRACE=0"])]) AC_MSG_NOTICE([Checking whether fopen64 is available]) AC_CHECK_FUNC(fopen64, [], [DMLC_DEFS="$DMLC_DEFS -DDMLC_USE_FOPEN64=0"]) AC_MSG_NOTICE([Endian detection]) AC_ARG_VAR(USE_LITTLE_ENDIAN, "Whether to build with little endian (checks at compile time if unset)") AS_IF([test -z "${USE_LITTLE_ENDIAN+x}"], [ AC_MSG_NOTICE([Checking system endianness as USE_LITTLE_ENDIAN is unset]) AC_C_BIGENDIAN( [AC_MSG_RESULT([using big endian]) ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=0"], [AC_MSG_RESULT([using little endian]) ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=1"], [AC_MSG_RESULT([unknown]) AC_MSG_ERROR([Could not determine endianness. Please set USE_LITTLE_ENDIAN])] ) ], [ AC_MSG_NOTICE([Forcing endianness to: ${USE_LITTLE_ENDIAN}]) ENDIAN_FLAG="-DDMLC_CMAKE_LITTLE_ENDIAN=${USE_LITTLE_ENDIAN}" ]) AC_MSG_NOTICE([Checking for prefetch builtin]) AC_LINK_IFELSE( [AC_LANG_PROGRAM( [], [__builtin_prefetch] )], [XGBOOST_BUILTIN_PREFETCH_PRESENT="-DXGBOOST_BUILTIN_PREFETCH_PRESENT=1"], [XGBOOST_BUILTIN_PREFETCH_PRESENT=""] ) if [[ "$XGBOOST_BUILTIN_PREFETCH_PRESENT" = "" ]]; then echo "Has __builtin_prefetch" else echo "Doesn't have __builtin_prefetch" fi AC_MSG_NOTICE([Checking for mm_prefetch]) AC_LINK_IFELSE( [AC_LANG_PROGRAM( [#include ], [_mm_prefetch] )], [XGBOOST_MM_PREFETCH_PRESENT="-DXGBOOST_MM_PREFETCH_PRESENT=1"], [XGBOOST_MM_PREFETCH_PRESENT=""] ) if [[ "$XGBOOST_MM_PREFETCH_PRESENT" = "" ]]; then echo "Has _mm_prefetch" else echo "Doesn't have _mm_prefetch" fi OPENMP_CXXFLAGS="" if test `uname -s` = "Linux" then OPENMP_CXXFLAGS="\$(SHLIB_OPENMP_CXXFLAGS)" fi if test `uname -s` = "Darwin" then if command -v brew &> /dev/null then HOMEBREW_LIBOMP_PREFIX=`brew --prefix libomp` else # Homebrew not found HOMEBREW_LIBOMP_PREFIX='' fi OPENMP_CXXFLAGS="-Xpreprocessor -fopenmp -I${HOMEBREW_LIBOMP_PREFIX}/include" OPENMP_LIB="-lomp -L${HOMEBREW_LIBOMP_PREFIX}/lib" ac_pkg_openmp=no AC_MSG_CHECKING([whether OpenMP will work in a package]) AC_LANG_CONFTEST([AC_LANG_PROGRAM([[#include ]], [[ return (omp_get_max_threads() <= 1); ]])]) ${CXX} -o conftest conftest.cpp ${CPPFLAGS} ${LDFLAGS} ${OPENMP_LIB} ${OPENMP_CXXFLAGS} 2>/dev/null && ./conftest && ac_pkg_openmp=yes AC_MSG_RESULT([${ac_pkg_openmp}]) if test "${ac_pkg_openmp}" = no; then OPENMP_CXXFLAGS='' OPENMP_LIB='' echo '*****************************************************************************************' echo ' OpenMP is unavailable on this Mac OSX system. Training speed may be suboptimal.' echo ' To use all CPU cores for training jobs, you should install OpenMP by running' echo ' brew install libomp' echo '*****************************************************************************************' fi fi AC_SUBST(OPENMP_CXXFLAGS) AC_SUBST(OPENMP_LIB) AC_SUBST(ENDIAN_FLAG) AC_SUBST(DMLC_DEFS) AC_SUBST(BACKTRACE_LIB) AC_SUBST(XGBOOST_BUILTIN_PREFETCH_PRESENT) AC_SUBST(XGBOOST_MM_PREFETCH_PRESENT) AC_CONFIG_FILES([src/Makevars]) AC_CONFIG_HEADERS([config.h]) AC_OUTPUT xgboost-3.0.0/R-package/configure.win000066400000000000000000000010261476511272400175010ustar00rootroot00000000000000R_EXE="${R_HOME}/bin${R_ARCH_BIN}/R.exe" CXX=`"${R_EXE}" CMD config CXX` cat > test.cpp < int main() { char data = 0; const char* address = &data; _mm_prefetch(address, _MM_HINT_NTA); return 0; } EOL XGBOOST_MM_PREFETCH_PRESENT="" ${CXX} -o test test.cpp 2>/dev/null && ./test && XGBOOST_MM_PREFETCH_PRESENT="-DXGBOOST_MM_PREFETCH_PRESENT=1" rm -f ./test rm -f ./test.cpp sed \ -e "s/@XGBOOST_MM_PREFETCH_PRESENT@/$XGBOOST_MM_PREFETCH_PRESENT/" \ < src/Makevars.win.in > src/Makevars.win xgboost-3.0.0/R-package/data/000077500000000000000000000000001476511272400157135ustar00rootroot00000000000000xgboost-3.0.0/R-package/data/agaricus.test.rda000066400000000000000000000361341476511272400211660ustar00rootroot000000000000007zXZi"6!X<])TW"nRʟ)'dz$&}Tz˭x,rcIPP6)JcO&FڕΟXhdiVgfϊ5NXVjOKB*vYO Rӄ7lj{BW[&ΛF* OEaSXX;4)ֵ ALQ˖ .H:зqv0Mꘝ5N`0mAorV\t@fjP߆Et >\/%̘:6Pm+1&= aǩ}=&4:}nAY'aXQqOM䶴ˤ=5æV zO e'AqUu1{]e+Ҷ2ȭ ZJ"13D_Ш@ۙrre:u`ps+|xGӭ.9vi)|gRWs"‹Dasr*h8^(b?_Sc^w{.!Uzx&)`(BzLeŭU-`SQ@ A s=d:lٙcB1z(ޭ+Q/S="#WD8ZDX=P {ش՛0! a5Kq1q;K8wj8Dj1FJNP硳rNiIگmZzvyD|H^_#:༓@H{zvb3\W-ebxR\.1%S;TbrLi%%"Խ/we| ⤧][o$ٜ72Axk.?e_'Q f4Qh_V A W P,uRЭ?7]o£7վN~H_V 9 C܎E<-4/3͕ʢ_'Ke eG%*jYs{{[fO&FU璖M?fN4*]r ;bN&lQ'sN3pӉ2hS p޻i -q\R⩟FI:S nڝ\I&j={+$Pzwi=d EvO iև+ sp81' RrpgDT%ײ-ǩ'2Lnfbhgڀ\C Eqr.CbG*+h'5X!'Xlw.q @ٝyNm.Д? i'p@Fݽl,uH'N5zJ߻}/JgC:aOlot57CFHqWzzTT1dW4E $evKc{y®# %lg8,!Ǹ+O)!@j +8<ۮ]tEzDm3;53s;RvaĪ mr΋vAֲ(;͠!&*<2UFw8R)?xB vHGyF1@xB'E SϬ]"Wa>Q} /y]Aѹ}7AB=z](4;)m'J'듴٨{prFO,,RLCv {>'l4]ȎС,m3 0TZA bْ',A%QT A4 *J0[z*p5nq g~a8(öo OKy=AݫI%RW.O-aOI3 V`zyg.p@Ύ AOzvxKl|(GYdH*sj0"dBlj%;n7AX6YXFrS]XԛQ}RsF?L~S0"wz ;][n'[=ΉNvsCXBܓsfME0Egx5سuC>Q&&.+qӡ%->rxMŸDW5ݻ4d9O$cGR3alۖC{Ⱥ{ Z8@v9b>)V=]tCz.He;!g`IlQ7Kh9ZcG4=خeaZ 1]m#0Df:s|6q ѱoa%ճ˜IQVtϕbnE c?u XLi.Ԩ6%dP\DiHc%xwaw)Xz}֖rbk_d)ȝWRlٻ g0qJ[ɘ-mףDCnq!\!*E`󆟒)˻LW%AAAyA@ {Fcq%؍cҼ ^ƠE>sg1S?%0TN3*9^%W aq[dzGVb9&b4BzEN;dDl.@f1< ^a]̴&*Z,%PjNML6vFopQu;aF_ ϻ3{p`JȰOS)†=1ц@ӄN:#Y`W@68O20*BΉC #=6<]M-zJ;Xy#p$近jXUVZQejͯ)h 2ۛr>|)&:L%I'O (@kk@OB-{7>vR8A=Lql$ƁZ2k~]\%[oI|ZW sfO?. Ofe!&'1^g[ʑ%D&>jEpԉX%5Ґ.pqԦ<7TB2\xjղ٧8ڤcueH$x`-RҠUyllynۆ,Y9%o ; 6"f~CA3mY?MĎY?To c52sX:~]z$@3۲W߉b.9Tѻݕ"Up?q shz3/K'V*'A8.Լ;LR<-<'5d|i녝F04uVTZ rtGBpޕ,'zjlgy7i~ m+?xm~ /~ٴp6JkJ8=D`?Ŕ.l2] lU9<ɰ`/kYELWxӇ,x%dDZaC*MT4{A VE 0PΑ}JEŏѫš7tSB\wnLBNCodKNwض_6턵{N%*@l fK{@F$G)ե(S`Y6M6V|1ےJΦ03Q93؊`^ǡ6H*}9S %E-t"܎o'oWrvN76 gZ `ǝ@x_(*`Sh"4m!1Ԅ- ]Q9u'S+X[9`kf[hv ɥJ [Z1^UJ~ ^y-6mV:'Īx[|#a52(Ź;Zi^圈K/$ !Qz^Q~K#U J 8K(9by|c?pK%Y!GB|ZkOltfT-`Ajy`ь -I7Uf 2Icxr>:AOaeOwVSߤm6f/P+} 7Ӿ -BTO~t=aOhb z; ޒo렳a`b~٬YN&*XT;QWϢ;)[Oϡ0;Gpu}loڶׂ%z{A(9u-ð:;ɘ{ XU*RI<00A׽C34KiD%+bHeH?޳gMLUݝ gYSBeQӚ MGݝʇAۄXQ  ';g)H3(gag2f+E/<`بɈ4iZl3 ^*r 6Ɗ$\Ŝ R_'uۭĺ_9pC1xT$zqLΏ,1& ZխU e98JXx9ɻ[Vuoa}i]F]6<5{#~y˗8PZ)*>c^:ʣax9-hFD-[Iv:G랃fޞ(RN{ -D;of[f:p ~%AR#g=tQ׾wyR ٸЩ8!*Y|X(QSw6}@nٽ ;j#M\K;,F-Ƀ>ܙ`|pc(n-Y2vhy{Q̨֘pxִH >Ɨȣjϕʴ'e 왥٥?ڠm:^/R ڰNC߰ω!LȆPE?(B AWw'=A#缆35Ǟ٩<8v :ۯIYB0h]tu7`+/-(|̼ӳdlLv˦y%z<ѻj*' BNuv}k5k߹ - 2ؔ Tdz~r20*% o_2۲FANf_u 1RB&?;KjOWDbA3{FAjouRPhh/5Di ɖM@|)Λ:!ғl bd&8mqI-=?cDi)&ge!(bz{J3K-o[h%۞,US{%H9W=ۮͤ{l:Ĝヵ{6>3.1񒂬^5HU:yM9hh@} I떝0/>4tQcўZ&·:߲/7`Ư+?r9B,SOw暧R^q./r*ЗU(74e$ V&nrmw|p[bzLĬg քzזNVxR0CQe h6ɦuӳ%;\|6nr ח x1:MDXq=QYž(M)RۨU-|5ϝ0">as("@aX묫Qh/2"cUyH M@F 8OQVh*U6U:ֈD,IUCq gj<^-]uڪI- ­/ro>A$p"+OIٟ֚x!H-j4`zey ň.Y~$۩ 뀃 H2t %4H]r 0 R9<]AvN \lYhMp iHBzزP( a _{ao".S1s_1!bc+4MD`.چYez`mLILOIfGF=B%͐þdIes8~a٨Av_g-V-M&L&[t.CW+T,\sxW׶,\&Ov^FAp"Do^ΰ..go1}ZƗo9+='zCuDmEGa=} Au~߬ܳ ΕR`O&c5LO ~s̱@,O7Fs"vNl)n6y'^UEz/jW! yWuR\M4'ͧFŻ ( !$rKVvWrܗ#+0 j?$|U'.zi !._ڀJj qgc.xgB 5 k՛ubl|5PӮ/͐(ԆjH]ql`{y]!X6ԡE(l]]0 l `<4,Y!Vh5䑒1WYGVƧ,#84y:w=C\  *4axpPkm1hQB|!C+aP izQ%?6;qnVvOw&2ͪ2'ݠ6] 0{:fAZ:UGޜA ݳw}s> WbWu:tGgzs&&#8 xȈdl.Hu6VS_mԨoE=0Ԏ*X6VY!fa]`X[BfTwH["#(] FOJws2 g!V^8 }#~%a(#U?70 ){JXd P7p$xt^Մ莍5 U+1rl[FlucX>j$BګzfeaK6Zd#&D/K0=RÁYAQ #ͦTb$`3KM*4K bŜ\)٥RjDh^s3KUmdN&i'wܐoW]!7׶}c?lbZ>V]`J⿞>-BzT2N=z<Î%{X.I6?Y(w1* =c/^lf:hcf|SW.^lEԝ$tby1~faq] .,bvW }1k:=ux*R+?<ꤨG"Uї7p&}Xxsꟺ<cYl۹ֽ$܏qr\5ih Mk 06WO$!a|vf\x~tM%q9؇/}U4IFu ͣ063扐 V'ǩɚsG3U$-Z=w,:Ǧ4$ Qeav3MzeY?z$o^\¦(hӳ?a3rJe|WI-Qc6OT [SH! f,eXûEw2j %ؘvؓFed$TQ/4ox҇$g՛6"dc;@$p7^5鳩 c@A0mZdjZRͪ6x# }Y(N9[ _’twFuեvmi4FYnVyTP {1ߊ{pއ'% 6O$|x%@ ׹{+4ɸ'd)xӛB#~mC@2rC` GCҷ}kMw3}Y0)3=|>bJVM W:Iq?ݴ#PedT+8˧}sV'3+ \;ϏGt<-(l"a@4# H$nyqRu 5KegK^6T͢:\5G,p :I;&!$ޑgĞ BrTQ]DEAy t'Q9[~w^ "GA д쀽v(lpbT3F. q0TyӘdGmt7ι)&iEA1jKh P1EfX͐V|7Qq> TxiT/Ip$X & S 9C1u(Q@z= *"T/u]2ڸݔN^01>c|.mzp};@n鿳._y4УѢ.kTG=Un~no LML/G~4ht33礲[ p08xu|?j x{U=~6" PV} > l͢/ip8[i8"/s %&LHmӥZTrf\?gzOWH›neГ}PCg/_; C D_3 ["bD&a]EkNlXtWQZ =d ~t*-E,: kڣ v#IF[4aڜޒOu5h-ӊ<)m- w˅yhgQ2}T za`Wud+t(0ڮ +8p)3V!d)Gp(gΓ&9lgjNZkQnd4WⲌj#bN[  c{mxPmgRg!^N ɮD-ѧ#{^|KeuLalm\ANf1}(uF{\Hϛ՜)oQ;)+qT~nqg-gW+s+$w˳Z ^xN'9T]tZL4{*tIƄ:;@CLT;*Ck5}!E$RFCebu@_m70 ,[ߢ}::V-T Wo|tA+uXa͗5Q9^2PGEbf3@ ҷ4[ك,9)UecM ZF*> r ozRkw8i ֗`D9FH 9U&Qc}jtN.R_&-|Pp7G<oC\xu12l]"L f0w󧝇T[ĕUENJHCqZx/ޮ2*vk *-|^;yEsh JZbuOYyP vn, j*9-6(4jьׄ.}ZGQMPbNP;"gb^OՌVm2 ~479 @n3VAP`sX^AC"Jwre=_z )hAhaȑtB/W4f_H8E%vǸB] rp J=l$d(13FxQJ,;U xѤ0$G#Gߦ̪sg (t2|@d-G `OD0a=ÏtڔzUu]Ot$M5A\{l^ Iaujr3S!5 ,?r`_vFO"pK}626`rcLuzޱ>~!C$dTNkǚ_INc5A;윰pގܠޑ*O4|ݷ4"@Ax^O'csz ~{gAζזN  !0M5D(:EP?ߌmmgsBwi[#y9Y6oRڙ- :PF d8.*)$e: owxzKe ZSX."˧:V-pHSΐ#cTetuRTH qP%0S)=J.lU@d|̶k{ۣmc07@n]oYs`P) (usVw9 6? цTeDveg nZ`Ĕ_(dCˁ(90r+`@z+k(?xYgmbcMSh2Z#@&W}[(YGOf/8IW7qd,z̅ &kcmuBed'*h3W] ԽPRaDDLۈ?cnPI/x_$zsc[^x: @\ kg|Yz^ V}9{ʺ""Ӵ2e%gGw>JcnauE~G0F>oBٛM&`K42f,p[*(b?s%+_q~ BM'~!`^MlEm{˗]LݛD+ a {uT䪤k!P5u˱FOfX8j\4N3[J b2?*7`pf`|aӀѯAkPoh>a6M/I[T- )6¢7Wm컼'~fmOG,~)- o jǝ汎Y|/Qp!T|$yXk璭]_D!DB\$VJ7TE칟1?:j5 |tS{4 M̀IJJ$n6n`<Ա'TeF;HB9x+U{#U10_AV'7^邏Xl^1V쌏Єist? p&,ֹKYcƎ"OUӡJQ|ƚLR'3}f.ް*m3My,D=Jd25]l?_*0l␏=Іt? & VS&f}o7_v$Tb"(#l]U᫓v!)&Hml0l߭imU3JS?ńa \e@a23K(A d  >&ؽw TU+uD3]4;ú7h=QwRz56 TbrMwwiKw&-4h 8"1qn67mZx0/ᬶ S/S"M '3t2;9)zc?Gf "  f=C\QloCwC%ӤTyS+{EΟף[-/uD@X6 ]3o@M1#֧s$ btsi#irD?T/rK8Mp=.5&)cN#H ARGAzT-GIF*voj]B ZY).Tw2Mx|zZ7veº"˰ 2N2K_%H6A;K"Wi/ ]\q.A tEVm@#evb Kt -W7\%$" ]ӏ'WRr>o 1-cʤ0;xCX_1BޙZBњxonXsS~!fz{NN> {pfM^5-s'aG6|ah#@wiw8=g⯱! ֡8٧avsJ>4߅zڴ>ZK^Ń;,NyߒUO5ݜzbaKwY%O1RXvY[ڭQr:)1ILL5h[!ypD1ƵߑϪ,<% J(-e7zsj61ûK-JRu.NvQҔaQo15;cq3j3N*T|T/,: לOb3 RJ0VR_$[C!_{Y  Dk[e8rҘ¥3.ؑb\{T7Y=s1++xe0=mg P6h2Up6noI`:" -F4oҐBgjUV_W\ts=k/) Xxzʽ/zU+{2`MƧ<9ȄmbŹh YM0up2|Փ%m5$ x(FKi>0 YZxgboost-3.0.0/R-package/data/agaricus.train.rda000066400000000000000000001577021476511272400213310ustar00rootroot00000000000000BZh91AY&SY.4^ AwPO=ʺn]Y 9"fUwiàX m[R2jgEeRhmZt4S]VvjHӶųTe@˶ZNBuu-E]:ʧ@:SZ-t.ԫ˅vZvB.N'N6n)]]lgc]m5k94P+*VlNZ]Y]vuu-n;.ݭhŹ۵[m*kii Yw]KZgNknviJ[GMݭw]ie;smrift 6ʍU]kcQ#% ӧT%]ۖᡡݝTEY+Mwll6PQ(:ݺiJنJ4-HUmkkkkgwnuwqv Wkn;WnV*Kt4-E:siv+Tʖٕ.;wiwsWrb>f`}re,&Rmfd(Z C`dd04 C`H2&A244SDSdiz U;}DYU(?@IğѤQ< te*}e{#FL<IJ ҀI FZsTmO yB?ZV#ܤ: *&Bڙa*0ƿ!U1RUqp[ b$!G]{UCnb1k NQgMѯHnvkq. kg{~K_`T]DaFxL~FV pe1>q$4BΎb~ 269qg?Jm?JsP_ Ϯ3P'^2e{#*_%#A_jԳuģp9?v! 1*"~JŰL"CDU従o:"Mq``g]tS/Y1FJ~2&$9RamGPf2w1 7Ww퀱uC'".G@a_Ģ_W}gZ^lۈ|֙1̏eru:Pp[%mRJ|*綛1=DYVt)SHMHb BbED#Rq P Ic51 9 (h}'?}8y_8Dm]pvҌg zTw M#A29L۶?_-Ʒ!#ު*K&We+p3q"RP0VÝ<3ǨJb#{R0xdE{5d"xy9Pfsxy]qJ.#%vߵc!FDHSnցm፽W͟cF6c\>^3D?c#W+nOl)Ey(ͳ G n y>0a; 2C,>2Q* o>HBl: ؛vz w]ip/99Qw  kR)ybnߗ,2&{OvzJ2zIMc #ܛ[FD7tM}D~A&s_BUP[_VĮ.CIZc۳%*Q!*(;]`U;|A49Sv}bfTI,1ԌITj%$}*mB'!GdrA$D,x4Ff?o=,.;k5dv|MB,}cp;7Ł=y^VB!|EM*H1R1ڑxu1okBsɲ?(0cVN[-/if̵A3ww[;GWa}%cY ̽aA\'QKd'@d2vBXJN=Q >-$-D JoJb g~d*oZecWhH:dڶA7Ghld5"7x{lnl0ڜs&wl*b=-pLC~pddꃐix;tF>oAWDk! vt=|e8 a1oU:MGsB;F"*j$g+9gOU%aQS㋵4pyހ|n_V$9&jk{ dȗ;.S܃'*8"hN: ֗O4wLIS36̞ķdvhX$C7^7=rO%ZI[CX䅊—gHm2s E7q"FW*b&m ڦmQ"e"-1Ј*sCj <1IEo1[khFsD e z* ՠ+‡̺U '7iڽ S}l-~Iqac>0l> !TM~/(E1ݲ.AomZH!Vm j#7q"1 cH-t2먔ߤ !ɻ(g+Tr;SZK hΠG8˭ (z̴2A)rIRi ?[dTbeAtP_=J:)*kBa1D__p*q)%.qaǬEUboB5%^飙ׯBCQ\뾡n:a+QbJQ:%T{s2vI=O#a6ci#9{Ϣ5r~WDv$/Hc6y] */)Si U6o[2CE[ܭoB?Ariǐ[!?Xe.l(a6}PTM$VoK`mvv!W/4#H p$vﱞm͓w_8dbA(w06wJ_|UKۨz\Yun2VЗ 䕅6=aP􈭌2m\q نG ??著l_J17Cr4o/Hl/fƔE!2ڭ XQf殾^°$Ԍy,%lYx;+@e[N.y$KNIrHb,n:e^4%euN_p5&l0ں %=ωlrI}|Eϭ9Pnn7VIu5u.Brˮez!I'YAT3=A/X`T)\ U=Q$FW:-˨3:^}D~;ݐi?ۉhs!9ot󎑋(rw{3c9|,!bmX+ 4h,=>Rb#i om%yp}?y"f gi(Q$ Soy1nON ̹? 'K/NMeBT+"]ͺFbO] sT̽7D`gcmmu:aS&4OkB5mTF6=io QFd x ~+cMBx9V^em #$kpg22>-dP m -A؟=ʙyGu?jERp|h H=Ev}bSG69.ӳ6AhI7q_"nGxxy:6|7γVǻ 9(3$\yl)J|{|>@G%1s¦|[G3(m)#"ټçKnl{)3! ׭" Ti՗=H +&6^[k|70])S.BTDMB9>($ni3Qb|ls@ay)AQnvl5M˲~fd_~ǁ ->`[vJGN):!<2&GfzxRP+i[~ VD]e x=ܔiS!G&NqjKJ[eǯ"{s]}3:k=ŗ' G 6LçiFie{g΅ {B0 oшx,# bb^S_k!l;y-Hv0mܳ{1t, ?>M 4a}Pl.[7qZjx 0vqDv*sw~X2޹u fZzII!_-[l#Ae!rbVh, hk#{&p77eQbUuGQ9Gb~TÏSgfR:"5ɑ6F$DUv-HahM#21buq:puF:R)U6:λMʅ!DO*cit/$[D✞˂"0xPeM tejX=WFPvtxF>&v [y]Qg3]EҶ}%ϔ0̣chg9$A$6(C`^}.<\Jh-&29͑ij:cǐsccN~[ _&n:3gdcǁRgݡa#$r7mcH)Od&Ο5)1CּZotMjyn9ZDDe6==D^_2pJ'`ݑe,"<610ʹcM5;CgH_xI"Vk m:ZJ9Kc97be"$U94Wuzaf5+MB9GeOH-׽%*[Nmb?W8U%Y[dn(~Y'V&h6+?F4˞ {oy--A\ߨS8hDO?0ӹm}#jbJ|<认= ACc lAXY#4"- .ٓDwJxt,o c`@ŗp9y Ƀj ؏0xF¼QD՟ҷ &ٙ2VI FnF0Dm&e[1lHx};18b/ uw7 &#ųz@knP^ [r,M6# k  >%g^Ɯ~@'$`9%VV=~tGK5N#"TFYZH]b+ cQ[hXg7>Ҏvz|[d2ȷ YcI#9 T7 ܜ*zfyz,cm s>*a:$ڣ@R)WKA3 F@FsO ,[8ǐ;v建Оpf4i`ǥj 0-Vg,= ;`ta%U D帅9 "z!L7fR g~A"ǐm*~H'+zAMΠ`4@dZȏ)kzWbp @M|QLCeZI3I]$tJhb.pU)In[dcI[$TiF%s+P/YWB O?_I0kэ1$tiT**MUhXm!ƀU=:#3 ԼЕc~N9%oa﷤Qڬ <7O/l6Mf%B=9^,ɡ:-7H-+IgYrDӒw{co>XoPQCJدޅM[tb?Hż(\\ԂU 9sC?zFvf#R.U~)Ʃ9%UEg/ `mͅFai~A `ll̋py&li9zK`wT]`!@[qOtz+9::ca̢z b0z=HzL[vH':)!4%Kvג[TLJ!q Q\dt{&2D֖t*MD{sJǃM4vB GmSFSΏ$EG聬Np\L.Z:z*Zl.j5ĩΪu wK@c2Y|˃T/N wS0MJ< j5ABt4:j0)<5#Ё6hlʿXt vO}a?1r꒫;+@6Gw7JT3)?EU2Cm ={ b| }Mأ `331 EҦjK@=!Z1# b "M `ޟilۨ$])o]-BA´Aݠ,Y>sGrJG& 6o3֖6.TvA?v-Ť$gu8CqIֺ^ Y3~4mÕZ_ s ,f:@xdK-SeuIi\>4.C`5hΟDVDAX_=c pEF6«SS r dЖc{QLWLYfg6[xhTl%m ĹK1k@WXJ1d=+i$&^PښA{]z4#l[ᤌ@?MDa~ 9} 7}B{_l;G=ȣh5;N.yM t*.)H6uE01Dt6=)SxrH6f9s4q&:oQn愪Tj%WY_bV%U/ʽb~V>,7Lw j{mzc[6׶ U#K쒫|-:%i蛹@s e6hDaYQAM!'=c@[?OWP">k kMk%SW,yEl1daz\O+b0[KfM5GP9bΨ>XY65-c KgCAVMROo?Q9 ov_l%A; ̚7=ʹM_TBLɒ먖EL9Y  EbHQ|@d2 6wM:y!@7==1;2#00c킏B9BЊcJy9V5^ sA7VFB34WJ6Gw=_Э9?A؆J6VdӇЃtJ'B>7Nx]?юBG xKsiّ Q&쳞Fܟ<3DO&q-"EPjHJa=N(#%ck#9?4F=ط2UE:=?{Qq|JsV^R5*{` UaC8앪<2gŃ11" %P&* 4-.4@co'|>?FX;6@TbR>aK3,y/wZ8iDOaě>uZ0>gL ::Э TeeDϵ^'Né"_+]sa1N4A-6Ar\FVI{oa «|ZicV:$@\x0NXglm;EwIZMYZ옏R$@=WfYy|g-̙jTЭWJ?Z,<5iEVU:̳&]QT5' m_ 1εzj[)7ϧ^Tv؛v^i%)4oH# SzQ&*i.m*xW] 6c~bldp1ED.#yhj^6p:+u1/ j90WI*ڥ߁%ͬD;+ lahЉ(tk6()et["GkX)H$᛽0{  ֈa.>.ccŴ 1Zvs 4 i @ڴiS) yh8~as4[z}:J|D]i3eA, F藡DJOz/ќF9 S]d.Uȣ&H݄6OPXYOz4` s~v_@;1YrB,a?Eq}}ߵ7\$}Bژܠ Rm ~hz=;1eV,ߖxh\|} ad m0Uw.,gѳ|]{3FR\q1Ma4ۋF}K io$~H#fp)9X(N6 ir|,k-6 n <)Qr$Y" 4(ޔc ĉ1FLy ZoH2龢R9I(Q6+o0F Ϩ2:2p-T/|ߟFagdz::;V42F?RLPg28aڲ'FT1@bj|="7݄e0mioPTqr(]Rj|kAhk# w.\Zn{3aNjV(`f $sUhވG>ԟ}6N]k(i\HX" $x1u\b.v-z㐂i`Mf[BrƒNX )vغ[M':[s+5Vy>66d( =r ځVF]8SQ]1b)p]&Z9h-xc|NX c$M'1OD.-/+YoH0̿ALNYb Wf<c{%7sI,Y\(1*hB[lʡ.S1I~#Nnx}@*X߹ג> &@ɽa{qGX]lkf%u% )&+Kk.ɸ(.$vV厥sOBH.uU4s `W:$'/e˱٤2p)1<8?cЖ1o%&(nOcЕ{ySى}Q]hK.Xq!(w$֨cLf\ /諌Е%J4%\ B|Y_All9/BT"T:K*%N57*j~K ٓDLu"rS 6,,baK!,(gK}T)n/=F%I:$HP[CuIT[\THuA)y>OW`6co6O1Y>pTV' nuo#&IUs 8Q%[ЖG(.[H"&T:-jiߊ_@pG5:S1Bs>'M mOBW?ۧ-mX;!m*tKix( q0n7wOgɑJJ Hӓ`6(sd>t(ǽ (j hN'`%T%zl~ Uu{?Pu+=@l8y)A@P 13[f8əBQq3w%loZ>reOXSgg cΔUJUAAISt-s5;{7 16}-H!;P/TKm0覀-"MxYY<^Y\%-~׈-hf?J$1K l^e%2IX=(xWG*9: LB9Jބ˟(.om4?އ@f$>至c<&i957d1S@gJ;{3H;ސ[/?Z'7\Hgabz&jhK c+ǐgBU깵1*<@|^H)MB ;?m- '}W.MZs*]l䋰"rЕk*nD1&.'ոɯV"ʚŽh1y 3Tn$'<{oޞmX8#}]@-hJ[꒡+-.[:QtmُzKySIT %͹iXIk٣=AqxhlJ*Ig7BX{?p₷%Kgl!+cOBPto3BRbfGI`)A]Dܷ7Ж,rfn7; 4&vfQp*$g@މLCIr71QFReT14:mb")˓BҾ~׈e0l|6=P \ÙxAm' 矁IUkn}øUQƺ*.>SiQwNޒo="ųބy ; Gl]+$FQ -Jxpn*V{?9- `lm\ƄE١+IfM4%mo,/ K1i&o`$[8i\b~3:rB"û@Ĵ,H.\?x<@ӑ%l0M7do~e˛VI=D"w{+C\H]ҁwTUMeY7u~xXW%UzAkߚc7y)M%m+:$aJ9=)IRO.^ytIaЂL lKSzT֒}V)zI,&T29unI'^"SVl7tJJQ;<}r8S9$q c%c i%$xϧBXĒ]{Ҧg)+@F\zAd#[m6{HQ,W'4$2bt%{ڮ~JJ{Ϧ%lݏB"h $_*9@uju=IlI dS`.- *L֔kB`.Y2&6Igk+7䒦q%HSZIGc_o"&xca$0_s>HԓBM*\be١(1ZI5ǫ%9\~I[KjoQG"b mǦ"[{F[a+}*$8IblPRc -UA4%G˰n/!*絖BPlvJ%f:$w UsI*C zIFxvK%c@2w Spڻ=fNf?dXQ*mǡ;Kg%-FPK9]D*ŗBR]#%i4_k4i%ow+wWQ'qGZ[\{K}b$Z1I-2%G~`gtuت6m*[C^. ["!h ;@6Fh417n&KR?|J|y G%yДa9Kw*VYp n}J*Ln8F_A&P !o@%Aə:7[玩ǡiz M yʞ%fm\Du m2?!AyVXc4%]D t̔H qi%Дy G$Y,12z1*\R`^캆!棭$3vni%f_:F9JJPГP^ԕ7.!+J3%fVJ)@^l 66T1Bޥ[\N`KD6 Zg(Y0%lݾ~l- 3n+8;-Дe('9+l@JƌuNIO p = uAJckR ا.:Ȣccu/bZVO$6I,/$$ n̏muKhJ1ДjV{,4٫Ԕ@b :>H`c{NJ,Уz~i&#C*cpQczSAMMs1NMC}cLly+@ K%oB]k7-JIG Oy^ U+tn-Kzv İd3I\lv^kI&:8蒗!Ro)XUo(2JI:BMȡIpwKI{= 1uslr]yubgWPKJ߅zJ؇3I2!Xn$+ٞ&$ ֆzU; :_ 5\)E$wT/I+]/2CBPr8QG qSvJ~`)hHgRX%s~V7Qh Mӏ&ѾdYs Ku8HJ:a,ifI%UnRf$Е3w*\fw@. !n^ oee[qfiq,{Q>s3P=usJZ/*m?Im2uT+ӟ&?]V;]c+eh'wAdU {HЌQ"z0cMӭ_TV<l af_-.tl\;]3Ϋ "񘛎x ?l!Dž$?\4C֒vHD꒨zI6Swru- h3.[ 7kB6SmsH6G꾞m=lŤ6>J'> t΀$g鈆e[Zx+zF0nwt#(~Mp4"ʀcI쿗K"9na5ʺR_stI:Nl4Nɤ$zW5!+`۲QM$̅i.l˲b@mN4`qb .* !LWI'(l^ ]ڈK6ZSrmHK1eyv{:rN JRNfzƊnLouσ1jv6{J7Vt3DboNQ+4wWW8aQÞgu$Gd-f_y(!}Ym#{J%烙* &KШV9~6EFDhc uJ@w+fFWa(?!.+IFP8=f H#LހAƅI c㨃9=Pz،iv(а4}Fx)Sz'm$ 3pE )%`8 w&.e!GݿÁRdM %D2) 1N\CFDb(3\Nt{ ʠj?|#uᬚA9:%L)j+cUH$iq<_{Ckz4J}$#IKV?=JGM34<=l:&eq;$naLa"oPtЕbcXi[|O +*hiN9S¼ieͳ]eIZOy8$Vnɤd|*8n -g Q俒+6 ZoEWMѮas@ ?A%n4DrAoi*iDh h3G5}:"'ڥsanB uߠW84Mo/ㆅc/+􌦌崮7,6x/^lꂁqbHL@%cqJl8}ew?tMppJ̗8N6AQkMa6D5w3H#.>3DM]ލm4VChv4*ʀ8v?Xٜb<*%Ht,d}M$Ψݰ{e_BȦ%Qx wQZ # ߏ$2l ; 3QkTJ &dNCQC #h VdFq[KE`E">EDL;~- 6Cx OC) OH#6:6w}Ėք[Gl}B21>#U[Dm'>4 SߕAXk۾^qcZnjO'}#Ço%U$u ؟EbsXϫ4, C3LQn wu[!Q{}/F\߄1.uUސ66ygĪJy 7HD{S!Ɗ9^MAs~Ȣ$!uX#)q}yg3damsHl4)iHE="LJacF]vm=v"#e=ړk%9O qD8ގ;K.LQZR9VaukZ}m=ibKqģS!J)pN+AlsGD'nO1,s֐g0V/#lg87C#kq>WDB,bw=Ԇ'T r-K(hD__- aʻUDfkfu%@Iq[8GW e?TFwX`I'VU[%YEwd!|01w3B/  &B#^(YgPb7uw>5|OͮGHi܈g6G<).>&1"pihq=6/N/|%fta+g G,(IXЌ+t)ElWFs&Ty;!芓i oRj.@c bp!`D*0(i¸ |Q}R∏|<B\(g A:W+Ro( ,$=KFhK Kf:Vы־%|T\#g\x7Ro'TKzT7Qٻ@lUq:3ě5Z#ϬU 2XӒE hF{LEtL"0!R0 bnsWL'h CU= / s3q̋]V_)W-0dЛk3^ ;@'~us!,MffπddVSPZQZWW= aЎj=y|EMxDgV;G o!*8)M{Í'ڞ`ge.H2zޅ MF8 f <4U;ciMV\쭸<cEX sw)K ,:ektJ go@a4 ]{S=DsK/@e@XJ4b/}?4l:%bi-M$K1('dŌoAˑL^h Ϥ,ܰaz | )8C}5\i7 W [+7lMkm~,3i[A8܀_TJ<7)yd|:DY= Y:!ǏMcq$}StR<Ύyz֞$f|""QaB+BhǐmJ/<И70KĪc[3ҥZ6sI_* .e΂)+й@=B6NC} K"[ewG"+e"n1u{+m EcOgL|%-"V9W6 $ٝPS~Z |(~`Õ.|O 3H W@GX57Kz('Yf 9遘7t! J%M9 `ձ8x[ )X7&hDK%&CSa4mH1m6BG9QM+$VTC.^OJp@gc|AJ =Il#KURM•-1!mUХ0{n+9@S7zJhߪ)4=?E{.54*3aKcM3AЋ/0pV5#s»B*[#:#9S*sB35Uqow:يhFq+pLsOD2n5!Ug3*p M,s#^sI09f嘴0(Vٙq)Vˀ #˜\0i+GqdZMҖJk$T`| g ]b{D|:Eba nI.[C.3WYf$nu73胱5vNVi[H,9Lxk-S .֒UQD ͅeu$M [yS1|) ^KO)ƒ=26[2/DF>. ut;:i=d%(c8zJOm|ҔTƅE'9K:]}4 8eɿI.w]-$Gdn+2ç~zVםlzC*"Gqjkzcv:̐3[[QoQ h(=^X%2:\36Vh|ѲYK"]e#el]R=d 5Pa/۸>w{sZ3.C>BXeY<PЋh0oЬ@6IQ["ߕݐamcBEH)[M\^xXdk^;v4ѰRrHJ\=ݻK|p"[zךY`37Tq[O .2]J#XI:o:Ix00gsJyR ƌĮ-FaKO?!|珰fmyt SAǜڦ}vMo7"삹> y=3lͿM-YYstA[Br}R 7ծ/L36_+OouJb>~2Gwy*m+i^( dIS"xŗHM[N7p4+mbgٕ}\$qn\}kĦ] I7Kv >ͭIGb.1 Qcj>u2%b@lIZd=)SPqzJWdmZK h&T揈*̙,q7S OºŊ3 ica;l*> gLJlck6dai6tqX6 gTFh+*V{ ̠撏}`U11|졬<;b-m;R<,VebTm-\V374Ki,2͙/ :m]QC'0W5ꕤk9KvuH&q4^}5Ш7<$6 a]H ?M A.a W\C9z`xH +jH[.%UqWЗH;y8g..u&HZLL͖; ˾(uBFfMHݶhuLhy3`hVO^7W!NĬ_G.{+ΏW!+t 0`%IGM \۟A$ЌT6i%2e cRm*2tM0lg%<dЕTu'v.uIK?%KBT7O-$^s=5? s%IO="Up >RBT?A%dpI9+0%cs:m|\% aߴ~W'ɧm;1p*dW6GY+B:~׃<>ln;?3(|"Uc%DA)V2 3kք_p+mNUoPQ[$Jy WSJ [w(O\DjM֜Dm9:]Lod愬c_%T$fVa+`03}%^3 3 ̏٠D|?eQǤlVgDyS2 ,|& ,X[ i+6S'*wV;Y+=lW$ vJX;K-{yIiiϥ8sI])J4%WpA3=3H=D?IwԺJ:*?I[+ qJ;RQPЎͶMy>~mvʀ?Q[Jј<%lx𭺠}_E87}"cG캼3;7ܸm,cȫׇHbkLrH5~J:'&tJTP eG(E;>YSnoF%[he!&ȟoquf\DΠuJXX]T9F7 J|bu]G(TllO8wq7lˎi&TnK ohOK6M8߈0#;y}H`<䒼=p 3 \q~0 1MmA[Rآ襀cTqJu 8 09J]R2QY=%MwtBS)ބ!%r3ހ1~iTahᱰiOT[hJnAh'+{67ܟˈϕpFnd3&#ЕTxp(t RAs0%,&4aTt(S+BT `V"IK}RM%6<Sh6H=p r꒱bVTPJГ;%V0}(q9Gz1Ѿ!y!FA΢\'}imvcĹI%QY!Es8ʫR<8r^JzQq:A+M?%i0Jia-GO{i #;D CCr(,bCmW94kg|3N$^rN,V\<[l^9>U4: bQwI= Ooį5 UJ :2݈.VV[s݃ v$(ӆIv+BOyx]K*aTQ[M ]ܖVTfV3B#FV67~'«2r0Rl8!n<+*q>m~ #HKO#KOY2tMDd6:Jv0 l4ǡ)!;s}B&ݕ#S9@0:Rǿx619JKΚIVm$}DuA_&[^#BSr+~Tt b[4ZPF3^@]Gl^ua=eT5qDiorٸ6^R9**Jo Ly$Jާİcʮ*t2%\1愲CmP@(JЛH^ql|mx, 4qfK!V68|2y m*gXZ}am5s(:JI'TA@SJb/IYɢkl,6,74IGD[ CH GHޒV"<ɘ1Q3*:i)H=wb+w](\4) \pz.n5s%6o&qƖNM>xg) m aPJUNV]z7W Y!%:_+d5ԭTz3I7s%ΊwOBXFD6({^MTWCc1|lbB_/t]Ҿ_da+Nm; ^uv ^ef&"K}dJ/ߣ%v$|nsG=;lcWq2|=Ep@6$#dM?} csӟ$ tUQW8n%-\un8VuG 6BQ愩+!(M Xw%OnW &mU: aO괸Og/4Nn5*mQs: ifzϬP5mqѝ*&Q 9>r-;U+D_eQ(x*lzIlkҖ}h{ϾK4sO cLq9=`LJL"Fg$d/n o&Z-4E 3Xsgخaw¦w5E,᲻i,`72.5W/Ӱ-fe4GX7V)]Xk5NXݿb᧴_mo̐QddsH !XYlR`^sBV9Ἦe\*+hu`$ibݪkz 66m=)cE>p^^mĒG%}|i6=`>o}C|!`}AE$o~UwuU%&9D5fz=FɲCD?v-Nkqo8g;+#[7 zd+2612&rΉFZGfR3v5 fXʏ3d2gbTgV=bdF͑aM_81RXR^>`Tm7'kZg) Rm} ?IfiH:zqTBXM)mWZhJTu/_Àv[p! jl)$˵’= [7= uϢ ԃa T3@[%~j g8Ϻ_oP+=eUK,[;ם 8mvz}`]1GWm ZU|pr_^e70ayݠNS7*%m{^a~(#&AoqښIl2~h U)tC>AccLǼ[sh.CmBiB(~O;{Ж9&Q# %IڕBW>K u*B4U$c mB9m'KoU@6SǒA(e^WL%o`Ch%>Ki!S2᯸%m%NG!QâJ6yuyLJ4ބՖ Jz6F'9OH5K|Q?yGA]N'̣w=9JI C b!^?!q:"llnܭӱ%ց9 nsd7G?Hzٻ)[+q{Sc7KV݄,S9Q T CRᜄY4%we4xzlLoJ9CG9|]WҜk d`llo>8__# OE*6'c])m2PiZÝлzF3 u0ɆF4 9Ƒɮ^h{oנm/kd+;`7b3P`_UTcG ;֙ÅЋ3̿B  ܖO~tÓDFFm%+ zm=Iah˾LFwI)a/*0[++-Wr ,4n X t-cW|砦dYsyg͑anqdVEY4fdqN\JctH1k1/n?M*HQyw/yx mƅ";sh> Gt|E|_$/2gRK1QspEfuKiOuOpp#虫9*U<qgVg$ 6ohF0# f )C%QD̪ 3Udc٤LḞyT[6Mz 1==n\j@3)Zϻ F cmpA\8-YÚD(A[ڳ7?Cs+?iKk=@|PCjAXVr̈o2 .~3qH9a8;+f"gB&M68oT.q){Tm6,VTxUUxtFi?H$7[Vl\71S2= {5Mem/e艳tr8&MR i oXK)AfgӃ+aym=L0¹$98 1!j *{$#6 ;eN}M]C7vgXF{<~[ õ uL^\77|?ͺia5a$>'ǤXT~Q3l1 cZ䥭γGx!g Ʀ)>zXrʰ?S%MߔW3BG`i66wC-)Ns!Qw?2â:rhho"ot1;t&ʛޅ{# ?$fdC ~kH84ƯJz|7!Ku ht߯}ݡ5-\8E|:Y9ܪElZ@vC <>Hڙ3tu;t#liZޡEU_?7Kplcs7C75~O09eb)cS@wiWfxA*ҼeBK~WCn֛# <3ʮcd6֨1ʇT{kyȰǒ|&Fkw7HfK,*Lq7/2+;˟K|m褤XЪ4T *?w(ivU eX6no}E`! z{ -N:4ل 7x:U\B]S"vDh].{iJPo,$j\M^6]]cNnf阽~(csjP)@xSw%HWT~~>\a9.k060{"ge-O27+,cTʇی)b}j2S6OU܈찉8֭Ìs`ZH?[Fع`3!G7 㐜Kksx#% ڍ96AU,9lCqڳ/G+0V~d qgrTuqfe<:vFCysB45>-6lސgazаl;9ih{$[Vù G];tA!;h};MTYz-6&~M;"nzBDD X ]Ѫ4fϐ* xc 24*kւ :F3Al46qH[mpo';y;?Al܃쥵̑Ӊ|zJ9ȸ}Ys8UP\bI414dm ISf ןqu PM q>O{4`A`BϝV֌CL.D?A, 6AKMi Gu砫~ %#Z9/~փ&>PL:˺c{ a7DZh2yjRcl_(-{Sg%od˧2 Ea _4#Fllہl(߇Vns/F2G=liU^ؐj3Wd+#oA;9Ci`ݿtEm 2lMInU"*;vL.y٥Z i B3r1Ch]z(}ϴ{No|Ajв㰯tȑfCK ن=gceqġz y=>)l_*2^1kblҋ lOÿALm]R"jvB6vOX#*ODe˄] n"xOChS/cvdRVg v,47Gr^̵pI /2Av?1 WͤR[TatC}f^W{nCJ8͢GT`ڧ]cG̓x ٶɚFbJ%&0Kß Ա؄k! Blc*5Gqok5ӏs?8Te ։4-ҚT٤FsdʹSDf9Ψ*װ+ms7ELmX #Cӻt[?  8ZHAVA>:7#&la 88f_pVyPfk¨G;;B<%ik߭05˾*O>UgdgǷ`ms*4 ;i/dYȆ Jt=>H]5'b4ABkKIf[ MR!LV`E`Bx6-.0bsN3ˆsedr!Ul4N =yWcf1{RT4^}O dFdn%jxMM0;}MJ3B͌m1̇(>ʭMx\_ cllڛkbF?~fFٰ4t o=oi|Рo,yyN7' }u3R#{ێI ɞ08.[QTkA|C}.<3 #쩎^]vFƹG:T`3uD/)a0۠{FBRjvmk(#zN .m}v¹O^7z]pHzS}C|}hm\]Ar B%L?ùbMjUj%;\@ .\#TpXa\-\c7UMl`R&O `Oz摑ۄTQ7x ?}(mmI0 ͳi;5!2tfV`jRe|P.;,xOEkKo܌RGʮ:FWQ,/ym?~! cWR^7_FSXaCi:,n70_cB(oSz^P:u9ޗV@qwU%_`#(%GE?wLnʞYmcu#x726+j4 ~2 O˪7ݦAð*۲+_]w8 VEӛ h¼@z[3 =6lo/x*/{9mMF 0%>/  ƹ䩃vH8AE)%3>\z2͢9rXm Ȯqmr9 A7F!l?-3U@ctSʏEdr ʥ~.0\[z@L@FD48o1{J al>M@>_AdYc#Bҭ|br81wKG%f.R-;ӿ2(? .II} 1܎4?]!{Cd-}#bZQO} y*Ztn=ʯ 5 ӯk(Rի3#_/j a0fbZnaٔ} SqWl?;%gs.EY[E|"  Vo߉^bFʌҾ 7W<j  ވX àWrdːiP-+YUWnsꉜ w ?cm0>UL.d_-%G,sv~[7 [6t8H7!䂘]GAi$ǸN*&S! ~_x)8:q$fYrw`3ӛS)XJ &u D{ mR>z6~-3z}]blBz< ǸJ'v+PHQHdK={Q#aY2a/-iRO=BLJ0)=!JpmYpWB6RT1qR35#a!(d44TBY9 1 ׸I5+6ΨP̒- 0._Q12 ߸AƗzӭ p$_ 6d)Koc#% ȽV+6B\#>tm]I#fmedJ7%C0߭ J #IBrC  g1 l:FCh[\Q;tmWPpc!GWLtbM0vIHRWBuA)S%vBQy!TVҽI*dД0+)dKwMjDc>O.%n!,T!_W_:CJܒxYDh8q_P$^jtibꛮΩ.0NO%$Mԩ:[~M퟿~lPxET dƙ^Ffk-B_FS 3zYΒ]mx,z@G c#ݓ2zqG>{V/ zIS]?BGo~."ArS!68V64q=!PۊiM?HIQߩ` HX+(d,Ue*D.Q%1~_[I s&<%\5hs6$= V̙ %M L&&bҦcI&9DS9}{) $KUU4{M=P\* $<л怡VT 4 mm(ߪ<2zutJ!L8oHT7"P0%:^H qbR9Nd%{ģMʹIṠo lcGڒrHwOHQ7+BᥗMX0ϲzĄB7{M)$-w!C*Kvg23jJGV0,v蕴YhI r푛TBn=й<*b}Qˠ *QD;M!DJmySWzBfgޏQ_T.Ny΅Oj ]nI' !Lo}P*yq(-^-:k ۓwy*x!UvBcU!L]e'Q(1zWwgᤱ:Ϥ!6Ff+^KgmI鶴׿а S޴d% ly!cnٿ.*fL֐:n F$ڦz~: [N-!GPdzBWQ+v\3)m7z6VRRTE.!$_9DZI[Q G}H)[tֻߏxg,`r\r53 "%**`T^n^ :yLfr_bF8ri ΄;ДޒMӡ+en9ԗLFtЕl(m!c-HX2ʆ]*hcJϳ; Kܣ4[B}P{6naglі Y3HR8n*YNIjNۤ*ml*02vcqi>#FƚI=j:f(!/erI:usuM GRHi mM!SG:B#Q(ЭhKTG:H"Bt#';?;ȸx#&7jei Sv˾[RzPQPi%)T)pz@z.3(0&Ҫhx]\WtY43[їW,mЕ8dA4EmEm}]p̷OF4|-zIpfvkݽ d2S=QHBJ`$W%(ЬyfWQ<ķI[&(̓@2XŴДa6ʯ*tf:QCHQ!~hRnЂ$#@RV&+wLnOI*uD(/HQ(2[7aM5}Lhk4.; t*|;V2=iP3e˗zi6u c^@̟R3IS]xQ.[]> ;ߚgDdWGA)Bmy{F oj蒧tUlзu vΩ.LT*œ٤Iu.+BMͥ7(喝LGz-TŽ;+$JUdqQ!G J!Asސ|Ut TnHtI<8vsmsMPNOnV6铪fI)WT5JCI(ouOXmD2vBx%l)W,0&bRuBl2V63HX4%$ّ[IA$,jvBqS4DfQauqżЕ1O糧zIU7إ?i~c4+Iۃ f$%S!v56 Ή)dײMnKZud/|%HT.![/>RG&Ku{^xĆ|Шg7JTfPnItt%+V{i ١-uB*U썘ܳat"M+to+HYĪ~+z14,URn-0U\QpBI6˭ ^,4%U: 8]OjC`]QZh6cLk}*{,d]!nS̪&|;hI㢴%l~.Xdzl(zBwn^7{;$#S)P8o z[uFIj=H1Ũkh޴T;Li 3w!QP$Ђ~BUQ_ ahJIZBGK4zB̎UD&[4r4]RXՏBL}2]PGU1%Lq(7CI+w(= L3 X%r#: \,g4%+^([tuB74lOTzIKd7*vB3PbVo`I.Ui&45 ˅>"PE./,h cЂ8i o uwZcV4\`•T6w-!ĒUU2w%[]&Г2$fJ GBcNE?JnI^(Qr8+HQsuBДFtJwJTDvTtB6ބB&N=,ˬJ vB$2=HޔJ旂GXS PocdoI3A[Е#XUc KMvIl€T4VyA^~t %{'9W7F%767ݎ^=fs:i1~P~*Mm{!I/^) yloHXyAy F[ ![Q; A~ fo:<΢\:k{Е1I܌0ܽ.dFPsW7?4ŵo z`6[},J6gJFf瑽"]em{!C %SiKo_IGSI*h!1Q~i+-BQsBMHV5uy a]D:}PQi Bm(.:&0u&so"AΤ7!UNLLmuBpzU[4 PdSu%xC)ь G,4 )Ȇڑ=hJk)d$~f27+'9?tr4m6Ƴy R[^&c9mJ=5m'$X SJq1R|[ ĉm#Q¨1cfm~05cn^i;B0ehXKQYi< %A,+[;lh 5>8b*h ())H [Li0c`{SJd0T#j4kzBGϓ΅m&9rS@ɘu-ʀV.YE׏-0Q}vK b D&/>] }OHO(l%u#g8F;CXCa\ zσޔ kа{3~bo0c"F1;7FL>yЕI f ބ5K{oQ*c 1eQD7l vp0h{,D(+3$62f6Ejh}~bcx^B!PՕf4jppe H;ājtG|||,3vbi,yh$GT2۝*M&{f~,yg$2Rݰfd7"jR~MbX}U'+.JRF}WVh!v}<^v`օ8 >S3h]:ȇ$ )Qg rdhl^ Pmf{HЩ$[0QeN҉VI1BjcBl+BlÔҍ6aR؋u,DC^۫wwx604rh;;ra^I=c|v럢\гIoDsw>glL\vr㚩g>^AZ.Ьm+ġqM$G@[͐`3uC[ D얯 "bw(\.۞.R?[08cгqNHv GAbDaÌl+in.vVmc*&m>r`hm A-}>ˌmuVG}SE`((6mupo劚E-wǶY47?կ$&Z̅]ؙSJgƱ*u W. @1ίua9ۢ\+V˛` k;5{T^z&dN:Q$\0ЭSB7,7*b8kJuLRCRK fcFهHXP>ݾ>@<O#8ݡ-~H[0gP..^e9TO,ٴ&<7pO>M9O<'Ќ4*)twҖ gCt%Lhxʪ@h`aT`C w֔Dאug}a ūaE[7Kl鱠"mtT \kUZ;-7n1#lh0 1O$hݢ6,6YZFU-Ѱ\[7/kF3 i_;hͤn ņzMStBz_ۊR09tFn Uovh27~}Sph<mXs=m2sſ(.CS',P(1da=HV*zV p貰Pm}MM㏒+6Zրi^-PoslXE 4Ecظo:-KCBcͽ( G$:X^*6066)L^s?U,c= `Q;,YquOՏ4w 1^ <:eS4l9 fDHFQuLV~ mI*z@UG$(C9Ͱd nbV )jev]6]rDKi І`*eύ=?Gs奌hWmi%y.(7 XC "]]7Kߴ{d~:نL/aRjrDs[fxyHS}YS,/M*6t˟=$HJД1Mu`+YM XK1N:͇b65\3znZKU4{jT=3hц }hx~$`pvW,AO0QQFYL:2vT[Aw4О]C)cN˃\m&w ßDsա,,V )+'{C) *w= ̠=Q?~p46Ǡk4yqDCA6wX xmamP|(lqD[> F=|be b)Ixڟ.ՠX̣=Ek2e=)Ҷ^yPz8dY414ګI9D "{CHFu?`('qFW/`(7آظ.36L1rg=p4dz7Cr2bɃ}7tkfW]+tBGhrPF3N*ϤXxqڦWMA0H9/^iq G<@4v0G @U|'^j0Hބc{aQP(m 90PR,4&QUdiW-]ODl:K'^IE:E.Y6x "8ŞufzTzSެmPRN;Ћ`ɉ>Ps@d1NG7`-צPp65$4,ǁx6i ֒ۉCf L@韯ؕ3$5q'6{V0qnny ZE_RP 6ثҌK&|1^X_1uʵDnpipe3ePja1946o(48|1^z CF9֒$гt\;t I1ۀ/" vcB:9u-UX촼e=Bg7@0U5&xc1 fxA PA)*h6rQ&bBhVVUu[qAJqRm \M 9Ϯtpn.*X6QK,Q\?zH>m(ቡbB0zdYMvymei鼞كeܞ̢Q6!Y 3xv)c/[nvJ7bb9*Lw' 5f8FYjf̆8P`f-Rdw967i/HcIbz~rس#8(C[ة4Q0R8Hz='̰eC<.k(ᱽ摆1CqD8q6Eo{4ђHFgox䱻mNOD`6g^6g>T^Y vhU(o(aƻ\eFogneD+!|rUCUmgQFlkpOUuYC M ΢K xZMAZE)ul <=`z]N~fU@o/X;zIZ&#b#$Ql?<}i=^Ӱ L_sWhhև5 \-.2f/8Rff L]֛F0opVFBv⓫ ~5\3}[- :`GC' 6E܅B6)'͒!eA!೾a} dUSyF3 'Pq[5cH ŝRT:\fgdU*ceT10w9nZaqwe0=Vd%qBXu#JTʨ5uVprG Wc ȳB۱w]D ұ1o: )>E# )oH6@5_aZD=,yBCx^QG{4Sk439go-mҴׇ9,(RJ裬$ƍ)nʣ6[Oz_Ym( pVFTgkMvp)9O b<ɞƁgnzvӾ=oML^d7ؐN:@XԮCm<6+ڒF?)4`Ir~?+ . )?@S"O2o?ϱO)@?ހ)X O@S >euPæ)qT}Ш,cB))? `)w*>x)P {@ EEA: )ЀOСIPX {@ ;0)C" ߀AOOLAO"d|?E>ÕͥE;K>G_,S .S)G( P?zCz?y:T| }(EO |)^ŢH  rD߫S!yUs5+Sb;>2/$)yt7loX72ԃ*Уýԇ-{( AHD! $E@T >dX>SOO OJ~OUO?S?өzbG|)!}S)~LS)e2S)e2LS)e2LJ/1/)e2LS)e2LS)e2LS)ǥP*SyO)TSO>TSO>TSO>TSO>T*|ʟ*bI|2OΦS) S)e2LS)e2LS)=< )SyO)='SO)})SOE>})SOE>dS)e2LS)e2LS)e2LS)%|)SyO)TSO>TSO>TSO>TSO>TSO>T™LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)?vS)e2LS)e2Le2LS)O)})SOE>})SʟE>})SOE5LS)e?e2LS)e2)e2LS)e2LS)e2LS)e2LS)'ʟ*|ʟ*|ʟ*|ʟ*|ʟ*|ʟ*|ʟ*>/~A_I#p96I$ dI$I$!4]Pe.x<OS) x$SO>TSO>T*|ʟ*|ʟ*|ʟ*|ʟ*}e?:LS)e2LS)e2LS)C)e2LS)e2LS)e2LS)rLe>e2LS)e2LS)e2LS)OSyO))e5LS)e2LS)e2LS)e2LSXYLS)e2Oe2LS)e2LS)O2LS)e2LS)e2LS)e2LS)e2LS)e2LS);bNا|SH#zh4r$Q(pd*@ LQ!XL*|S)SOe>})SOe>}QSOe2LS)e2LS)e2LܦS)e2 J}))SOE>})SOE>})SOE>ʟ*|ʟ*|ʟ*|ʟ*|ʟ*|ʟ*|ʚS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e2M&S)e?{e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e?ީSyO)S)e2LS)S)e2LS);)e2LS)e2LSlS)e2Lєe2LS)e2LS)e2LS)}LS)e2LS)e2LS)e2L^SyO)e2LS)e2LS)e2LS)e2LS)e2LS)e2LS)e?uwyO)O) |)j~?ZO֧ާSj~?ZO֧S?忹[/.>n3"# Gl xy})SOe>})SO׎=RJ?Setyk0-¯и80ť3>&W$@ P aO0S) y<>aO0RžaO0<žaK y?S)S>}TSOU>}TSOU>}TSO}TS($V p94>3jg *~?JOҧST*~?JOЧOЧS){Nm{3$=O=ǒwRRw;N;e>})Su?BOe>})SOW}?/!.{gs[J&!B ZYZzG^QH)yETSO>TSO>TSO>TSO>TS*|) {ঀŒ|z1 SOOU>}TU>}TSOxS){PwcĐ!C=j'14a # y< y=O S) y8AI y"71## yE%OS) xJ~ ~ rSSSSS𼧔SyO)sݝ{>Gn $w?%?%?%?%?%?%?%?%?%?o)SO>~J}Rx$O|yFpy`s) xE<"OԧS)J~?ROԧS)J~?ROԧS;c~J~tLpGS)))))))))))xxS) S'OU>~OU>>>rU>}T)O ƪ;S)wE}>})SOE>})SOE>~)e2LS)e2LS)e2LS)~?ʿT"SyO)S)e2LS)9LS)r)r9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NSyNS9NS9NS9O')r)r).S9NS9NS9NS?t)rI|r)r)r)r)r)r))r)K)e2LS)e2LS)e2LS)S)e2LS)e2LS)e2LS2LS)e2LSILS)e2LS9NS9Oީr)r)r)r)r)r)r)r)r)r)r)r)r)r)r)r)r S)e2LS)i2LS)e2L)r)r)r))r)r)r)r)r)r)r)r)r)r)r)r)r)r)r)~9NS?a)r)S9NS?kr)r)r9NS9NS9NS9NS9O2LS)e2LS)e2LS)e9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9O9NS9NS9NS9NS9NS9NSx^SyO)TSO>TSO>TSO>dSO>TSO>T9NS9NS9NS9NS9NS9NS')e2LS)e2LS)e2LS)r)r)r)r)r)r)r)r)r)r)r)r)r_ռSyO)9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9O>9NS9NS9NS9NS9NS9NSLS)e2LS)e2LS)|L 9NS9NS9NS9NS9NSS9NS9NS9NS9NS9NS9NS9NSS)e2LS)e2LS)e2L_|SSyO)9NS8)r)9O9NS9NS9NSS9NS ~Ӕ9NS?e2LS)e2LrO)e2LS)ğLS)e2LS)e2LS)e2O; e2LS)e2LS)e?y&S)e2)r)r)r)r)r)r)r)r)r)r)r)r)rLS)e2LS)e2LS)e2S9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS:NS??)rsS9OrI|r)r)r')~nS9NS9NS>9NS9NS9OS)r)r~S)e2LS)e2NS)|e>2LSuNS9NS9NS9NS9NS)rS>SNS9NS9NS9NS9NS9NS>)r)r)r)rI|r)rfSyO)SNS9NS9NS9NS9NS9O9NS6S)e2LS)~LS)e2LS)9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS9NS:)r)r)r)r)r)rIr)r)r)r)r)r)ʐ%9NS9NS9NS9NS9O?мSyO)I|!D QPϥܪҨ'QU>lBQ]T(*RQHH4 ҄IBPHBP/чJKI@4Jt)iJD](h>2'J:Zily&":EPtإ1:[PP)(Jآ衴`&$it:"imT-v_[zxPV)4RMNC%PU4ӏ[IMܚKi4אг+TSMRQVq%}Dy{xjJ6ݵاEQqnuʽ%1/ #Q1: SJ֒f:=Hiqt[HQlko3`)<P{~v|7CҞs{\30UWQD IdQGF%JBPJ4>AESvKB{#M)Hy&FMO*LAEhݺ붠X{d.MıWqj5ko":Ldc :kX4gDB*ȥP f6$/ieS b FacO4$ %qb`JGg1%4W9b9ZX3A (r &1%,CAȖ|10PJ3ӘeZҥ*FmWYtqYY5}v;BN}R QΜ)CdJ֕-m}sٶIWa.7D95+!a7{/PD;//Oh +: Å+p  *"w$S !xgboost-3.0.0/R-package/inst/000077500000000000000000000000001476511272400157575ustar00rootroot00000000000000xgboost-3.0.0/R-package/inst/make-r-def.R000066400000000000000000000055031476511272400200150ustar00rootroot00000000000000# [description] # Create a definition file (.def) from a .dll file, using objdump. This # is used by FindLibR.cmake when building the R package with MSVC. # # [usage] # # Rscript make-r-def.R something.dll something.def # # [references] # * https://www.cs.colorado.edu/~main/cs1300/doc/mingwfaq.html args <- commandArgs(trailingOnly = TRUE) IN_DLL_FILE <- args[[1L]] OUT_DEF_FILE <- args[[2L]] DLL_BASE_NAME <- basename(IN_DLL_FILE) message(sprintf("Creating '%s' from '%s'", OUT_DEF_FILE, IN_DLL_FILE)) # system() will not raise an R exception if the process called # fails. Wrapping it here to get that behavior. # # system() introduces a lot of overhead, at least on Windows, # so trying processx if it is available .pipe_shell_command_to_stdout <- function(command, args, out_file) { has_processx <- suppressMessages({ suppressWarnings({ require("processx") # nolint }) }) if (has_processx) { p <- processx::process$new( command = command , args = args , stdout = out_file , windows_verbatim_args = FALSE ) invisible(p$wait()) } else { message(paste0( "Using system2() to run shell commands. Installing " , "'processx' with install.packages('processx') might " , "make this faster." )) exit_code <- system2( command = command , args = shQuote(args) , stdout = out_file ) if (exit_code != 0L) { stop(paste0("Command failed with exit code: ", exit_code)) } } return(invisible(NULL)) } # use objdump to dump all the symbols OBJDUMP_FILE <- file.path(tempdir(), "objdump-out.txt") .pipe_shell_command_to_stdout( command = "objdump" , args = c("-p", IN_DLL_FILE) , out_file = OBJDUMP_FILE ) objdump_results <- readLines(OBJDUMP_FILE) result <- file.remove(OBJDUMP_FILE) # Only one table in the objdump results matters for our purposes, # see https://www.cs.colorado.edu/~main/cs1300/doc/mingwfaq.html start_index <- which( grepl( pattern = "[Ordinal/Name Pointer] Table" , x = objdump_results , fixed = TRUE ) ) empty_lines <- which(objdump_results == "") end_of_table <- empty_lines[empty_lines > start_index][1L] # Read the contents of the table exported_symbols <- objdump_results[(start_index + 1L):end_of_table] exported_symbols <- gsub("\t", "", exported_symbols, fixed = TRUE) exported_symbols <- gsub(".*\\] ", "", exported_symbols) exported_symbols <- gsub(" ", "", exported_symbols, fixed = TRUE) # Write R.def file writeLines( text = c( paste0("LIBRARY \"", DLL_BASE_NAME, "\"") , "EXPORTS" , exported_symbols ) , con = OUT_DEF_FILE , sep = "\n" ) message(sprintf("Successfully created '%s'", OUT_DEF_FILE)) xgboost-3.0.0/R-package/man/000077500000000000000000000000001476511272400155555ustar00rootroot00000000000000xgboost-3.0.0/R-package/man/a-compatibility-note-for-saveRDS-save.Rd000066400000000000000000000147331476511272400251330ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utils.R \name{a-compatibility-note-for-saveRDS-save} \alias{a-compatibility-note-for-saveRDS-save} \title{Model Serialization and Compatibility} \description{ When it comes to serializing XGBoost models, it's possible to use R serializers such as \code{\link[=save]{save()}} or \code{\link[=saveRDS]{saveRDS()}} to serialize an XGBoost model object, but XGBoost also provides its own serializers with better compatibility guarantees, which allow loading said models in other language bindings of XGBoost. Note that an \code{xgb.Booster} object (\strong{as produced by \code{\link[=xgb.train]{xgb.train()}}}, see rest of the doc for objects produced by \code{\link[=xgboost]{xgboost()}}), outside of its core components, might also keep: \itemize{ \item Additional model configuration (accessible through \code{\link[=xgb.config]{xgb.config()}}), which includes model fitting parameters like \code{max_depth} and runtime parameters like \code{nthread}. These are not necessarily useful for prediction/importance/plotting. \item Additional R specific attributes - e.g. results of callbacks, such as evaluation logs, which are kept as a \code{data.table} object, accessible through \code{attributes(model)$evaluation_log} if present. } The first one (configurations) does not have the same compatibility guarantees as the model itself, including attributes that are set and accessed through \code{\link[=xgb.attributes]{xgb.attributes()}} - that is, such configuration might be lost after loading the booster in a different XGBoost version, regardless of the serializer that was used. These are saved when using \code{\link[=saveRDS]{saveRDS()}}, but will be discarded if loaded into an incompatible XGBoost version. They are not saved when using XGBoost's serializers from its public interface including \code{\link[=xgb.save]{xgb.save()}} and \code{\link[=xgb.save.raw]{xgb.save.raw()}}. The second ones (R attributes) are not part of the standard XGBoost model structure, and thus are not saved when using XGBoost's own serializers. These attributes are only used for informational purposes, such as keeping track of evaluation metrics as the model was fit, or saving the R call that produced the model, but are otherwise not used for prediction / importance / plotting / etc. These R attributes are only preserved when using R's serializers. In addition to the regular \code{xgb.Booster} objects produced by \code{\link[=xgb.train]{xgb.train()}}, the function \code{\link[=xgboost]{xgboost()}} produces objects with a different subclass \code{xgboost} (which inherits from \code{xgb.Booster}), which keeps other additional metadata as R attributes such as class names in classification problems, and which has a dedicated \code{predict} method that uses different defaults and takes different argument names. XGBoost's own serializers can work with this \code{xgboost} class, but as they do not keep R attributes, the resulting object, when deserialized, is downcasted to the regular \code{xgb.Booster} class (i.e. it loses the metadata, and the resulting object will use \code{\link[=predict.xgb.Booster]{predict.xgb.Booster()}} instead of \code{\link[=predict.xgboost]{predict.xgboost()}}) - for these \code{xgboost} objects, \code{saveRDS} might thus be a better option if the extra functionalities are needed. Note that XGBoost models in R starting from version \verb{2.1.0} and onwards, and XGBoost models before version \verb{2.1.0}; have a very different R object structure and are incompatible with each other. Hence, models that were saved with R serializers like \code{\link[=saveRDS]{saveRDS()}} or \code{\link[=save]{save()}} before version \verb{2.1.0} will not work with latter \code{xgboost} versions and vice versa. Be aware that the structure of R model objects could in theory change again in the future, so XGBoost's serializers should be preferred for long-term storage. Furthermore, note that model objects from XGBoost might not be serializable with third-party R packages like \code{qs} or \code{qs2}. } \details{ Use \code{\link[=xgb.save]{xgb.save()}} to save the XGBoost model as a stand-alone file. You may opt into the JSON format by specifying the JSON extension. To read the model back, use \code{\link[=xgb.load]{xgb.load()}}. Use \code{\link[=xgb.save.raw]{xgb.save.raw()}} to save the XGBoost model as a sequence (vector) of raw bytes in a future-proof manner. Future releases of XGBoost will be able to read the raw bytes and re-construct the corresponding model. To read the model back, use \code{\link[=xgb.load.raw]{xgb.load.raw()}}. The \code{\link[=xgb.save.raw]{xgb.save.raw()}} function is useful if you would like to persist the XGBoost model as part of another R object. Use \code{\link[=saveRDS]{saveRDS()}} if you require the R-specific attributes that a booster might have, such as evaluation logs or the model class \code{xgboost} instead of \code{xgb.Booster}, but note that future compatibility of such objects is outside XGBoost's control as it relies on R's serialization format (see e.g. the details section in \link{serialize} and \code{\link[=save]{save()}} from base R). For more details and explanation about model persistence and archival, consult the page \url{https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html}. } \examples{ data(agaricus.train, package = "xgboost") bst <- xgb.train( data = xgb.DMatrix(agaricus.train$data, label = agaricus.train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = 2, objective = "binary:logistic" ) ) # Save as a stand-alone file; load it with xgb.load() fname <- file.path(tempdir(), "xgb_model.ubj") xgb.save(bst, fname) bst2 <- xgb.load(fname) # Save as a stand-alone file (JSON); load it with xgb.load() fname <- file.path(tempdir(), "xgb_model.json") xgb.save(bst, fname) bst2 <- xgb.load(fname) # Save as a raw byte vector; load it with xgb.load.raw() xgb_bytes <- xgb.save.raw(bst) bst2 <- xgb.load.raw(xgb_bytes) # Persist XGBoost model as part of another R object obj <- list(xgb_model_bytes = xgb.save.raw(bst), description = "My first XGBoost model") # Persist the R object. Here, saveRDS() is okay, since it doesn't persist # xgb.Booster directly. What's being persisted is the future-proof byte representation # as given by xgb.save.raw(). fname <- file.path(tempdir(), "my_object.Rds") saveRDS(obj, fname) # Read back the R object obj2 <- readRDS(fname) # Re-construct xgb.Booster object from the bytes bst2 <- xgb.load.raw(obj2$xgb_model_bytes) } xgboost-3.0.0/R-package/man/agaricus.test.Rd000066400000000000000000000015531476511272400206240ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgboost.R \docType{data} \name{agaricus.test} \alias{agaricus.test} \title{Test part from Mushroom Data Set} \format{ A list containing a label vector, and a dgCMatrix object with 1611 rows and 126 variables } \usage{ data(agaricus.test) } \description{ This data set is originally from the Mushroom data set, UCI Machine Learning Repository. } \details{ It includes the following fields: \itemize{ \item \code{label}: The label for each record. \item \code{data}: A sparse Matrix of 'dgCMatrix' class with 126 columns. } } \references{ \url{https://archive.ics.uci.edu/ml/datasets/Mushroom} Bache, K. & Lichman, M. (2013). UCI Machine Learning Repository \url{http://archive.ics.uci.edu/ml}. Irvine, CA: University of California, School of Information and Computer Science. } \keyword{datasets} xgboost-3.0.0/R-package/man/agaricus.train.Rd000066400000000000000000000015621476511272400207620ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgboost.R \docType{data} \name{agaricus.train} \alias{agaricus.train} \title{Training part from Mushroom Data Set} \format{ A list containing a label vector, and a dgCMatrix object with 6513 rows and 127 variables } \usage{ data(agaricus.train) } \description{ This data set is originally from the Mushroom data set, UCI Machine Learning Repository. } \details{ It includes the following fields: \itemize{ \item \code{label}: The label for each record. \item \code{data}: A sparse Matrix of 'dgCMatrix' class with 126 columns. } } \references{ \url{https://archive.ics.uci.edu/ml/datasets/Mushroom} Bache, K. & Lichman, M. (2013). UCI Machine Learning Repository \url{http://archive.ics.uci.edu/ml}. Irvine, CA: University of California, School of Information and Computer Science. } \keyword{datasets} xgboost-3.0.0/R-package/man/coef.xgb.Booster.Rd000066400000000000000000000037311476511272400211570ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{coef.xgb.Booster} \alias{coef.xgb.Booster} \title{Extract coefficients from linear booster} \usage{ \method{coef}{xgb.Booster}(object, ...) } \arguments{ \item{object}{A fitted booster of 'gblinear' type.} \item{...}{Not used.} } \value{ The extracted coefficients: \itemize{ \item If there is only one coefficient per column in the data, will be returned as a vector, potentially containing the feature names if available, with the intercept as first column. \item If there is more than one coefficient per column in the data (e.g. when using \code{objective="multi:softmax"}), will be returned as a matrix with dimensions equal to \verb{[num_features, num_cols]}, with the intercepts as first row. Note that the column (classes in multi-class classification) dimension will not be named. } The intercept returned here will include the 'base_score' parameter (unlike the 'bias' or the last coefficient in the model dump, which doesn't have 'base_score' added to it), hence one should get the same values from calling \code{predict(..., outputmargin = TRUE)} and from performing a matrix multiplication with \code{model.matrix(~., ...)}. Be aware that the coefficients are obtained by first converting them to strings and back, so there will always be some very small lose of precision compared to the actual coefficients as used by \link{predict.xgb.Booster}. } \description{ Extracts the coefficients from a 'gblinear' booster object, as produced by \code{\link[=xgb.train]{xgb.train()}} when using parameter \code{booster="gblinear"}. Note: this function will error out if passing a booster model which is not of "gblinear" type. } \examples{ library(xgboost) data(mtcars) y <- mtcars[, 1] x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(data = x, label = y, nthread = 1) params <- xgb.params(booster = "gblinear", nthread = 1) model <- xgb.train(data = dm, params = params, nrounds = 2) coef(model) } xgboost-3.0.0/R-package/man/dim.xgb.DMatrix.Rd000066400000000000000000000015111476511272400207410ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{dim.xgb.DMatrix} \alias{dim.xgb.DMatrix} \title{Dimensions of xgb.DMatrix} \usage{ \method{dim}{xgb.DMatrix}(x) } \arguments{ \item{x}{Object of class \code{xgb.DMatrix}} } \description{ Returns a vector of numbers of rows and of columns in an \code{xgb.DMatrix}. } \details{ Note: since \code{\link[=nrow]{nrow()}} and \code{\link[=ncol]{ncol()}} internally use \code{\link[=dim]{dim()}}, they can also be directly used with an \code{xgb.DMatrix} object. } \examples{ data(agaricus.train, package = "xgboost") train <- agaricus.train dtrain <- xgb.DMatrix(train$data, label = train$label, nthread = 2) stopifnot(nrow(dtrain) == nrow(train$data)) stopifnot(ncol(dtrain) == ncol(train$data)) stopifnot(all(dim(dtrain) == dim(train$data))) } xgboost-3.0.0/R-package/man/dimnames.xgb.DMatrix.Rd000066400000000000000000000021461476511272400217720ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{dimnames.xgb.DMatrix} \alias{dimnames.xgb.DMatrix} \alias{dimnames<-.xgb.DMatrix} \title{Handling of column names of \code{xgb.DMatrix}} \usage{ \method{dimnames}{xgb.DMatrix}(x) \method{dimnames}{xgb.DMatrix}(x) <- value } \arguments{ \item{x}{Object of class \code{xgb.DMatrix}.} \item{value}{A list of two elements: the first one is ignored and the second one is column names} } \description{ Only column names are supported for \code{xgb.DMatrix}, thus setting of row names would have no effect and returned row names would be \code{NULL}. } \details{ Generic \code{\link[=dimnames]{dimnames()}} methods are used by \code{\link[=colnames]{colnames()}}. Since row names are irrelevant, it is recommended to use \code{\link[=colnames]{colnames()}} directly. } \examples{ data(agaricus.train, package = "xgboost") train <- agaricus.train dtrain <- xgb.DMatrix(train$data, label = train$label, nthread = 2) dimnames(dtrain) colnames(dtrain) colnames(dtrain) <- make.names(1:ncol(train$data)) print(dtrain, verbose = TRUE) } xgboost-3.0.0/R-package/man/getinfo.Rd000066400000000000000000000052721476511272400175050ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R, R/xgb.DMatrix.R \name{getinfo.xgb.Booster} \alias{getinfo.xgb.Booster} \alias{setinfo.xgb.Booster} \alias{getinfo} \alias{getinfo.xgb.DMatrix} \alias{setinfo} \alias{setinfo.xgb.DMatrix} \title{Get or set information of xgb.DMatrix and xgb.Booster objects} \usage{ \method{getinfo}{xgb.Booster}(object, name) \method{setinfo}{xgb.Booster}(object, name, info) getinfo(object, name) \method{getinfo}{xgb.DMatrix}(object, name) setinfo(object, name, info) \method{setinfo}{xgb.DMatrix}(object, name, info) } \arguments{ \item{object}{Object of class \code{xgb.DMatrix} or \code{xgb.Booster}.} \item{name}{The name of the information field to get (see details).} \item{info}{The specific field of information to set.} } \value{ For \code{getinfo()}, will return the requested field. For \code{setinfo()}, will always return value \code{TRUE} if it succeeds. } \description{ Get or set information of xgb.DMatrix and xgb.Booster objects } \details{ The \code{name} field can be one of the following for \code{xgb.DMatrix}: \itemize{ \item label \item weight \item base_margin \item label_lower_bound \item label_upper_bound \item group \item feature_type \item feature_name \item nrow } See the documentation for \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} for more information about these fields. For \code{xgb.Booster}, can be one of the following: \itemize{ \item \code{feature_type} \item \code{feature_name} } Note that, while 'qid' cannot be retrieved, it is possible to get the equivalent 'group' for a DMatrix that had 'qid' assigned. \strong{Important}: when calling \code{\link[=setinfo]{setinfo()}}, the objects are modified in-place. See \code{\link[=xgb.copy.Booster]{xgb.copy.Booster()}} for an idea of this in-place assignment works. See the documentation for \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} for possible fields that can be set (which correspond to arguments in that function). Note that the following fields are allowed in the construction of an \code{xgb.DMatrix} but \strong{are not} allowed here: \itemize{ \item data \item missing \item silent \item nthread } } \examples{ data(agaricus.train, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) labels <- getinfo(dtrain, "label") setinfo(dtrain, "label", 1 - labels) labels2 <- getinfo(dtrain, "label") stopifnot(all(labels2 == 1 - labels)) data(agaricus.train, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) labels <- getinfo(dtrain, "label") setinfo(dtrain, "label", 1 - labels) labels2 <- getinfo(dtrain, "label") stopifnot(all.equal(labels2, 1 - labels)) } xgboost-3.0.0/R-package/man/predict.xgb.Booster.Rd000066400000000000000000000344701476511272400217010ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{predict.xgb.Booster} \alias{predict.xgb.Booster} \title{Predict method for XGBoost model} \usage{ \method{predict}{xgb.Booster}( object, newdata, missing = NA, outputmargin = FALSE, predleaf = FALSE, predcontrib = FALSE, approxcontrib = FALSE, predinteraction = FALSE, training = FALSE, iterationrange = NULL, strict_shape = FALSE, avoid_transpose = FALSE, validate_features = FALSE, base_margin = NULL, ... ) } \arguments{ \item{object}{Object of class \code{xgb.Booster}.} \item{newdata}{Takes \code{data.frame}, \code{matrix}, \code{dgCMatrix}, \code{dgRMatrix}, \code{dsparseVector}, local data file, or \code{xgb.DMatrix}. For single-row predictions on sparse data, it is recommended to use CSR format. If passing a sparse vector, it will take it as a row vector. Note that, for repeated predictions on the same data, one might want to create a DMatrix to pass here instead of passing R types like matrices or data frames, as predictions will be faster on DMatrix. If \code{newdata} is a \code{data.frame}, be aware that: \itemize{ \item Columns will be converted to numeric if they aren't already, which could potentially make the operation slower than in an equivalent \code{matrix} object. \item The order of the columns must match with that of the data from which the model was fitted (i.e. columns will not be referenced by their names, just by their order in the data), unless passing \code{validate_features = TRUE} (which is not the default). \item If the model was fitted to data with categorical columns, these columns must be of \code{factor} type here, and must use the same encoding (i.e. have the same levels). \item If \code{newdata} contains any \code{factor} columns, they will be converted to base-0 encoding (same as during DMatrix creation) - hence, one should not pass a \code{factor} under a column which during training had a different type. \item Any columns with type other than \code{factor} will be interpreted as numeric. }} \item{missing}{Float value that represents missing values in data (e.g., 0 or some other extreme value). This parameter is not used when \code{newdata} is an \code{xgb.DMatrix} - in such cases, should pass this as an argument to the DMatrix constructor instead.} \item{outputmargin}{Whether the prediction should be returned in the form of original untransformed sum of predictions from boosting iterations' results. E.g., setting \code{outputmargin = TRUE} for logistic regression would return log-odds instead of probabilities.} \item{predleaf}{Whether to predict per-tree leaf indices.} \item{predcontrib}{Whether to return feature contributions to individual predictions (see Details).} \item{approxcontrib}{Whether to use a fast approximation for feature contributions (see Details).} \item{predinteraction}{Whether to return contributions of feature interactions to individual predictions (see Details).} \item{training}{Whether the prediction result is used for training. For dart booster, training predicting will perform dropout.} \item{iterationrange}{Sequence of rounds/iterations from the model to use for prediction, specified by passing a two-dimensional vector with the start and end numbers in the sequence (same format as R's \code{seq} - i.e. base-1 indexing, and inclusive of both ends). For example, passing \code{c(1,20)} will predict using the first twenty iterations, while passing \code{c(1,1)} will predict using only the first one. If passing \code{NULL}, will either stop at the best iteration if the model used early stopping, or use all of the iterations (rounds) otherwise. If passing "all", will use all of the rounds regardless of whether the model had early stopping or not. Not applicable to \code{gblinear} booster.} \item{strict_shape}{Whether to always return an array with the same dimensions for the given prediction mode regardless of the model type - meaning that, for example, both a multi-class and a binary classification model would generate output arrays with the same number of dimensions, with the 'class' dimension having size equal to '1' for the binary model. If passing \code{FALSE} (the default), dimensions will be simplified according to the model type, so that a binary classification model for example would not have a redundant dimension for 'class'. See documentation for the return type for the exact shape of the output arrays for each prediction mode.} \item{avoid_transpose}{Whether to output the resulting predictions in the same memory layout in which they are generated by the core XGBoost library, without transposing them to match the expected output shape. Internally, XGBoost uses row-major order for the predictions it generates, while R arrays use column-major order, hence the result needs to be transposed in order to have the expected shape when represented as an R array or matrix, which might be a slow operation. If passing \code{TRUE}, then the result will have dimensions in reverse order - for example, rows will be the last dimensions instead of the first dimension.} \item{validate_features}{When \code{TRUE}, validate that the Booster's and newdata's feature_names match (only applicable when both \code{object} and \code{newdata} have feature names). If the column names differ and \code{newdata} is not an \code{xgb.DMatrix}, will try to reorder the columns in \code{newdata} to match with the booster's. If the booster has feature types and \code{newdata} is either an \code{xgb.DMatrix} or \code{data.frame}, will additionally verify that categorical columns are of the correct type in \code{newdata}, throwing an error if they do not match. If passing \code{FALSE}, it is assumed that the feature names and types are the same, and come in the same order as in the training data. Note that this check might add some sizable latency to the predictions, so it's recommended to disable it for performance-sensitive applications.} \item{base_margin}{Base margin used for boosting from existing model (raw score that gets added to all observations independently of the trees in the model). If supplied, should be either a vector with length equal to the number of rows in \code{newdata} (for objectives which produces a single score per observation), or a matrix with number of rows matching to the number rows in \code{newdata} and number of columns matching to the number of scores estimated by the model (e.g. number of classes for multi-class classification). Note that, if \code{newdata} is an \code{xgb.DMatrix} object, this argument will be ignored as it needs to be added to the DMatrix instead (e.g. by passing it as an argument in its constructor, or by calling \code{\link[=setinfo.xgb.DMatrix]{setinfo.xgb.DMatrix()}}.} \item{...}{Not used.} } \value{ A numeric vector or array, with corresponding dimensions depending on the prediction mode and on parameter \code{strict_shape} as follows: If passing \code{strict_shape=FALSE}:\itemize{ \item For regression or binary classification: a vector of length \code{nrows}. \item For multi-class and multi-target objectives: a matrix of dimensions \verb{[nrows, ngroups]}. Note that objective variant \code{multi:softmax} defaults towards predicting most likely class (a vector \code{nrows}) instead of per-class probabilities. \item For \code{predleaf}: a matrix with one column per tree. For multi-class / multi-target, they will be arranged so that columns in the output will have the leafs from one group followed by leafs of the other group (e.g. order will be \code{group1:feat1}, \code{group1:feat2}, ..., \code{group2:feat1}, \code{group2:feat2}, ...). If there is more than one parallel tree (e.g. random forests), the parallel trees will be the last grouping in the resulting order, which will still be 2D. \item For \code{predcontrib}: when not multi-class / multi-target, a matrix with dimensions \verb{[nrows, nfeats+1]}. The last "+ 1" column corresponds to the baseline value. For multi-class and multi-target objectives, will be an array with dimensions \verb{[nrows, ngroups, nfeats+1]}. The contribution values are on the scale of untransformed margin (e.g., for binary classification, the values are log-odds deviations from the baseline). \item For \code{predinteraction}: when not multi-class / multi-target, the output is a 3D array of dimensions \verb{[nrows, nfeats+1, nfeats+1]}. The off-diagonal (in the last two dimensions) elements represent different feature interaction contributions. The array is symmetric w.r.t. the last two dimensions. The "+ 1" columns corresponds to the baselines. Summing this array along the last dimension should produce practically the same result as \code{predcontrib = TRUE}. For multi-class and multi-target, will be a 4D array with dimensions \verb{[nrows, ngroups, nfeats+1, nfeats+1]} } If passing \code{strict_shape=TRUE}, the result is always a matrix (if 2D) or array (if 3D or higher): \itemize{ \item For normal predictions, the dimension is \verb{[nrows, ngroups]}. \item For \code{predcontrib=TRUE}, the dimension is \verb{[nrows, ngroups, nfeats+1]}. \item For \code{predinteraction=TRUE}, the dimension is \verb{[nrows, ngroups, nfeats+1, nfeats+1]}. \item For \code{predleaf=TRUE}, the dimension is \verb{[nrows, niter, ngroups, num_parallel_tree]}. } If passing \code{avoid_transpose=TRUE}, then the dimensions in all cases will be in reverse order - for example, for \code{predinteraction}, they will be \verb{[nfeats+1, nfeats+1, ngroups, nrows]} instead of \verb{[nrows, ngroups, nfeats+1, nfeats+1]}. } \description{ Predict values on data based on XGBoost model. } \details{ Note that \code{iterationrange} would currently do nothing for predictions from "gblinear", since "gblinear" doesn't keep its boosting history. One possible practical applications of the \code{predleaf} option is to use the model as a generator of new features which capture non-linearity and interactions, e.g., as implemented in \code{\link[=xgb.create.features]{xgb.create.features()}}. Setting \code{predcontrib = TRUE} allows to calculate contributions of each feature to individual predictions. For "gblinear" booster, feature contributions are simply linear terms (feature_beta * feature_value). For "gbtree" booster, feature contributions are SHAP values (Lundberg 2017) that sum to the difference between the expected output of the model and the current prediction (where the hessian weights are used to compute the expectations). Setting \code{approxcontrib = TRUE} approximates these values following the idea explained in \url{http://blog.datadive.net/interpreting-random-forests/}. With \code{predinteraction = TRUE}, SHAP values of contributions of interaction of each pair of features are computed. Note that this operation might be rather expensive in terms of compute and memory. Since it quadratically depends on the number of features, it is recommended to perform selection of the most important features first. See below about the format of the returned results. The \code{predict()} method uses as many threads as defined in \code{xgb.Booster} object (all by default). If you want to change their number, assign a new number to \code{nthread} using \code{\link[=xgb.model.parameters<-]{xgb.model.parameters<-()}}. Note that converting a matrix to \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} uses multiple threads too. } \examples{ ## binary classification: data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") ## Keep the number of threads to 2 for examples nthread <- 2 data.table::setDTthreads(nthread) train <- agaricus.train test <- agaricus.test bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 5, params = xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic" ) ) # use all trees by default pred <- predict(bst, test$data) # use only the 1st tree pred1 <- predict(bst, test$data, iterationrange = c(1, 1)) # Predicting tree leafs: # the result is an nsamples X ntrees matrix pred_leaf <- predict(bst, test$data, predleaf = TRUE) str(pred_leaf) # Predicting feature contributions to predictions: # the result is an nsamples X (nfeatures + 1) matrix pred_contr <- predict(bst, test$data, predcontrib = TRUE) str(pred_contr) # verify that contributions' sums are equal to log-odds of predictions (up to float precision): summary(rowSums(pred_contr) - qlogis(pred)) # for the 1st record, let's inspect its features that had non-zero contribution to prediction: contr1 <- pred_contr[1,] contr1 <- contr1[-length(contr1)] # drop intercept contr1 <- contr1[contr1 != 0] # drop non-contributing features contr1 <- contr1[order(abs(contr1))] # order by contribution magnitude old_mar <- par("mar") par(mar = old_mar + c(0,7,0,0)) barplot(contr1, horiz = TRUE, las = 2, xlab = "contribution to prediction in log-odds") par(mar = old_mar) ## multiclass classification in iris dataset: lb <- as.numeric(iris$Species) - 1 num_class <- 3 set.seed(11) bst <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5], nthread = 1), label = lb), nrounds = 10, params = xgb.params( max_depth = 4, nthread = 2, subsample = 0.5, objective = "multi:softprob", num_class = num_class ) ) # predict for softmax returns num_class probability numbers per case: pred <- predict(bst, as.matrix(iris[, -5])) str(pred) # convert the probabilities to softmax labels pred_labels <- max.col(pred) - 1 # the following should result in the same error as seen in the last iteration sum(pred_labels != lb) / length(lb) # compare with predictions from softmax: set.seed(11) bst <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5], nthread = 1), label = lb), nrounds = 10, params = xgb.params( max_depth = 4, nthread = 2, subsample = 0.5, objective = "multi:softmax", num_class = num_class ) ) pred <- predict(bst, as.matrix(iris[, -5])) str(pred) all.equal(pred, pred_labels) # prediction from using only 5 iterations should result # in the same error as seen in iteration 5: pred5 <- predict(bst, as.matrix(iris[, -5]), iterationrange = c(1, 5)) sum(pred5 != lb) / length(lb) } \references{ \enumerate{ \item Scott M. Lundberg, Su-In Lee, "A Unified Approach to Interpreting Model Predictions", NIPS Proceedings 2017, \url{https://arxiv.org/abs/1705.07874} \item Scott M. Lundberg, Su-In Lee, "Consistent feature attribution for tree ensembles", \url{https://arxiv.org/abs/1706.06060} } } \seealso{ \code{\link[=xgb.train]{xgb.train()}} } xgboost-3.0.0/R-package/man/predict.xgboost.Rd000066400000000000000000000155241476511272400211710ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgboost.R \name{predict.xgboost} \alias{predict.xgboost} \title{Compute predictions from XGBoost model on new data} \usage{ \method{predict}{xgboost}( object, newdata, type = "response", base_margin = NULL, iteration_range = NULL, validate_features = TRUE, ... ) } \arguments{ \item{object}{An XGBoost model object of class \code{xgboost}, as produced by function \code{\link[=xgboost]{xgboost()}}. Note that there is also a lower-level \code{\link[=predict.xgb.Booster]{predict.xgb.Booster()}} method for models of class \code{xgb.Booster} as produced by \code{\link[=xgb.train]{xgb.train()}}, which can also be used for \code{xgboost} class models as an alternative that performs fewer validations and post-processings.} \item{newdata}{Data on which to compute predictions from the model passed in \code{object}. Supported input classes are: \itemize{ \item Data Frames (class \code{data.frame} from base R and subclasses like \code{data.table}). \item Matrices (class \code{matrix} from base R). \item Sparse matrices from package \code{Matrix}, either as class \code{dgRMatrix} (CSR) or \code{dgCMatrix} (CSC). \item Sparse vectors from package \code{Matrix}, which will be interpreted as containing a single observation. } In the case of data frames, if there are any categorical features, they should be of class \code{factor} and should have the same levels as the \code{factor} columns of the data from which the model was constructed. Any columns with type other than \code{factor} will be interpreted as numeric. If there are named columns and the model was fitted to data with named columns, they will be matched by name by default (see \code{validate_features}).} \item{type}{Type of prediction to make. Supported options are: \itemize{ \item \code{"response"}: will output model predictions on the scale of the response variable (e.g. probabilities of belonging to the last class in the case of binary classification). Result will be either a numeric vector with length matching to rows in \code{newdata}, or a numeric matrix with shape \verb{[nrows(newdata), nscores]} (for objectives that produce more than one score per observation such as multi-class classification or multi-quantile regression). \item \code{"raw"}: will output the unprocessed boosting scores (e.g. log-odds in the case of objective \code{binary:logistic}). Same output shape and type as for \code{"response"}. \item \code{"class"}: will output the class with the highest predicted probability, returned as a \code{factor} (only applicable to classification objectives) with length matching to rows in \code{newdata}. \item \code{"leaf"}: will output the terminal node indices of each observation across each tree, as an integer matrix of shape \verb{[nrows(newdata), ntrees]}, or as an integer array with an extra one or two dimensions, up to \verb{[nrows(newdata), ntrees, nscores, n_parallel_trees]} for models that produce more than one score per tree and/or which have more than one parallel tree (e.g. random forests). Only applicable to tree-based boosters (not \code{gblinear}). \item \code{"contrib"}: will produce per-feature contribution estimates towards the model score for a given observation, based on SHAP values. The contribution values are on the scale of untransformed margin (e.g., for binary classification, the values are log-odds deviations from the baseline). Output will be a numeric matrix with shape \verb{[nrows, nfeatures+1]}, with the intercept being the last feature, or a numeric array with shape \verb{[nrows, nscores, nfeatures+1]} if the model produces more than one score per observation. \item \code{"interaction"}: similar to \code{"contrib"}, but computing SHAP values of contributions of interaction of each pair of features. Note that this operation might be rather expensive in terms of compute and memory. Since it quadratically depends on the number of features, it is recommended to perform selection of the most important features first. Output will be a numeric array of shape \verb{[nrows, nfeatures+1, nfeatures+1]}, or shape \verb{[nrows, nscores, nfeatures+1, nfeatures+1]} (for objectives that produce more than one score per observation). }} \item{base_margin}{Base margin used for boosting from existing model (raw score that gets added to all observations independently of the trees in the model). If supplied, should be either a vector with length equal to the number of rows in \code{newdata} (for objectives which produces a single score per observation), or a matrix with number of rows matching to the number rows in \code{newdata} and number of columns matching to the number of scores estimated by the model (e.g. number of classes for multi-class classification).} \item{iteration_range}{Sequence of rounds/iterations from the model to use for prediction, specified by passing a two-dimensional vector with the start and end numbers in the sequence (same format as R's \code{seq} - i.e. base-1 indexing, and inclusive of both ends). For example, passing \code{c(1,20)} will predict using the first twenty iterations, while passing \code{c(1,1)} will predict using only the first one. If passing \code{NULL}, will either stop at the best iteration if the model used early stopping, or use all of the iterations (rounds) otherwise. If passing "all", will use all of the rounds regardless of whether the model had early stopping or not. Not applicable to \code{gblinear} booster.} \item{validate_features}{Validate that the feature names in the data match to the feature names in the column, and reorder them in the data otherwise. If passing \code{FALSE}, it is assumed that the feature names and types are the same, and come in the same order as in the training data. Be aware that this only applies to column names and not to factor levels in categorical columns. Note that this check might add some sizable latency to the predictions, so it's recommended to disable it for performance-sensitive applications.} \item{...}{Not used.} } \value{ Either a numeric vector (for 1D outputs), numeric matrix (for 2D outputs), numeric array (for 3D and higher), or \code{factor} (for class predictions). See documentation for parameter \code{type} for details about what the output type and shape will be. } \description{ Predict values on data based on XGBoost model. } \examples{ data("ToothGrowth") y <- ToothGrowth$supp x <- ToothGrowth[, -2L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred_prob <- predict(model, x[1:5, ], type = "response") pred_raw <- predict(model, x[1:5, ], type = "raw") pred_class <- predict(model, x[1:5, ], type = "class") # Relationships between these manual_probs <- 1 / (1 + exp(-pred_raw)) manual_class <- ifelse(manual_probs < 0.5, levels(y)[1], levels(y)[2]) # They should match up to numerical precision round(pred_prob, 6) == round(manual_probs, 6) pred_class == manual_class } xgboost-3.0.0/R-package/man/print.xgb.Booster.Rd000066400000000000000000000013321476511272400213720ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{print.xgb.Booster} \alias{print.xgb.Booster} \title{Print xgb.Booster} \usage{ \method{print}{xgb.Booster}(x, ...) } \arguments{ \item{x}{An \code{xgb.Booster} object.} \item{...}{Not used.} } \value{ The same \code{x} object, returned invisibly } \description{ Print information about \code{xgb.Booster}. } \examples{ data(agaricus.train, package = "xgboost") train <- agaricus.train bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = 2, objective = "binary:logistic" ) ) attr(bst, "myattr") <- "memo" print(bst) } xgboost-3.0.0/R-package/man/print.xgb.DMatrix.Rd000066400000000000000000000012401476511272400213230ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{print.xgb.DMatrix} \alias{print.xgb.DMatrix} \title{Print xgb.DMatrix} \usage{ \method{print}{xgb.DMatrix}(x, verbose = FALSE, ...) } \arguments{ \item{x}{An xgb.DMatrix object.} \item{verbose}{Whether to print colnames (when present).} \item{...}{Not currently used.} } \description{ Print information about xgb.DMatrix. Currently it displays dimensions and presence of info-fields and colnames. } \examples{ data(agaricus.train, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) dtrain print(dtrain, verbose = TRUE) } xgboost-3.0.0/R-package/man/print.xgb.cv.Rd000066400000000000000000000016461476511272400203750ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.cv.R \name{print.xgb.cv.synchronous} \alias{print.xgb.cv.synchronous} \title{Print xgb.cv result} \usage{ \method{print}{xgb.cv.synchronous}(x, verbose = FALSE, ...) } \arguments{ \item{x}{An \code{xgb.cv.synchronous} object.} \item{verbose}{Whether to print detailed data.} \item{...}{Passed to \code{data.table.print()}.} } \description{ Prints formatted results of \code{\link[=xgb.cv]{xgb.cv()}}. } \details{ When not verbose, it would only print the evaluation results, including the best iteration (when available). } \examples{ data(agaricus.train, package = "xgboost") train <- agaricus.train cv <- xgb.cv( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nfold = 5, nrounds = 2, params = xgb.params( max_depth = 2, nthread = 2, objective = "binary:logistic" ) ) print(cv) print(cv, verbose = TRUE) } xgboost-3.0.0/R-package/man/print.xgboost.Rd000066400000000000000000000007531476511272400206710ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgboost.R \name{print.xgboost} \alias{print.xgboost} \title{Print info from XGBoost model} \usage{ \method{print}{xgboost}(x, ...) } \arguments{ \item{x}{An XGBoost model object of class \code{xgboost}, as produced by function \code{\link[=xgboost]{xgboost()}}.} \item{...}{Not used.} } \value{ Same object \code{x}, after printing its info. } \description{ Prints basic properties of an XGBoost model object. } xgboost-3.0.0/R-package/man/variable.names.xgb.Booster.Rd000066400000000000000000000013611476511272400231270ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{variable.names.xgb.Booster} \alias{variable.names.xgb.Booster} \title{Get Features Names from Booster} \usage{ \method{variable.names}{xgb.Booster}(object, ...) } \arguments{ \item{object}{An \code{xgb.Booster} object.} \item{...}{Not used.} } \description{ Returns the feature / variable / column names from a fitted booster object, which are set automatically during the call to \code{\link[=xgb.train]{xgb.train()}} from the DMatrix names, or which can be set manually through \code{\link[=setinfo]{setinfo()}}. If the object doesn't have feature names, will return \code{NULL}. It is equivalent to calling \code{getinfo(object, "feature_name")}. } xgboost-3.0.0/R-package/man/xgb.Callback.Rd000066400000000000000000000253051476511272400203240ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.Callback} \alias{xgb.Callback} \title{XGBoost Callback Constructor} \usage{ xgb.Callback( cb_name = "custom_callback", env = new.env(), f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) NULL, f_before_iter = function(env, model, data, evals, iteration) NULL, f_after_iter = function(env, model, data, evals, iteration, iter_feval) NULL, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) NULL ) } \arguments{ \item{cb_name}{Name for the callback. If the callback produces some non-NULL result (from executing the function passed under \code{f_after_training}), that result will be added as an R attribute to the resulting booster (or as a named element in the result of CV), with the attribute name specified here. Names of callbacks must be unique - i.e. there cannot be two callbacks with the same name.} \item{env}{An environment object that will be passed to the different functions in the callback. Note that this environment will not be shared with other callbacks.} \item{f_before_training}{A function that will be executed before the training has started. If passing \code{NULL} for this or for the other function inputs, then no function will be executed. If passing a function, it will be called with parameters supplied as non-named arguments matching the function signatures that are shown in the default value for each function argument.} \item{f_before_iter}{A function that will be executed before each boosting round. This function can signal whether the training should be finalized or not, by outputting a value that evaluates to \code{TRUE} - i.e. if the output from the function provided here at a given round is \code{TRUE}, then training will be stopped before the current iteration happens. Return values of \code{NULL} will be interpreted as \code{FALSE}.} \item{f_after_iter}{A function that will be executed after each boosting round. This function can signal whether the training should be finalized or not, by outputting a value that evaluates to \code{TRUE} - i.e. if the output from the function provided here at a given round is \code{TRUE}, then training will be stopped at that round. Return values of \code{NULL} will be interpreted as \code{FALSE}.} \item{f_after_training}{A function that will be executed after training is finished. This function can optionally output something non-NULL, which will become part of the R attributes of the booster (assuming one passes \code{keep_extra_attributes=TRUE} to \code{\link[=xgb.train]{xgb.train()}}) under the name supplied for parameter \code{cb_name} imn the case of \code{\link[=xgb.train]{xgb.train()}}; or a part of the named elements in the result of \code{\link[=xgb.cv]{xgb.cv()}}.} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ Constructor for defining the structure of callback functions that can be executed at different stages of model training (before / after training, before / after each boosting iteration). } \details{ Arguments that will be passed to the supplied functions are as follows: \itemize{ \item env The same environment that is passed under argument \code{env}. It may be modified by the functions in order to e.g. keep tracking of what happens across iterations or similar. This environment is only used by the functions supplied to the callback, and will not be kept after the model fitting function terminates (see parameter \code{f_after_training}). \item model The booster object when using \code{\link[=xgb.train]{xgb.train()}}, or the folds when using \code{\link[=xgb.cv]{xgb.cv()}}. For \code{\link[=xgb.cv]{xgb.cv()}}, folds are a list with a structure as follows: \itemize{ \item \code{dtrain}: The training data for the fold (as an \code{xgb.DMatrix} object). \item \code{bst}: Rhe \code{xgb.Booster} object for the fold. \item \code{evals}: A list containing two DMatrices, with names \code{train} and \code{test} (\code{test} is the held-out data for the fold). \item \code{index}: The indices of the hold-out data for that fold (base-1 indexing), from which the \code{test} entry in \code{evals} was obtained. } This object should \strong{not} be in-place modified in ways that conflict with the training (e.g. resetting the parameters for a training update in a way that resets the number of rounds to zero in order to overwrite rounds). Note that any R attributes that are assigned to the booster during the callback functions, will not be kept thereafter as the booster object variable is not re-assigned during training. It is however possible to set C-level attributes of the booster through \code{\link[=xgb.attr]{xgb.attr()}} or \code{\link[=xgb.attributes]{xgb.attributes()}}, which should remain available for the rest of the iterations and after the training is done. For keeping variables across iterations, it's recommended to use \code{env} instead. \item data The data to which the model is being fit, as an \code{xgb.DMatrix} object. Note that, for \code{\link[=xgb.cv]{xgb.cv()}}, this will be the full data, while data for the specific folds can be found in the \code{model} object. \item evals The evaluation data, as passed under argument \code{evals} to \code{\link[=xgb.train]{xgb.train()}}. For \code{\link[=xgb.cv]{xgb.cv()}}, this will always be \code{NULL}. \item begin_iteration Index of the first boosting iteration that will be executed (base-1 indexing). This will typically be '1', but when using training continuation, depending on the parameters for updates, boosting rounds will be continued from where the previous model ended, in which case this will be larger than 1. \item end_iteration Index of the last boostign iteration that will be executed (base-1 indexing, inclusive of this end). It should match with argument \code{nrounds} passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. Note that boosting might be interrupted before reaching this last iteration, for example by using the early stopping callback \code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}}. \item iteration Index of the iteration number that is being executed (first iteration will be the same as parameter \code{begin_iteration}, then next one will add +1, and so on). \item iter_feval Evaluation metrics for \code{evals} that were supplied, either determined by the objective, or by parameter \code{custom_metric}. For \code{\link[=xgb.train]{xgb.train()}}, this will be a named vector with one entry per element in \code{evals}, where the names are determined as 'evals name' + '-' + 'metric name' - for example, if \code{evals} contains an entry named "tr" and the metric is "rmse", this will be a one-element vector with name "tr-rmse". For \code{\link[=xgb.cv]{xgb.cv()}}, this will be a 2d matrix with dimensions \verb{[length(evals), nfolds]}, where the row names will follow the same naming logic as the one-dimensional vector that is passed in \code{\link[=xgb.train]{xgb.train()}}. Note that, internally, the built-in callbacks such as \link{xgb.cb.print.evaluation} summarize this table by calculating the row-wise means and standard deviations. \item final_feval The evaluation results after the last boosting round is executed (same format as \code{iter_feval}, and will be the exact same input as passed under \code{iter_feval} to the last round that is executed during model fitting). \item prev_cb_res Result from a previous run of a callback sharing the same name (as given by parameter \code{cb_name}) when conducting training continuation, if there was any in the booster R attributes. Sometimes, one might want to append the new results to the previous one, and this will be done automatically by the built-in callbacks such as \link{xgb.cb.evaluation.log}, which will append the new rows to the previous table. If no such previous callback result is available (which it never will when fitting a model from start instead of updating an existing model), this will be \code{NULL}. For \code{\link[=xgb.cv]{xgb.cv()}}, which doesn't support training continuation, this will always be \code{NULL}. } The following names (\code{cb_name} values) are reserved for internal callbacks: \itemize{ \item print_evaluation \item evaluation_log \item reset_parameters \item early_stop \item save_model \item cv_predict \item gblinear_history } The following names are reserved for other non-callback attributes: \itemize{ \item names \item class \item call \item params \item niter \item nfeatures \item folds } When using the built-in early stopping callback (\link{xgb.cb.early.stop}), said callback will always be executed before the others, as it sets some booster C-level attributes that other callbacks might also use. Otherwise, the order of execution will match with the order in which the callbacks are passed to the model fitting function. } \examples{ # Example constructing a custom callback that calculates # squared error on the training data (no separate test set), # and outputs the per-iteration results. ssq_callback <- xgb.Callback( cb_name = "ssq", f_before_training = function(env, model, data, evals, begin_iteration, end_iteration) { # A vector to keep track of a number at each iteration env$logs <- rep(NA_real_, end_iteration - begin_iteration + 1) }, f_after_iter = function(env, model, data, evals, iteration, iter_feval) { # This calculates the sum of squared errors on the training data. # Note that this can be better done by passing an 'evals' entry, # but this demonstrates a way in which callbacks can be structured. pred <- predict(model, data) err <- pred - getinfo(data, "label") sq_err <- sum(err^2) env$logs[iteration] <- sq_err cat( sprintf( "Squared error at iteration \%d: \%.2f\n", iteration, sq_err ) ) # A return value of 'TRUE' here would signal to finalize the training return(FALSE) }, f_after_training = function(env, model, data, evals, iteration, final_feval, prev_cb_res) { return(env$logs) } ) data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params(objective = "reg:squarederror", nthread = 1), nrounds = 5, callbacks = list(ssq_callback) ) # Result from 'f_after_iter' will be available as an attribute attributes(model)$ssq } \seealso{ Built-in callbacks: \itemize{ \item \link{xgb.cb.print.evaluation} \item \link{xgb.cb.evaluation.log} \item \link{xgb.cb.reset.parameters} \item \link{xgb.cb.early.stop} \item \link{xgb.cb.save.model} \item \link{xgb.cb.cv.predict} \item \link{xgb.cb.gblinear.history} } } xgboost-3.0.0/R-package/man/xgb.DMatrix.Rd000066400000000000000000000175521476511272400202050ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.DMatrix} \alias{xgb.DMatrix} \alias{xgb.QuantileDMatrix} \title{Construct xgb.DMatrix object} \usage{ xgb.DMatrix( data, label = NULL, weight = NULL, base_margin = NULL, missing = NA, silent = FALSE, feature_names = colnames(data), feature_types = NULL, nthread = NULL, group = NULL, qid = NULL, label_lower_bound = NULL, label_upper_bound = NULL, feature_weights = NULL, data_split_mode = "row", ... ) xgb.QuantileDMatrix( data, label = NULL, weight = NULL, base_margin = NULL, missing = NA, feature_names = colnames(data), feature_types = NULL, nthread = NULL, group = NULL, qid = NULL, label_lower_bound = NULL, label_upper_bound = NULL, feature_weights = NULL, ref = NULL, max_bin = NULL ) } \arguments{ \item{data}{Data from which to create a DMatrix, which can then be used for fitting models or for getting predictions out of a fitted model. Supported input types are as follows: \itemize{ \item \code{matrix} objects, with types \code{numeric}, \code{integer}, or \code{logical}. \item \code{data.frame} objects, with columns of types \code{numeric}, \code{integer}, \code{logical}, or \code{factor} } Note that xgboost uses base-0 encoding for categorical types, hence \code{factor} types (which use base-1 encoding') will be converted inside the function call. Be aware that the encoding used for \code{factor} types is not kept as part of the model, so in subsequent calls to \code{predict}, it is the user's responsibility to ensure that factor columns have the same levels as the ones from which the DMatrix was constructed. Other column types are not supported. \itemize{ \item CSR matrices, as class \code{dgRMatrix} from package \code{Matrix}. \item CSC matrices, as class \code{dgCMatrix} from package \code{Matrix}. } These are \strong{not} supported by \code{xgb.QuantileDMatrix}. \itemize{ \item XGBoost's own binary format for DMatrices, as produced by \code{\link[=xgb.DMatrix.save]{xgb.DMatrix.save()}}. \item Single-row CSR matrices, as class \code{dsparseVector} from package \code{Matrix}, which is interpreted as a single row (only when making predictions from a fitted model). }} \item{label}{Label of the training data. For classification problems, should be passed encoded as integers with numeration starting at zero.} \item{weight}{Weight for each instance. Note that, for ranking task, weights are per-group. In ranking task, one weight is assigned to each group (not each data point). This is because we only care about the relative ordering of data points within each group, so it doesn't make sense to assign weights to individual data points.} \item{base_margin}{Base margin used for boosting from existing model. In the case of multi-output models, one can also pass multi-dimensional base_margin.} \item{missing}{A float value to represents missing values in data (not used when creating DMatrix from text files). It is useful to change when a zero, infinite, or some other extreme value represents missing values in data.} \item{silent}{whether to suppress printing an informational message after loading from a file.} \item{feature_names}{Set names for features. Overrides column names in data frame and matrix. Note: columns are not referenced by name when calling \code{predict}, so the column order there must be the same as in the DMatrix construction, regardless of the column names.} \item{feature_types}{Set types for features. If \code{data} is a \code{data.frame} and passing \code{feature_types} is not supplied, feature types will be deduced automatically from the column types. Otherwise, one can pass a character vector with the same length as number of columns in \code{data}, with the following possible values: \itemize{ \item "c", which represents categorical columns. \item "q", which represents numeric columns. \item "int", which represents integer columns. \item "i", which represents logical (boolean) columns. } Note that, while categorical types are treated differently from the rest for model fitting purposes, the other types do not influence the generated model, but have effects in other functionalities such as feature importances. \strong{Important}: Categorical features, if specified manually through \code{feature_types}, must be encoded as integers with numeration starting at zero, and the same encoding needs to be applied when passing data to \code{\link[=predict]{predict()}}. Even if passing \code{factor} types, the encoding will not be saved, so make sure that \code{factor} columns passed to \code{predict} have the same \code{levels}.} \item{nthread}{Number of threads used for creating DMatrix.} \item{group}{Group size for all ranking group.} \item{qid}{Query ID for data samples, used for ranking.} \item{label_lower_bound}{Lower bound for survival training.} \item{label_upper_bound}{Upper bound for survival training.} \item{feature_weights}{Set feature weights for column sampling.} \item{data_split_mode}{Not used yet. This parameter is for distributed training, which is not yet available for the R package.} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} \item{ref}{The training dataset that provides quantile information, needed when creating validation/test dataset with \code{\link[=xgb.QuantileDMatrix]{xgb.QuantileDMatrix()}}. Supplying the training DMatrix as a reference means that the same quantisation applied to the training data is applied to the validation/test data} \item{max_bin}{The number of histogram bin, should be consistent with the training parameter \code{max_bin}. This is only supported when constructing a QuantileDMatrix.} } \value{ An 'xgb.DMatrix' object. If calling \code{xgb.QuantileDMatrix}, it will have additional subclass \code{xgb.QuantileDMatrix}. } \description{ Construct an 'xgb.DMatrix' object from a given data source, which can then be passed to functions such as \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=predict]{predict()}}. } \details{ Function \code{xgb.QuantileDMatrix()} will construct a DMatrix with quantization for the histogram method already applied to it, which can be used to reduce memory usage (compared to using a a regular DMatrix first and then creating a quantization out of it) when using the histogram method (\code{tree_method = "hist"}, which is the default algorithm), but is not usable for the sorted-indices method (\code{tree_method = "exact"}), nor for the approximate method (\code{tree_method = "approx"}). Note that DMatrix objects are not serializable through R functions such as \code{\link[=saveRDS]{saveRDS()}} or \code{\link[=save]{save()}}. If a DMatrix gets serialized and then de-serialized (for example, when saving data in an R session or caching chunks in an Rmd file), the resulting object will not be usable anymore and will need to be reconstructed from the original source of data. } \examples{ data(agaricus.train, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) dtrain <- with( agaricus.train, xgb.DMatrix(data, label = label, nthread = nthread) ) fname <- file.path(tempdir(), "xgb.DMatrix.data") xgb.DMatrix.save(dtrain, fname) dtrain <- xgb.DMatrix(fname, nthread = 1) } xgboost-3.0.0/R-package/man/xgb.DMatrix.hasinfo.Rd000066400000000000000000000016631476511272400216270ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.DMatrix.hasinfo} \alias{xgb.DMatrix.hasinfo} \title{Check whether DMatrix object has a field} \usage{ xgb.DMatrix.hasinfo(object, info) } \arguments{ \item{object}{The DMatrix object to check for the given \code{info} field.} \item{info}{The field to check for presence or absence in \code{object}.} } \description{ Checks whether an xgb.DMatrix object has a given field assigned to it, such as weights, labels, etc. } \examples{ x <- matrix(1:10, nrow = 5) dm <- xgb.DMatrix(x, nthread = 1) # 'dm' so far does not have any fields set xgb.DMatrix.hasinfo(dm, "label") # Fields can be added after construction setinfo(dm, "label", 1:5) xgb.DMatrix.hasinfo(dm, "label") } \seealso{ \code{\link[=xgb.DMatrix]{xgb.DMatrix()}}, \code{\link[=getinfo.xgb.DMatrix]{getinfo.xgb.DMatrix()}}, \code{\link[=setinfo.xgb.DMatrix]{setinfo.xgb.DMatrix()}} } xgboost-3.0.0/R-package/man/xgb.DMatrix.save.Rd000066400000000000000000000012761476511272400211360ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.save.R \name{xgb.DMatrix.save} \alias{xgb.DMatrix.save} \title{Save xgb.DMatrix object to binary file} \usage{ xgb.DMatrix.save(dmatrix, fname) } \arguments{ \item{dmatrix}{the \code{xgb.DMatrix} object} \item{fname}{the name of the file to write.} } \description{ Save xgb.DMatrix object to binary file } \examples{ \dontshow{RhpcBLASctl::omp_set_num_threads(1)} data(agaricus.train, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) fname <- file.path(tempdir(), "xgb.DMatrix.data") xgb.DMatrix.save(dtrain, fname) dtrain <- xgb.DMatrix(fname, nthread = 1) } xgboost-3.0.0/R-package/man/xgb.DataBatch.Rd000066400000000000000000000116321476511272400204410ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.DataBatch} \alias{xgb.DataBatch} \title{Structure for Data Batches} \usage{ xgb.DataBatch( data, label = NULL, weight = NULL, base_margin = NULL, feature_names = colnames(data), feature_types = NULL, group = NULL, qid = NULL, label_lower_bound = NULL, label_upper_bound = NULL, feature_weights = NULL ) } \arguments{ \item{data}{Batch of data belonging to this batch. Note that not all of the input types supported by \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} are possible to pass here. Supported types are: \itemize{ \item \code{matrix}, with types \code{numeric}, \code{integer}, and \code{logical}. Note that for types \code{integer} and \code{logical}, missing values might not be automatically recognized as as such - see the documentation for parameter \code{missing} in \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}} for details on this. \item \code{data.frame}, with the same types as supported by 'xgb.DMatrix' and same conversions applied to it. See the documentation for parameter \code{data} in \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} for details on it. \item CSR matrices, as class \code{dgRMatrix} from package "Matrix". }} \item{label}{Label of the training data. For classification problems, should be passed encoded as integers with numeration starting at zero.} \item{weight}{Weight for each instance. Note that, for ranking task, weights are per-group. In ranking task, one weight is assigned to each group (not each data point). This is because we only care about the relative ordering of data points within each group, so it doesn't make sense to assign weights to individual data points.} \item{base_margin}{Base margin used for boosting from existing model. In the case of multi-output models, one can also pass multi-dimensional base_margin.} \item{feature_names}{Set names for features. Overrides column names in data frame and matrix. Note: columns are not referenced by name when calling \code{predict}, so the column order there must be the same as in the DMatrix construction, regardless of the column names.} \item{feature_types}{Set types for features. If \code{data} is a \code{data.frame} and passing \code{feature_types} is not supplied, feature types will be deduced automatically from the column types. Otherwise, one can pass a character vector with the same length as number of columns in \code{data}, with the following possible values: \itemize{ \item "c", which represents categorical columns. \item "q", which represents numeric columns. \item "int", which represents integer columns. \item "i", which represents logical (boolean) columns. } Note that, while categorical types are treated differently from the rest for model fitting purposes, the other types do not influence the generated model, but have effects in other functionalities such as feature importances. \strong{Important}: Categorical features, if specified manually through \code{feature_types}, must be encoded as integers with numeration starting at zero, and the same encoding needs to be applied when passing data to \code{\link[=predict]{predict()}}. Even if passing \code{factor} types, the encoding will not be saved, so make sure that \code{factor} columns passed to \code{predict} have the same \code{levels}.} \item{group}{Group size for all ranking group.} \item{qid}{Query ID for data samples, used for ranking.} \item{label_lower_bound}{Lower bound for survival training.} \item{label_upper_bound}{Upper bound for survival training.} \item{feature_weights}{Set feature weights for column sampling.} } \value{ An object of class \code{xgb.DataBatch}, which is just a list containing the data and parameters passed here. It does \strong{not} inherit from \code{xgb.DMatrix}. } \description{ Helper function to supply data in batches of a data iterator when constructing a DMatrix from external memory through \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}} or through \code{\link[=xgb.QuantileDMatrix.from_iterator]{xgb.QuantileDMatrix.from_iterator()}}. This function is \strong{only} meant to be called inside of a callback function (which is passed as argument to function \code{\link[=xgb.DataIter]{xgb.DataIter()}} to construct a data iterator) when constructing a DMatrix through external memory - otherwise, one should call \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} or \code{\link[=xgb.QuantileDMatrix]{xgb.QuantileDMatrix()}}. The object that results from calling this function directly is \strong{not} like an \code{xgb.DMatrix} - i.e. cannot be used to train a model, nor to get predictions - only possible usage is to supply data to an iterator, from which a DMatrix is then constructed. For more information and for example usage, see the documentation for \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}. } \seealso{ \code{\link[=xgb.DataIter]{xgb.DataIter()}}, \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}. } xgboost-3.0.0/R-package/man/xgb.DataIter.Rd000066400000000000000000000047301476511272400203240ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.DataIter} \alias{xgb.DataIter} \title{XGBoost Data Iterator} \usage{ xgb.DataIter(env = new.env(), f_next, f_reset) } \arguments{ \item{env}{An R environment to pass to the callback functions supplied here, which can be used to keep track of variables to determine how to handle the batches. For example, one might want to keep track of an iteration number in this environment in order to know which part of the data to pass next.} \item{f_next}{\verb{function(env)} which is responsible for: \itemize{ \item Accessing or retrieving the next batch of data in the iterator. \item Supplying this data by calling function \code{\link[=xgb.DataBatch]{xgb.DataBatch()}} on it and returning the result. \item Keeping track of where in the iterator batch it is or will go next, which can for example be done by modifiying variables in the \code{env} variable that is passed here. \item Signaling whether there are more batches to be consumed or not, by returning \code{NULL} when the stream of data ends (all batches in the iterator have been consumed), or the result from calling \code{\link[=xgb.DataBatch]{xgb.DataBatch()}} when there are more batches in the line to be consumed. }} \item{f_reset}{\verb{function(env)} which is responsible for reseting the data iterator (i.e. taking it back to the first batch, called before and after the sequence of batches has been consumed). Note that, after resetting the iterator, the batches will be accessed again, so the same data (and in the same order) must be passed in subsequent iterations.} } \value{ An \code{xgb.DataIter} object, containing the same inputs supplied here, which can then be passed to \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}. } \description{ Interface to create a custom data iterator in order to construct a DMatrix from external memory. This function is responsible for generating an R object structure containing callback functions and an environment shared with them. The output structure from this function is then meant to be passed to \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}, which will consume the data and create a DMatrix from it by executing the callback functions. For more information, and for a usage example, see the documentation for \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}. } \seealso{ \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}, \code{\link[=xgb.DataBatch]{xgb.DataBatch()}}. } xgboost-3.0.0/R-package/man/xgb.ExtMemDMatrix.Rd000066400000000000000000000114611476511272400213160ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.ExtMemDMatrix} \alias{xgb.ExtMemDMatrix} \title{DMatrix from External Data} \usage{ xgb.ExtMemDMatrix( data_iterator, cache_prefix = tempdir(), missing = NA, nthread = NULL ) } \arguments{ \item{data_iterator}{A data iterator structure as returned by \code{\link[=xgb.DataIter]{xgb.DataIter()}}, which includes an environment shared between function calls, and functions to access the data in batches on-demand.} \item{cache_prefix}{The path of cache file, caller must initialize all the directories in this path.} \item{missing}{A float value to represents missing values in data. Note that, while functions like \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} can take a generic \code{NA} and interpret it correctly for different types like \code{numeric} and \code{integer}, if an \code{NA} value is passed here, it will not be adapted for different input types. For example, in R \code{integer} types, missing values are represented by integer number \code{-2147483648} (since machine 'integer' types do not have an inherent 'NA' value) - hence, if one passes \code{NA}, which is interpreted as a floating-point NaN by \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}} and by \code{\link[=xgb.QuantileDMatrix.from_iterator]{xgb.QuantileDMatrix.from_iterator()}}, these integer missing values will not be treated as missing. This should not pose any problem for \code{numeric} types, since they do have an inheret NaN value.} \item{nthread}{Number of threads used for creating DMatrix.} } \value{ An 'xgb.DMatrix' object, with subclass 'xgb.ExtMemDMatrix', in which the data is not held internally but accessed through the iterator when needed. } \description{ Create a special type of XGBoost 'DMatrix' object from external data supplied by an \code{\link[=xgb.DataIter]{xgb.DataIter()}} object, potentially passed in batches from a bigger set that might not fit entirely in memory. The data supplied by the iterator is accessed on-demand as needed, multiple times, without being concatenated, but note that fields like 'label' \strong{will} be concatenated from multiple calls to the data iterator. For more information, see the guide 'Using XGBoost External Memory Version': \url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html} } \details{ Be aware that construction of external data DMatrices \bold{will cache data on disk} in a compressed format, under the path supplied in \code{cache_prefix}. External data is not supported for the exact tree method. } \examples{ data(mtcars) # This custom environment will be passed to the iterator # functions at each call. It is up to the user to keep # track of the iteration number in this environment. iterator_env <- as.environment( list( iter = 0, x = mtcars[, -1], y = mtcars[, 1] ) ) # Data is passed in two batches. # In this example, batches are obtained by subsetting the 'x' variable. # This is not advantageous to do, since the data is already loaded in memory # and can be passed in full in one go, but there can be situations in which # only a subset of the data will fit in the computer's memory, and it can # be loaded in batches that are accessed one-at-a-time only. iterator_next <- function(iterator_env) { curr_iter <- iterator_env[["iter"]] if (curr_iter >= 2) { # there are only two batches, so this signals end of the stream return(NULL) } if (curr_iter == 0) { x_batch <- iterator_env[["x"]][1:16, ] y_batch <- iterator_env[["y"]][1:16] } else { x_batch <- iterator_env[["x"]][17:32, ] y_batch <- iterator_env[["y"]][17:32] } on.exit({ iterator_env[["iter"]] <- curr_iter + 1 }) # Function 'xgb.DataBatch' must be called manually # at each batch with all the appropriate attributes, # such as feature names and feature types. return(xgb.DataBatch(data = x_batch, label = y_batch)) } # This moves the iterator back to its beginning iterator_reset <- function(iterator_env) { iterator_env[["iter"]] <- 0 } data_iterator <- xgb.DataIter( env = iterator_env, f_next = iterator_next, f_reset = iterator_reset ) cache_prefix <- tempdir() # DMatrix will be constructed from the iterator's batches dm <- xgb.ExtMemDMatrix(data_iterator, cache_prefix, nthread = 1) # After construction, can be used as a regular DMatrix params <- xgb.params(nthread = 1, objective = "reg:squarederror") model <- xgb.train(data = dm, nrounds = 2, params = params) # Predictions can also be called on it, and should be the same # as if the data were passed differently. pred_dm <- predict(model, dm) pred_mat <- predict(model, as.matrix(mtcars[, -1])) } \seealso{ \code{\link[=xgb.DataIter]{xgb.DataIter()}}, \code{\link[=xgb.DataBatch]{xgb.DataBatch()}}, \code{\link[=xgb.QuantileDMatrix.from_iterator]{xgb.QuantileDMatrix.from_iterator()}} } xgboost-3.0.0/R-package/man/xgb.QuantileDMatrix.from_iterator.Rd000066400000000000000000000065111476511272400245540ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.QuantileDMatrix.from_iterator} \alias{xgb.QuantileDMatrix.from_iterator} \title{QuantileDMatrix from External Data} \usage{ xgb.QuantileDMatrix.from_iterator( data_iterator, missing = NA, nthread = NULL, ref = NULL, max_bin = NULL ) } \arguments{ \item{data_iterator}{A data iterator structure as returned by \code{\link[=xgb.DataIter]{xgb.DataIter()}}, which includes an environment shared between function calls, and functions to access the data in batches on-demand.} \item{missing}{A float value to represents missing values in data. Note that, while functions like \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} can take a generic \code{NA} and interpret it correctly for different types like \code{numeric} and \code{integer}, if an \code{NA} value is passed here, it will not be adapted for different input types. For example, in R \code{integer} types, missing values are represented by integer number \code{-2147483648} (since machine 'integer' types do not have an inherent 'NA' value) - hence, if one passes \code{NA}, which is interpreted as a floating-point NaN by \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}} and by \code{\link[=xgb.QuantileDMatrix.from_iterator]{xgb.QuantileDMatrix.from_iterator()}}, these integer missing values will not be treated as missing. This should not pose any problem for \code{numeric} types, since they do have an inheret NaN value.} \item{nthread}{Number of threads used for creating DMatrix.} \item{ref}{The training dataset that provides quantile information, needed when creating validation/test dataset with \code{\link[=xgb.QuantileDMatrix]{xgb.QuantileDMatrix()}}. Supplying the training DMatrix as a reference means that the same quantisation applied to the training data is applied to the validation/test data} \item{max_bin}{The number of histogram bin, should be consistent with the training parameter \code{max_bin}. This is only supported when constructing a QuantileDMatrix.} } \value{ An 'xgb.DMatrix' object, with subclass 'xgb.QuantileDMatrix'. } \description{ Create an \code{xgb.QuantileDMatrix} object (exact same class as would be returned by calling function \code{\link[=xgb.QuantileDMatrix]{xgb.QuantileDMatrix()}}, with the same advantages and limitations) from external data supplied by \code{\link[=xgb.DataIter]{xgb.DataIter()}}, potentially passed in batches from a bigger set that might not fit entirely in memory, same way as \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}. Note that, while external data will only be loaded through the iterator (thus the full data might not be held entirely in-memory), the quantized representation of the data will get created in-memory, being concatenated from multiple calls to the data iterator. The quantized version is typically lighter than the original data, so there might be cases in which this representation could potentially fit in memory even if the full data does not. For more information, see the guide 'Using XGBoost External Memory Version': \url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html} } \seealso{ \code{\link[=xgb.DataIter]{xgb.DataIter()}}, \code{\link[=xgb.DataBatch]{xgb.DataBatch()}}, \code{\link[=xgb.ExtMemDMatrix]{xgb.ExtMemDMatrix()}}, \code{\link[=xgb.QuantileDMatrix]{xgb.QuantileDMatrix()}} } xgboost-3.0.0/R-package/man/xgb.attr.Rd000066400000000000000000000066401476511272400176030ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.attr} \alias{xgb.attr} \alias{xgb.attr<-} \alias{xgb.attributes} \alias{xgb.attributes<-} \title{Accessors for serializable attributes of a model} \usage{ xgb.attr(object, name) xgb.attr(object, name) <- value xgb.attributes(object) xgb.attributes(object) <- value } \arguments{ \item{object}{Object of class \code{xgb.Booster}. \strong{Will be modified in-place} when assigning to it.} \item{name}{A non-empty character string specifying which attribute is to be accessed.} \item{value}{For \verb{xgb.attr<-}, a value of an attribute; for \verb{xgb.attributes<-}, it is a list (or an object coercible to a list) with the names of attributes to set and the elements corresponding to attribute values. Non-character values are converted to character. When an attribute value is not a scalar, only the first index is used. Use \code{NULL} to remove an attribute.} } \value{ \itemize{ \item \code{xgb.attr()} returns either a string value of an attribute or \code{NULL} if an attribute wasn't stored in a model. \item \code{xgb.attributes()} returns a list of all attributes stored in a model or \code{NULL} if a model has no stored attributes. } } \description{ These methods allow to manipulate the key-value attribute strings of an XGBoost model. } \details{ The primary purpose of XGBoost model attributes is to store some meta data about the model. Note that they are a separate concept from the object attributes in R. Specifically, they refer to key-value strings that can be attached to an XGBoost model, stored together with the model's binary representation, and accessed later (from R or any other interface). In contrast, any R attribute assigned to an R object of \code{xgb.Booster} class would not be saved by \code{\link[=xgb.save]{xgb.save()}} because an XGBoost model is an external memory object and its serialization is handled externally. Also, setting an attribute that has the same name as one of XGBoost's parameters wouldn't change the value of that parameter for a model. Use \code{\link[=xgb.model.parameters<-]{xgb.model.parameters<-()}} to set or change model parameters. The \verb{xgb.attributes<-} setter either updates the existing or adds one or several attributes, but it doesn't delete the other existing attributes. Important: since this modifies the booster's C object, semantics for assignment here will differ from R's, as any object reference to the same booster will be modified too, while assignment of R attributes through \verb{attributes(model)$ <- } will follow the usual copy-on-write R semantics (see \code{\link[=xgb.copy.Booster]{xgb.copy.Booster()}} for an example of these behaviors). } \examples{ data(agaricus.train, package = "xgboost") train <- agaricus.train bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = 2, objective = "binary:logistic" ) ) xgb.attr(bst, "my_attribute") <- "my attribute value" print(xgb.attr(bst, "my_attribute")) xgb.attributes(bst) <- list(a = 123, b = "abc") fname <- file.path(tempdir(), "xgb.ubj") xgb.save(bst, fname) bst1 <- xgb.load(fname) print(xgb.attr(bst1, "my_attribute")) print(xgb.attributes(bst1)) # deletion: xgb.attr(bst1, "my_attribute") <- NULL print(xgb.attributes(bst1)) xgb.attributes(bst1) <- list(a = NULL, b = NULL) print(xgb.attributes(bst1)) } xgboost-3.0.0/R-package/man/xgb.cb.cv.predict.Rd000066400000000000000000000027551476511272400212600ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.cv.predict} \alias{xgb.cb.cv.predict} \title{Callback for returning cross-validation based predictions} \usage{ xgb.cb.cv.predict(save_models = FALSE, outputmargin = FALSE) } \arguments{ \item{save_models}{A flag for whether to save the folds' models.} \item{outputmargin}{Whether to save margin predictions (same effect as passing this parameter to \link{predict.xgb.Booster}).} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.cv]{xgb.cv()}}, but \strong{not} to \code{\link[=xgb.train]{xgb.train()}}. } \description{ This callback function saves predictions for all of the test folds, and also allows to save the folds' models. } \details{ Predictions are saved inside of the \code{pred} element, which is either a vector or a matrix, depending on the number of prediction outputs per data row. The order of predictions corresponds to the order of rows in the original dataset. Note that when a custom \code{folds} list is provided in \code{\link[=xgb.cv]{xgb.cv()}}, the predictions would only be returned properly when this list is a non-overlapping list of k sets of indices, as in a standard k-fold CV. The predictions would not be meaningful when user-provided folds have overlapping indices as in, e.g., random sampling splits. When some of the indices in the training dataset are not included into user-provided \code{folds}, their prediction value would be \code{NA}. } xgboost-3.0.0/R-package/man/xgb.cb.early.stop.Rd000066400000000000000000000046651476511272400213210ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.early.stop} \alias{xgb.cb.early.stop} \title{Callback to activate early stopping} \usage{ xgb.cb.early.stop( stopping_rounds, maximize = FALSE, metric_name = NULL, verbose = TRUE, save_best = FALSE ) } \arguments{ \item{stopping_rounds}{The number of rounds with no improvement in the evaluation metric in order to stop the training.} \item{maximize}{Whether to maximize the evaluation metric.} \item{metric_name}{The name of an evaluation column to use as a criteria for early stopping. If not set, the last column would be used. Let's say the test data in \code{evals} was labelled as \code{dtest}, and one wants to use the AUC in test data for early stopping regardless of where it is in the \code{evals}, then one of the following would need to be set: \code{metric_name = 'dtest-auc'} or \code{metric_name = 'dtest_auc'}. All dash '-' characters in metric names are considered equivalent to '_'.} \item{verbose}{Whether to print the early stopping information.} \item{save_best}{Whether training should return the best model or the last model. If set to \code{TRUE}, it will only keep the boosting rounds up to the detected best iteration, discarding the ones that come after. This parameter is not supported by the \code{xgb.cv} function and the \code{gblinear} booster yet.} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ This callback function determines the condition for early stopping. The following attributes are assigned to the booster's object: \itemize{ \item \code{best_score} the evaluation score at the best iteration \item \code{best_iteration} at which boosting iteration the best score has occurred (0-based index for interoperability of binary models) } The same values are also stored as R attributes as a result of the callback, plus an additional attribute \code{stopped_by_max_rounds} which indicates whether an early stopping by the \code{stopping_rounds} condition occurred. Note that the \code{best_iteration} that is stored under R attributes will follow base-1 indexing, so it will be larger by '1' than the C-level 'best_iteration' that is accessed through \code{\link[=xgb.attr]{xgb.attr()}} or \code{\link[=xgb.attributes]{xgb.attributes()}}. At least one dataset is required in \code{evals} for early stopping to work. } xgboost-3.0.0/R-package/man/xgb.cb.evaluation.log.Rd000066400000000000000000000014771476511272400221460ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.evaluation.log} \alias{xgb.cb.evaluation.log} \title{Callback for logging the evaluation history} \usage{ xgb.cb.evaluation.log() } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ Callback for logging the evaluation history } \details{ This callback creates a table with per-iteration evaluation metrics (see parameters \code{evals} and \code{custom_metric} in \code{\link[=xgb.train]{xgb.train()}}). Note: in the column names of the final data.table, the dash '-' character is replaced with the underscore '_' in order to make the column names more like regular R identifiers. } \seealso{ \link{xgb.cb.print.evaluation} } xgboost-3.0.0/R-package/man/xgb.cb.gblinear.history.Rd000066400000000000000000000131761476511272400225010ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.gblinear.history} \alias{xgb.cb.gblinear.history} \title{Callback for collecting coefficients history of a gblinear booster} \usage{ xgb.cb.gblinear.history(sparse = FALSE) } \arguments{ \item{sparse}{When set to \code{FALSE}/\code{TRUE}, a dense/sparse matrix is used to store the result. Sparse format is useful when one expects only a subset of coefficients to be non-zero, when using the "thrifty" feature selector with fairly small number of top features selected per iteration.} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ Callback for collecting coefficients history of a gblinear booster } \details{ To keep things fast and simple, gblinear booster does not internally store the history of linear model coefficients at each boosting iteration. This callback provides a workaround for storing the coefficients' path, by extracting them after each training iteration. This callback will construct a matrix where rows are boosting iterations and columns are feature coefficients (same order as when calling \link{coef.xgb.Booster}, with the intercept corresponding to the first column). When there is more than one coefficient per feature (e.g. multi-class classification), the result will be reshaped into a vector where coefficients are arranged first by features and then by class (e.g. first 1 through N coefficients will be for the first class, then coefficients N+1 through 2N for the second class, and so on). If the result has only one coefficient per feature in the data, then the resulting matrix will have column names matching with the feature names, otherwise (when there's more than one coefficient per feature) the names will be composed as 'column name' + ':' + 'class index' (so e.g. column 'c1' for class '0' will be named 'c1:0'). With \code{\link[=xgb.train]{xgb.train()}}, the output is either a dense or a sparse matrix. With with \code{\link[=xgb.cv]{xgb.cv()}}, it is a list (one element per each fold) of such matrices. Function \link{xgb.gblinear.history} provides an easy way to retrieve the outputs from this callback. } \examples{ #### Binary classification: ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) # In the iris dataset, it is hard to linearly separate Versicolor class from the rest # without considering the 2nd order interactions: x <- model.matrix(Species ~ .^2, iris)[, -1] colnames(x) dtrain <- xgb.DMatrix( scale(x), label = 1 * (iris$Species == "versicolor"), nthread = nthread ) param <- xgb.params( booster = "gblinear", objective = "reg:logistic", eval_metric = "auc", reg_lambda = 0.0003, reg_alpha = 0.0003, nthread = nthread ) # For 'shotgun', which is a default linear updater, using high learning_rate values may result in # unstable behaviour in some datasets. With this simple dataset, however, the high learning # rate does not break the convergence, but allows us to illustrate the typical pattern of # "stochastic explosion" behaviour of this lock-free algorithm at early boosting iterations. bst <- xgb.train( c(param, list(learning_rate = 1.)), dtrain, evals = list(tr = dtrain), nrounds = 200, callbacks = list(xgb.cb.gblinear.history()) ) # Extract the coefficients' path and plot them vs boosting iteration number: coef_path <- xgb.gblinear.history(bst) matplot(coef_path, type = "l") # With the deterministic coordinate descent updater, it is safer to use higher learning rates. # Will try the classical componentwise boosting which selects a single best feature per round: bst <- xgb.train( c( param, xgb.params( learning_rate = 0.8, updater = "coord_descent", feature_selector = "thrifty", top_k = 1 ) ), dtrain, evals = list(tr = dtrain), nrounds = 200, callbacks = list(xgb.cb.gblinear.history()) ) matplot(xgb.gblinear.history(bst), type = "l") # Componentwise boosting is known to have similar effect to Lasso regularization. # Try experimenting with various values of top_k, learning_rate, nrounds, # as well as different feature_selectors. # For xgb.cv: bst <- xgb.cv( c( param, xgb.params( learning_rate = 0.8, updater = "coord_descent", feature_selector = "thrifty", top_k = 1 ) ), dtrain, nfold = 5, nrounds = 100, callbacks = list(xgb.cb.gblinear.history()) ) # coefficients in the CV fold #3 matplot(xgb.gblinear.history(bst)[[3]], type = "l") #### Multiclass classification: dtrain <- xgb.DMatrix(scale(x), label = as.numeric(iris$Species) - 1, nthread = nthread) param <- xgb.params( booster = "gblinear", objective = "multi:softprob", num_class = 3, reg_lambda = 0.0003, reg_alpha = 0.0003, nthread = nthread ) # For the default linear updater 'shotgun' it sometimes is helpful # to use smaller learning_rate to reduce instability bst <- xgb.train( c(param, list(learning_rate = 0.5)), dtrain, evals = list(tr = dtrain), nrounds = 50, callbacks = list(xgb.cb.gblinear.history()) ) # Will plot the coefficient paths separately for each class: matplot(xgb.gblinear.history(bst, class_index = 0), type = "l") matplot(xgb.gblinear.history(bst, class_index = 1), type = "l") matplot(xgb.gblinear.history(bst, class_index = 2), type = "l") # CV: bst <- xgb.cv( c(param, list(learning_rate = 0.5)), dtrain, nfold = 5, nrounds = 70, callbacks = list(xgb.cb.gblinear.history(FALSE)) ) # 1st fold of 1st class matplot(xgb.gblinear.history(bst, class_index = 0)[[1]], type = "l") } \seealso{ \link{xgb.gblinear.history}, \link{coef.xgb.Booster}. } xgboost-3.0.0/R-package/man/xgb.cb.print.evaluation.Rd000066400000000000000000000015361476511272400225150ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.print.evaluation} \alias{xgb.cb.print.evaluation} \title{Callback for printing the result of evaluation} \usage{ xgb.cb.print.evaluation(period = 1, showsd = TRUE) } \arguments{ \item{period}{Results would be printed every number of periods.} \item{showsd}{Whether standard deviations should be printed (when available).} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ The callback function prints the result of evaluation at every \code{period} iterations. The initial and the last iteration's evaluations are always printed. Does not leave any attribute in the booster (see \link{xgb.cb.evaluation.log} for that). } \seealso{ \link{xgb.Callback} } xgboost-3.0.0/R-package/man/xgb.cb.reset.parameters.Rd000066400000000000000000000021561476511272400224760ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.reset.parameters} \alias{xgb.cb.reset.parameters} \title{Callback for resetting booster parameters at each iteration} \usage{ xgb.cb.reset.parameters(new_params) } \arguments{ \item{new_params}{List of parameters needed to be reset. Each element's value must be either a vector of values of length \code{nrounds} to be set at each iteration, or a function of two parameters \code{learning_rates(iteration, nrounds)} which returns a new parameter value by using the current iteration number and the total number of boosting rounds.} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ Callback for resetting booster parameters at each iteration } \details{ Note that when training is resumed from some previous model, and a function is used to reset a parameter value, the \code{nrounds} argument in this function would be the the number of boosting rounds in the current training. Does not leave any attribute in the booster. } xgboost-3.0.0/R-package/man/xgb.cb.save.model.Rd000066400000000000000000000020241476511272400212410ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.cb.save.model} \alias{xgb.cb.save.model} \title{Callback for saving a model file} \usage{ xgb.cb.save.model(save_period = 0, save_name = "xgboost.ubj") } \arguments{ \item{save_period}{Save the model to disk after every \code{save_period} iterations; 0 means save the model at the end.} \item{save_name}{The name or path for the saved model file. It can contain a \code{\link[=sprintf]{sprintf()}} formatting specifier to include the integer iteration number in the file name. E.g., with \code{save_name = 'xgboost_\%04d.model'}, the file saved at iteration 50 would be named "xgboost_0050.model".} } \value{ An \code{xgb.Callback} object, which can be passed to \code{\link[=xgb.train]{xgb.train()}}, but \strong{not} to \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ This callback function allows to save an xgb-model file, either periodically after each \code{save_period}'s or at the end. Does not leave any attribute in the booster. } xgboost-3.0.0/R-package/man/xgb.config.Rd000066400000000000000000000023401476511272400200670ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.config} \alias{xgb.config} \alias{xgb.config<-} \title{Accessors for model parameters as JSON string} \usage{ xgb.config(object) xgb.config(object) <- value } \arguments{ \item{object}{Object of class \code{xgb.Booster}.\strong{Will be modified in-place} when assigning to it.} \item{value}{A list.} } \value{ Parameters as a list. } \description{ Accessors for model parameters as JSON string } \details{ Note that assignment is performed in-place on the booster C object, which unlike assignment of R attributes, doesn't follow typical copy-on-write semantics for assignment - i.e. all references to the same booster will also get updated. See \code{\link[=xgb.copy.Booster]{xgb.copy.Booster()}} for an example of this behavior. } \examples{ data(agaricus.train, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) train <- agaricus.train bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic" ) ) config <- xgb.config(bst) } xgboost-3.0.0/R-package/man/xgb.copy.Booster.Rd000066400000000000000000000031161476511272400212120ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.copy.Booster} \alias{xgb.copy.Booster} \title{Deep-copies a Booster Object} \usage{ xgb.copy.Booster(model) } \arguments{ \item{model}{An 'xgb.Booster' object.} } \value{ A deep copy of \code{model} - it will be identical in every way, but C-level functions called on that copy will not affect the \code{model} variable. } \description{ Creates a deep copy of an 'xgb.Booster' object, such that the C object pointer contained will be a different object, and hence functions like \code{\link[=xgb.attr]{xgb.attr()}} will not affect the object from which it was copied. } \examples{ library(xgboost) data(mtcars) y <- mtcars$mpg x <- mtcars[, -1] dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params(nthread = 1), nrounds = 3 ) # Set an arbitrary attribute kept at the C level xgb.attr(model, "my_attr") <- 100 print(xgb.attr(model, "my_attr")) # Just assigning to a new variable will not create # a deep copy - C object pointer is shared, and in-place # modifications will affect both objects model_shallow_copy <- model xgb.attr(model_shallow_copy, "my_attr") <- 333 # 'model' was also affected by this change: print(xgb.attr(model, "my_attr")) model_deep_copy <- xgb.copy.Booster(model) xgb.attr(model_deep_copy, "my_attr") <- 444 # 'model' was NOT affected by this change # (keeps previous value that was assigned before) print(xgb.attr(model, "my_attr")) # Verify that the new object was actually modified print(xgb.attr(model_deep_copy, "my_attr")) } xgboost-3.0.0/R-package/man/xgb.create.features.Rd000066400000000000000000000071141476511272400217060ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.create.features.R \name{xgb.create.features} \alias{xgb.create.features} \title{Create new features from a previously learned model} \usage{ xgb.create.features(model, data) } \arguments{ \item{model}{Decision tree boosting model learned on the original data.} \item{data}{Original data (usually provided as a \code{dgCMatrix} matrix).} } \value{ A \code{dgCMatrix} matrix including both the original data and the new features. } \description{ May improve the learning by adding new features to the training data based on the decision trees from a previously learned model. } \details{ This is the function inspired from the paragraph 3.1 of the paper: \strong{Practical Lessons from Predicting Clicks on Ads at Facebook} \emph{(Xinran He, Junfeng Pan, Ou Jin, Tianbing Xu, Bo Liu, Tao Xu, Yan, xin Shi, Antoine Atallah, Ralf Herbrich, Stuart Bowers, Joaquin Quinonero Candela)} International Workshop on Data Mining for Online Advertising (ADKDD) - August 24, 2014 \url{https://research.facebook.com/publications/practical-lessons-from-predicting-clicks-on-ads-at-facebook/}. Extract explaining the method: "We found that boosted decision trees are a powerful and very convenient way to implement non-linear and tuple transformations of the kind we just described. We treat each individual tree as a categorical feature that takes as value the index of the leaf an instance ends up falling in. We use 1-of-K coding of this type of features. For example, consider the boosted tree model in Figure 1 with 2 subtrees, where the first subtree has 3 leafs and the second 2 leafs. If an instance ends up in leaf 2 in the first subtree and leaf 1 in second subtree, the overall input to the linear classifier will be the binary vector \verb{[0, 1, 0, 1, 0]}, where the first 3 entries correspond to the leaves of the first subtree and last 2 to those of the second subtree. ... We can understand boosted decision tree based transformation as a supervised feature encoding that converts a real-valued vector into a compact binary-valued vector. A traversal from root node to a leaf node represents a rule on certain features." } \examples{ data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) dtest <- with(agaricus.test, xgb.DMatrix(data, label = label, nthread = 2)) param <- list(max_depth = 2, learning_rate = 1, objective = 'binary:logistic', nthread = 1) nrounds = 4 bst <- xgb.train(params = param, data = dtrain, nrounds = nrounds) # Model accuracy without new features accuracy.before <- sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.test$label) / length(agaricus.test$label) # Convert previous features to one hot encoding new.features.train <- xgb.create.features(model = bst, agaricus.train$data) new.features.test <- xgb.create.features(model = bst, agaricus.test$data) # learning with new features new.dtrain <- xgb.DMatrix( data = new.features.train, label = agaricus.train$label, nthread = 1 ) new.dtest <- xgb.DMatrix( data = new.features.test, label = agaricus.test$label, nthread = 1 ) bst <- xgb.train(params = param, data = new.dtrain, nrounds = nrounds) # Model accuracy with new features accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) / length(agaricus.test$label) # Here the accuracy was already good and is now perfect. cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now", accuracy.after, "!\n")) } xgboost-3.0.0/R-package/man/xgb.cv.Rd000066400000000000000000000242121476511272400172340ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.cv.R \name{xgb.cv} \alias{xgb.cv} \title{Cross Validation} \usage{ xgb.cv( params = xgb.params(), data, nrounds, nfold, prediction = FALSE, showsd = TRUE, metrics = list(), objective = NULL, custom_metric = NULL, stratified = "auto", folds = NULL, train_folds = NULL, verbose = TRUE, print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL, callbacks = list(), ... ) } \arguments{ \item{params}{List of XGBoost parameters which control the model building process. See the \href{https://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation} and the documentation for \code{\link[=xgb.params]{xgb.params()}} for details. Should be passed as list with named entries. Parameters that are not specified in this list will use their default values. A list of named parameters can be created through the function \code{\link[=xgb.params]{xgb.params()}}, which accepts all valid parameters as function arguments.} \item{data}{An \code{xgb.DMatrix} object, with corresponding fields like \code{label} or bounds as required for model training by the objective. Note that only the basic \code{xgb.DMatrix} class is supported - variants such as \code{xgb.QuantileDMatrix} or \code{xgb.ExtMemDMatrix} are not supported here.} \item{nrounds}{Max number of boosting iterations.} \item{nfold}{The original dataset is randomly partitioned into \code{nfold} equal size subsamples.} \item{prediction}{A logical value indicating whether to return the test fold predictions from each CV model. This parameter engages the \code{\link[=xgb.cb.cv.predict]{xgb.cb.cv.predict()}} callback.} \item{showsd}{Logical value whether to show standard deviation of cross validation.} \item{metrics}{List of evaluation metrics to be used in cross validation, when it is not specified, the evaluation metric is chosen according to objective function. Possible options are: \itemize{ \item \code{error}: Binary classification error rate \item \code{rmse}: Root mean square error \item \code{logloss}: Negative log-likelihood function \item \code{mae}: Mean absolute error \item \code{mape}: Mean absolute percentage error \item \code{auc}: Area under curve \item \code{aucpr}: Area under PR curve \item \code{merror}: Exact matching error used to evaluate multi-class classification }} \item{objective}{Customized objective function. Should take two arguments: the first one will be the current predictions (either a numeric vector or matrix depending on the number of targets / classes), and the second one will be the \code{data} DMatrix object that is used for training. It should return a list with two elements \code{grad} and \code{hess} (in that order), as either numeric vectors or numeric matrices depending on the number of targets / classes (same dimension as the predictions that are passed as first argument).} \item{custom_metric}{Customized evaluation function. Just like \code{objective}, should take two arguments, with the first one being the predictions and the second one the \code{data} DMatrix. Should return a list with two elements \code{metric} (name that will be displayed for this metric, should be a string / character), and \code{value} (the number that the function calculates, should be a numeric scalar). Note that even if passing \code{custom_metric}, objectives also have an associated default metric that will be evaluated in addition to it. In order to disable the built-in metric, one can pass parameter \code{disable_default_eval_metric = TRUE}.} \item{stratified}{Logical flag indicating whether sampling of folds should be stratified by the values of outcome labels. For real-valued labels in regression objectives, stratification will be done by discretizing the labels into up to 5 buckets beforehand. If passing "auto", will be set to \code{TRUE} if the objective in \code{params} is a classification objective (from XGBoost's built-in objectives, doesn't apply to custom ones), and to \code{FALSE} otherwise. This parameter is ignored when \code{data} has a \code{group} field - in such case, the splitting will be based on whole groups (note that this might make the folds have different sizes). Value \code{TRUE} here is \strong{not} supported for custom objectives.} \item{folds}{List with pre-defined CV folds (each element must be a vector of test fold's indices). When folds are supplied, the \code{nfold} and \code{stratified} parameters are ignored. If \code{data} has a \code{group} field and the objective requires this field, each fold (list element) must additionally have two attributes (retrievable through \code{attributes}) named \code{group_test} and \code{group_train}, which should hold the \code{group} to assign through \code{\link[=setinfo.xgb.DMatrix]{setinfo.xgb.DMatrix()}} to the resulting DMatrices.} \item{train_folds}{List specifying which indices to use for training. If \code{NULL} (the default) all indices not specified in \code{folds} will be used for training. This is not supported when \code{data} has \code{group} field.} \item{verbose}{If 0, xgboost will stay silent. If 1, it will print information about performance. If 2, some additional information will be printed out. Note that setting \code{verbose > 0} automatically engages the \code{xgb.cb.print.evaluation(period=1)} callback function.} \item{print_every_n}{When passing \code{verbose>0}, evaluation logs (metrics calculated on the data passed under \code{evals}) will be printed every nth iteration according to the value passed here. The first and last iteration are always included regardless of this 'n'. Only has an effect when passing data under \code{evals} and when passing \code{verbose>0}. The parameter is passed to the \code{\link[=xgb.cb.print.evaluation]{xgb.cb.print.evaluation()}} callback.} \item{early_stopping_rounds}{Number of boosting rounds after which training will be stopped if there is no improvement in performance (as measured by the evaluatiation metric that is supplied or selected by default for the objective) on the evaluation data passed under \code{evals}. Must pass \code{evals} in order to use this functionality. Setting this parameter adds the \code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}} callback. If \code{NULL}, early stopping will not be used.} \item{maximize}{If \code{feval} and \code{early_stopping_rounds} are set, then this parameter must be set as well. When it is \code{TRUE}, it means the larger the evaluation score the better. This parameter is passed to the \code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}} callback.} \item{callbacks}{A list of callback functions to perform various task during boosting. See \code{\link[=xgb.Callback]{xgb.Callback()}}. Some of the callbacks are automatically created depending on the parameters' values. User can provide either existing or their own callback methods in order to customize the training process.} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ An object of class 'xgb.cv.synchronous' with the following elements: \itemize{ \item \code{call}: Function call. \item \code{params}: Parameters that were passed to the xgboost library. Note that it does not capture parameters changed by the \code{\link[=xgb.cb.reset.parameters]{xgb.cb.reset.parameters()}} callback. \item \code{evaluation_log}: Evaluation history stored as a \code{data.table} with the first column corresponding to iteration number and the rest corresponding to the CV-based evaluation means and standard deviations for the training and test CV-sets. It is created by the \code{\link[=xgb.cb.evaluation.log]{xgb.cb.evaluation.log()}} callback. \item \code{niter}: Number of boosting iterations. \item \code{nfeatures}: Number of features in training data. \item \code{folds}: The list of CV folds' indices - either those passed through the \code{folds} parameter or randomly generated. \item \code{best_iteration}: Iteration number with the best evaluation metric value (only available with early stopping). } Plus other potential elements that are the result of callbacks, such as a list \code{cv_predict} with a sub-element \code{pred} when passing \code{prediction = TRUE}, which is added by the \code{\link[=xgb.cb.cv.predict]{xgb.cb.cv.predict()}} callback (note that one can also pass it manually under \code{callbacks} with different settings, such as saving also the models created during cross validation); or a list \code{early_stop} which will contain elements such as \code{best_iteration} when using the early stopping callback (\code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}}). } \description{ The cross validation function of xgboost. } \details{ The original sample is randomly partitioned into \code{nfold} equal size subsamples. Of the \code{nfold} subsamples, a single subsample is retained as the validation data for testing the model, and the remaining \code{nfold - 1} subsamples are used as training data. The cross-validation process is then repeated \code{nrounds} times, with each of the \code{nfold} subsamples used exactly once as the validation data. All observations are used for both training and validation. Adapted from \url{https://en.wikipedia.org/wiki/Cross-validation_\%28statistics\%29} } \examples{ data(agaricus.train, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) cv <- xgb.cv( data = dtrain, nrounds = 3, params = xgb.params( nthread = 2, max_depth = 3, objective = "binary:logistic" ), nfold = 5, metrics = list("rmse","auc") ) print(cv) print(cv, verbose = TRUE) } xgboost-3.0.0/R-package/man/xgb.dump.Rd000066400000000000000000000057631476511272400176030ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.dump.R \name{xgb.dump} \alias{xgb.dump} \title{Dump an XGBoost model in text format.} \usage{ xgb.dump( model, fname = NULL, fmap = "", with_stats = FALSE, dump_format = c("text", "json", "dot"), ... ) } \arguments{ \item{model}{The model object.} \item{fname}{The name of the text file where to save the model text dump. If not provided or set to \code{NULL}, the model is returned as a character vector.} \item{fmap}{Feature map file representing feature types. See demo/ for a walkthrough example in R, and \url{https://github.com/dmlc/xgboost/blob/master/demo/data/featmap.txt} to see an example of the value.} \item{with_stats}{Whether to dump some additional statistics about the splits. When this option is on, the model dump contains two additional values: gain is the approximate loss function gain we get in each split; cover is the sum of second order gradient in each node.} \item{dump_format}{Either 'text', 'json', or 'dot' (graphviz) format could be specified. Format 'dot' for a single tree can be passed directly to packages that consume this format for graph visualization, such as function \code{DiagrammeR::grViz()}} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ If fname is not provided or set to \code{NULL} the function will return the model as a character vector. Otherwise it will return \code{TRUE}. } \description{ Dump an XGBoost model in text format. } \examples{ \dontshow{RhpcBLASctl::omp_set_num_threads(1)} data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") train <- agaricus.train test <- agaricus.test bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = 2, objective = "binary:logistic" ) ) # save the model in file 'xgb.model.dump' dump_path = file.path(tempdir(), 'model.dump') xgb.dump(bst, dump_path, with_stats = TRUE) # print the model without saving it to a file print(xgb.dump(bst, with_stats = TRUE)) # print in JSON format: cat(xgb.dump(bst, with_stats = TRUE, dump_format = "json")) # plot first tree leveraging the 'dot' format if (requireNamespace('DiagrammeR', quietly = TRUE)) { DiagrammeR::grViz(xgb.dump(bst, dump_format = "dot")[[1L]]) } } xgboost-3.0.0/R-package/man/xgb.gblinear.history.Rd000066400000000000000000000041551476511272400221130ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/callbacks.R \name{xgb.gblinear.history} \alias{xgb.gblinear.history} \title{Extract gblinear coefficients history} \usage{ xgb.gblinear.history(model, class_index = NULL) } \arguments{ \item{model}{Either an \code{xgb.Booster} or a result of \code{\link[=xgb.cv]{xgb.cv()}}, trained using the \link{xgb.cb.gblinear.history} callback, but \strong{not} a booster loaded from \code{\link[=xgb.load]{xgb.load()}} or \code{\link[=xgb.load.raw]{xgb.load.raw()}}.} \item{class_index}{zero-based class index to extract the coefficients for only that specific class in a multinomial multiclass model. When it is \code{NULL}, all the coefficients are returned. Has no effect in non-multiclass models.} } \value{ For an \code{\link[=xgb.train]{xgb.train()}} result, a matrix (either dense or sparse) with the columns corresponding to iteration's coefficients and the rows corresponding to boosting iterations. For an \code{\link[=xgb.cv]{xgb.cv()}} result, a list of such matrices is returned with the elements corresponding to CV folds. When there is more than one coefficient per feature (e.g. multi-class classification) and \code{class_index} is not provided, the result will be reshaped into a vector where coefficients are arranged first by features and then by class (e.g. first 1 through N coefficients will be for the first class, then coefficients N+1 through 2N for the second class, and so on). } \description{ A helper function to extract the matrix of linear coefficients' history from a gblinear model created while using the \link{xgb.cb.gblinear.history} callback (which must be added manually as by default it is not used). } \details{ Note that this is an R-specific function that relies on R attributes that are not saved when using XGBoost's own serialization functions like \code{\link[=xgb.load]{xgb.load()}} or \code{\link[=xgb.load.raw]{xgb.load.raw()}}. In order for a serialized model to be accepted by this function, one must use R serializers such as \code{\link[=saveRDS]{saveRDS()}}. } \seealso{ \link{xgb.cb.gblinear.history}, \link{coef.xgb.Booster}. } xgboost-3.0.0/R-package/man/xgb.get.DMatrix.data.Rd000066400000000000000000000010471476511272400216630ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.get.DMatrix.data} \alias{xgb.get.DMatrix.data} \title{Get DMatrix Data} \usage{ xgb.get.DMatrix.data(dmat) } \arguments{ \item{dmat}{An \code{xgb.DMatrix} object, as returned by \code{\link[=xgb.DMatrix]{xgb.DMatrix()}}.} } \value{ The data held in the DMatrix, as a sparse CSR matrix (class \code{dgRMatrix} from package \code{Matrix}). If it had feature names, these will be added as column names in the output. } \description{ Get DMatrix Data } xgboost-3.0.0/R-package/man/xgb.get.DMatrix.num.non.missing.Rd000066400000000000000000000007741476511272400240200ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.get.DMatrix.num.non.missing} \alias{xgb.get.DMatrix.num.non.missing} \title{Get Number of Non-Missing Entries in DMatrix} \usage{ xgb.get.DMatrix.num.non.missing(dmat) } \arguments{ \item{dmat}{An \code{xgb.DMatrix} object, as returned by \code{\link[=xgb.DMatrix]{xgb.DMatrix()}}.} } \value{ The number of non-missing entries in the DMatrix. } \description{ Get Number of Non-Missing Entries in DMatrix } xgboost-3.0.0/R-package/man/xgb.get.DMatrix.qcut.Rd000066400000000000000000000037671476511272400217410ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.get.DMatrix.qcut} \alias{xgb.get.DMatrix.qcut} \title{Get Quantile Cuts from DMatrix} \usage{ xgb.get.DMatrix.qcut(dmat, output = c("list", "arrays")) } \arguments{ \item{dmat}{An \code{xgb.DMatrix} object, as returned by \code{\link[=xgb.DMatrix]{xgb.DMatrix()}}.} \item{output}{Output format for the quantile cuts. Possible options are: \itemize{ \item "list"\verb{will return the output as a list with one entry per column, where each column will have a numeric vector with the cuts. The list will be named if}dmat` has column names assigned to it. \item \code{"arrays"} will return a list with entries \code{indptr} (base-0 indexing) and \code{data}. Here, the cuts for column 'i' are obtained by slicing 'data' from entries \code{ indptr[i]+1} to \code{indptr[i+1]}. }} } \value{ The quantile cuts, in the format specified by parameter \code{output}. } \description{ Get the quantile cuts (a.k.a. borders) from an \code{xgb.DMatrix} that has been quantized for the histogram method (\code{tree_method = "hist"}). These cuts are used in order to assign observations to bins - i.e. these are ordered boundaries which are used to determine assignment condition \verb{border_low < x < border_high}. As such, the first and last bin will be outside of the range of the data, so as to include all of the observations there. If a given column has 'n' bins, then there will be 'n+1' cuts / borders for that column, which will be output in sorted order from lowest to highest. Different columns can have different numbers of bins according to their range. } \examples{ data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) # DMatrix is not quantized right away, but will be once a hist model is generated model <- xgb.train( data = dm, params = xgb.params(tree_method = "hist", max_bin = 8, nthread = 1), nrounds = 3 ) # Now can get the quantile cuts xgb.get.DMatrix.qcut(dm) } xgboost-3.0.0/R-package/man/xgb.get.num.boosted.rounds.Rd000066400000000000000000000013071476511272400231500ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.get.num.boosted.rounds} \alias{xgb.get.num.boosted.rounds} \alias{length.xgb.Booster} \title{Get number of boosting in a fitted booster} \usage{ xgb.get.num.boosted.rounds(model) \method{length}{xgb.Booster}(x) } \arguments{ \item{model, x}{A fitted \code{xgb.Booster} model.} } \value{ The number of rounds saved in the model as an integer. } \description{ Get number of boosting in a fitted booster } \details{ Note that setting booster parameters related to training continuation / updates through \code{\link[=xgb.model.parameters<-]{xgb.model.parameters<-()}} will reset the number of rounds to zero. } xgboost-3.0.0/R-package/man/xgb.importance.Rd000066400000000000000000000073201476511272400207660ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.importance.R \name{xgb.importance} \alias{xgb.importance} \title{Feature importance} \usage{ xgb.importance( model = NULL, feature_names = getinfo(model, "feature_name"), trees = NULL ) } \arguments{ \item{model}{Object of class \code{xgb.Booster}.} \item{feature_names}{Character vector used to overwrite the feature names of the model. The default is \code{NULL} (use original feature names).} \item{trees}{An integer vector of (base-1) tree indices that should be included into the importance calculation (only for the "gbtree" booster). The default (\code{NULL}) parses all trees. It could be useful, e.g., in multiclass classification to get feature importances for each class separately.} } \value{ A \code{data.table} with the following columns: For a tree model: \itemize{ \item \code{Features}: Names of the features used in the model. \item \code{Gain}: Fractional contribution of each feature to the model based on the total gain of this feature's splits. Higher percentage means higher importance. \item \code{Cover}: Metric of the number of observation related to this feature. \item \code{Frequency}: Percentage of times a feature has been used in trees. } For a linear model: \itemize{ \item \code{Features}: Names of the features used in the model. \item \code{Weight}: Linear coefficient of this feature. \item \code{Class}: Class label (only for multiclass models). For objects of class \code{xgboost} (as produced by \code{\link[=xgboost]{xgboost()}}), it will be a \code{factor}, while for objects of class \code{xgb.Booster} (as produced by \code{\link[=xgb.train]{xgb.train()}}), it will be a zero-based integer vector. } If \code{feature_names} is not provided and \code{model} doesn't have \code{feature_names}, the index of the features will be used instead. Because the index is extracted from the model dump (based on C++ code), it starts at 0 (as in C/C++ or Python) instead of 1 (usual in R). } \description{ Creates a \code{data.table} of feature importances. } \details{ This function works for both linear and tree models. For linear models, the importance is the absolute magnitude of linear coefficients. To obtain a meaningful ranking by importance for linear models, the features need to be on the same scale (which is also recommended when using L1 or L2 regularization). } \examples{ # binary classification using "gbtree": data("ToothGrowth") x <- ToothGrowth[, c("len", "dose")] y <- ToothGrowth$supp model_tree_binary <- xgboost( x, y, nrounds = 5L, nthreads = 1L, booster = "gbtree", max_depth = 2L ) xgb.importance(model_tree_binary) # binary classification using "gblinear": model_tree_linear <- xgboost( x, y, nrounds = 5L, nthreads = 1L, booster = "gblinear", learning_rate = 0.3 ) xgb.importance(model_tree_linear) # multi-class classification using "gbtree": data("iris") x <- iris[, c("Sepal.Length", "Sepal.Width", "Petal.Length", "Petal.Width")] y <- iris$Species model_tree_multi <- xgboost( x, y, nrounds = 5L, nthreads = 1L, booster = "gbtree", max_depth = 3 ) # all classes clumped together: xgb.importance(model_tree_multi) # inspect importances separately for each class: num_classes <- 3L nrounds <- 5L xgb.importance( model_tree_multi, trees = seq(from = 1, by = num_classes, length.out = nrounds) ) xgb.importance( model_tree_multi, trees = seq(from = 2, by = num_classes, length.out = nrounds) ) xgb.importance( model_tree_multi, trees = seq(from = 3, by = num_classes, length.out = nrounds) ) # multi-class classification using "gblinear": model_linear_multi <- xgboost( x, y, nrounds = 5L, nthreads = 1L, booster = "gblinear", learning_rate = 0.2 ) xgb.importance(model_linear_multi) } xgboost-3.0.0/R-package/man/xgb.is.same.Booster.Rd000066400000000000000000000040641476511272400216020ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.is.same.Booster} \alias{xgb.is.same.Booster} \title{Check if two boosters share the same C object} \usage{ xgb.is.same.Booster(obj1, obj2) } \arguments{ \item{obj1}{Booster model to compare with \code{obj2}.} \item{obj2}{Booster model to compare with \code{obj1}.} } \value{ Either \code{TRUE} or \code{FALSE} according to whether the two boosters share the underlying C object. } \description{ Checks whether two booster objects refer to the same underlying C object. } \details{ As booster objects (as returned by e.g. \code{\link[=xgb.train]{xgb.train()}}) contain an R 'externalptr' object, they don't follow typical copy-on-write semantics of other R objects - that is, if one assigns a booster to a different variable and modifies that new variable through in-place methods like \code{\link[=xgb.attr<-]{xgb.attr<-()}}, the modification will be applied to both the old and the new variable, unlike typical R assignments which would only modify the latter. This function allows checking whether two booster objects share the same 'externalptr', regardless of the R attributes that they might have. In order to duplicate a booster in such a way that the copy wouldn't share the same 'externalptr', one can use function \code{\link[=xgb.copy.Booster]{xgb.copy.Booster()}}. } \examples{ library(xgboost) data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) model <- xgb.train( params = xgb.params(nthread = 1), data = xgb.DMatrix(x, label = y, nthread = 1), nrounds = 3 ) model_shallow_copy <- model xgb.is.same.Booster(model, model_shallow_copy) # same C object model_deep_copy <- xgb.copy.Booster(model) xgb.is.same.Booster(model, model_deep_copy) # different C objects # In-place assignments modify all references, # but not full/deep copies of the booster xgb.attr(model_shallow_copy, "my_attr") <- 111 xgb.attr(model, "my_attr") # gets modified xgb.attr(model_deep_copy, "my_attr") # doesn't get modified } \seealso{ \code{\link[=xgb.copy.Booster]{xgb.copy.Booster()}} } xgboost-3.0.0/R-package/man/xgb.load.Rd000066400000000000000000000026311476511272400175440ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.load.R \name{xgb.load} \alias{xgb.load} \title{Load XGBoost model from binary file} \usage{ xgb.load(modelfile) } \arguments{ \item{modelfile}{The name of the binary input file.} } \value{ An object of \code{xgb.Booster} class. } \description{ Load XGBoost model from binary model file. } \details{ The input file is expected to contain a model saved in an XGBoost model format using either \code{\link[=xgb.save]{xgb.save()}} in R, or using some appropriate methods from other XGBoost interfaces. E.g., a model trained in Python and saved from there in XGBoost format, could be loaded from R. Note: a model saved as an R object has to be loaded using corresponding R-methods, not by \code{\link[=xgb.load]{xgb.load()}}. } \examples{ \dontshow{RhpcBLASctl::omp_set_num_threads(1)} data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) train <- agaricus.train test <- agaricus.test bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic" ) ) fname <- file.path(tempdir(), "xgb.ubj") xgb.save(bst, fname) bst <- xgb.load(fname) } \seealso{ \code{\link[=xgb.save]{xgb.save()}} } xgboost-3.0.0/R-package/man/xgb.load.raw.Rd000066400000000000000000000006541476511272400203370ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.load.raw.R \name{xgb.load.raw} \alias{xgb.load.raw} \title{Load serialised XGBoost model from R's raw vector} \usage{ xgb.load.raw(buffer) } \arguments{ \item{buffer}{The buffer returned by \code{\link[=xgb.save.raw]{xgb.save.raw()}}.} } \description{ User can generate raw memory buffer by calling \code{\link[=xgb.save.raw]{xgb.save.raw()}}. } xgboost-3.0.0/R-package/man/xgb.model.dt.tree.Rd000066400000000000000000000075601476511272400212770ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.model.dt.tree.R \name{xgb.model.dt.tree} \alias{xgb.model.dt.tree} \title{Parse model text dump} \usage{ xgb.model.dt.tree(model, trees = NULL, use_int_id = FALSE, ...) } \arguments{ \item{model}{Object of class \code{xgb.Booster}. If it contains feature names (they can be set through \code{\link[=setinfo]{setinfo()}}), they will be used in the output from this function. If the model contains categorical features, an error will be thrown.} \item{trees}{An integer vector of (base-1) tree indices that should be used. The default (\code{NULL}) uses all trees. Useful, e.g., in multiclass classification to get only the trees of one class.} \item{use_int_id}{A logical flag indicating whether nodes in columns "Yes", "No", and "Missing" should be represented as integers (when \code{TRUE}) or as "Tree-Node" character strings (when \code{FALSE}, default).} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ A \code{data.table} with detailed information about tree nodes. It has the following columns: \itemize{ \item \code{Tree}: integer ID of a tree in a model (zero-based index). \item \code{Node}: integer ID of a node in a tree (zero-based index). \item \code{ID}: character identifier of a node in a model (only when \code{use_int_id = FALSE}). \item \code{Feature}: for a branch node, a feature ID or name (when available); for a leaf node, it simply labels it as \code{"Leaf"}. \item \code{Split}: location of the split for a branch node (split condition is always "less than"). \item \code{Yes}: ID of the next node when the split condition is met. \item \code{No}: ID of the next node when the split condition is not met. \item \code{Missing}: ID of the next node when the branch value is missing. \item \code{Gain}: either the split gain (change in loss) or the leaf value. \item \code{Cover}: metric related to the number of observations either seen by a split or collected by a leaf during training. } When \code{use_int_id = FALSE}, columns "Yes", "No", and "Missing" point to model-wide node identifiers in the "ID" column. When \code{use_int_id = TRUE}, those columns point to node identifiers from the corresponding trees in the "Node" column. } \description{ Parse a boosted tree model text dump into a \code{data.table} structure. } \details{ Note that this function does not work with models that were fitted to categorical data, and is only applicable to tree-based boosters (not \code{gblinear}). } \examples{ # Basic use: data(agaricus.train, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) bst <- xgb.train( data = xgb.DMatrix(agaricus.train$data, label = agaricus.train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic" ) ) # This bst model already has feature_names stored with it, so those would be used when # feature_names is not set: dt <- xgb.model.dt.tree(bst) # How to match feature names of splits that are following a current 'Yes' branch: merge( dt, dt[, .(ID, Y.Feature = Feature)], by.x = "Yes", by.y = "ID", all.x = TRUE )[ order(Tree, Node) ] } xgboost-3.0.0/R-package/man/xgb.model.parameters.Rd000066400000000000000000000030251476511272400220650ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.model.parameters<-} \alias{xgb.model.parameters<-} \title{Accessors for model parameters} \usage{ xgb.model.parameters(object) <- value } \arguments{ \item{object}{Object of class \code{xgb.Booster}. \strong{Will be modified in-place}.} \item{value}{A list (or an object coercible to a list) with the names of parameters to set and the elements corresponding to parameter values.} } \value{ The same booster \code{object}, which gets modified in-place. } \description{ Only the setter for XGBoost parameters is currently implemented. } \details{ Just like \code{\link[=xgb.attr]{xgb.attr()}}, this function will make in-place modifications on the booster object which do not follow typical R assignment semantics - that is, all references to the same booster will also be updated, unlike assingment of R attributes which follow copy-on-write semantics. See \code{\link[=xgb.copy.Booster]{xgb.copy.Booster()}} for an example of this behavior. Be aware that setting parameters of a fitted booster related to training continuation / updates will reset its number of rounds indicator to zero. } \examples{ data(agaricus.train, package = "xgboost") train <- agaricus.train bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, learning_rate = 1, nthread = 2, objective = "binary:logistic" ) ) xgb.model.parameters(bst) <- list(learning_rate = 0.1) } xgboost-3.0.0/R-package/man/xgb.params.Rd000066400000000000000000001111431476511272400201070ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.train.R \name{xgb.params} \alias{xgb.params} \title{XGBoost Parameters} \usage{ xgb.params( objective = NULL, verbosity = NULL, nthread = NULL, seed = NULL, booster = NULL, eta = NULL, learning_rate = NULL, gamma = NULL, min_split_loss = NULL, max_depth = NULL, min_child_weight = NULL, max_delta_step = NULL, subsample = NULL, sampling_method = NULL, colsample_bytree = NULL, colsample_bylevel = NULL, colsample_bynode = NULL, lambda = NULL, reg_lambda = NULL, alpha = NULL, reg_alpha = NULL, tree_method = NULL, scale_pos_weight = NULL, updater = NULL, refresh_leaf = NULL, grow_policy = NULL, max_leaves = NULL, max_bin = NULL, num_parallel_tree = NULL, monotone_constraints = NULL, interaction_constraints = NULL, multi_strategy = NULL, base_score = NULL, eval_metric = NULL, seed_per_iteration = NULL, device = NULL, disable_default_eval_metric = NULL, use_rmm = NULL, max_cached_hist_node = NULL, extmem_single_page = NULL, max_cat_to_onehot = NULL, max_cat_threshold = NULL, sample_type = NULL, normalize_type = NULL, rate_drop = NULL, one_drop = NULL, skip_drop = NULL, feature_selector = NULL, top_k = NULL, num_class = NULL, tweedie_variance_power = NULL, huber_slope = NULL, quantile_alpha = NULL, aft_loss_distribution = NULL, lambdarank_pair_method = NULL, lambdarank_num_pair_per_sample = NULL, lambdarank_normalization = NULL, lambdarank_score_normalization = NULL, lambdarank_unbiased = NULL, lambdarank_bias_norm = NULL, ndcg_exp_gain = NULL ) } \arguments{ \item{objective}{(default=\code{"reg:squarederror"}) Specify the learning task and the corresponding learning objective or a custom objective function to be used. For custom objective, see \href{https://xgboost.readthedocs.io/en/latest/tutorials/custom_metric_obj.html}{Custom Objective and Evaluation Metric} and \href{https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html#custom-obj-metric}{Custom objective and metric} for more information, along with the end note for function signatures. Supported values are: \itemize{ \item \code{"reg:squarederror"}: regression with squared loss. \item \code{"reg:squaredlogerror"}: regression with squared log loss \eqn{\frac{1}{2}[log(pred + 1) - log(label + 1)]^2}. All input labels are required to be greater than -1. Also, see metric \code{rmsle} for possible issue with this objective. \item \code{"reg:logistic"}: logistic regression, output probability \item \code{"reg:pseudohubererror"}: regression with Pseudo Huber loss, a twice differentiable alternative to absolute loss. \item \code{"reg:absoluteerror"}: Regression with L1 error. When tree model is used, leaf value is refreshed after tree construction. If used in distributed training, the leaf value is calculated as the mean value from all workers, which is not guaranteed to be optimal. Version added: 1.7.0 \item \code{"reg:quantileerror"}: Quantile loss, also known as "pinball loss". See later sections for its parameter and \href{https://xgboost.readthedocs.io/en/latest/python/examples/quantile_regression.html#sphx-glr-python-examples-quantile-regression-py}{Quantile Regression} for a worked example. Version added: 2.0.0 \item \code{"binary:logistic"}: logistic regression for binary classification, output probability \item \code{"binary:logitraw"}: logistic regression for binary classification, output score before logistic transformation \item \code{"binary:hinge"}: hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities. \item \code{"count:poisson"}: Poisson regression for count data, output mean of Poisson distribution. \code{"max_delta_step"} is set to 0.7 by default in Poisson regression (used to safeguard optimization) \item \code{"survival:cox"}: Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function \code{h(t) = h0(t) * HR}). \item \code{"survival:aft"}: Accelerated failure time model for censored survival time data. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details. \item \code{"multi:softmax"}: set XGBoost to do multiclass classification using the softmax objective, you also need to set num_class(number of classes) \item \code{"multi:softprob"}: same as softmax, but output a vector of \code{ndata * nclass}, which can be further reshaped to \code{ndata * nclass} matrix. The result contains predicted probability of each data point belonging to each class. \item \code{"rank:ndcg"}: Use LambdaMART to perform pair-wise ranking where \href{https://en.wikipedia.org/wiki/NDCG}{Normalized Discounted Cumulative Gain (NDCG)} is maximized. This objective supports position debiasing for click data. \item \code{"rank:map"}: Use LambdaMART to perform pair-wise ranking where \href{https://en.wikipedia.org/wiki/Mean_average_precision#Mean_average_precision}{Mean Average Precision (MAP)} is maximized \item \code{"rank:pairwise"}: Use LambdaRank to perform pair-wise ranking using the \code{ranknet} objective. \item \code{"reg:gamma"}: gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Gamma_distribution#Occurrence_and_applications}{gamma-distributed}. \item \code{"reg:tweedie"}: Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Tweedie_distribution#Occurrence_and_applications}{Tweedie-distributed}. }} \item{verbosity}{(default=1) Verbosity of printing messages. Valid values are 0 (silent), 1 (warning), 2 (info), 3 (debug). Sometimes XGBoost tries to change configurations based on heuristics, which is displayed as warning message. If there's unexpected behaviour, please try to increase value of verbosity.} \item{nthread}{(default to maximum number of threads available if not set) Number of parallel threads used to run XGBoost. When choosing it, please keep thread contention and hyperthreading in mind.} \item{seed}{Random number seed. If not specified, will take a random seed through R's own RNG engine.} \item{booster}{(default= \code{"gbtree"}) Which booster to use. Can be \code{"gbtree"}, \code{"gblinear"} or \code{"dart"}; \code{"gbtree"} and \code{"dart"} use tree based models while \code{"gblinear"} uses linear functions.} \item{eta, learning_rate}{(two aliases for the same parameter) Step size shrinkage used in update to prevent overfitting. After each boosting step, we can directly get the weights of new features, and \code{eta} shrinks the feature weights to make the boosting process more conservative. \itemize{ \item range: \eqn{[0,1]} \item default value: 0.3 for tree-based boosters, 0.5 for linear booster. } Note: should only pass one of \code{eta} or \code{learning_rate}. Both refer to the same parameter and there's thus no difference between one or the other.} \item{gamma, min_split_loss}{(two aliases for the same parameter) (for Tree Booster) (default=0, alias: \code{gamma}) Minimum loss reduction required to make a further partition on a leaf node of the tree. The larger \code{min_split_loss} is, the more conservative the algorithm will be. Note that a tree where no splits were made might still contain a single terminal node with a non-zero score. range: \eqn{[0, \infty)} Note: should only pass one of \code{gamma} or \code{min_split_loss}. Both refer to the same parameter and there's thus no difference between one or the other.} \item{max_depth}{(for Tree Booster) (default=6, type=int32) Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. \code{"exact"} tree method requires non-zero value. range: \eqn{[0, \infty)}} \item{min_child_weight}{(for Tree Booster) (default=1) Minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than \code{min_child_weight}, then the building process will give up further partitioning. In linear regression task, this simply corresponds to minimum number of instances needed to be in each node. The larger \code{min_child_weight} is, the more conservative the algorithm will be. range: \eqn{[0, \infty)}} \item{max_delta_step}{(for Tree Booster) (default=0) Maximum delta step we allow each leaf output to be. If the value is set to 0, it means there is no constraint. If it is set to a positive value, it can help making the update step more conservative. Usually this parameter is not needed, but it might help in logistic regression when class is extremely imbalanced. Set it to value of 1-10 might help control the update. range: \eqn{[0, \infty)}} \item{subsample}{(for Tree Booster) (default=1) Subsample ratio of the training instances. Setting it to 0.5 means that XGBoost would randomly sample half of the training data prior to growing trees. and this will prevent overfitting. Subsampling will occur once in every boosting iteration. range: \eqn{(0,1]}} \item{sampling_method}{(for Tree Booster) (default= \code{"uniform"}) The method to use to sample the training instances. \itemize{ \item \code{"uniform"}: each training instance has an equal probability of being selected. Typically set \code{"subsample"} >= 0.5 for good results. \item \code{"gradient_based"}: the selection probability for each training instance is proportional to the \bold{regularized absolute value} of gradients (more specifically, \eqn{\sqrt{g^2+\lambda h^2}}). \code{"subsample"} may be set to as low as 0.1 without loss of model accuracy. Note that this sampling method is only supported when \code{"tree_method"} is set to \code{"hist"} and the device is \code{"cuda"}; other tree methods only support \code{"uniform"} sampling. }} \item{colsample_bytree, colsample_bylevel, colsample_bynode}{(for Tree Booster) (default=1) This is a family of parameters for subsampling of columns. \itemize{ \item All \code{"colsample_by*"} parameters have a range of \eqn{(0, 1]}, the default value of 1, and specify the fraction of columns to be subsampled. \item \code{"colsample_bytree"} is the subsample ratio of columns when constructing each tree. Subsampling occurs once for every tree constructed. \item \code{"colsample_bylevel"} is the subsample ratio of columns for each level. Subsampling occurs once for every new depth level reached in a tree. Columns are subsampled from the set of columns chosen for the current tree. \item \code{"colsample_bynode"} is the subsample ratio of columns for each node (split). Subsampling occurs once every time a new split is evaluated. Columns are subsampled from the set of columns chosen for the current level. This is not supported by the exact tree method. \item \code{"colsample_by*"} parameters work cumulatively. For instance, the combination \verb{\{'colsample_bytree'=0.5, 'colsample_bylevel'=0.5, 'colsample_bynode'=0.5\}} with 64 features will leave 8 features to choose from at each split. } One can set the \code{"feature_weights"} for DMatrix to define the probability of each feature being selected when using column sampling.} \item{lambda, reg_lambda}{(two aliases for the same parameter) \itemize{ \item For tree-based boosters: \itemize{ \item L2 regularization term on weights. Increasing this value will make model more conservative. \item default: 1 \item range: \eqn{[0, \infty]} } \item For linear booster: \itemize{ \item L2 regularization term on weights. Increasing this value will make model more conservative. Normalised to number of training examples. \item default: 0 \item range: \eqn{[0, \infty)} } } Note: should only pass one of \code{lambda} or \code{reg_lambda}. Both refer to the same parameter and there's thus no difference between one or the other.} \item{alpha, reg_alpha}{(two aliases for the same parameter) \itemize{ \item L1 regularization term on weights. Increasing this value will make model more conservative. \item For the linear booster, it's normalised to number of training examples. \item default: 0 \item range: \eqn{[0, \infty)} } Note: should only pass one of \code{alpha} or \code{reg_alpha}. Both refer to the same parameter and there's thus no difference between one or the other.} \item{tree_method}{(for Tree Booster) (default= \code{"auto"}) The tree construction algorithm used in XGBoost. See description in the \href{https://arxiv.org/abs/1603.02754}{reference paper} and \href{https://xgboost.readthedocs.io/en/latest/treemethod.html}{Tree Methods}. Choices: \code{"auto"}, \code{"exact"}, \code{"approx"}, \code{"hist"}, this is a combination of commonly used updaters. For other updaters like \code{"refresh"}, set the parameter \code{updater} directly. \itemize{ \item \code{"auto"}: Same as the \code{"hist"} tree method. \item \code{"exact"}: Exact greedy algorithm. Enumerates all split candidates. \item \code{"approx"}: Approximate greedy algorithm using quantile sketch and gradient histogram. \item \code{"hist"}: Faster histogram optimized approximate greedy algorithm. }} \item{scale_pos_weight}{(for Tree Booster) (default=1) Control the balance of positive and negative weights, useful for unbalanced classes. A typical value to consider: \verb{sum(negative instances) / sum(positive instances)}. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/param_tuning.html}{Parameters Tuning} for more discussion. Also, see Higgs Kaggle competition demo for examples: \href{https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-train.R}{R}, \href{https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-numpy.py}{py1}, \href{https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-cv.py}{py2}, \href{https://github.com/dmlc/xgboost/blob/master/demo/guide-python/cross_validation.py}{py3}.} \item{updater}{Has different meanings depending on the type of booster. \itemize{ \item For tree-based boosters: A comma separated string defining the sequence of tree updaters to run, providing a modular way to construct and to modify the trees. This is an advanced parameter that is usually set automatically, depending on some other parameters. However, it could be also set explicitly by a user. The following updaters exist: \itemize{ \item \code{"grow_colmaker"}: non-distributed column-based construction of trees. \item \code{"grow_histmaker"}: distributed tree construction with row-based data splitting based on global proposal of histogram counting. \item \code{"grow_quantile_histmaker"}: Grow tree using quantized histogram. \item \code{"grow_gpu_hist"}: Enabled when \code{tree_method} is set to \code{"hist"} along with \code{device="cuda"}. \item \code{"grow_gpu_approx"}: Enabled when \code{tree_method} is set to \code{"approx"} along with \code{device="cuda"}. \item \code{"sync"}: synchronizes trees in all distributed nodes. \item \code{"refresh"}: refreshes tree's statistics and/or leaf values based on the current data. Note that no random subsampling of data rows is performed. \item \code{"prune"}: prunes the splits where loss < \code{min_split_loss} (or \code{gamma}) and nodes that have depth greater than \code{max_depth}. } \item For \code{booster="gblinear"}: (default= \code{"shotgun"}) Choice of algorithm to fit linear model \itemize{ \item \code{"shotgun"}: Parallel coordinate descent algorithm based on shotgun algorithm. Uses 'hogwild' parallelism and therefore produces a nondeterministic solution on each run. \item \code{"coord_descent"}: Ordinary coordinate descent algorithm. Also multithreaded but still produces a deterministic solution. When the \code{device} parameter is set to \code{"cuda"} or \code{"gpu"}, a GPU variant would be used. } }} \item{refresh_leaf}{(for Tree Booster) (default=1) This is a parameter of the \code{"refresh"} updater. When this flag is 1, tree leafs as well as tree nodes' stats are updated. When it is 0, only node stats are updated.} \item{grow_policy}{(for Tree Booster) (default= \code{"depthwise"}) \itemize{ \item Controls a way new nodes are added to the tree. \item Currently supported only if \code{tree_method} is set to \code{"hist"} or \code{"approx"}. \item Choices: \code{"depthwise"}, \code{"lossguide"} \itemize{ \item \code{"depthwise"}: split at nodes closest to the root. \item \code{"lossguide"}: split at nodes with highest loss change. } }} \item{max_leaves}{(for Tree Booster) (default=0, type=int32) Maximum number of nodes to be added. Not used by \code{"exact"} tree method.} \item{max_bin}{(for Tree Booster) (default=256, type=int32) \itemize{ \item Only used if \code{tree_method} is set to \code{"hist"} or \code{"approx"}. \item Maximum number of discrete bins to bucket continuous features. \item Increasing this number improves the optimality of splits at the cost of higher computation time. }} \item{num_parallel_tree}{(for Tree Booster) (default=1) Number of parallel trees constructed during each iteration. This option is used to support boosted random forest.} \item{monotone_constraints}{(for Tree Booster) Constraint of variable monotonicity. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/monotonic.html}{Monotonic Constraints} for more information.} \item{interaction_constraints}{(for Tree Booster) Constraints for interaction representing permitted interactions. The constraints must be specified in the form of a nest list, e.g. \code{list(c(0, 1), c(2, 3, 4))}, where each inner list is a group of indices of features (base-0 numeration) that are allowed to interact with each other. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/feature_interaction_constraint.html}{Feature Interaction Constraints} for more information.} \item{multi_strategy}{(for Tree Booster) (default = \code{"one_output_per_tree"}) The strategy used for training multi-target models, including multi-target regression and multi-class classification. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/multioutput.html}{Multiple Outputs} for more information. \itemize{ \item \code{"one_output_per_tree"}: One model for each target. \item \code{"multi_output_tree"}: Use multi-target trees. } Version added: 2.0.0 Note: This parameter is working-in-progress.} \item{base_score}{\itemize{ \item The initial prediction score of all instances, global bias \item The parameter is automatically estimated for selected objectives before training. To disable the estimation, specify a real number argument. \item If \code{base_margin} is supplied, \code{base_score} will not be added. \item For sufficient number of iterations, changing this value will not have too much effect. }} \item{eval_metric}{(default according to objective) \itemize{ \item Evaluation metrics for validation data, a default metric will be assigned according to objective (rmse for regression, and logloss for classification, \verb{mean average precision} for \code{rank:map}, etc.) \item User can add multiple evaluation metrics. \item The choices are listed below: \itemize{ \item \code{"rmse"}: \href{https://en.wikipedia.org/wiki/Root_mean_square_error}{root mean square error} \item \code{"rmsle"}: root mean square log error: \eqn{\sqrt{\frac{1}{N}[log(pred + 1) - log(label + 1)]^2}}. Default metric of \code{"reg:squaredlogerror"} objective. This metric reduces errors generated by outliers in dataset. But because \code{log} function is employed, \code{"rmsle"} might output \code{nan} when prediction value is less than -1. See \code{"reg:squaredlogerror"} for other requirements. \item \code{"mae"}: \href{https://en.wikipedia.org/wiki/Mean_absolute_error}{mean absolute error} \item \code{"mape"}: \href{https://en.wikipedia.org/wiki/Mean_absolute_percentage_error}{mean absolute percentage error} \item \code{"mphe"}: \href{https://en.wikipedia.org/wiki/Huber_loss}{mean Pseudo Huber error}. Default metric of \code{"reg:pseudohubererror"} objective. \item \code{"logloss"}: \href{https://en.wikipedia.org/wiki/Log-likelihood}{negative log-likelihood} \item \code{"error"}: Binary classification error rate. It is calculated as \verb{#(wrong cases)/#(all cases)}. For the predictions, the evaluation will regard the instances with prediction value larger than 0.5 as positive instances, and the others as negative instances. \item \code{"error@t"}: a different than 0.5 binary classification threshold value could be specified by providing a numerical value through 't'. \item \code{"merror"}: Multiclass classification error rate. It is calculated as \verb{#(wrong cases)/#(all cases)}. \item \code{"mlogloss"}: \href{https://scikit-learn.org/stable/modules/generated/sklearn.metrics.log_loss.html}{Multiclass logloss}. \item \code{"auc"}: \href{https://en.wikipedia.org/wiki/Receiver_operating_characteristic#Area_under_the_curve}{Receiver Operating Characteristic Area under the Curve}. Available for classification and learning-to-rank tasks. \itemize{ \item When used with binary classification, the objective should be \code{"binary:logistic"} or similar functions that work on probability. \item When used with multi-class classification, objective should be \code{"multi:softprob"} instead of \code{"multi:softmax"}, as the latter doesn't output probability. Also the AUC is calculated by 1-vs-rest with reference class weighted by class prevalence. \item When used with LTR task, the AUC is computed by comparing pairs of documents to count correctly sorted pairs. This corresponds to pairwise learning to rank. The implementation has some issues with average AUC around groups and distributed workers not being well-defined. \item On a single machine the AUC calculation is exact. In a distributed environment the AUC is a weighted average over the AUC of training rows on each node - therefore, distributed AUC is an approximation sensitive to the distribution of data across workers. Use another metric in distributed environments if precision and reproducibility are important. \item When input dataset contains only negative or positive samples, the output is \code{NaN}. The behavior is implementation defined, for instance, \code{scikit-learn} returns \eqn{0.5} instead. } \item \code{"aucpr"}: \href{https://en.wikipedia.org/wiki/Precision_and_recall}{Area under the PR curve}. Available for classification and learning-to-rank tasks. After XGBoost 1.6, both of the requirements and restrictions for using \code{"aucpr"} in classification problem are similar to \code{"auc"}. For ranking task, only binary relevance label \eqn{y \in [0, 1]} is supported. Different from \code{"map"} (mean average precision), \code{"aucpr"} calculates the \emph{interpolated} area under precision recall curve using continuous interpolation. \item \code{"pre"}: Precision at \eqn{k}. Supports only learning to rank task. \item \code{"ndcg"}: \href{https://en.wikipedia.org/wiki/NDCG}{Normalized Discounted Cumulative Gain} \item \code{"map"}: \href{https://en.wikipedia.org/wiki/Mean_average_precision#Mean_average_precision}{Mean Average Precision} The \verb{average precision} is defined as: \eqn{AP@l = \frac{1}{min{(l, N)}}\sum^l_{k=1}P@k \cdot I_{(k)}} where \eqn{I_{(k)}} is an indicator function that equals to \eqn{1} when the document at \eqn{k} is relevant and \eqn{0} otherwise. The \eqn{P@k} is the precision at \eqn{k}, and \eqn{N} is the total number of relevant documents. Lastly, the \verb{mean average precision} is defined as the weighted average across all queries. \item \code{"ndcg@n"}, \code{"map@n"}, \code{"pre@n"}: \eqn{n} can be assigned as an integer to cut off the top positions in the lists for evaluation. \item \code{"ndcg-"}, \code{"map-"}, \code{"ndcg@n-"}, \code{"map@n-"}: In XGBoost, the NDCG and MAP evaluate the score of a list without any positive samples as \eqn{1}. By appending "-" to the evaluation metric name, we can ask XGBoost to evaluate these scores as \eqn{0} to be consistent under some conditions. \item \code{"poisson-nloglik"}: negative log-likelihood for Poisson regression \item \code{"gamma-nloglik"}: negative log-likelihood for gamma regression \item \code{"cox-nloglik"}: negative partial log-likelihood for Cox proportional hazards regression \item \code{"gamma-deviance"}: residual deviance for gamma regression \item \code{"tweedie-nloglik"}: negative log-likelihood for Tweedie regression (at a specified value of the \code{tweedie_variance_power} parameter) \item \code{"aft-nloglik"}: Negative log likelihood of Accelerated Failure Time model. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details. \item \code{"interval-regression-accuracy"}: Fraction of data points whose predicted labels fall in the interval-censored labels. Only applicable for interval-censored data. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details. } }} \item{seed_per_iteration}{(default= \code{FALSE}) Seed PRNG determnisticly via iterator number.} \item{device}{(default= \code{"cpu"}) Device for XGBoost to run. User can set it to one of the following values: \itemize{ \item \code{"cpu"}: Use CPU. \item \code{"cuda"}: Use a GPU (CUDA device). \item \code{"cuda:"}: \verb{} is an integer that specifies the ordinal of the GPU (which GPU do you want to use if you have more than one devices). \item \code{"gpu"}: Default GPU device selection from the list of available and supported devices. Only \code{"cuda"} devices are supported currently. \item \code{"gpu:"}: Default GPU device selection from the list of available and supported devices. Only \code{"cuda"} devices are supported currently. } For more information about GPU acceleration, see \href{https://xgboost.readthedocs.io/en/latest/gpu/index.html}{XGBoost GPU Support}. In distributed environments, ordinal selection is handled by distributed frameworks instead of XGBoost. As a result, using \code{"cuda:"} will result in an error. Use \code{"cuda"} instead. Version added: 2.0.0 Note: if XGBoost was installed from CRAN, it won't have GPU support enabled, thus only \code{"cpu"} will be available. To get GPU support, the R package for XGBoost must be installed from source or from the GitHub releases - see \href{https://xgboost.readthedocs.io/en/latest/install.html#r}{instructions}.} \item{disable_default_eval_metric}{(default= \code{FALSE}) Flag to disable default metric. Set to 1 or \code{TRUE} to disable.} \item{use_rmm}{Whether to use RAPIDS Memory Manager (RMM) to allocate cache GPU memory. The primary memory is always allocated on the RMM pool when XGBoost is built (compiled) with the RMM plugin enabled. Valid values are \code{TRUE} and \code{FALSE}. See \href{https://xgboost.readthedocs.io/en/latest/python/rmm-examples/index.html}{Using XGBoost with RAPIDS Memory Manager (RMM) plugin} for details.} \item{max_cached_hist_node}{(for Non-Exact Tree Methods) (default = 65536) Maximum number of cached nodes for histogram. This can be used with the \code{"hist"} and the \code{"approx"} tree methods. Version added: 2.0.0 \itemize{ \item For most of the cases this parameter should not be set except for growing deep trees. After 3.0, this parameter affects GPU algorithms as well. }} \item{extmem_single_page}{(for Non-Exact Tree Methods) (default = \code{FALSE}) This parameter is only used for the \code{"hist"} tree method with \code{device="cuda"} and \code{subsample != 1.0}. Before 3.0, pages were always concatenated. Version added: 3.0.0 Whether the GPU-based \code{"hist"} tree method should concatenate the training data into a single batch instead of fetching data on-demand when external memory is used. For GPU devices that don't support address translation services, external memory training is expensive. This parameter can be used in combination with subsampling to reduce overall memory usage without significant overhead. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/external_memory.html}{Using XGBoost External Memory Version} for more information.} \item{max_cat_to_onehot}{(for Non-Exact Tree Methods) A threshold for deciding whether XGBoost should use one-hot encoding based split for categorical data. When number of categories is lesser than the threshold then one-hot encoding is chosen, otherwise the categories will be partitioned into children nodes. Version added: 1.6.0} \item{max_cat_threshold}{(for Non-Exact Tree Methods) Maximum number of categories considered for each split. Used only by partition-based splits for preventing over-fitting. Version added: 1.7.0} \item{sample_type}{(for Dart Booster) (default= \code{"uniform"}) Type of sampling algorithm. \itemize{ \item \code{"uniform"}: dropped trees are selected uniformly. \item \code{"weighted"}: dropped trees are selected in proportion to weight. }} \item{normalize_type}{(for Dart Booster) (default= \code{"tree"}) Type of normalization algorithm. \itemize{ \item \code{"tree"}: new trees have the same weight of each of dropped trees. \itemize{ \item Weight of new trees are \code{1 / (k + learning_rate)}. \item Dropped trees are scaled by a factor of \code{k / (k + learning_rate)}. } \item \code{"forest"}: new trees have the same weight of sum of dropped trees (forest). \itemize{ \item Weight of new trees are \code{1 / (1 + learning_rate)}. \item Dropped trees are scaled by a factor of \code{1 / (1 + learning_rate)}. } }} \item{rate_drop}{(for Dart Booster) (default=0.0) Dropout rate (a fraction of previous trees to drop during the dropout). range: \eqn{[0.0, 1.0]}} \item{one_drop}{(for Dart Booster) (default=0) When this flag is enabled, at least one tree is always dropped during the dropout (allows Binomial-plus-one or epsilon-dropout from the original DART paper).} \item{skip_drop}{(for Dart Booster) (default=0.0) Probability of skipping the dropout procedure during a boosting iteration. \itemize{ \item If a dropout is skipped, new trees are added in the same manner as \code{"gbtree"}. \item Note that non-zero \code{skip_drop} has higher priority than \code{rate_drop} or \code{one_drop}. } range: \eqn{[0.0, 1.0]}} \item{feature_selector}{(for Linear Booster) (default= \code{"cyclic"}) Feature selection and ordering method \itemize{ \item \code{"cyclic"}: Deterministic selection by cycling through features one at a time. \item \code{"shuffle"}: Similar to \code{"cyclic"} but with random feature shuffling prior to each update. \item \code{"random"}: A random (with replacement) coordinate selector. \item \code{"greedy"}: Select coordinate with the greatest gradient magnitude. It has \code{O(num_feature^2)} complexity. It is fully deterministic. It allows restricting the selection to \code{top_k} features per group with the largest magnitude of univariate weight change, by setting the \code{top_k} parameter. Doing so would reduce the complexity to \code{O(num_feature*top_k)}. \item \code{"thrifty"}: Thrifty, approximately-greedy feature selector. Prior to cyclic updates, reorders features in descending magnitude of their univariate weight changes. This operation is multithreaded and is a linear complexity approximation of the quadratic greedy selection. It allows restricting the selection to \code{top_k} features per group with the largest magnitude of univariate weight change, by setting the \code{top_k} parameter. }} \item{top_k}{(for Linear Booster) (default=0) The number of top features to select in \code{greedy} and \code{thrifty} feature selector. The value of 0 means using all the features.} \item{num_class}{Number of classes when using multi-class classification objectives (e.g. \code{objective="multi:softprob"})} \item{tweedie_variance_power}{(for Tweedie Regression (\code{"objective=reg:tweedie"})) (default=1.5) \itemize{ \item Parameter that controls the variance of the Tweedie distribution \code{var(y) ~ E(y)^tweedie_variance_power} \item range: \eqn{(1,2)} \item Set closer to 2 to shift towards a gamma distribution \item Set closer to 1 to shift towards a Poisson distribution. }} \item{huber_slope}{(for using Pseudo-Huber (\verb{"reg:pseudohubererror}")) (default = 1.0) A parameter used for Pseudo-Huber loss to define the \eqn{\delta} term.} \item{quantile_alpha}{(for using Quantile Loss (\code{"reg:quantileerror"})) A scalar or a list of targeted quantiles (passed as a numeric vector). Version added: 2.0.0} \item{aft_loss_distribution}{(for using AFT Survival Loss (\code{"survival:aft"}) and Negative Log Likelihood of AFT metric (\code{"aft-nloglik"})) Probability Density Function, \code{"normal"}, \code{"logistic"}, or \code{"extreme"}.} \item{lambdarank_pair_method}{(for learning to rank (\code{"rank:ndcg"}, \code{"rank:map"}, \code{"rank:pairwise"})) (default = \code{"topk"}) How to construct pairs for pair-wise learning. \itemize{ \item \code{"mean"}: Sample \code{lambdarank_num_pair_per_sample} pairs for each document in the query list. \item \code{"topk"}: Focus on top-\code{lambdarank_num_pair_per_sample} documents. Construct \eqn{|query|} pairs for each document at the top-\code{lambdarank_num_pair_per_sample} ranked by the model. }} \item{lambdarank_num_pair_per_sample}{(for learning to rank (\code{"rank:ndcg"}, \code{"rank:map"}, \code{"rank:pairwise"})) It specifies the number of pairs sampled for each document when pair method is \code{"mean"}, or the truncation level for queries when the pair method is \code{"topk"}. For example, to train with \verb{ndcg@6}, set \code{"lambdarank_num_pair_per_sample"} to \eqn{6} and \code{lambdarank_pair_method} to \code{"topk"}. range = \eqn{[1, \infty)}} \item{lambdarank_normalization}{(for learning to rank (\code{"rank:ndcg"}, \code{"rank:map"}, \code{"rank:pairwise"})) (default = \code{TRUE}) Whether to normalize the leaf value by lambda gradient. This can sometimes stagnate the training progress. Version added: 2.1.0} \item{lambdarank_score_normalization}{Whether to normalize the delta metric by the difference of prediction scores. This can sometimes stagnate the training progress. With pairwise ranking, we can normalize the gradient using the difference between two samples in each pair to reduce influence from the pairs that have large difference in ranking scores. This can help us regularize the model to reduce bias and prevent overfitting. Similar to other regularization techniques, this might prevent training from converging. There was no normalization before 2.0. In 2.0 and later versions this is used by default. In 3.0, we made this an option that users can disable. Version added: 3.0.0} \item{lambdarank_unbiased}{(for learning to rank (\code{"rank:ndcg"}, \code{"rank:map"}, \code{"rank:pairwise"})) (default = \code{FALSE}) Specify whether do we need to debias input click data.} \item{lambdarank_bias_norm}{(for learning to rank (\code{"rank:ndcg"}, \code{"rank:map"}, \code{"rank:pairwise"})) (default = 2.0) \eqn{L_p} normalization for position debiasing, default is \eqn{L_2}. Only relevant when \code{lambdarank_unbiased} is set to \code{TRUE}.} \item{ndcg_exp_gain}{(for learning to rank (\code{"rank:ndcg"}, \code{"rank:map"}, \code{"rank:pairwise"})) (default = \code{TRUE}) Whether we should use exponential gain function for \code{NDCG}. There are two forms of gain function for \code{NDCG}, one is using relevance value directly while the other is using\eqn{2^{rel} - 1} to emphasize on retrieving relevant documents. When \code{ndcg_exp_gain} is \code{TRUE} (the default), relevance degree cannot be greater than 31.} } \value{ A list with the entries that were passed non-NULL values. It is intended to be passed as argument \code{params} to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}. } \description{ Convenience function to generate a list of named XGBoost parameters, which can be passed as argument \code{params} to \code{\link[=xgb.train]{xgb.train()}}. See the \href{https://xgboost.readthedocs.io/en/stable/parameter.html}{online documentation} for more details. The purpose of this function is to enable IDE autocompletions and to provide in-package documentation for all the possible parameters that XGBoost accepts. The output from this function is just a regular R list containing the parameters that were set to non-default values. Note that this function will not perform any validation on the supplied arguments. If passing \code{NULL} for a given parameter (the default for all of them), then the default value for that parameter will be used. Default values are automatically determined by the XGBoost core library upon calls to \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=xgb.cv]{xgb.cv()}}, and are subject to change over XGBoost library versions. Some of them might differ according to the booster type (e.g. defaults for regularization are different for linear and tree-based boosters). } xgboost-3.0.0/R-package/man/xgb.plot.deepness.Rd000066400000000000000000000061151476511272400214110ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.ggplot.R, R/xgb.plot.deepness.R \name{xgb.ggplot.deepness} \alias{xgb.ggplot.deepness} \alias{xgb.plot.deepness} \title{Plot model tree depth} \usage{ xgb.ggplot.deepness( model = NULL, which = c("2x1", "max.depth", "med.depth", "med.weight") ) xgb.plot.deepness( model = NULL, which = c("2x1", "max.depth", "med.depth", "med.weight"), plot = TRUE, ... ) } \arguments{ \item{model}{Either an \code{xgb.Booster} model, or the "data.table" returned by \code{\link[=xgb.model.dt.tree]{xgb.model.dt.tree()}}.} \item{which}{Which distribution to plot (see details).} \item{plot}{Should the plot be shown? Default is \code{TRUE}.} \item{...}{Other parameters passed to \code{\link[graphics:barplot]{graphics::barplot()}} or \code{\link[graphics:plot.default]{graphics::plot()}}.} } \value{ The return value of the two functions is as follows: \itemize{ \item \code{xgb.plot.deepness()}: A "data.table" (invisibly). Each row corresponds to a terminal leaf in the model. It contains its information about depth, cover, and weight (used in calculating predictions). If \code{plot = TRUE}, also a plot is shown. \item \code{xgb.ggplot.deepness()}: When \code{which = "2x1"}, a list of two "ggplot" objects, and a single "ggplot" object otherwise. } } \description{ Visualizes distributions related to the depth of tree leaves. \itemize{ \item \code{xgb.plot.deepness()} uses base R graphics, while \item \code{xgb.ggplot.deepness()} uses "ggplot2". } } \details{ When \code{which = "2x1"}, two distributions with respect to the leaf depth are plotted on top of each other: \enumerate{ \item The distribution of the number of leaves in a tree model at a certain depth. \item The distribution of the average weighted number of observations ("cover") ending up in leaves at a certain depth. } Those could be helpful in determining sensible ranges of the \code{max_depth} and \code{min_child_weight} parameters. When \code{which = "max.depth"} or \code{which = "med.depth"}, plots of either maximum or median depth per tree with respect to the tree number are created. Finally, \code{which = "med.weight"} allows to see how a tree's median absolute leaf weight changes through the iterations. These functions have been inspired by the blog post \url{https://github.com/aysent/random-forest-leaf-visualization}. } \examples{ data(agaricus.train, package = "xgboost") ## Keep the number of threads to 2 for examples nthread <- 2 data.table::setDTthreads(nthread) ## Change max_depth to a higher number to get a more significant result model <- xgboost( agaricus.train$data, factor(agaricus.train$label), nrounds = 50, max_depth = 6, nthreads = nthread, subsample = 0.5, min_child_weight = 2 ) xgb.plot.deepness(model) xgb.ggplot.deepness(model) xgb.plot.deepness( model, which = "max.depth", pch = 16, col = rgb(0, 0, 1, 0.3), cex = 2 ) xgb.plot.deepness( model, which = "med.weight", pch = 16, col = rgb(0, 0, 1, 0.3), cex = 2 ) } \seealso{ \code{\link[=xgb.train]{xgb.train()}} and \code{\link[=xgb.model.dt.tree]{xgb.model.dt.tree()}}. } xgboost-3.0.0/R-package/man/xgb.plot.importance.Rd000066400000000000000000000072151476511272400217460ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.ggplot.R, R/xgb.plot.importance.R \name{xgb.ggplot.importance} \alias{xgb.ggplot.importance} \alias{xgb.plot.importance} \title{Plot feature importance} \usage{ xgb.ggplot.importance( importance_matrix = NULL, top_n = NULL, measure = NULL, rel_to_first = FALSE, n_clusters = seq_len(10), ... ) xgb.plot.importance( importance_matrix = NULL, top_n = NULL, measure = NULL, rel_to_first = FALSE, left_margin = 10, cex = NULL, plot = TRUE, ... ) } \arguments{ \item{importance_matrix}{A \code{data.table} as returned by \code{\link[=xgb.importance]{xgb.importance()}}.} \item{top_n}{Maximal number of top features to include into the plot.} \item{measure}{The name of importance measure to plot. When \code{NULL}, 'Gain' would be used for trees and 'Weight' would be used for gblinear.} \item{rel_to_first}{Whether importance values should be represented as relative to the highest ranked feature, see Details.} \item{n_clusters}{A numeric vector containing the min and the max range of the possible number of clusters of bars.} \item{...}{Other parameters passed to \code{\link[graphics:barplot]{graphics::barplot()}} (except \code{horiz}, \code{border}, \code{cex.names}, \code{names.arg}, and \code{las}). Only used in \code{xgb.plot.importance()}.} \item{left_margin}{Adjust the left margin size to fit feature names. When \code{NULL}, the existing \code{par("mar")} is used.} \item{cex}{Passed as \code{cex.names} parameter to \code{\link[graphics:barplot]{graphics::barplot()}}.} \item{plot}{Should the barplot be shown? Default is \code{TRUE}.} } \value{ The return value depends on the function: \itemize{ \item \code{xgb.plot.importance()}: Invisibly, a "data.table" with \code{n_top} features sorted by importance. If \code{plot = TRUE}, the values are also plotted as barplot. \item \code{xgb.ggplot.importance()}: A customizable "ggplot" object. E.g., to change the title, set \code{+ ggtitle("A GRAPH NAME")}. } } \description{ Represents previously calculated feature importance as a bar graph. \itemize{ \item \code{xgb.plot.importance()} uses base R graphics, while \item \code{xgb.ggplot.importance()} uses "ggplot". } } \details{ The graph represents each feature as a horizontal bar of length proportional to the importance of a feature. Features are sorted by decreasing importance. It works for both "gblinear" and "gbtree" models. When \code{rel_to_first = FALSE}, the values would be plotted as in \code{importance_matrix}. For a "gbtree" model, that would mean being normalized to the total of 1 ("what is feature's importance contribution relative to the whole model?"). For linear models, \code{rel_to_first = FALSE} would show actual values of the coefficients. Setting \code{rel_to_first = TRUE} allows to see the picture from the perspective of "what is feature's importance contribution relative to the most important feature?" The "ggplot" backend performs 1-D clustering of the importance values, with bar colors corresponding to different clusters having similar importance values. } \examples{ data(agaricus.train) ## Keep the number of threads to 2 for examples nthread <- 2 data.table::setDTthreads(nthread) model <- xgboost( agaricus.train$data, factor(agaricus.train$label), nrounds = 2, max_depth = 3, nthreads = nthread ) importance_matrix <- xgb.importance(model) xgb.plot.importance( importance_matrix, rel_to_first = TRUE, xlab = "Relative importance" ) gg <- xgb.ggplot.importance( importance_matrix, measure = "Frequency", rel_to_first = TRUE ) gg gg + ggplot2::ylab("Frequency") } \seealso{ \code{\link[graphics:barplot]{graphics::barplot()}} } xgboost-3.0.0/R-package/man/xgb.plot.multi.trees.Rd000066400000000000000000000066611476511272400220640ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.plot.multi.trees.R \name{xgb.plot.multi.trees} \alias{xgb.plot.multi.trees} \title{Project all trees on one tree} \usage{ xgb.plot.multi.trees( model, features_keep = 5, plot_width = NULL, plot_height = NULL, render = TRUE, ... ) } \arguments{ \item{model}{Object of class \code{xgb.Booster}. If it contains feature names (they can be set through \code{\link[=setinfo]{setinfo()}}, they will be used in the output from this function.} \item{features_keep}{Number of features to keep in each position of the multi trees, by default 5.} \item{plot_width, plot_height}{Width and height of the graph in pixels. The values are passed to \code{DiagrammeR::render_graph()}.} \item{render}{Should the graph be rendered or not? The default is \code{TRUE}.} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ Rendered graph object which is an htmlwidget of ' class \code{grViz}. Similar to "ggplot" objects, it needs to be printed when not running from the command line. } \description{ Visualization of the ensemble of trees as a single collective unit. } \details{ Note that this function does not work with models that were fitted to categorical data. This function tries to capture the complexity of a gradient boosted tree model in a cohesive way by compressing an ensemble of trees into a single tree-graph representation. The goal is to improve the interpretability of a model generally seen as black box. Note: this function is applicable to tree booster-based models only. It takes advantage of the fact that the shape of a binary tree is only defined by its depth (therefore, in a boosting model, all trees have similar shape). Moreover, the trees tend to reuse the same features. The function projects each tree onto one, and keeps for each position the \code{features_keep} first features (based on the Gain per feature measure). This function is inspired by this blog post: \url{https://wellecks.wordpress.com/2015/02/21/peering-into-the-black-box-visualizing-lambdamart/} } \examples{ data(agaricus.train, package = "xgboost") ## Keep the number of threads to 2 for examples nthread <- 2 data.table::setDTthreads(nthread) model <- xgboost( agaricus.train$data, factor(agaricus.train$label), nrounds = 30, verbosity = 0L, nthreads = nthread, max_depth = 15, learning_rate = 1, min_child_weight = 50 ) p <- xgb.plot.multi.trees(model, features_keep = 3) print(p) # Below is an example of how to save this plot to a file. if (require("DiagrammeR") && require("DiagrammeRsvg") && require("rsvg")) { fname <- file.path(tempdir(), "tree.pdf") gr <- xgb.plot.multi.trees(model, features_keep = 3, render = FALSE) export_graph(gr, fname, width = 1500, height = 600) } } xgboost-3.0.0/R-package/man/xgb.plot.shap.Rd000066400000000000000000000151141476511272400205350ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.plot.shap.R \name{xgb.plot.shap} \alias{xgb.plot.shap} \title{SHAP dependence plots} \usage{ xgb.plot.shap( data, shap_contrib = NULL, features = NULL, top_n = 1, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL, n_col = 1, col = rgb(0, 0, 1, 0.2), pch = ".", discrete_n_uniq = 5, discrete_jitter = 0.01, ylab = "SHAP", plot_NA = TRUE, col_NA = rgb(0.7, 0, 1, 0.6), pch_NA = ".", pos_NA = 1.07, plot_loess = TRUE, col_loess = 2, span_loess = 0.5, which = c("1d", "2d"), plot = TRUE, ... ) } \arguments{ \item{data}{The data to explain as a \code{matrix}, \code{dgCMatrix}, or \code{data.frame}.} \item{shap_contrib}{Matrix of SHAP contributions of \code{data}. The default (\code{NULL}) computes it from \code{model} and \code{data}.} \item{features}{Vector of column indices or feature names to plot. When \code{NULL} (default), the \code{top_n} most important features are selected by \code{\link[=xgb.importance]{xgb.importance()}}.} \item{top_n}{How many of the most important features (<= 100) should be selected? By default 1 for SHAP dependence and 10 for SHAP summary. Only used when \code{features = NULL}.} \item{model}{An \code{xgb.Booster} model. Only required when \code{shap_contrib = NULL} or \code{features = NULL}.} \item{trees}{Passed to \code{\link[=xgb.importance]{xgb.importance()}} when \code{features = NULL}.} \item{target_class}{Only relevant for multiclass models. The default (\code{NULL}) averages the SHAP values over all classes. Pass a (0-based) class index to show only SHAP values of that class.} \item{approxcontrib}{Passed to \code{\link[=predict.xgb.Booster]{predict.xgb.Booster()}} when \code{shap_contrib = NULL}.} \item{subsample}{Fraction of data points randomly picked for plotting. The default (\code{NULL}) will use up to 100k data points.} \item{n_col}{Number of columns in a grid of plots.} \item{col}{Color of the scatterplot markers.} \item{pch}{Scatterplot marker.} \item{discrete_n_uniq}{Maximal number of unique feature values to consider the feature as discrete.} \item{discrete_jitter}{Jitter amount added to the values of discrete features.} \item{ylab}{The y-axis label in 1D plots.} \item{plot_NA}{Should contributions of cases with missing values be plotted? Default is \code{TRUE}.} \item{col_NA}{Color of marker for missing value contributions.} \item{pch_NA}{Marker type for \code{NA} values.} \item{pos_NA}{Relative position of the x-location where \code{NA} values are shown: \code{min(x) + (max(x) - min(x)) * pos_NA}.} \item{plot_loess}{Should loess-smoothed curves be plotted? (Default is \code{TRUE}). The smoothing is only done for features with more than 5 distinct values.} \item{col_loess}{Color of loess curves.} \item{span_loess}{The \code{span} parameter of \code{\link[stats:loess]{stats::loess()}}.} \item{which}{Whether to do univariate or bivariate plotting. Currently, only "1d" is implemented.} \item{plot}{Should the plot be drawn? (Default is \code{TRUE}). If \code{FALSE}, only a list of matrices is returned.} \item{...}{Other parameters passed to \code{\link[graphics:plot.default]{graphics::plot()}}.} } \value{ In addition to producing plots (when \code{plot = TRUE}), it silently returns a list of two matrices: \itemize{ \item \code{data}: Feature value matrix. \item \code{shap_contrib}: Corresponding SHAP value matrix. } } \description{ Visualizes SHAP values against feature values to gain an impression of feature effects. } \details{ These scatterplots represent how SHAP feature contributions depend of feature values. The similarity to partial dependence plots is that they also give an idea for how feature values affect predictions. However, in partial dependence plots, we see marginal dependencies of model prediction on feature value, while SHAP dependence plots display the estimated contributions of a feature to the prediction for each individual case. When \code{plot_loess = TRUE}, feature values are rounded to three significant digits and weighted LOESS is computed and plotted, where the weights are the numbers of data points at each rounded value. Note: SHAP contributions are on the scale of the model margin. E.g., for a logistic binomial objective, the margin is on log-odds scale. Also, since SHAP stands for "SHapley Additive exPlanation" (model prediction = sum of SHAP contributions for all features + bias), depending on the objective used, transforming SHAP contributions for a feature from the marginal to the prediction space is not necessarily a meaningful thing to do. } \examples{ data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) nrounds <- 20 model_binary <- xgboost( agaricus.train$data, factor(agaricus.train$label), nrounds = nrounds, verbosity = 0L, learning_rate = 0.1, max_depth = 3L, subsample = 0.5, nthreads = nthread ) xgb.plot.shap(agaricus.test$data, model = model_binary, features = "odor=none") contr <- predict(model_binary, agaricus.test$data, type = "contrib") xgb.plot.shap(agaricus.test$data, contr, model = model_binary, top_n = 12, n_col = 3) # Summary plot xgb.ggplot.shap.summary(agaricus.test$data, contr, model = model_binary, top_n = 12) # Multiclass example - plots for each class separately: x <- as.matrix(iris[, -5]) set.seed(123) is.na(x[sample(nrow(x) * 4, 30)]) <- TRUE # introduce some missing values model_multiclass <- xgboost( x, iris$Species, nrounds = nrounds, verbosity = 0, max_depth = 2, subsample = 0.5, nthreads = nthread ) nclass <- 3 trees0 <- seq(from = 1, by = nclass, length.out = nrounds) col <- rgb(0, 0, 1, 0.5) xgb.plot.shap( x, model = model_multiclass, trees = trees0, target_class = 0, top_n = 4, n_col = 2, col = col, pch = 16, pch_NA = 17 ) xgb.plot.shap( x, model = model_multiclass, trees = trees0 + 1, target_class = 1, top_n = 4, n_col = 2, col = col, pch = 16, pch_NA = 17 ) xgb.plot.shap( x, model = model_multiclass, trees = trees0 + 2, target_class = 2, top_n = 4, n_col = 2, col = col, pch = 16, pch_NA = 17 ) # Summary plot xgb.ggplot.shap.summary(x, model = model_multiclass, target_class = 0, top_n = 4) } \references{ \enumerate{ \item Scott M. Lundberg, Su-In Lee, "A Unified Approach to Interpreting Model Predictions", NIPS Proceedings 2017, \url{https://arxiv.org/abs/1705.07874} \item Scott M. Lundberg, Su-In Lee, "Consistent feature attribution for tree ensembles", \url{https://arxiv.org/abs/1706.06060} } } xgboost-3.0.0/R-package/man/xgb.plot.shap.summary.Rd000066400000000000000000000052701476511272400222330ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.ggplot.R, R/xgb.plot.shap.R \name{xgb.ggplot.shap.summary} \alias{xgb.ggplot.shap.summary} \alias{xgb.plot.shap.summary} \title{SHAP summary plot} \usage{ xgb.ggplot.shap.summary( data, shap_contrib = NULL, features = NULL, top_n = 10, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL ) xgb.plot.shap.summary( data, shap_contrib = NULL, features = NULL, top_n = 10, model = NULL, trees = NULL, target_class = NULL, approxcontrib = FALSE, subsample = NULL ) } \arguments{ \item{data}{The data to explain as a \code{matrix}, \code{dgCMatrix}, or \code{data.frame}.} \item{shap_contrib}{Matrix of SHAP contributions of \code{data}. The default (\code{NULL}) computes it from \code{model} and \code{data}.} \item{features}{Vector of column indices or feature names to plot. When \code{NULL} (default), the \code{top_n} most important features are selected by \code{\link[=xgb.importance]{xgb.importance()}}.} \item{top_n}{How many of the most important features (<= 100) should be selected? By default 1 for SHAP dependence and 10 for SHAP summary. Only used when \code{features = NULL}.} \item{model}{An \code{xgb.Booster} model. Only required when \code{shap_contrib = NULL} or \code{features = NULL}.} \item{trees}{Passed to \code{\link[=xgb.importance]{xgb.importance()}} when \code{features = NULL}.} \item{target_class}{Only relevant for multiclass models. The default (\code{NULL}) averages the SHAP values over all classes. Pass a (0-based) class index to show only SHAP values of that class.} \item{approxcontrib}{Passed to \code{\link[=predict.xgb.Booster]{predict.xgb.Booster()}} when \code{shap_contrib = NULL}.} \item{subsample}{Fraction of data points randomly picked for plotting. The default (\code{NULL}) will use up to 100k data points.} } \value{ A \code{ggplot2} object. } \description{ Visualizes SHAP contributions of different features. } \details{ A point plot (each point representing one observation from \code{data}) is produced for each feature, with the points plotted on the SHAP value axis. Each point (observation) is coloured based on its feature value. The plot allows to see which features have a negative / positive contribution on the model prediction, and whether the contribution is different for larger or smaller values of the feature. Inspired by the summary plot of \url{https://github.com/shap/shap}. } \examples{ # See examples in xgb.plot.shap() } \seealso{ \code{\link[=xgb.plot.shap]{xgb.plot.shap()}}, \code{\link[=xgb.ggplot.shap.summary]{xgb.ggplot.shap.summary()}}, and the Python library \url{https://github.com/shap/shap}. } xgboost-3.0.0/R-package/man/xgb.plot.tree.Rd000066400000000000000000000061241476511272400205420ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.plot.tree.R \name{xgb.plot.tree} \alias{xgb.plot.tree} \title{Plot boosted trees} \usage{ xgb.plot.tree( model, tree_idx = 1, plot_width = NULL, plot_height = NULL, with_stats = FALSE, ... ) } \arguments{ \item{model}{Object of class \code{xgb.Booster}. If it contains feature names (they can be set through \code{\link[=setinfo]{setinfo()}}, they will be used in the output from this function.} \item{tree_idx}{An integer of the tree index that should be used. This is an 1-based index.} \item{plot_width, plot_height}{Width and height of the graph in pixels. The values are passed to \code{DiagrammeR::render_graph()}.} \item{with_stats}{Whether to dump some additional statistics about the splits. When this option is on, the model dump contains two additional values: gain is the approximate loss function gain we get in each split; cover is the sum of second order gradient in each node.} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ Rendered graph object which is an htmlwidget of ' class \code{grViz}. Similar to "ggplot" objects, it needs to be printed when not running from the command line. } \description{ Read a tree model text dump and plot the model. } \details{ The content of each node is visualized as follows: \itemize{ \item For non-terminal nodes, it will display the split condition (number or name if available, and the condition that would decide to which node to go next). \item Those nodes will be connected to their children by arrows that indicate whether the branch corresponds to the condition being met or not being met. \item Terminal (leaf) nodes contain the margin to add when ending there. } The "Yes" branches are marked by the "< split_value" label. The branches also used for missing values are marked as bold (as in "carrying extra capacity"). This function uses \href{https://www.graphviz.org/}{GraphViz} as DiagrammeR backend. } \examples{ data("ToothGrowth") x <- ToothGrowth[, c("len", "dose")] y <- ToothGrowth$supp model <- xgboost( x, y, nthreads = 1L, nrounds = 3L, max_depth = 3L ) # plot the first tree xgb.plot.tree(model, tree_idx = 1) # Below is an example of how to save this plot to a file. if (require("DiagrammeR") && require("htmlwidgets")) { fname <- file.path(tempdir(), "plot.html'") gr <- xgb.plot.tree(model, tree_idx = 1) htmlwidgets::saveWidget(gr, fname) } } xgboost-3.0.0/R-package/man/xgb.save.Rd000066400000000000000000000051151476511272400175630ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.save.R \name{xgb.save} \alias{xgb.save} \title{Save XGBoost model to binary file} \usage{ xgb.save(model, fname) } \arguments{ \item{model}{Model object of \code{xgb.Booster} class.} \item{fname}{Name of the file to write. Its extension determines the serialization format: \itemize{ \item ".ubj": Use the universal binary JSON format (recommended). This format uses binary types for e.g. floating point numbers, thereby preventing any loss of precision when converting to a human-readable JSON text or similar. \item ".json": Use plain JSON, which is a human-readable format. \item ".deprecated": Use \strong{deprecated} binary format. This format will not be able to save attributes introduced after v1 of XGBoost, such as the "best_iteration" attribute that boosters might keep, nor feature names or user-specifiec attributes. \item If the format is not specified by passing one of the file extensions above, will default to UBJ. }} } \description{ Save XGBoost model to a file in binary or JSON format. } \details{ This methods allows to save a model in an XGBoost-internal binary or text format which is universal among the various xgboost interfaces. In R, the saved model file could be read later using either the \code{\link[=xgb.load]{xgb.load()}} function or the \code{xgb_model} parameter of \code{\link[=xgb.train]{xgb.train()}}. Note: a model can also be saved as an R object (e.g., by using \code{\link[=readRDS]{readRDS()}} or \code{\link[=save]{save()}}). However, it would then only be compatible with R, and corresponding R methods would need to be used to load it. Moreover, persisting the model with \code{\link[=readRDS]{readRDS()}} or \code{\link[=save]{save()}} might cause compatibility problems in future versions of XGBoost. Consult \link{a-compatibility-note-for-saveRDS-save} to learn how to persist models in a future-proof way, i.e., to make the model accessible in future releases of XGBoost. } \examples{ \dontshow{RhpcBLASctl::omp_set_num_threads(1)} data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) train <- agaricus.train test <- agaricus.test bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic" ) ) fname <- file.path(tempdir(), "xgb.ubj") xgb.save(bst, fname) bst <- xgb.load(fname) } \seealso{ \code{\link[=xgb.load]{xgb.load()}} } xgboost-3.0.0/R-package/man/xgb.save.raw.Rd000066400000000000000000000024071476511272400203540ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.save.raw.R \name{xgb.save.raw} \alias{xgb.save.raw} \title{Save XGBoost model to R's raw vector} \usage{ xgb.save.raw(model, raw_format = "ubj") } \arguments{ \item{model}{The model object.} \item{raw_format}{The format for encoding the booster: \itemize{ \item "json": Encode the booster into JSON text document. \item "ubj": Encode the booster into Universal Binary JSON. \item "deprecated": Encode the booster into old customized binary format. }} } \description{ Save XGBoost model from \code{\link[=xgboost]{xgboost()}} or \code{\link[=xgb.train]{xgb.train()}}. Call \code{\link[=xgb.load.raw]{xgb.load.raw()}} to load the model back from raw vector. } \examples{ \dontshow{RhpcBLASctl::omp_set_num_threads(1)} data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) train <- agaricus.train test <- agaricus.test bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic" ) ) raw <- xgb.save.raw(bst) bst <- xgb.load.raw(raw) } xgboost-3.0.0/R-package/man/xgb.slice.Booster.Rd000066400000000000000000000034561476511272400213460ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.Booster.R \name{xgb.slice.Booster} \alias{xgb.slice.Booster} \alias{[.xgb.Booster} \title{Slice Booster by Rounds} \usage{ xgb.slice.Booster( model, start, end = xgb.get.num.boosted.rounds(model), step = 1L ) \method{[}{xgb.Booster}(x, i) } \arguments{ \item{model, x}{A fitted \code{xgb.Booster} object, which is to be sliced by taking only a subset of its rounds / iterations.} \item{start}{Start of the slice (base-1 and inclusive, like R's \code{\link[=seq]{seq()}}).} \item{end}{End of the slice (base-1 and inclusive, like R's \code{\link[=seq]{seq()}}). Passing a value of zero here is equivalent to passing the full number of rounds in the booster object.} \item{step}{Step size of the slice. Passing '1' will take every round in the sequence defined by \verb{(start, end)}, while passing '2' will take every second value, and so on.} \item{i}{The indices - must be an increasing sequence as generated by e.g. \code{seq(...)}.} } \value{ A sliced booster object containing only the requested rounds. } \description{ Creates a new booster including only a selected range of rounds / iterations from an existing booster, as given by the sequence \code{seq(start, end, step)}. } \details{ Note that any R attributes that the booster might have, will not be copied into the resulting object. } \examples{ data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train(data = dm, params = xgb.params(nthread = 1), nrounds = 5) model_slice <- xgb.slice.Booster(model, 1, 3) # Prediction for first three rounds predict(model, x, predleaf = TRUE)[, 1:3] # The new model has only those rounds, so # a full prediction from it is equivalent predict(model_slice, x, predleaf = TRUE) } xgboost-3.0.0/R-package/man/xgb.slice.DMatrix.Rd000066400000000000000000000022401476511272400212670ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.DMatrix.R \name{xgb.slice.DMatrix} \alias{xgb.slice.DMatrix} \alias{[.xgb.DMatrix} \title{Slice DMatrix} \usage{ xgb.slice.DMatrix(object, idxset, allow_groups = FALSE) \method{[}{xgb.DMatrix}(object, idxset, colset = NULL) } \arguments{ \item{object}{Object of class \code{xgb.DMatrix}.} \item{idxset}{An integer vector of indices of rows needed (base-1 indexing).} \item{allow_groups}{Whether to allow slicing an \code{xgb.DMatrix} with \code{group} (or equivalently \code{qid}) field. Note that in such case, the result will not have the groups anymore - they need to be set manually through \code{\link[=setinfo]{setinfo()}}.} \item{colset}{Currently not used (columns subsetting is not available).} } \description{ Get a new DMatrix containing the specified rows of original xgb.DMatrix object. } \examples{ data(agaricus.train, package = "xgboost") dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2)) dsub <- xgb.slice.DMatrix(dtrain, 1:42) labels1 <- getinfo(dsub, "label") dsub <- dtrain[1:42, ] labels2 <- getinfo(dsub, "label") all.equal(labels1, labels2) } xgboost-3.0.0/R-package/man/xgb.train.Rd000066400000000000000000000317311476511272400177450ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.train.R \name{xgb.train} \alias{xgb.train} \title{Fit XGBoost Model} \usage{ xgb.train( params = xgb.params(), data, nrounds, evals = list(), objective = NULL, custom_metric = NULL, verbose = 1, print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL, save_period = NULL, save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ... ) } \arguments{ \item{params}{List of XGBoost parameters which control the model building process. See the \href{https://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation} and the documentation for \code{\link[=xgb.params]{xgb.params()}} for details. Should be passed as list with named entries. Parameters that are not specified in this list will use their default values. A list of named parameters can be created through the function \code{\link[=xgb.params]{xgb.params()}}, which accepts all valid parameters as function arguments.} \item{data}{Training dataset. \code{xgb.train()} accepts only an \code{xgb.DMatrix} as the input. Note that there is a function \code{\link[=xgboost]{xgboost()}} which is meant to accept R data objects as inputs, such as data frames and matrices.} \item{nrounds}{Max number of boosting iterations.} \item{evals}{Named list of \code{xgb.DMatrix} datasets to use for evaluating model performance. Metrics specified in either \code{eval_metric} (under params) or \code{custom_metric} (function argument here) will be computed for each of these datasets during each boosting iteration, and stored in the end as a field named \code{evaluation_log} in the resulting object. When either \code{verbose>=1} or \code{\link[=xgb.cb.print.evaluation]{xgb.cb.print.evaluation()}} callback is engaged, the performance results are continuously printed out during the training. E.g., specifying \code{evals=list(validation1=mat1, validation2=mat2)} allows to track the performance of each round's model on \code{mat1} and \code{mat2}.} \item{objective}{Customized objective function. Should take two arguments: the first one will be the current predictions (either a numeric vector or matrix depending on the number of targets / classes), and the second one will be the \code{data} DMatrix object that is used for training. It should return a list with two elements \code{grad} and \code{hess} (in that order), as either numeric vectors or numeric matrices depending on the number of targets / classes (same dimension as the predictions that are passed as first argument).} \item{custom_metric}{Customized evaluation function. Just like \code{objective}, should take two arguments, with the first one being the predictions and the second one the \code{data} DMatrix. Should return a list with two elements \code{metric} (name that will be displayed for this metric, should be a string / character), and \code{value} (the number that the function calculates, should be a numeric scalar). Note that even if passing \code{custom_metric}, objectives also have an associated default metric that will be evaluated in addition to it. In order to disable the built-in metric, one can pass parameter \code{disable_default_eval_metric = TRUE}.} \item{verbose}{If 0, xgboost will stay silent. If 1, it will print information about performance. If 2, some additional information will be printed out. Note that setting \code{verbose > 0} automatically engages the \code{xgb.cb.print.evaluation(period=1)} callback function.} \item{print_every_n}{When passing \code{verbose>0}, evaluation logs (metrics calculated on the data passed under \code{evals}) will be printed every nth iteration according to the value passed here. The first and last iteration are always included regardless of this 'n'. Only has an effect when passing data under \code{evals} and when passing \code{verbose>0}. The parameter is passed to the \code{\link[=xgb.cb.print.evaluation]{xgb.cb.print.evaluation()}} callback.} \item{early_stopping_rounds}{Number of boosting rounds after which training will be stopped if there is no improvement in performance (as measured by the evaluatiation metric that is supplied or selected by default for the objective) on the evaluation data passed under \code{evals}. Must pass \code{evals} in order to use this functionality. Setting this parameter adds the \code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}} callback. If \code{NULL}, early stopping will not be used.} \item{maximize}{If \code{feval} and \code{early_stopping_rounds} are set, then this parameter must be set as well. When it is \code{TRUE}, it means the larger the evaluation score the better. This parameter is passed to the \code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}} callback.} \item{save_period}{When not \code{NULL}, model is saved to disk after every \code{save_period} rounds. 0 means save at the end. The saving is handled by the \code{\link[=xgb.cb.save.model]{xgb.cb.save.model()}} callback.} \item{save_name}{the name or path for periodically saved model file.} \item{xgb_model}{A previously built model to continue the training from. Could be either an object of class \code{xgb.Booster}, or its raw data, or the name of a file with a previously saved model.} \item{callbacks}{A list of callback functions to perform various task during boosting. See \code{\link[=xgb.Callback]{xgb.Callback()}}. Some of the callbacks are automatically created depending on the parameters' values. User can provide either existing or their own callback methods in order to customize the training process. Note that some callbacks might try to leave attributes in the resulting model object, such as an evaluation log (a \code{data.table} object) - be aware that these objects are kept as R attributes, and thus do not get saved when using XGBoost's own serializaters like \code{\link[=xgb.save]{xgb.save()}} (but are kept when using R serializers like \code{\link[=saveRDS]{saveRDS()}}).} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ An object of class \code{xgb.Booster}. } \description{ Fits an XGBoost model to given data in DMatrix format (e.g. as produced by \code{\link[=xgb.DMatrix]{xgb.DMatrix()}}). See the tutorial \href{https://xgboost.readthedocs.io/en/stable/tutorials/model.html}{Introduction to Boosted Trees} for a longer explanation of what XGBoost does, and the rest of the \href{https://xgboost.readthedocs.io/en/latest/tutorials/index.html}{XGBoost Tutorials} for further explanations XGBoost's features and usage. Compared to function \code{\link[=xgboost]{xgboost()}} which is a user-friendly function targeted towards interactive usage, \code{xgb.train} is a lower-level interface which allows finer-grained control and exposes further functionalities offered by the core library (such as learning-to-rank objectives), but which works exclusively with XGBoost's own data format ("DMatrices") instead of with regular R objects. The syntax of this function closely mimics the same function from the Python package for XGBoost, and is recommended to use for package developers over \code{xgboost()} as it will provide a more stable interface (with fewer breaking changes) and lower overhead from data validations. See also the \href{https://xgboost.readthedocs.io/en/latest/R-package/migration_guide.html}{migration guide} if coming from a previous version of XGBoost in the 1.x series. } \details{ Compared to \code{\link[=xgboost]{xgboost()}}, the \code{xgb.train()} interface supports advanced features such as \code{evals}, customized objective and evaluation metric functions, among others, with the difference these work \code{xgb.DMatrix} objects and do not follow typical R idioms. Parallelization is automatically enabled if OpenMP is present. Number of threads can also be manually specified via the \code{nthread} parameter. While in XGBoost language bindings, the default random seed defaults to zero, in R, if a parameter \code{seed} is not manually supplied, it will generate a random seed through R's own random number generator, whose seed in turn is controllable through \code{set.seed}. If \code{seed} is passed, it will override the RNG from R. The following callbacks are automatically created when certain parameters are set: \itemize{ \item \code{\link[=xgb.cb.print.evaluation]{xgb.cb.print.evaluation()}} is turned on when \code{verbose > 0} and the \code{print_every_n} parameter is passed to it. \item \code{\link[=xgb.cb.evaluation.log]{xgb.cb.evaluation.log()}} is on when \code{evals} is present. \item \code{\link[=xgb.cb.early.stop]{xgb.cb.early.stop()}}: When \code{early_stopping_rounds} is set. \item \code{\link[=xgb.cb.save.model]{xgb.cb.save.model()}}: When \code{save_period > 0} is set. } Note that objects of type \code{xgb.Booster} as returned by this function behave a bit differently from typical R objects (it's an 'altrep' list class), and it makes a separation between internal booster attributes (restricted to jsonifyable data), accessed through \code{\link[=xgb.attr]{xgb.attr()}} and shared between interfaces through serialization functions like \code{\link[=xgb.save]{xgb.save()}}; and R-specific attributes (typically the result from a callback), accessed through \code{\link[=attributes]{attributes()}} and \code{\link[=attr]{attr()}}, which are otherwise only used in the R interface, only kept when using R's serializers like \code{\link[=saveRDS]{saveRDS()}}, and not anyhow used by functions like \code{predict.xgb.Booster()}. Be aware that one such R attribute that is automatically added is \code{params} - this attribute is assigned from the \code{params} argument to this function, and is only meant to serve as a reference for what went into the booster, but is not used in other methods that take a booster object - so for example, changing the booster's configuration requires calling \verb{xgb.config<-} or \verb{xgb.model.parameters<-}, while simply modifying \verb{attributes(model)$params$<...>} will have no effect elsewhere. } \examples{ data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") ## Keep the number of threads to 1 for examples nthread <- 1 data.table::setDTthreads(nthread) dtrain <- with( agaricus.train, xgb.DMatrix(data, label = label, nthread = nthread) ) dtest <- with( agaricus.test, xgb.DMatrix(data, label = label, nthread = nthread) ) evals <- list(train = dtrain, eval = dtest) ## A simple xgb.train example: param <- xgb.params( max_depth = 2, nthread = nthread, objective = "binary:logistic", eval_metric = "auc" ) bst <- xgb.train(param, dtrain, nrounds = 2, evals = evals, verbose = 0) ## An xgb.train example where custom objective and evaluation metric are ## used: logregobj <- function(preds, dtrain) { labels <- getinfo(dtrain, "label") preds <- 1/(1 + exp(-preds)) grad <- preds - labels hess <- preds * (1 - preds) return(list(grad = grad, hess = hess)) } evalerror <- function(preds, dtrain) { labels <- getinfo(dtrain, "label") err <- as.numeric(sum(labels != (preds > 0)))/length(labels) return(list(metric = "error", value = err)) } # These functions could be used by passing them as 'objective' and # 'eval_metric' parameters in the params list: param <- xgb.params( max_depth = 2, nthread = nthread, objective = logregobj, eval_metric = evalerror ) bst <- xgb.train(param, dtrain, nrounds = 2, evals = evals, verbose = 0) # ... or as dedicated 'objective' and 'custom_metric' parameters of xgb.train: bst <- xgb.train( within(param, rm("objective", "eval_metric")), dtrain, nrounds = 2, evals = evals, objective = logregobj, custom_metric = evalerror ) ## An xgb.train example of using variable learning rates at each iteration: param <- xgb.params( max_depth = 2, learning_rate = 1, nthread = nthread, objective = "binary:logistic", eval_metric = "auc" ) my_learning_rates <- list(learning_rate = c(0.5, 0.1)) bst <- xgb.train( param, dtrain, nrounds = 2, evals = evals, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_learning_rates)) ) ## Early stopping: bst <- xgb.train( param, dtrain, nrounds = 25, evals = evals, early_stopping_rounds = 3 ) } \references{ Tianqi Chen and Carlos Guestrin, "XGBoost: A Scalable Tree Boosting System", 22nd SIGKDD Conference on Knowledge Discovery and Data Mining, 2016, \url{https://arxiv.org/abs/1603.02754} } \seealso{ \code{\link[=xgb.Callback]{xgb.Callback()}}, \code{\link[=predict.xgb.Booster]{predict.xgb.Booster()}}, \code{\link[=xgb.cv]{xgb.cv()}} } xgboost-3.0.0/R-package/man/xgbConfig.Rd000066400000000000000000000036151476511272400177570ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgb.config.R \name{xgb.set.config, xgb.get.config} \alias{xgb.set.config, xgb.get.config} \alias{xgb.set.config} \alias{xgb.get.config} \title{Set and get global configuration} \usage{ xgb.set.config(...) xgb.get.config() } \arguments{ \item{...}{List of parameters to be set, as keyword arguments} } \value{ \code{xgb.set.config()} returns \code{TRUE} to signal success. \code{xgb.get.config()} returns a list containing all global-scope parameters and their values. } \description{ Global configuration consists of a collection of parameters that can be applied in the global scope. See \url{https://xgboost.readthedocs.io/en/stable/parameter.html} for the full list of parameters supported in the global configuration. Use \code{xgb.set.config()} to update the values of one or more global-scope parameters. Use \code{xgb.get.config()} to fetch the current values of all global-scope parameters (listed in \url{https://xgboost.readthedocs.io/en/stable/parameter.html}). } \details{ Note that serialization-related functions might use a globally-configured number of threads, which is managed by the system's OpenMP (OMP) configuration instead. Typically, XGBoost methods accept an \code{nthreads} parameter, but some methods like \code{\link[=readRDS]{readRDS()}} might get executed before such parameter can be supplied. The number of OMP threads can in turn be configured for example through an environment variable \code{OMP_NUM_THREADS} (needs to be set before R is started), or through \code{RhpcBLASctl::omp_set_num_threads}. } \examples{ # Set verbosity level to silent (0) xgb.set.config(verbosity = 0) # Now global verbosity level is 0 config <- xgb.get.config() print(config$verbosity) # Set verbosity level to warning (1) xgb.set.config(verbosity = 1) # Now global verbosity level is 1 config <- xgb.get.config() print(config$verbosity) } xgboost-3.0.0/R-package/man/xgboost-options.Rd000066400000000000000000000026641476511272400212320ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/utils.R \name{xgboost-options} \alias{xgboost-options} \title{XGBoost Options} \description{ XGBoost offers an \link[base:options]{option setting} for controlling the behavior of deprecated and removed function arguments. Some of the arguments in functions like \code{\link[=xgb.train]{xgb.train()}} or \code{\link[=predict.xgb.Booster]{predict.xgb.Booster()}} been renamed from how they were in previous versions, or have been removed. In order to make the transition to newer XGBoost versions easier, some of these parameters are still accepted but issue a warning when using them. \bold{Note that these warnings will become errors in the future!!} - this is just a temporary workaround to make the transition easier. One can optionally use 'strict mode' to turn these warnings into errors, in order to ensure that code calling xgboost will still work once those are removed in future releases. Currently, the only supported option is \code{xgboost.strict_mode}, which can be set to \code{TRUE} or \code{FALSE} (default). In addition to an R option, it can also be enabled through by setting environment variable \code{XGB_STRICT_MODE=1}. If set, this environment variable will take precedence over the option. } \examples{ options("xgboost.strict_mode" = FALSE) options("xgboost.strict_mode" = TRUE) Sys.setenv("XGB_STRICT_MODE" = "1") Sys.setenv("XGB_STRICT_MODE" = "0") } xgboost-3.0.0/R-package/man/xgboost.Rd000066400000000000000000001230451476511272400175360ustar00rootroot00000000000000% Generated by roxygen2: do not edit by hand % Please edit documentation in R/xgboost.R \name{xgboost} \alias{xgboost} \title{Fit XGBoost Model} \usage{ xgboost( x, y, objective = NULL, nrounds = 100L, max_depth = NULL, learning_rate = NULL, min_child_weight = NULL, min_split_loss = NULL, reg_lambda = NULL, weights = NULL, verbosity = if (is.null(eval_set)) 0L else 1L, monitor_training = verbosity > 0, eval_set = NULL, early_stopping_rounds = NULL, print_every_n = 1L, eval_metric = NULL, nthreads = parallel::detectCores(), seed = 0L, base_margin = NULL, monotone_constraints = NULL, interaction_constraints = NULL, reg_alpha = NULL, max_bin = NULL, max_leaves = NULL, booster = NULL, subsample = NULL, sampling_method = NULL, feature_weights = NULL, colsample_bytree = NULL, colsample_bylevel = NULL, colsample_bynode = NULL, tree_method = NULL, max_delta_step = NULL, scale_pos_weight = NULL, updater = NULL, grow_policy = NULL, num_parallel_tree = NULL, multi_strategy = NULL, base_score = NULL, seed_per_iteration = NULL, device = NULL, disable_default_eval_metric = NULL, use_rmm = NULL, max_cached_hist_node = NULL, extmem_single_page = NULL, max_cat_to_onehot = NULL, max_cat_threshold = NULL, sample_type = NULL, normalize_type = NULL, rate_drop = NULL, one_drop = NULL, skip_drop = NULL, feature_selector = NULL, top_k = NULL, tweedie_variance_power = NULL, huber_slope = NULL, quantile_alpha = NULL, aft_loss_distribution = NULL, ... ) } \arguments{ \item{x}{The features / covariates. Can be passed as: \itemize{ \item A numeric or integer \code{matrix}. \item A \code{data.frame}, in which all columns are one of the following types: \itemize{ \item \code{numeric} \item \code{integer} \item \code{logical} \item \code{factor} } Columns of \code{factor} type will be assumed to be categorical, while other column types will be assumed to be numeric. \item A sparse matrix from the \code{Matrix} package, either as \code{dgCMatrix} or \code{dgRMatrix} class. } Note that categorical features are only supported for \code{data.frame} inputs, and are automatically determined based on their types. See \code{\link[=xgb.train]{xgb.train()}} with \code{\link[=xgb.DMatrix]{xgb.DMatrix()}} for more flexible variants that would allow something like categorical features on sparse matrices.} \item{y}{The response variable. Allowed values are: \itemize{ \item A numeric or integer vector (for regression tasks). \item A factor or character vector (for binary and multi-class classification tasks). \item A logical (boolean) vector (for binary classification tasks). \item A numeric or integer matrix or \code{data.frame} with numeric/integer columns (for multi-task regression tasks). \item A \code{Surv} object from the 'survival' package (for survival tasks). } If \code{objective} is \code{NULL}, the right task will be determined automatically based on the class of \code{y}. If \code{objective} is not \code{NULL}, it must match with the type of \code{y} - e.g. \code{factor} types of \code{y} can only be used with classification objectives and vice-versa. For binary classification, the last factor level of \code{y} will be used as the "positive" class - that is, the numbers from \code{predict} will reflect the probabilities of belonging to this class instead of to the first factor level. If \code{y} is a \code{logical} vector, then \code{TRUE} will be set as the last level.} \item{objective}{Optimization objective to minimize based on the supplied data, to be passed by name as a string / character (e.g. \code{reg:absoluteerror}). See the \href{https://xgboost.readthedocs.io/en/stable/parameter.html#learning-task-parameters}{Learning Task Parameters} page and the \code{\link[=xgb.params]{xgb.params()}} documentation for more detailed information on allowed values. If \code{NULL} (the default), will be automatically determined from \code{y} according to the following logic: \itemize{ \item If \code{y} is a factor with 2 levels, will use \code{binary:logistic}. \item If \code{y} is a factor with more than 2 levels, will use \code{multi:softprob} (number of classes will be determined automatically, should not be passed under \code{params}). \item If \code{y} is a \code{Surv} object from the \code{survival} package, will use \code{survival:aft} (note that the only types supported are left / right / interval censored). \item Otherwise, will use \code{reg:squarederror}. } If \code{objective} is not \code{NULL}, it must match with the type of \code{y} - e.g. \code{factor} types of \code{y} can only be used with classification objectives and vice-versa. Note that not all possible \code{objective} values supported by the core XGBoost library are allowed here - for example, objectives which are a variation of another but with a different default prediction type (e.g. \code{multi:softmax} vs. \code{multi:softprob}) are not allowed, and neither are ranking objectives, nor custom objectives at the moment. Supported values are: \itemize{ \item \code{"reg:squarederror"}: regression with squared loss. \item \code{"reg:squaredlogerror"}: regression with squared log loss \eqn{\frac{1}{2}[log(pred + 1) - log(label + 1)]^2}. All input labels are required to be greater than -1. Also, see metric \code{rmsle} for possible issue with this objective. \item \code{"reg:pseudohubererror"}: regression with Pseudo Huber loss, a twice differentiable alternative to absolute loss. \item \code{"reg:absoluteerror"}: Regression with L1 error. When tree model is used, leaf value is refreshed after tree construction. If used in distributed training, the leaf value is calculated as the mean value from all workers, which is not guaranteed to be optimal. \item \code{"reg:quantileerror"}: Quantile loss, also known as "pinball loss". See later sections for its parameter and \href{https://xgboost.readthedocs.io/en/latest/python/examples/quantile_regression.html#sphx-glr-python-examples-quantile-regression-py}{Quantile Regression} for a worked example. \item \code{"binary:logistic"}: logistic regression for binary classification, output probability \item \code{"binary:hinge"}: hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities. \item \code{"count:poisson"}: Poisson regression for count data, output mean of Poisson distribution. \code{"max_delta_step"} is set to 0.7 by default in Poisson regression (used to safeguard optimization) \item \code{"survival:cox"}: Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function \code{h(t) = h0(t) * HR}). \item \code{"survival:aft"}: Accelerated failure time model for censored survival time data. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details. \item \code{"multi:softprob"}: multi-class classification throgh multinomial logistic likelihood. \item \code{"reg:gamma"}: gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Gamma_distribution#Occurrence_and_applications}{gamma-distributed}. \item \code{"reg:tweedie"}: Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be \href{https://en.wikipedia.org/wiki/Tweedie_distribution#Occurrence_and_applications}{Tweedie-distributed}. } The following values are \bold{NOT} supported by \code{xgboost}, but are supported by \code{\link[=xgb.train]{xgb.train()}} (see \code{\link[=xgb.params]{xgb.params()}} for details): \itemize{ \item \code{"reg:logistic"} \item \code{"binary:logitraw"} \item \code{"multi:softmax"} \item \code{"rank:ndcg"} \item \code{"rank:map"} \item \code{"rank:pairwise"} }} \item{nrounds}{Number of boosting iterations / rounds. Note that the number of default boosting rounds here is not automatically tuned, and different problems will have vastly different optimal numbers of boosting rounds.} \item{max_depth}{(for Tree Booster) (default=6, type=int32) Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. \code{"exact"} tree method requires non-zero value. range: \eqn{[0, \infty)}} \item{learning_rate}{(alias: \code{eta}) Step size shrinkage used in update to prevent overfitting. After each boosting step, we can directly get the weights of new features, and \code{learning_rate} shrinks the feature weights to make the boosting process more conservative. \itemize{ \item range: \eqn{[0,1]} \item default value: 0.3 for tree-based boosters, 0.5 for linear booster. }} \item{min_child_weight}{(for Tree Booster) (default=1) Minimum sum of instance weight (hessian) needed in a child. If the tree partition step results in a leaf node with the sum of instance weight less than \code{min_child_weight}, then the building process will give up further partitioning. In linear regression task, this simply corresponds to minimum number of instances needed to be in each node. The larger \code{min_child_weight} is, the more conservative the algorithm will be. range: \eqn{[0, \infty)}} \item{min_split_loss}{(for Tree Booster) (default=0, alias: \code{gamma}) Minimum loss reduction required to make a further partition on a leaf node of the tree. The larger \code{min_split_loss} is, the more conservative the algorithm will be. Note that a tree where no splits were made might still contain a single terminal node with a non-zero score. range: \eqn{[0, \infty)}} \item{reg_lambda}{(alias: \code{lambda}) \itemize{ \item For tree-based boosters: \itemize{ \item L2 regularization term on weights. Increasing this value will make model more conservative. \item default: 1 \item range: \eqn{[0, \infty]} } \item For linear booster: \itemize{ \item L2 regularization term on weights. Increasing this value will make model more conservative. Normalised to number of training examples. \item default: 0 \item range: \eqn{[0, \infty)} } }} \item{weights}{Sample weights for each row in \code{x} and \code{y}. If \code{NULL} (the default), each row will have the same weight. If not \code{NULL}, should be passed as a numeric vector with length matching to the number of rows in \code{x}.} \item{verbosity}{Verbosity of printing messages. Valid values of 0 (silent), 1 (warning), 2 (info), and 3 (debug).} \item{monitor_training}{Whether to monitor objective optimization progress on the input data. Note that same 'x' and 'y' data are used for both model fitting and evaluation.} \item{eval_set}{Subset of the data to use as evaluation set. Can be passed as: \itemize{ \item A vector of row indices (base-1 numeration) indicating the observations that are to be designed as evaluation data. \item A number between zero and one indicating a random fraction of the input data to use as evaluation data. Note that the selection will be done uniformly at random, regardless of argument \code{weights}. } If passed, this subset of the data will be excluded from the training procedure, and the evaluation metric(s) supplied under \code{eval_metric} will be calculated on this dataset after each boosting iteration (pass \code{verbosity>0} to have these metrics printed during training). If \code{eval_metric} is not passed, a default metric will be selected according to \code{objective}. If passing a fraction, in classification problems, the evaluation set will be chosen in such a way that at least one observation of each class will be kept in the training data. For more elaborate evaluation variants (e.g. custom metrics, multiple evaluation sets, etc.), one might want to use \code{\link[=xgb.train]{xgb.train()}} instead.} \item{early_stopping_rounds}{Number of boosting rounds after which training will be stopped if there is no improvement in performance (as measured by the last metric passed under \code{eval_metric}, or by the default metric for the objective if \code{eval_metric} is not passed) on the evaluation data from \code{eval_set}. Must pass \code{eval_set} in order to use this functionality. If \code{NULL}, early stopping will not be used.} \item{print_every_n}{When passing \code{verbosity>0} and either \code{monitor_training=TRUE} or \code{eval_set}, evaluation logs (metrics calculated on the training and/or evaluation data) will be printed every nth iteration according to the value passed here. The first and last iteration are always included regardless of this 'n'. Only has an effect when passing \code{verbosity>0}.} \item{eval_metric}{(default according to objective) \itemize{ \item Evaluation metrics for validation data, a default metric will be assigned according to objective (rmse for regression, and logloss for classification, \verb{mean average precision} for \code{rank:map}, etc.) \item User can add multiple evaluation metrics. \item The choices are listed below: \itemize{ \item \code{"rmse"}: \href{https://en.wikipedia.org/wiki/Root_mean_square_error}{root mean square error} \item \code{"rmsle"}: root mean square log error: \eqn{\sqrt{\frac{1}{N}[log(pred + 1) - log(label + 1)]^2}}. Default metric of \code{"reg:squaredlogerror"} objective. This metric reduces errors generated by outliers in dataset. But because \code{log} function is employed, \code{"rmsle"} might output \code{nan} when prediction value is less than -1. See \code{"reg:squaredlogerror"} for other requirements. \item \code{"mae"}: \href{https://en.wikipedia.org/wiki/Mean_absolute_error}{mean absolute error} \item \code{"mape"}: \href{https://en.wikipedia.org/wiki/Mean_absolute_percentage_error}{mean absolute percentage error} \item \code{"mphe"}: \href{https://en.wikipedia.org/wiki/Huber_loss}{mean Pseudo Huber error}. Default metric of \code{"reg:pseudohubererror"} objective. \item \code{"logloss"}: \href{https://en.wikipedia.org/wiki/Log-likelihood}{negative log-likelihood} \item \code{"error"}: Binary classification error rate. It is calculated as \verb{#(wrong cases)/#(all cases)}. For the predictions, the evaluation will regard the instances with prediction value larger than 0.5 as positive instances, and the others as negative instances. \item \code{"error@t"}: a different than 0.5 binary classification threshold value could be specified by providing a numerical value through 't'. \item \code{"merror"}: Multiclass classification error rate. It is calculated as \verb{#(wrong cases)/#(all cases)}. \item \code{"mlogloss"}: \href{https://scikit-learn.org/stable/modules/generated/sklearn.metrics.log_loss.html}{Multiclass logloss}. \item \code{"auc"}: \href{https://en.wikipedia.org/wiki/Receiver_operating_characteristic#Area_under_the_curve}{Receiver Operating Characteristic Area under the Curve}. Available for classification and learning-to-rank tasks. \itemize{ \item When used with binary classification, the objective should be \code{"binary:logistic"} or similar functions that work on probability. \item When used with multi-class classification, objective should be \code{"multi:softprob"} instead of \code{"multi:softmax"}, as the latter doesn't output probability. Also the AUC is calculated by 1-vs-rest with reference class weighted by class prevalence. \item When used with LTR task, the AUC is computed by comparing pairs of documents to count correctly sorted pairs. This corresponds to pairwise learning to rank. The implementation has some issues with average AUC around groups and distributed workers not being well-defined. \item On a single machine the AUC calculation is exact. In a distributed environment the AUC is a weighted average over the AUC of training rows on each node - therefore, distributed AUC is an approximation sensitive to the distribution of data across workers. Use another metric in distributed environments if precision and reproducibility are important. \item When input dataset contains only negative or positive samples, the output is \code{NaN}. The behavior is implementation defined, for instance, \code{scikit-learn} returns \eqn{0.5} instead. } \item \code{"aucpr"}: \href{https://en.wikipedia.org/wiki/Precision_and_recall}{Area under the PR curve}. Available for classification and learning-to-rank tasks. After XGBoost 1.6, both of the requirements and restrictions for using \code{"aucpr"} in classification problem are similar to \code{"auc"}. For ranking task, only binary relevance label \eqn{y \in [0, 1]} is supported. Different from \code{"map"} (mean average precision), \code{"aucpr"} calculates the \emph{interpolated} area under precision recall curve using continuous interpolation. \item \code{"pre"}: Precision at \eqn{k}. Supports only learning to rank task. \item \code{"ndcg"}: \href{https://en.wikipedia.org/wiki/NDCG}{Normalized Discounted Cumulative Gain} \item \code{"map"}: \href{https://en.wikipedia.org/wiki/Mean_average_precision#Mean_average_precision}{Mean Average Precision} The \verb{average precision} is defined as: \eqn{AP@l = \frac{1}{min{(l, N)}}\sum^l_{k=1}P@k \cdot I_{(k)}} where \eqn{I_{(k)}} is an indicator function that equals to \eqn{1} when the document at \eqn{k} is relevant and \eqn{0} otherwise. The \eqn{P@k} is the precision at \eqn{k}, and \eqn{N} is the total number of relevant documents. Lastly, the \verb{mean average precision} is defined as the weighted average across all queries. \item \code{"ndcg@n"}, \code{"map@n"}, \code{"pre@n"}: \eqn{n} can be assigned as an integer to cut off the top positions in the lists for evaluation. \item \code{"ndcg-"}, \code{"map-"}, \code{"ndcg@n-"}, \code{"map@n-"}: In XGBoost, the NDCG and MAP evaluate the score of a list without any positive samples as \eqn{1}. By appending "-" to the evaluation metric name, we can ask XGBoost to evaluate these scores as \eqn{0} to be consistent under some conditions. \item \code{"poisson-nloglik"}: negative log-likelihood for Poisson regression \item \code{"gamma-nloglik"}: negative log-likelihood for gamma regression \item \code{"cox-nloglik"}: negative partial log-likelihood for Cox proportional hazards regression \item \code{"gamma-deviance"}: residual deviance for gamma regression \item \code{"tweedie-nloglik"}: negative log-likelihood for Tweedie regression (at a specified value of the \code{tweedie_variance_power} parameter) \item \code{"aft-nloglik"}: Negative log likelihood of Accelerated Failure Time model. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details. \item \code{"interval-regression-accuracy"}: Fraction of data points whose predicted labels fall in the interval-censored labels. Only applicable for interval-censored data. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/aft_survival_analysis.html}{Survival Analysis with Accelerated Failure Time} for details. } }} \item{nthreads}{Number of parallel threads to use. If passing zero, will use all CPU threads.} \item{seed}{Seed to use for random number generation. If passing \code{NULL}, will draw a random number using R's PRNG system to use as seed.} \item{base_margin}{Base margin used for boosting from existing model. If passing it, will start the gradient boosting procedure from the scores that are provided here - for example, one can pass the raw scores from a previous model, or some per-observation offset, or similar. Should be either a numeric vector or numeric matrix (for multi-class and multi-target objectives) with the same number of rows as \code{x} and number of columns corresponding to number of optimization targets, and should be in the untransformed scale (for example, for objective \code{binary:logistic}, it should have log-odds, not probabilities; and for objective \code{multi:softprob}, should have number of columns matching to number of classes in the data). Note that, if it contains more than one column, then columns will not be matched by name to the corresponding \code{y} - \code{base_margin} should have the same column order that the model will use (for example, for objective \code{multi:softprob}, columns of \code{base_margin} will be matched against \code{levels(y)} by their position, regardless of what \code{colnames(base_margin)} returns). If \code{NULL}, will start from zero, but note that for most objectives, an intercept is usually added (controllable through parameter \code{base_score} instead) when \code{base_margin} is not passed.} \item{monotone_constraints}{Optional monotonicity constraints for features. Can be passed either as a named list (when \code{x} has column names), or as a vector. If passed as a vector and \code{x} has column names, will try to match the elements by name. A value of \code{+1} for a given feature makes the model predictions / scores constrained to be a monotonically increasing function of that feature (that is, as the value of the feature increases, the model prediction cannot decrease), while a value of \code{-1} makes it a monotonically decreasing function. A value of zero imposes no constraint. The input for \code{monotone_constraints} can be a subset of the columns of \code{x} if named, in which case the columns that are not referred to in \code{monotone_constraints} will be assumed to have a value of zero (no constraint imposed on the model for those features). See the tutorial \href{https://xgboost.readthedocs.io/en/stable/tutorials/monotonic.html}{Monotonic Constraints} for a more detailed explanation.} \item{interaction_constraints}{Constraints for interaction representing permitted interactions. The constraints must be specified in the form of a list of vectors referencing columns in the data, e.g. \code{list(c(1, 2), c(3, 4, 5))} (with these numbers being column indices, numeration starting at 1 - i.e. the first sublist references the first and second columns) or \code{list(c("Sepal.Length", "Sepal.Width"), c("Petal.Length", "Petal.Width"))} (references columns by names), where each vector is a group of indices of features that are allowed to interact with each other. See the tutorial \href{https://xgboost.readthedocs.io/en/stable/tutorials/feature_interaction_constraint.html}{Feature Interaction Constraints} for more information.} \item{reg_alpha}{(alias: \code{reg_alpha}) \itemize{ \item L1 regularization term on weights. Increasing this value will make model more conservative. \item For the linear booster, it's normalised to number of training examples. \item default: 0 \item range: \eqn{[0, \infty)} }} \item{max_bin}{(for Tree Booster) (default=256, type=int32) \itemize{ \item Only used if \code{tree_method} is set to \code{"hist"} or \code{"approx"}. \item Maximum number of discrete bins to bucket continuous features. \item Increasing this number improves the optimality of splits at the cost of higher computation time. }} \item{max_leaves}{(for Tree Booster) (default=0, type=int32) Maximum number of nodes to be added. Not used by \code{"exact"} tree method.} \item{booster}{(default= \code{"gbtree"}) Which booster to use. Can be \code{"gbtree"}, \code{"gblinear"} or \code{"dart"}; \code{"gbtree"} and \code{"dart"} use tree based models while \code{"gblinear"} uses linear functions.} \item{subsample}{(for Tree Booster) (default=1) Subsample ratio of the training instances. Setting it to 0.5 means that XGBoost would randomly sample half of the training data prior to growing trees. and this will prevent overfitting. Subsampling will occur once in every boosting iteration. range: \eqn{(0,1]}} \item{sampling_method}{(for Tree Booster) (default= \code{"uniform"}) The method to use to sample the training instances. \itemize{ \item \code{"uniform"}: each training instance has an equal probability of being selected. Typically set \code{"subsample"} >= 0.5 for good results. \item \code{"gradient_based"}: the selection probability for each training instance is proportional to the \bold{regularized absolute value} of gradients (more specifically, \eqn{\sqrt{g^2+\lambda h^2}}). \code{"subsample"} may be set to as low as 0.1 without loss of model accuracy. Note that this sampling method is only supported when \code{"tree_method"} is set to \code{"hist"} and the device is \code{"cuda"}; other tree methods only support \code{"uniform"} sampling. }} \item{feature_weights}{Feature weights for column sampling. Can be passed either as a vector with length matching to columns of \code{x}, or as a named list (only if \code{x} has column names) with names matching to columns of 'x'. If it is a named vector, will try to match the entries to column names of \code{x} by name. If \code{NULL} (the default), all columns will have the same weight.} \item{colsample_bytree, colsample_bylevel, colsample_bynode}{(for Tree Booster) (default=1) This is a family of parameters for subsampling of columns. \itemize{ \item All \code{"colsample_by*"} parameters have a range of \eqn{(0, 1]}, the default value of 1, and specify the fraction of columns to be subsampled. \item \code{"colsample_bytree"} is the subsample ratio of columns when constructing each tree. Subsampling occurs once for every tree constructed. \item \code{"colsample_bylevel"} is the subsample ratio of columns for each level. Subsampling occurs once for every new depth level reached in a tree. Columns are subsampled from the set of columns chosen for the current tree. \item \code{"colsample_bynode"} is the subsample ratio of columns for each node (split). Subsampling occurs once every time a new split is evaluated. Columns are subsampled from the set of columns chosen for the current level. This is not supported by the exact tree method. \item \code{"colsample_by*"} parameters work cumulatively. For instance, the combination \verb{\{'colsample_bytree'=0.5, 'colsample_bylevel'=0.5, 'colsample_bynode'=0.5\}} with 64 features will leave 8 features to choose from at each split. } One can set the \code{"feature_weights"} for DMatrix to define the probability of each feature being selected when using column sampling.} \item{tree_method}{(for Tree Booster) (default= \code{"auto"}) The tree construction algorithm used in XGBoost. See description in the \href{https://arxiv.org/abs/1603.02754}{reference paper} and \href{https://xgboost.readthedocs.io/en/latest/treemethod.html}{Tree Methods}. Choices: \code{"auto"}, \code{"exact"}, \code{"approx"}, \code{"hist"}, this is a combination of commonly used updaters. For other updaters like \code{"refresh"}, set the parameter \code{updater} directly. \itemize{ \item \code{"auto"}: Same as the \code{"hist"} tree method. \item \code{"exact"}: Exact greedy algorithm. Enumerates all split candidates. \item \code{"approx"}: Approximate greedy algorithm using quantile sketch and gradient histogram. \item \code{"hist"}: Faster histogram optimized approximate greedy algorithm. }} \item{max_delta_step}{(for Tree Booster) (default=0) Maximum delta step we allow each leaf output to be. If the value is set to 0, it means there is no constraint. If it is set to a positive value, it can help making the update step more conservative. Usually this parameter is not needed, but it might help in logistic regression when class is extremely imbalanced. Set it to value of 1-10 might help control the update. range: \eqn{[0, \infty)}} \item{scale_pos_weight}{(for Tree Booster) (default=1) Control the balance of positive and negative weights, useful for unbalanced classes. A typical value to consider: \verb{sum(negative instances) / sum(positive instances)}. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/param_tuning.html}{Parameters Tuning} for more discussion. Also, see Higgs Kaggle competition demo for examples: \href{https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-train.R}{R}, \href{https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-numpy.py}{py1}, \href{https://github.com/dmlc/xgboost/blob/master/demo/kaggle-higgs/higgs-cv.py}{py2}, \href{https://github.com/dmlc/xgboost/blob/master/demo/guide-python/cross_validation.py}{py3}.} \item{updater}{(for Linear Booster) (default= \code{"shotgun"}) Choice of algorithm to fit linear model \itemize{ \item \code{"shotgun"}: Parallel coordinate descent algorithm based on shotgun algorithm. Uses 'hogwild' parallelism and therefore produces a nondeterministic solution on each run. \item \code{"coord_descent"}: Ordinary coordinate descent algorithm. Also multithreaded but still produces a deterministic solution. When the \code{device} parameter is set to \code{"cuda"} or \code{"gpu"}, a GPU variant would be used. }} \item{grow_policy}{(for Tree Booster) (default= \code{"depthwise"}) \itemize{ \item Controls a way new nodes are added to the tree. \item Currently supported only if \code{tree_method} is set to \code{"hist"} or \code{"approx"}. \item Choices: \code{"depthwise"}, \code{"lossguide"} \itemize{ \item \code{"depthwise"}: split at nodes closest to the root. \item \code{"lossguide"}: split at nodes with highest loss change. } }} \item{num_parallel_tree}{(for Tree Booster) (default=1) Number of parallel trees constructed during each iteration. This option is used to support boosted random forest.} \item{multi_strategy}{(for Tree Booster) (default = \code{"one_output_per_tree"}) The strategy used for training multi-target models, including multi-target regression and multi-class classification. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/multioutput.html}{Multiple Outputs} for more information. \itemize{ \item \code{"one_output_per_tree"}: One model for each target. \item \code{"multi_output_tree"}: Use multi-target trees. } Version added: 2.0.0 Note: This parameter is working-in-progress.} \item{base_score}{\itemize{ \item The initial prediction score of all instances, global bias \item The parameter is automatically estimated for selected objectives before training. To disable the estimation, specify a real number argument. \item If \code{base_margin} is supplied, \code{base_score} will not be added. \item For sufficient number of iterations, changing this value will not have too much effect. }} \item{seed_per_iteration}{(default= \code{FALSE}) Seed PRNG determnisticly via iterator number.} \item{device}{(default= \code{"cpu"}) Device for XGBoost to run. User can set it to one of the following values: \itemize{ \item \code{"cpu"}: Use CPU. \item \code{"cuda"}: Use a GPU (CUDA device). \item \code{"cuda:"}: \verb{} is an integer that specifies the ordinal of the GPU (which GPU do you want to use if you have more than one devices). \item \code{"gpu"}: Default GPU device selection from the list of available and supported devices. Only \code{"cuda"} devices are supported currently. \item \code{"gpu:"}: Default GPU device selection from the list of available and supported devices. Only \code{"cuda"} devices are supported currently. } For more information about GPU acceleration, see \href{https://xgboost.readthedocs.io/en/latest/gpu/index.html}{XGBoost GPU Support}. In distributed environments, ordinal selection is handled by distributed frameworks instead of XGBoost. As a result, using \code{"cuda:"} will result in an error. Use \code{"cuda"} instead. Version added: 2.0.0 Note: if XGBoost was installed from CRAN, it won't have GPU support enabled, thus only \code{"cpu"} will be available. To get GPU support, the R package for XGBoost must be installed from source or from the GitHub releases - see \href{https://xgboost.readthedocs.io/en/latest/install.html#r}{instructions}.} \item{disable_default_eval_metric}{(default= \code{FALSE}) Flag to disable default metric. Set to 1 or \code{TRUE} to disable.} \item{use_rmm}{Whether to use RAPIDS Memory Manager (RMM) to allocate cache GPU memory. The primary memory is always allocated on the RMM pool when XGBoost is built (compiled) with the RMM plugin enabled. Valid values are \code{TRUE} and \code{FALSE}. See \href{https://xgboost.readthedocs.io/en/latest/python/rmm-examples/index.html}{Using XGBoost with RAPIDS Memory Manager (RMM) plugin} for details.} \item{max_cached_hist_node}{(for Non-Exact Tree Methods) (default = 65536) Maximum number of cached nodes for histogram. This can be used with the \code{"hist"} and the \code{"approx"} tree methods. Version added: 2.0.0 \itemize{ \item For most of the cases this parameter should not be set except for growing deep trees. After 3.0, this parameter affects GPU algorithms as well. }} \item{extmem_single_page}{(for Non-Exact Tree Methods) (default = \code{FALSE}) This parameter is only used for the \code{"hist"} tree method with \code{device="cuda"} and \code{subsample != 1.0}. Before 3.0, pages were always concatenated. Version added: 3.0.0 Whether the GPU-based \code{"hist"} tree method should concatenate the training data into a single batch instead of fetching data on-demand when external memory is used. For GPU devices that don't support address translation services, external memory training is expensive. This parameter can be used in combination with subsampling to reduce overall memory usage without significant overhead. See \href{https://xgboost.readthedocs.io/en/latest/tutorials/external_memory.html}{Using XGBoost External Memory Version} for more information.} \item{max_cat_to_onehot}{(for Non-Exact Tree Methods) A threshold for deciding whether XGBoost should use one-hot encoding based split for categorical data. When number of categories is lesser than the threshold then one-hot encoding is chosen, otherwise the categories will be partitioned into children nodes. Version added: 1.6.0} \item{max_cat_threshold}{(for Non-Exact Tree Methods) Maximum number of categories considered for each split. Used only by partition-based splits for preventing over-fitting. Version added: 1.7.0} \item{sample_type}{(for Dart Booster) (default= \code{"uniform"}) Type of sampling algorithm. \itemize{ \item \code{"uniform"}: dropped trees are selected uniformly. \item \code{"weighted"}: dropped trees are selected in proportion to weight. }} \item{normalize_type}{(for Dart Booster) (default= \code{"tree"}) Type of normalization algorithm. \itemize{ \item \code{"tree"}: new trees have the same weight of each of dropped trees. \itemize{ \item Weight of new trees are \code{1 / (k + learning_rate)}. \item Dropped trees are scaled by a factor of \code{k / (k + learning_rate)}. } \item \code{"forest"}: new trees have the same weight of sum of dropped trees (forest). \itemize{ \item Weight of new trees are \code{1 / (1 + learning_rate)}. \item Dropped trees are scaled by a factor of \code{1 / (1 + learning_rate)}. } }} \item{rate_drop}{(for Dart Booster) (default=0.0) Dropout rate (a fraction of previous trees to drop during the dropout). range: \eqn{[0.0, 1.0]}} \item{one_drop}{(for Dart Booster) (default=0) When this flag is enabled, at least one tree is always dropped during the dropout (allows Binomial-plus-one or epsilon-dropout from the original DART paper).} \item{skip_drop}{(for Dart Booster) (default=0.0) Probability of skipping the dropout procedure during a boosting iteration. \itemize{ \item If a dropout is skipped, new trees are added in the same manner as \code{"gbtree"}. \item Note that non-zero \code{skip_drop} has higher priority than \code{rate_drop} or \code{one_drop}. } range: \eqn{[0.0, 1.0]}} \item{feature_selector}{(for Linear Booster) (default= \code{"cyclic"}) Feature selection and ordering method \itemize{ \item \code{"cyclic"}: Deterministic selection by cycling through features one at a time. \item \code{"shuffle"}: Similar to \code{"cyclic"} but with random feature shuffling prior to each update. \item \code{"random"}: A random (with replacement) coordinate selector. \item \code{"greedy"}: Select coordinate with the greatest gradient magnitude. It has \code{O(num_feature^2)} complexity. It is fully deterministic. It allows restricting the selection to \code{top_k} features per group with the largest magnitude of univariate weight change, by setting the \code{top_k} parameter. Doing so would reduce the complexity to \code{O(num_feature*top_k)}. \item \code{"thrifty"}: Thrifty, approximately-greedy feature selector. Prior to cyclic updates, reorders features in descending magnitude of their univariate weight changes. This operation is multithreaded and is a linear complexity approximation of the quadratic greedy selection. It allows restricting the selection to \code{top_k} features per group with the largest magnitude of univariate weight change, by setting the \code{top_k} parameter. }} \item{top_k}{(for Linear Booster) (default=0) The number of top features to select in \code{greedy} and \code{thrifty} feature selector. The value of 0 means using all the features.} \item{tweedie_variance_power}{(for Tweedie Regression (\code{"objective=reg:tweedie"})) (default=1.5) \itemize{ \item Parameter that controls the variance of the Tweedie distribution \code{var(y) ~ E(y)^tweedie_variance_power} \item range: \eqn{(1,2)} \item Set closer to 2 to shift towards a gamma distribution \item Set closer to 1 to shift towards a Poisson distribution. }} \item{huber_slope}{(for using Pseudo-Huber (\verb{"reg:pseudohubererror}")) (default = 1.0) A parameter used for Pseudo-Huber loss to define the \eqn{\delta} term.} \item{quantile_alpha}{(for using Quantile Loss (\code{"reg:quantileerror"})) A scalar or a list of targeted quantiles (passed as a numeric vector). Version added: 2.0.0} \item{aft_loss_distribution}{(for using AFT Survival Loss (\code{"survival:aft"}) and Negative Log Likelihood of AFT metric (\code{"aft-nloglik"})) Probability Density Function, \code{"normal"}, \code{"logistic"}, or \code{"extreme"}.} \item{...}{Not used. Some arguments that were part of this function in previous XGBoost versions are currently deprecated or have been renamed. If a deprecated or renamed argument is passed, will throw a warning (by default) and use its current equivalent instead. This warning will become an error if using the \link[=xgboost-options]{'strict mode' option}. If some additional argument is passed that is neither a current function argument nor a deprecated or renamed argument, a warning or error will be thrown depending on the 'strict mode' option. \bold{Important:} \code{...} will be removed in a future version, and all the current deprecation warnings will become errors. Please use only arguments that form part of the function signature.} } \value{ A model object, inheriting from both \code{xgboost} and \code{xgb.Booster}. Compared to the regular \code{xgb.Booster} model class produced by \code{\link[=xgb.train]{xgb.train()}}, this \code{xgboost} class will have an additional attribute \code{metadata} containing information which is used for formatting prediction outputs, such as class names for classification problems. } \description{ Fits an XGBoost model (boosted decision tree ensemble) to given x/y data. See the tutorial \href{https://xgboost.readthedocs.io/en/stable/tutorials/model.html}{Introduction to Boosted Trees} for a longer explanation of what XGBoost does, and the rest of the \href{https://xgboost.readthedocs.io/en/latest/tutorials/index.html}{XGBoost Tutorials} for further explanations XGBoost's features and usage. This function is intended to provide a user-friendly interface for XGBoost that follows R's conventions for model fitting and predictions, but which doesn't expose all of the possible functionalities of the core XGBoost library. See \code{\link[=xgb.train]{xgb.train()}} for a more flexible low-level alternative which is similar across different language bindings of XGBoost and which exposes additional functionalities such as training on external memory data and learning-to-rank objectives. See also the \href{https://xgboost.readthedocs.io/en/latest/R-package/migration_guide.html}{migration guide} if coming from a previous version of XGBoost in the 1.x series. By default, most of the parameters here have a value of \code{NULL}, which signals XGBoost to use its default value. Default values are automatically determined by the XGBoost core library, and are subject to change over XGBoost library versions. Some of them might differ according to the booster type (e.g. defaults for regularization are different for linear and tree-based boosters). See \code{\link[=xgb.params]{xgb.params()}} and the \href{https://xgboost.readthedocs.io/en/latest/parameter.html}{online documentation} for more details about parameters - but note that some of the parameters are not supported in the \code{xgboost()} interface. } \details{ For package authors using 'xgboost' as a dependency, it is highly recommended to use \code{\link[=xgb.train]{xgb.train()}} in package code instead of \code{\link[=xgboost]{xgboost()}}, since it has a more stable interface and performs fewer data conversions and copies along the way. } \examples{ data(mtcars) # Fit a small regression model on the mtcars data model_regression <- xgboost(mtcars[, -1], mtcars$mpg, nthreads = 1, nrounds = 3) predict(model_regression, mtcars, validate_features = TRUE) # Task objective is determined automatically according to the type of 'y' data(iris) model_classif <- xgboost(iris[, -5], iris$Species, nthreads = 1, nrounds = 5) predict(model_classif, iris[1:10,]) predict(model_classif, iris[1:10,], type = "class") # Can nevertheless choose a non-default objective if needed model_poisson <- xgboost( mtcars[, -1], mtcars$mpg, objective = "count:poisson", nthreads = 1, nrounds = 3 ) # Can calculate evaluation metrics during boosting rounds data(ToothGrowth) xgboost( ToothGrowth[, c("len", "dose")], ToothGrowth$supp, eval_metric = c("auc", "logloss"), eval_set = 0.2, monitor_training = TRUE, verbosity = 1, nthreads = 1, nrounds = 3 ) } \references{ \itemize{ \item Chen, Tianqi, and Carlos Guestrin. "Xgboost: A scalable tree boosting system." Proceedings of the 22nd acm sigkdd international conference on knowledge discovery and data mining. 2016. \item \url{https://xgboost.readthedocs.io/en/stable/} } } xgboost-3.0.0/R-package/pkgdown/000077500000000000000000000000001476511272400164535ustar00rootroot00000000000000xgboost-3.0.0/R-package/pkgdown/_pkgdown.yml000066400000000000000000000046741476511272400210210ustar00rootroot00000000000000url: https://github.com/dmlc/xgboost template: bootstrap: 5 math-rendering: mathjax reference: - title: High Level Interface desc: High level XGBoost interface contents: - "xgboost" - "predict.xgboost" - "print.xgboost" - title: Datasets desc: Test datasets bundled with the R package. contents: - "agaricus.train" - "agaricus.test" - title: Global Configuration desc: Global configuration for the XGBoost library. contents: - "xgb.config" - "xgb.set.config" - "xgb.get.config" - title: DMatrix desc: Low-level data storage. contents: - "xgb.DMatrix" - "xgb.DMatrix.hasinfo" - "xgb.DMatrix.save" - "dim.xgb.DMatrix" - "dimnames.xgb.DMatrix" - "print.xgb.DMatrix" - "xgb.DataBatch" - "xgb.DataIter" - "xgb.get.DMatrix.data" - "xgb.get.DMatrix.num.non.missing" - "xgb.ExtMemDMatrix" - "xgb.QuantileDMatrix.from_iterator" - "xgb.get.DMatrix.qcut" - "xgb.slice.DMatrix" - title: Booster desc: The model for XGBoost. contents: - "a-compatibility-note-for-saveRDS-save" - "coef.xgb.Booster" - "getinfo.xgb.Booster" - "predict.xgb.Booster" - "print.xgb.Booster" - "xgb.load" - "xgb.load.raw" - "xgb.save" - "xgb.save.raw" - "xgb.copy.Booster" - "xgb.slice.Booster" - "xgb.get.num.boosted.rounds" - "xgb.is.same.Booster" - "xgb.importance" - "xgb.attr" - "xgb.create.features" - "xgb.model.dt.tree" - "xgb.model.parameters<-" - "xgb.ggplot.deepness" - "xgb.dump" - "variable.names.xgb.Booster" - "xgb.ggplot.importance" - "xgb.plot.multi.trees" - "xgb.plot.shap" - "xgb.ggplot.shap.summary" - "xgb.plot.tree" - "xgb.gblinear.history" - title: Training Callbacks desc: Callback functions used for training. contents: - "xgb.Callback" - "xgb.cb.cv.predict" - "xgb.cb.early.stop" - "xgb.cb.evaluation.log" - "xgb.cb.gblinear.history" - "xgb.cb.print.evaluation" - "xgb.cb.reset.parameters" - "xgb.cb.save.model" - title: Low-level Training Functions desc: Low-level Training Functions with DMatrix and Booster contents: - "xgb.params" - "xgb.train" - "xgb.cv" - "print.xgb.cv.synchronous" - title: Deprecation Settings contents: - "xgboost-options" xgboost-3.0.0/R-package/remove_warning_suppression_pragma.sh000077500000000000000000000005541476511272400243700ustar00rootroot00000000000000#!/bin/bash # remove all #pragma's that suppress compiler warnings set -e set -x for file in xgboost/src/dmlc-core/include/dmlc/*.h do sed -i.bak -e 's/^.*#pragma GCC diagnostic.*$//' -e 's/^.*#pragma clang diagnostic.*$//' -e 's/^.*#pragma warning.*$//' "${file}" done for file in xgboost/src/dmlc-core/include/dmlc/*.h.bak do rm "${file}" done set +x set +e xgboost-3.0.0/R-package/src/000077500000000000000000000000001476511272400155715ustar00rootroot00000000000000xgboost-3.0.0/R-package/src/Makevars.in000066400000000000000000000116121476511272400176730ustar00rootroot00000000000000# package root PKGROOT=../../ ENABLE_STD_THREAD=1 # _*_ mode: Makefile; _*_ CXX_STD = CXX17 XGB_RFLAGS = \ @DMLC_DEFS@ \ @XGBOOST_BUILTIN_PREFETCH_PRESENT@ \ @XGBOOST_MM_PREFETCH_PRESENT@ \ -DXGBOOST_STRICT_R_MODE=1 \ -DDMLC_LOG_BEFORE_THROW=0 \ -DDMLC_ENABLE_STD_THREAD=$(ENABLE_STD_THREAD) \ -DDMLC_DISABLE_STDIN=1 \ -DDMLC_LOG_CUSTOMIZE=1 # disable the use of thread_local for 32 bit windows: ifeq ($(R_OSTYPE)$(WIN),windows) XGB_RFLAGS += -DDMLC_CXX11_THREAD_LOCAL=0 endif $(foreach v, $(XGB_RFLAGS), $(warning $(v))) PKG_CPPFLAGS = \ -I$(PKGROOT)/include \ -I$(PKGROOT)/dmlc-core/include \ -I$(PKGROOT) \ $(XGB_RFLAGS) PKG_CXXFLAGS = \ @OPENMP_CXXFLAGS@ \ @ENDIAN_FLAG@ \ -pthread \ $(CXX_VISIBILITY) PKG_LIBS = \ @OPENMP_CXXFLAGS@ \ @OPENMP_LIB@ \ @ENDIAN_FLAG@ \ @BACKTRACE_LIB@ \ -pthread OBJECTS= \ ./xgboost_R.o \ ./xgboost_custom.o \ ./init.o \ $(PKGROOT)/src/metric/metric.o \ $(PKGROOT)/src/metric/elementwise_metric.o \ $(PKGROOT)/src/metric/multiclass_metric.o \ $(PKGROOT)/src/metric/rank_metric.o \ $(PKGROOT)/src/metric/auc.o \ $(PKGROOT)/src/metric/survival_metric.o \ $(PKGROOT)/src/objective/objective.o \ $(PKGROOT)/src/objective/regression_obj.o \ $(PKGROOT)/src/objective/multiclass_obj.o \ $(PKGROOT)/src/objective/lambdarank_obj.o \ $(PKGROOT)/src/objective/hinge.o \ $(PKGROOT)/src/objective/aft_obj.o \ $(PKGROOT)/src/objective/adaptive.o \ $(PKGROOT)/src/objective/init_estimation.o \ $(PKGROOT)/src/objective/quantile_obj.o \ $(PKGROOT)/src/gbm/gbm.o \ $(PKGROOT)/src/gbm/gbtree.o \ $(PKGROOT)/src/gbm/gbtree_model.o \ $(PKGROOT)/src/gbm/gblinear.o \ $(PKGROOT)/src/gbm/gblinear_model.o \ $(PKGROOT)/src/data/adapter.o \ $(PKGROOT)/src/data/array_interface.o \ $(PKGROOT)/src/data/simple_dmatrix.o \ $(PKGROOT)/src/data/data.o \ $(PKGROOT)/src/data/sparse_page_raw_format.o \ $(PKGROOT)/src/data/ellpack_page.o \ $(PKGROOT)/src/data/file_iterator.o \ $(PKGROOT)/src/data/gradient_index.o \ $(PKGROOT)/src/data/gradient_index_page_source.o \ $(PKGROOT)/src/data/gradient_index_format.o \ $(PKGROOT)/src/data/validation.o \ $(PKGROOT)/src/data/sparse_page_dmatrix.o \ $(PKGROOT)/src/data/sparse_page_source.o \ $(PKGROOT)/src/data/extmem_quantile_dmatrix.o \ $(PKGROOT)/src/data/quantile_dmatrix.o \ $(PKGROOT)/src/data/batch_utils.o \ $(PKGROOT)/src/data/proxy_dmatrix.o \ $(PKGROOT)/src/data/iterative_dmatrix.o \ $(PKGROOT)/src/predictor/predictor.o \ $(PKGROOT)/src/predictor/cpu_predictor.o \ $(PKGROOT)/src/predictor/cpu_treeshap.o \ $(PKGROOT)/src/tree/constraints.o \ $(PKGROOT)/src/tree/param.o \ $(PKGROOT)/src/tree/fit_stump.o \ $(PKGROOT)/src/tree/tree_model.o \ $(PKGROOT)/src/tree/tree_updater.o \ $(PKGROOT)/src/tree/multi_target_tree_model.o \ $(PKGROOT)/src/tree/updater_approx.o \ $(PKGROOT)/src/tree/updater_colmaker.o \ $(PKGROOT)/src/tree/updater_prune.o \ $(PKGROOT)/src/tree/updater_quantile_hist.o \ $(PKGROOT)/src/tree/updater_refresh.o \ $(PKGROOT)/src/tree/updater_sync.o \ $(PKGROOT)/src/tree/hist/param.o \ $(PKGROOT)/src/tree/hist/histogram.o \ $(PKGROOT)/src/linear/linear_updater.o \ $(PKGROOT)/src/linear/updater_coordinate.o \ $(PKGROOT)/src/linear/updater_shotgun.o \ $(PKGROOT)/src/learner.o \ $(PKGROOT)/src/context.o \ $(PKGROOT)/src/logging.o \ $(PKGROOT)/src/global_config.o \ $(PKGROOT)/src/collective/result.o \ $(PKGROOT)/src/collective/allgather.o \ $(PKGROOT)/src/collective/allreduce.o \ $(PKGROOT)/src/collective/broadcast.o \ $(PKGROOT)/src/collective/comm.o \ $(PKGROOT)/src/collective/comm_group.o \ $(PKGROOT)/src/collective/coll.o \ $(PKGROOT)/src/collective/tracker.o \ $(PKGROOT)/src/collective/in_memory_handler.o \ $(PKGROOT)/src/collective/loop.o \ $(PKGROOT)/src/collective/socket.o \ $(PKGROOT)/src/common/charconv.o \ $(PKGROOT)/src/common/column_matrix.o \ $(PKGROOT)/src/common/common.o \ $(PKGROOT)/src/common/cuda_rt_utils.o \ $(PKGROOT)/src/common/error_msg.o \ $(PKGROOT)/src/common/hist_util.o \ $(PKGROOT)/src/common/host_device_vector.o \ $(PKGROOT)/src/common/io.o \ $(PKGROOT)/src/common/json.o \ $(PKGROOT)/src/common/numeric.o \ $(PKGROOT)/src/common/pseudo_huber.o \ $(PKGROOT)/src/common/quantile.o \ $(PKGROOT)/src/common/random.o \ $(PKGROOT)/src/common/stats.o \ $(PKGROOT)/src/common/survival_util.o \ $(PKGROOT)/src/common/threading_utils.o \ $(PKGROOT)/src/common/ranking_utils.o \ $(PKGROOT)/src/common/quantile_loss_utils.o \ $(PKGROOT)/src/common/timer.o \ $(PKGROOT)/src/common/version.o \ $(PKGROOT)/src/c_api/c_api.o \ $(PKGROOT)/src/c_api/c_api_error.o \ $(PKGROOT)/amalgamation/dmlc-minimum0.o xgboost-3.0.0/R-package/src/Makevars.win.in000066400000000000000000000116701476511272400204730ustar00rootroot00000000000000# package root PKGROOT=../../ ENABLE_STD_THREAD=0 # _*_ mode: Makefile; _*_ CXX_STD = CXX17 XGB_RFLAGS = \ -DXGBOOST_STRICT_R_MODE=1 \ -DDMLC_LOG_BEFORE_THROW=0 \ -DDMLC_ENABLE_STD_THREAD=$(ENABLE_STD_THREAD) \ -DDMLC_DISABLE_STDIN=1 \ -DDMLC_LOG_CUSTOMIZE=1 # disable the use of thread_local for 32 bit windows: ifeq ($(R_OSTYPE)$(WIN),windows) XGB_RFLAGS += -DDMLC_CXX11_THREAD_LOCAL=0 endif $(foreach v, $(XGB_RFLAGS), $(warning $(v))) PKG_CPPFLAGS = \ -I$(PKGROOT)/include \ -I$(PKGROOT)/dmlc-core/include \ -I$(PKGROOT) \ -DXGBOOST_BUILTIN_PREFETCH_PRESENT=1 \ @XGBOOST_MM_PREFETCH_PRESENT@ \ $(XGB_RFLAGS) PKG_CXXFLAGS = \ $(SHLIB_OPENMP_CXXFLAGS) \ -DDMLC_CMAKE_LITTLE_ENDIAN=1 \ $(SHLIB_PTHREAD_FLAGS) \ $(CXX_VISIBILITY) PKG_LIBS = \ $(SHLIB_OPENMP_CXXFLAGS) \ -DDMLC_CMAKE_LITTLE_ENDIAN=1 \ $(SHLIB_PTHREAD_FLAGS) \ -lwsock32 \ -lws2_32 OBJECTS= \ ./xgboost_R.o \ ./xgboost_custom.o \ ./init.o \ $(PKGROOT)/src/metric/metric.o \ $(PKGROOT)/src/metric/elementwise_metric.o \ $(PKGROOT)/src/metric/multiclass_metric.o \ $(PKGROOT)/src/metric/rank_metric.o \ $(PKGROOT)/src/metric/auc.o \ $(PKGROOT)/src/metric/survival_metric.o \ $(PKGROOT)/src/objective/objective.o \ $(PKGROOT)/src/objective/regression_obj.o \ $(PKGROOT)/src/objective/multiclass_obj.o \ $(PKGROOT)/src/objective/lambdarank_obj.o \ $(PKGROOT)/src/objective/hinge.o \ $(PKGROOT)/src/objective/aft_obj.o \ $(PKGROOT)/src/objective/adaptive.o \ $(PKGROOT)/src/objective/init_estimation.o \ $(PKGROOT)/src/objective/quantile_obj.o \ $(PKGROOT)/src/gbm/gbm.o \ $(PKGROOT)/src/gbm/gbtree.o \ $(PKGROOT)/src/gbm/gbtree_model.o \ $(PKGROOT)/src/gbm/gblinear.o \ $(PKGROOT)/src/gbm/gblinear_model.o \ $(PKGROOT)/src/data/adapter.o \ $(PKGROOT)/src/data/array_interface.o \ $(PKGROOT)/src/data/simple_dmatrix.o \ $(PKGROOT)/src/data/data.o \ $(PKGROOT)/src/data/sparse_page_raw_format.o \ $(PKGROOT)/src/data/ellpack_page.o \ $(PKGROOT)/src/data/file_iterator.o \ $(PKGROOT)/src/data/gradient_index.o \ $(PKGROOT)/src/data/gradient_index_page_source.o \ $(PKGROOT)/src/data/gradient_index_format.o \ $(PKGROOT)/src/data/validation.o \ $(PKGROOT)/src/data/sparse_page_dmatrix.o \ $(PKGROOT)/src/data/sparse_page_source.o \ $(PKGROOT)/src/data/extmem_quantile_dmatrix.o \ $(PKGROOT)/src/data/quantile_dmatrix.o \ $(PKGROOT)/src/data/batch_utils.o \ $(PKGROOT)/src/data/proxy_dmatrix.o \ $(PKGROOT)/src/data/iterative_dmatrix.o \ $(PKGROOT)/src/predictor/predictor.o \ $(PKGROOT)/src/predictor/cpu_predictor.o \ $(PKGROOT)/src/predictor/cpu_treeshap.o \ $(PKGROOT)/src/tree/constraints.o \ $(PKGROOT)/src/tree/param.o \ $(PKGROOT)/src/tree/fit_stump.o \ $(PKGROOT)/src/tree/tree_model.o \ $(PKGROOT)/src/tree/multi_target_tree_model.o \ $(PKGROOT)/src/tree/tree_updater.o \ $(PKGROOT)/src/tree/updater_approx.o \ $(PKGROOT)/src/tree/updater_colmaker.o \ $(PKGROOT)/src/tree/updater_prune.o \ $(PKGROOT)/src/tree/updater_quantile_hist.o \ $(PKGROOT)/src/tree/updater_refresh.o \ $(PKGROOT)/src/tree/updater_sync.o \ $(PKGROOT)/src/tree/hist/param.o \ $(PKGROOT)/src/tree/hist/histogram.o \ $(PKGROOT)/src/linear/linear_updater.o \ $(PKGROOT)/src/linear/updater_coordinate.o \ $(PKGROOT)/src/linear/updater_shotgun.o \ $(PKGROOT)/src/learner.o \ $(PKGROOT)/src/context.o \ $(PKGROOT)/src/logging.o \ $(PKGROOT)/src/global_config.o \ $(PKGROOT)/src/collective/result.o \ $(PKGROOT)/src/collective/allgather.o \ $(PKGROOT)/src/collective/allreduce.o \ $(PKGROOT)/src/collective/broadcast.o \ $(PKGROOT)/src/collective/comm.o \ $(PKGROOT)/src/collective/comm_group.o \ $(PKGROOT)/src/collective/coll.o \ $(PKGROOT)/src/collective/tracker.o \ $(PKGROOT)/src/collective/in_memory_handler.o \ $(PKGROOT)/src/collective/loop.o \ $(PKGROOT)/src/collective/socket.o \ $(PKGROOT)/src/common/charconv.o \ $(PKGROOT)/src/common/column_matrix.o \ $(PKGROOT)/src/common/common.o \ $(PKGROOT)/src/common/cuda_rt_utils.o \ $(PKGROOT)/src/common/error_msg.o \ $(PKGROOT)/src/common/hist_util.o \ $(PKGROOT)/src/common/host_device_vector.o \ $(PKGROOT)/src/common/io.o \ $(PKGROOT)/src/common/json.o \ $(PKGROOT)/src/common/numeric.o \ $(PKGROOT)/src/common/pseudo_huber.o \ $(PKGROOT)/src/common/quantile.o \ $(PKGROOT)/src/common/random.o \ $(PKGROOT)/src/common/stats.o \ $(PKGROOT)/src/common/survival_util.o \ $(PKGROOT)/src/common/threading_utils.o \ $(PKGROOT)/src/common/ranking_utils.o \ $(PKGROOT)/src/common/quantile_loss_utils.o \ $(PKGROOT)/src/common/timer.o \ $(PKGROOT)/src/common/version.o \ $(PKGROOT)/src/c_api/c_api.o \ $(PKGROOT)/src/c_api/c_api_error.o \ $(PKGROOT)/amalgamation/dmlc-minimum0.o xgboost-3.0.0/R-package/src/init.c000066400000000000000000000215571476511272400167120ustar00rootroot00000000000000/* Copyright (c) 2015 by Contributors * * This file was initially generated using the following R command: * tools::package_native_routine_registration_skeleton('.', con = 'src/init.c', character_only = F) * and edited to conform to xgboost C linter requirements. For details, see * https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Registering-native-routines */ #include #include #include #include /* FIXME: Check these declarations against the C/Fortran source code. */ /* .Call calls */ extern void XGBInitializeAltrepClass_R(DllInfo *info); extern SEXP XGDuplicate_R(SEXP); extern SEXP XGPointerEqComparison_R(SEXP, SEXP); extern SEXP XGBoosterTrainOneIter_R(SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterCreate_R(SEXP); extern SEXP XGBoosterCopyInfoFromDMatrix_R(SEXP, SEXP); extern SEXP XGBoosterSetStrFeatureInfo_R(SEXP, SEXP, SEXP); extern SEXP XGBoosterGetStrFeatureInfo_R(SEXP, SEXP); extern SEXP XGBoosterBoostedRounds_R(SEXP); extern SEXP XGBoosterGetNumFeature_R(SEXP); extern SEXP XGBoosterDumpModel_R(SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterEvalOneIter_R(SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterGetAttrNames_R(SEXP); extern SEXP XGBoosterGetAttr_R(SEXP, SEXP); extern SEXP XGBoosterLoadModelFromRaw_R(SEXP, SEXP); extern SEXP XGBoosterSaveModelToRaw_R(SEXP handle, SEXP config); extern SEXP XGBoosterLoadModel_R(SEXP, SEXP); extern SEXP XGBoosterSaveJsonConfig_R(SEXP handle); extern SEXP XGBoosterLoadJsonConfig_R(SEXP handle, SEXP value); extern SEXP XGBoosterSerializeToBuffer_R(SEXP handle); extern SEXP XGBoosterUnserializeFromBuffer_R(SEXP handle, SEXP raw); extern SEXP XGBoosterPredictFromDMatrix_R(SEXP, SEXP, SEXP); extern SEXP XGBoosterPredictFromDense_R(SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterPredictFromCSR_R(SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterPredictFromColumnar_R(SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterSaveModel_R(SEXP, SEXP); extern SEXP XGBoosterSetAttr_R(SEXP, SEXP, SEXP); extern SEXP XGBoosterSetParam_R(SEXP, SEXP, SEXP); extern SEXP XGBoosterUpdateOneIter_R(SEXP, SEXP, SEXP); extern SEXP XGCheckNullPtr_R(SEXP); extern SEXP XGSetArrayDimNamesInplace_R(SEXP, SEXP); extern SEXP XGSetVectorNamesInplace_R(SEXP, SEXP); extern SEXP XGDMatrixCreateFromCSC_R(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGDMatrixCreateFromCSR_R(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGDMatrixCreateFromURI_R(SEXP, SEXP, SEXP); extern SEXP XGDMatrixCreateFromMat_R(SEXP, SEXP, SEXP); extern SEXP XGDMatrixGetFloatInfo_R(SEXP, SEXP); extern SEXP XGDMatrixGetUIntInfo_R(SEXP, SEXP); extern SEXP XGDMatrixCreateFromDF_R(SEXP, SEXP, SEXP); extern SEXP XGDMatrixGetStrFeatureInfo_R(SEXP, SEXP); extern SEXP XGDMatrixNumCol_R(SEXP); extern SEXP XGDMatrixNumRow_R(SEXP); extern SEXP XGProxyDMatrixCreate_R(); extern SEXP XGProxyDMatrixSetDataDense_R(SEXP, SEXP); extern SEXP XGProxyDMatrixSetDataCSR_R(SEXP, SEXP); extern SEXP XGProxyDMatrixSetDataColumnar_R(SEXP, SEXP); extern SEXP XGDMatrixCreateFromCallback_R(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGQuantileDMatrixCreateFromCallback_R(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP XGDMatrixFree_R(SEXP); extern SEXP XGGetRNAIntAsDouble(); extern SEXP XGDMatrixGetQuantileCut_R(SEXP); extern SEXP XGDMatrixNumNonMissing_R(SEXP); extern SEXP XGDMatrixGetDataAsCSR_R(SEXP); extern SEXP XGDMatrixSaveBinary_R(SEXP, SEXP, SEXP); extern SEXP XGDMatrixSetInfo_R(SEXP, SEXP, SEXP); extern SEXP XGDMatrixSetStrFeatureInfo_R(SEXP, SEXP, SEXP); extern SEXP XGDMatrixSliceDMatrix_R(SEXP, SEXP, SEXP); extern SEXP XGBSetGlobalConfig_R(SEXP); extern SEXP XGBGetGlobalConfig_R(void); extern SEXP XGBoosterFeatureScore_R(SEXP, SEXP); extern SEXP XGBoosterSlice_R(SEXP, SEXP, SEXP, SEXP); extern SEXP XGBoosterSliceAndReplace_R(SEXP, SEXP, SEXP, SEXP); static const R_CallMethodDef CallEntries[] = { {"XGDuplicate_R", (DL_FUNC) &XGDuplicate_R, 1}, {"XGPointerEqComparison_R", (DL_FUNC) &XGPointerEqComparison_R, 2}, {"XGBoosterTrainOneIter_R", (DL_FUNC) &XGBoosterTrainOneIter_R, 5}, {"XGBoosterCreate_R", (DL_FUNC) &XGBoosterCreate_R, 1}, {"XGBoosterCopyInfoFromDMatrix_R", (DL_FUNC) &XGBoosterCopyInfoFromDMatrix_R, 2}, {"XGBoosterSetStrFeatureInfo_R",(DL_FUNC) &XGBoosterSetStrFeatureInfo_R,3}, // NOLINT {"XGBoosterGetStrFeatureInfo_R",(DL_FUNC) &XGBoosterGetStrFeatureInfo_R,2}, // NOLINT {"XGBoosterBoostedRounds_R", (DL_FUNC) &XGBoosterBoostedRounds_R, 1}, {"XGBoosterGetNumFeature_R", (DL_FUNC) &XGBoosterGetNumFeature_R, 1}, {"XGBoosterDumpModel_R", (DL_FUNC) &XGBoosterDumpModel_R, 4}, {"XGBoosterEvalOneIter_R", (DL_FUNC) &XGBoosterEvalOneIter_R, 4}, {"XGBoosterGetAttrNames_R", (DL_FUNC) &XGBoosterGetAttrNames_R, 1}, {"XGBoosterGetAttr_R", (DL_FUNC) &XGBoosterGetAttr_R, 2}, {"XGBoosterLoadModelFromRaw_R", (DL_FUNC) &XGBoosterLoadModelFromRaw_R, 2}, {"XGBoosterSaveModelToRaw_R", (DL_FUNC) &XGBoosterSaveModelToRaw_R, 2}, {"XGBoosterLoadModel_R", (DL_FUNC) &XGBoosterLoadModel_R, 2}, {"XGBoosterSaveJsonConfig_R", (DL_FUNC) &XGBoosterSaveJsonConfig_R, 1}, {"XGBoosterLoadJsonConfig_R", (DL_FUNC) &XGBoosterLoadJsonConfig_R, 2}, {"XGBoosterSerializeToBuffer_R", (DL_FUNC) &XGBoosterSerializeToBuffer_R, 1}, {"XGBoosterUnserializeFromBuffer_R", (DL_FUNC) &XGBoosterUnserializeFromBuffer_R, 2}, {"XGBoosterPredictFromDMatrix_R", (DL_FUNC) &XGBoosterPredictFromDMatrix_R, 3}, {"XGBoosterPredictFromDense_R", (DL_FUNC) &XGBoosterPredictFromDense_R, 5}, {"XGBoosterPredictFromCSR_R", (DL_FUNC) &XGBoosterPredictFromCSR_R, 5}, {"XGBoosterPredictFromColumnar_R", (DL_FUNC) &XGBoosterPredictFromColumnar_R, 5}, {"XGBoosterSaveModel_R", (DL_FUNC) &XGBoosterSaveModel_R, 2}, {"XGBoosterSetAttr_R", (DL_FUNC) &XGBoosterSetAttr_R, 3}, {"XGBoosterSetParam_R", (DL_FUNC) &XGBoosterSetParam_R, 3}, {"XGBoosterUpdateOneIter_R", (DL_FUNC) &XGBoosterUpdateOneIter_R, 3}, {"XGCheckNullPtr_R", (DL_FUNC) &XGCheckNullPtr_R, 1}, {"XGSetArrayDimNamesInplace_R", (DL_FUNC) &XGSetArrayDimNamesInplace_R, 2}, {"XGSetVectorNamesInplace_R", (DL_FUNC) &XGSetVectorNamesInplace_R, 2}, {"XGDMatrixCreateFromCSC_R", (DL_FUNC) &XGDMatrixCreateFromCSC_R, 6}, {"XGDMatrixCreateFromCSR_R", (DL_FUNC) &XGDMatrixCreateFromCSR_R, 6}, {"XGDMatrixCreateFromURI_R", (DL_FUNC) &XGDMatrixCreateFromURI_R, 3}, {"XGDMatrixCreateFromMat_R", (DL_FUNC) &XGDMatrixCreateFromMat_R, 3}, {"XGDMatrixGetFloatInfo_R", (DL_FUNC) &XGDMatrixGetFloatInfo_R, 2}, {"XGDMatrixGetUIntInfo_R", (DL_FUNC) &XGDMatrixGetUIntInfo_R, 2}, {"XGDMatrixCreateFromDF_R", (DL_FUNC) &XGDMatrixCreateFromDF_R, 3}, {"XGDMatrixGetStrFeatureInfo_R", (DL_FUNC) &XGDMatrixGetStrFeatureInfo_R, 2}, {"XGDMatrixNumCol_R", (DL_FUNC) &XGDMatrixNumCol_R, 1}, {"XGDMatrixNumRow_R", (DL_FUNC) &XGDMatrixNumRow_R, 1}, {"XGProxyDMatrixCreate_R", (DL_FUNC) &XGProxyDMatrixCreate_R, 0}, {"XGProxyDMatrixSetDataDense_R", (DL_FUNC) &XGProxyDMatrixSetDataDense_R, 2}, {"XGProxyDMatrixSetDataCSR_R", (DL_FUNC) &XGProxyDMatrixSetDataCSR_R, 2}, {"XGProxyDMatrixSetDataColumnar_R", (DL_FUNC) &XGProxyDMatrixSetDataColumnar_R, 2}, {"XGDMatrixCreateFromCallback_R", (DL_FUNC) &XGDMatrixCreateFromCallback_R, 7}, {"XGQuantileDMatrixCreateFromCallback_R", (DL_FUNC) &XGQuantileDMatrixCreateFromCallback_R, 8}, {"XGDMatrixFree_R", (DL_FUNC) &XGDMatrixFree_R, 1}, {"XGGetRNAIntAsDouble", (DL_FUNC) &XGGetRNAIntAsDouble, 0}, {"XGDMatrixGetQuantileCut_R", (DL_FUNC) &XGDMatrixGetQuantileCut_R, 1}, {"XGDMatrixNumNonMissing_R", (DL_FUNC) &XGDMatrixNumNonMissing_R, 1}, {"XGDMatrixGetDataAsCSR_R", (DL_FUNC) &XGDMatrixGetDataAsCSR_R, 1}, {"XGDMatrixSaveBinary_R", (DL_FUNC) &XGDMatrixSaveBinary_R, 3}, {"XGDMatrixSetInfo_R", (DL_FUNC) &XGDMatrixSetInfo_R, 3}, {"XGDMatrixSetStrFeatureInfo_R", (DL_FUNC) &XGDMatrixSetStrFeatureInfo_R, 3}, {"XGDMatrixSliceDMatrix_R", (DL_FUNC) &XGDMatrixSliceDMatrix_R, 3}, {"XGBSetGlobalConfig_R", (DL_FUNC) &XGBSetGlobalConfig_R, 1}, {"XGBGetGlobalConfig_R", (DL_FUNC) &XGBGetGlobalConfig_R, 0}, {"XGBoosterFeatureScore_R", (DL_FUNC) &XGBoosterFeatureScore_R, 2}, {"XGBoosterSlice_R", (DL_FUNC) &XGBoosterSlice_R, 4}, {"XGBoosterSliceAndReplace_R", (DL_FUNC) &XGBoosterSliceAndReplace_R, 4}, {NULL, NULL, 0} }; #if defined(_WIN32) __declspec(dllexport) #endif // defined(_WIN32) void attribute_visible R_init_xgboost(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); XGBInitializeAltrepClass_R(dll); } xgboost-3.0.0/R-package/src/xgboost-win.def000066400000000000000000000000541476511272400205300ustar00rootroot00000000000000LIBRARY xgboost.dll EXPORTS R_init_xgboost xgboost-3.0.0/R-package/src/xgboost_R.cc000066400000000000000000001532641476511272400200610ustar00rootroot00000000000000/** * Copyright 2014-2024, XGBoost Contributors */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "../../src/c_api/c_api_error.h" #include "../../src/c_api/c_api_utils.h" // MakeSparseFromPtr #include "../../src/common/threading_utils.h" #include "../../src/data/array_interface.h" // for ArrayInterface #include "./xgboost_R.h" // Must follow other includes. #ifdef _MSC_VER #error "Compilation of R package with MSVC is not supported due to issues handling R headers" #endif namespace { /* Note: this class is used as a throwable exception. Some xgboost C functions that use callbacks will catch exceptions that happen inside of the callback execution, hence it purposefully doesn't inherit from 'std::exception' even if used as such. */ struct ErrorWithUnwind {}; void ThrowExceptionFromRError(void *, Rboolean jump) { if (jump) { throw ErrorWithUnwind(); } } struct PtrToConstChar { const char *ptr; }; SEXP WrappedMkChar(void *void_ptr) { return Rf_mkChar(static_cast(void_ptr)->ptr); } SEXP SafeMkChar(const char *c_str, SEXP continuation_token) { PtrToConstChar ptr_struct{c_str}; return R_UnwindProtect( WrappedMkChar, static_cast(&ptr_struct), ThrowExceptionFromRError, nullptr, continuation_token); } struct RFunAndEnv { SEXP R_fun; SEXP R_calling_env; }; SEXP WrappedExecFun(void *void_ptr) { RFunAndEnv *r_fun_and_env = static_cast(void_ptr); SEXP f_expr = Rf_protect(Rf_lang1(r_fun_and_env->R_fun)); SEXP out = Rf_protect(Rf_eval(f_expr, r_fun_and_env->R_calling_env)); Rf_unprotect(2); return out; } SEXP SafeExecFun(SEXP R_fun, SEXP R_calling_env, SEXP continuation_token) { RFunAndEnv r_fun_and_env{R_fun, R_calling_env}; return R_UnwindProtect( WrappedExecFun, static_cast(&r_fun_and_env), ThrowExceptionFromRError, nullptr, continuation_token); } SEXP WrappedAllocReal(void *void_ptr) { size_t *size = static_cast(void_ptr); return Rf_allocVector(REALSXP, *size); } SEXP SafeAllocReal(size_t size, SEXP continuation_token) { return R_UnwindProtect( WrappedAllocReal, static_cast(&size), ThrowExceptionFromRError, nullptr, continuation_token); } SEXP WrappedAllocInteger(void *void_ptr) { size_t *size = static_cast(void_ptr); return Rf_allocVector(INTSXP, *size); } SEXP SafeAllocInteger(size_t size, SEXP continuation_token) { return R_UnwindProtect( WrappedAllocInteger, static_cast(&size), ThrowExceptionFromRError, nullptr, continuation_token); } [[nodiscard]] std::string MakeArrayInterfaceFromRMat(SEXP R_mat) { SEXP mat_dims = Rf_getAttrib(R_mat, R_DimSymbol); if (Rf_xlength(mat_dims) > 2) { LOG(FATAL) << "Passed input array with more than two dimensions, which is not supported."; } const int *ptr_mat_dims = INTEGER(mat_dims); // Lambda for type dispatch. auto make_matrix = [=](auto const *ptr) { using namespace xgboost; // NOLINT using T = std::remove_pointer_t; auto m = linalg::MatrixView{ common::Span{ptr, static_cast(ptr_mat_dims[0]) * static_cast(ptr_mat_dims[1])}, {ptr_mat_dims[0], ptr_mat_dims[1]}, // Shape DeviceOrd::CPU(), linalg::Order::kF // R uses column-major }; CHECK(m.FContiguous()); return linalg::ArrayInterfaceStr(m); }; const SEXPTYPE arr_type = TYPEOF(R_mat); switch (arr_type) { case REALSXP: return make_matrix(REAL(R_mat)); case INTSXP: return make_matrix(INTEGER(R_mat)); case LGLSXP: return make_matrix(LOGICAL(R_mat)); default: LOG(FATAL) << "Array or matrix has unsupported type."; } LOG(FATAL) << "Not reachable"; return ""; } [[nodiscard]] std::string MakeArrayInterfaceFromRVector(SEXP R_vec) { const size_t vec_len = Rf_xlength(R_vec); // Lambda for type dispatch. auto make_vec = [=](auto const *ptr) { using namespace xgboost; // NOLINT auto v = linalg::MakeVec(ptr, vec_len); return linalg::ArrayInterfaceStr(v); }; const SEXPTYPE arr_type = TYPEOF(R_vec); switch (arr_type) { case REALSXP: return make_vec(REAL(R_vec)); case INTSXP: return make_vec(INTEGER(R_vec)); case LGLSXP: return make_vec(LOGICAL(R_vec)); default: LOG(FATAL) << "Array or matrix has unsupported type."; } LOG(FATAL) << "Not reachable"; return ""; } [[nodiscard]] std::string MakeArrayInterfaceFromRDataFrame(SEXP R_df) { auto make_vec = [&](auto const *ptr, std::size_t len) { auto v = xgboost::linalg::MakeVec(ptr, len); return xgboost::linalg::ArrayInterface(v); }; R_xlen_t n_features = Rf_xlength(R_df); std::vector array(n_features); CHECK_GT(n_features, 0); std::size_t len = Rf_xlength(VECTOR_ELT(R_df, 0)); // The `data.frame` in R actually converts all data into numeric. The other type // handlers here are not used. At the moment they are kept as a reference for when we // can avoid making data copies during transformation. for (R_xlen_t i = 0; i < n_features; ++i) { switch (TYPEOF(VECTOR_ELT(R_df, i))) { case INTSXP: { auto const *ptr = INTEGER(VECTOR_ELT(R_df, i)); array[i] = make_vec(ptr, len); break; } case REALSXP: { auto const *ptr = REAL(VECTOR_ELT(R_df, i)); array[i] = make_vec(ptr, len); break; } case LGLSXP: { auto const *ptr = LOGICAL(VECTOR_ELT(R_df, i)); array[i] = make_vec(ptr, len); break; } default: { LOG(FATAL) << "data.frame has unsupported type."; } } } xgboost::Json jinterface{std::move(array)}; return xgboost::Json::Dump(jinterface); } void AddMissingToJson(xgboost::Json *jconfig, SEXP missing, SEXPTYPE arr_type) { if (Rf_isNull(missing) || ISNAN(Rf_asReal(missing))) { // missing is not specified if (arr_type == REALSXP) { (*jconfig)["missing"] = std::numeric_limits::quiet_NaN(); } else { (*jconfig)["missing"] = R_NaInt; } } else { // missing specified (*jconfig)["missing"] = Rf_asReal(missing); } } [[nodiscard]] std::string MakeJsonConfigForArray(SEXP missing, SEXP n_threads, SEXPTYPE arr_type) { using namespace ::xgboost; // NOLINT Json jconfig{Object{}}; AddMissingToJson(&jconfig, missing, arr_type); jconfig["nthread"] = Rf_asInteger(n_threads); return Json::Dump(jconfig); } // Allocate a R vector and copy an array interface encoded object to it. [[nodiscard]] SEXP CopyArrayToR(const char *array_str, SEXP ctoken) { xgboost::ArrayInterface<1> array{xgboost::StringView{array_str}}; // R supports only int and double. bool is_int_type = xgboost::DispatchDType(array.type, [](auto t) { return std::is_integral_v; }); bool is_float = xgboost::DispatchDType( array.type, [](auto v) { return std::is_floating_point_v; }); CHECK(is_int_type || is_float) << "Internal error: Invalid DType."; CHECK(array.is_contiguous) << "Internal error: Return by XGBoost should be contiguous"; // Note: the only case in which this will receive an integer type is // for the 'indptr' part of the quantile cut outputs, which comes // in sorted order, so the last element contains the maximum value. bool fits_into_C_int = xgboost::DispatchDType(array.type, [&](auto t) { using T = decltype(t); if (!std::is_integral_v) { return false; } auto ptr = static_cast(array.data); T last_elt = ptr[array.n - 1]; if (last_elt < 0) { last_elt = -last_elt; // no std::abs overload for all possible types } return last_elt <= std::numeric_limits::max(); }); bool use_int = is_int_type && fits_into_C_int; // Allocate memory in R SEXP out = Rf_protect(use_int ? SafeAllocInteger(array.n, ctoken) : SafeAllocReal(array.n, ctoken)); xgboost::DispatchDType(array.type, [&](auto t) { using T = decltype(t); auto in_ptr = static_cast(array.data); if (use_int) { auto out_ptr = INTEGER(out); std::copy_n(in_ptr, array.n, out_ptr); } else { auto out_ptr = REAL(out); std::copy_n(in_ptr, array.n, out_ptr); } }); Rf_unprotect(1); return out; } } // namespace struct RRNGStateController { RRNGStateController() { GetRNGstate(); } ~RRNGStateController() { PutRNGstate(); } }; /*! * \brief macro to annotate begin of api */ #define R_API_BEGIN() \ try { \ RRNGStateController rng_controller{}; /* Note: an R error triggers a long jump, hence all C++ objects that allocated memory through non-R allocators, including the exception object, need to be destructed before triggering the R error. In order to preserve the error message, it gets copied to a temporary buffer, and the R error section is reached through a 'goto' statement that bypasses usual function control flow. */ char cpp_ex_msg[512]; /*! * \brief macro to annotate end of api */ #define R_API_END() \ } catch(std::exception &e) { \ std::strncpy(cpp_ex_msg, e.what(), 512); \ goto throw_cpp_ex_as_R_err; \ } \ if (false) { \ throw_cpp_ex_as_R_err: \ Rf_error("%s", cpp_ex_msg); \ } /** * @brief Macro for checking XGBoost return code. */ #define CHECK_CALL(__rc) \ if ((__rc) != 0) { \ Rf_error("%s", XGBGetLastError()); \ } using dmlc::BeginPtr; XGB_DLL SEXP XGCheckNullPtr_R(SEXP handle) { return Rf_ScalarLogical(R_ExternalPtrAddr(handle) == nullptr); } XGB_DLL SEXP XGSetArrayDimNamesInplace_R(SEXP arr, SEXP dim_names) { Rf_setAttrib(arr, R_DimNamesSymbol, dim_names); return R_NilValue; } XGB_DLL SEXP XGSetVectorNamesInplace_R(SEXP arr, SEXP names) { Rf_setAttrib(arr, R_NamesSymbol, names); return R_NilValue; } namespace { void _DMatrixFinalizer(SEXP ext) { R_API_BEGIN(); if (R_ExternalPtrAddr(ext) == NULL) return; CHECK_CALL(XGDMatrixFree(R_ExternalPtrAddr(ext))); R_ClearExternalPtr(ext); R_API_END(); } } /* namespace */ XGB_DLL SEXP XGBSetGlobalConfig_R(SEXP json_str) { R_API_BEGIN(); CHECK_CALL(XGBSetGlobalConfig(CHAR(Rf_asChar(json_str)))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBGetGlobalConfig_R() { const char* json_str; R_API_BEGIN(); CHECK_CALL(XGBGetGlobalConfig(&json_str)); R_API_END(); return Rf_mkString(json_str); } XGB_DLL SEXP XGDMatrixCreateFromURI_R(SEXP uri, SEXP silent, SEXP data_split_mode) { SEXP ret = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); SEXP uri_char = Rf_protect(Rf_asChar(uri)); const char *uri_ptr = CHAR(uri_char); R_API_BEGIN(); xgboost::Json jconfig{xgboost::Object{}}; jconfig["uri"] = std::string(uri_ptr); jconfig["silent"] = Rf_asLogical(silent); jconfig["data_split_mode"] = Rf_asInteger(data_split_mode); const std::string sconfig = xgboost::Json::Dump(jconfig); DMatrixHandle handle; CHECK_CALL(XGDMatrixCreateFromURI(sconfig.c_str(), &handle)); R_SetExternalPtrAddr(ret, handle); R_RegisterCFinalizerEx(ret, _DMatrixFinalizer, TRUE); R_API_END(); Rf_unprotect(2); return ret; } XGB_DLL SEXP XGDMatrixCreateFromMat_R(SEXP mat, SEXP missing, SEXP n_threads) { SEXP ret = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); DMatrixHandle handle; int res_code; { auto array_str = MakeArrayInterfaceFromRMat(mat); auto config_str = MakeJsonConfigForArray(missing, n_threads, TYPEOF(mat)); res_code = XGDMatrixCreateFromDense(array_str.c_str(), config_str.c_str(), &handle); } CHECK_CALL(res_code); R_SetExternalPtrAddr(ret, handle); R_RegisterCFinalizerEx(ret, _DMatrixFinalizer, TRUE); R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixCreateFromDF_R(SEXP df, SEXP missing, SEXP n_threads) { SEXP ret = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); DMatrixHandle handle; std::int32_t rc{0}; { const std::string sinterface = MakeArrayInterfaceFromRDataFrame(df); xgboost::Json jconfig{xgboost::Object{}}; jconfig["missing"] = Rf_asReal(missing); jconfig["nthread"] = Rf_asInteger(n_threads); std::string sconfig = xgboost::Json::Dump(jconfig); rc = XGDMatrixCreateFromColumnar(sinterface.c_str(), sconfig.c_str(), &handle); } CHECK_CALL(rc); R_SetExternalPtrAddr(ret, handle); R_RegisterCFinalizerEx(ret, _DMatrixFinalizer, TRUE); R_API_END(); Rf_unprotect(1); return ret; } namespace { void CreateFromSparse(SEXP indptr, SEXP indices, SEXP data, std::string *indptr_str, std::string *indices_str, std::string *data_str) { const int *p_indptr = INTEGER(indptr); const int *p_indices = INTEGER(indices); const double *p_data = REAL(data); auto nindptr = static_cast(Rf_xlength(indptr)); auto ndata = static_cast(Rf_xlength(data)); CHECK_EQ(ndata, p_indptr[nindptr - 1]); xgboost::detail::MakeSparseFromPtr(p_indptr, p_indices, p_data, nindptr, indptr_str, indices_str, data_str); } } // namespace XGB_DLL SEXP XGDMatrixCreateFromCSC_R(SEXP indptr, SEXP indices, SEXP data, SEXP num_row, SEXP missing, SEXP n_threads) { SEXP ret = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); std::int32_t threads = Rf_asInteger(n_threads); DMatrixHandle handle; int res_code; { using xgboost::Integer; using xgboost::Json; using xgboost::Object; std::string sindptr, sindices, sdata; CreateFromSparse(indptr, indices, data, &sindptr, &sindices, &sdata); auto nrow = static_cast(INTEGER(num_row)[0]); Json jconfig{Object{}}; // Construct configuration jconfig["nthread"] = Integer{threads}; AddMissingToJson(&jconfig, missing, TYPEOF(data)); std::string config; Json::Dump(jconfig, &config); res_code = XGDMatrixCreateFromCSC(sindptr.c_str(), sindices.c_str(), sdata.c_str(), nrow, config.c_str(), &handle); } CHECK_CALL(res_code); R_SetExternalPtrAddr(ret, handle); R_RegisterCFinalizerEx(ret, _DMatrixFinalizer, TRUE); R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixCreateFromCSR_R(SEXP indptr, SEXP indices, SEXP data, SEXP num_col, SEXP missing, SEXP n_threads) { SEXP ret = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); std::int32_t threads = Rf_asInteger(n_threads); DMatrixHandle handle; int res_code; { using xgboost::Integer; using xgboost::Json; using xgboost::Object; std::string sindptr, sindices, sdata; CreateFromSparse(indptr, indices, data, &sindptr, &sindices, &sdata); auto ncol = static_cast(INTEGER(num_col)[0]); Json jconfig{Object{}}; // Construct configuration jconfig["nthread"] = Integer{threads}; AddMissingToJson(&jconfig, missing, TYPEOF(data)); std::string config; Json::Dump(jconfig, &config); res_code = XGDMatrixCreateFromCSR(sindptr.c_str(), sindices.c_str(), sdata.c_str(), ncol, config.c_str(), &handle); } CHECK_CALL(res_code); R_SetExternalPtrAddr(ret, handle); R_RegisterCFinalizerEx(ret, _DMatrixFinalizer, TRUE); R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixSliceDMatrix_R(SEXP handle, SEXP idxset, SEXP allow_groups) { SEXP ret = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); R_xlen_t len = Rf_xlength(idxset); const int *idxset_ = INTEGER(idxset); DMatrixHandle res; int res_code; { std::vector idxvec(len); #ifndef _MSC_VER #pragma omp simd #endif for (R_xlen_t i = 0; i < len; ++i) { idxvec[i] = idxset_[i] - 1; } res_code = XGDMatrixSliceDMatrixEx(R_ExternalPtrAddr(handle), BeginPtr(idxvec), len, &res, Rf_asLogical(allow_groups)); } CHECK_CALL(res_code); R_SetExternalPtrAddr(ret, res); R_RegisterCFinalizerEx(ret, _DMatrixFinalizer, TRUE); R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixSaveBinary_R(SEXP handle, SEXP fname, SEXP silent) { R_API_BEGIN(); CHECK_CALL(XGDMatrixSaveBinary(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(fname)), Rf_asInteger(silent))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGDMatrixSetInfo_R(SEXP handle, SEXP field, SEXP array) { R_API_BEGIN(); SEXP field_ = Rf_protect(Rf_asChar(field)); SEXP arr_dim = Rf_getAttrib(array, R_DimSymbol); int res_code; { const std::string array_str = Rf_isNull(arr_dim)? MakeArrayInterfaceFromRVector(array) : MakeArrayInterfaceFromRMat(array); res_code = XGDMatrixSetInfoFromInterface( R_ExternalPtrAddr(handle), CHAR(field_), array_str.c_str()); } CHECK_CALL(res_code); Rf_unprotect(1); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGDMatrixSetStrFeatureInfo_R(SEXP handle, SEXP field, SEXP array) { R_API_BEGIN(); size_t len{0}; if (!Rf_isNull(array)) { len = Rf_xlength(array); } SEXP str_info_holder = Rf_protect(Rf_allocVector(VECSXP, len)); if (TYPEOF(array) == STRSXP) { for (size_t i = 0; i < len; ++i) { SET_VECTOR_ELT(str_info_holder, i, STRING_ELT(array, i)); } } else { for (size_t i = 0; i < len; ++i) { SET_VECTOR_ELT(str_info_holder, i, Rf_asChar(VECTOR_ELT(array, i))); } } SEXP field_ = Rf_protect(Rf_asChar(field)); const char *name = CHAR(field_); int res_code; { std::vector str_info; str_info.reserve(len); for (size_t i = 0; i < len; ++i) { str_info.emplace_back(CHAR(VECTOR_ELT(str_info_holder, i))); } std::vector vec(len); std::transform(str_info.cbegin(), str_info.cend(), vec.begin(), [](std::string const &str) { return str.c_str(); }); res_code = XGDMatrixSetStrFeatureInfo(R_ExternalPtrAddr(handle), name, vec.data(), len); } CHECK_CALL(res_code); Rf_unprotect(2); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGDMatrixGetStrFeatureInfo_R(SEXP handle, SEXP field) { SEXP ret; R_API_BEGIN(); char const **out_features{nullptr}; bst_ulong len{0}; const char *name = CHAR(Rf_asChar(field)); XGDMatrixGetStrFeatureInfo(R_ExternalPtrAddr(handle), name, &len, &out_features); if (len > 0) { ret = Rf_protect(Rf_allocVector(STRSXP, len)); for (size_t i = 0; i < len; ++i) { SET_STRING_ELT(ret, i, Rf_mkChar(out_features[i])); } } else { ret = Rf_protect(R_NilValue); } R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixGetFloatInfo_R(SEXP handle, SEXP field) { SEXP ret; R_API_BEGIN(); bst_ulong olen; const float *res; CHECK_CALL(XGDMatrixGetFloatInfo(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(field)), &olen, &res)); ret = Rf_protect(Rf_allocVector(REALSXP, olen)); std::copy(res, res + olen, REAL(ret)); R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixGetUIntInfo_R(SEXP handle, SEXP field) { SEXP ret; R_API_BEGIN(); bst_ulong olen; const unsigned *res; CHECK_CALL(XGDMatrixGetUIntInfo(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(field)), &olen, &res)); ret = Rf_protect(Rf_allocVector(INTSXP, olen)); std::copy(res, res + olen, INTEGER(ret)); R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGDMatrixNumRow_R(SEXP handle) { bst_ulong nrow; R_API_BEGIN(); CHECK_CALL(XGDMatrixNumRow(R_ExternalPtrAddr(handle), &nrow)); R_API_END(); return Rf_ScalarInteger(static_cast(nrow)); } XGB_DLL SEXP XGDMatrixNumCol_R(SEXP handle) { bst_ulong ncol; R_API_BEGIN(); CHECK_CALL(XGDMatrixNumCol(R_ExternalPtrAddr(handle), &ncol)); R_API_END(); return Rf_ScalarInteger(static_cast(ncol)); } XGB_DLL SEXP XGProxyDMatrixCreate_R() { SEXP out = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); DMatrixHandle proxy_dmat_handle; CHECK_CALL(XGProxyDMatrixCreate(&proxy_dmat_handle)); R_SetExternalPtrAddr(out, proxy_dmat_handle); R_RegisterCFinalizerEx(out, _DMatrixFinalizer, TRUE); Rf_unprotect(1); R_API_END(); return out; } XGB_DLL SEXP XGProxyDMatrixSetDataDense_R(SEXP handle, SEXP R_mat) { R_API_BEGIN(); DMatrixHandle proxy_dmat = R_ExternalPtrAddr(handle); int res_code; { std::string array_str = MakeArrayInterfaceFromRMat(R_mat); res_code = XGProxyDMatrixSetDataDense(proxy_dmat, array_str.c_str()); } CHECK_CALL(res_code); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGProxyDMatrixSetDataCSR_R(SEXP handle, SEXP lst) { R_API_BEGIN(); DMatrixHandle proxy_dmat = R_ExternalPtrAddr(handle); int res_code; { std::string array_str_indptr = MakeArrayInterfaceFromRVector(VECTOR_ELT(lst, 0)); std::string array_str_indices = MakeArrayInterfaceFromRVector(VECTOR_ELT(lst, 1)); std::string array_str_data = MakeArrayInterfaceFromRVector(VECTOR_ELT(lst, 2)); const int ncol = Rf_asInteger(VECTOR_ELT(lst, 3)); res_code = XGProxyDMatrixSetDataCSR(proxy_dmat, array_str_indptr.c_str(), array_str_indices.c_str(), array_str_data.c_str(), ncol); } CHECK_CALL(res_code); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGProxyDMatrixSetDataColumnar_R(SEXP handle, SEXP lst) { R_API_BEGIN(); DMatrixHandle proxy_dmat = R_ExternalPtrAddr(handle); int res_code; { std::string sinterface = MakeArrayInterfaceFromRDataFrame(lst); res_code = XGProxyDMatrixSetDataColumnar(proxy_dmat, sinterface.c_str()); } CHECK_CALL(res_code); R_API_END(); return R_NilValue; } namespace { struct _RDataIterator { SEXP f_next; SEXP f_reset; SEXP calling_env; SEXP continuation_token; _RDataIterator( SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP continuation_token) : f_next(f_next), f_reset(f_reset), calling_env(calling_env), continuation_token(continuation_token) {} void reset() { SafeExecFun(this->f_reset, this->calling_env, this->continuation_token); } int next() { SEXP R_res = Rf_protect( SafeExecFun(this->f_next, this->calling_env, this->continuation_token)); int res = Rf_asInteger(R_res); Rf_unprotect(1); return res; } }; void _reset_RDataIterator(DataIterHandle iter) { static_cast<_RDataIterator*>(iter)->reset(); } int _next_RDataIterator(DataIterHandle iter) { return static_cast<_RDataIterator*>(iter)->next(); } SEXP XGDMatrixCreateFromCallbackGeneric_R( SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP proxy_dmat, SEXP n_threads, SEXP missing, SEXP max_bin, SEXP ref_dmat, SEXP cache_prefix, bool as_quantile_dmatrix) { SEXP continuation_token = Rf_protect(R_MakeUnwindCont()); SEXP out = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); R_API_BEGIN(); DMatrixHandle out_dmat; int res_code; try { _RDataIterator data_iterator(f_next, f_reset, calling_env, continuation_token); std::string str_cache_prefix; xgboost::Json jconfig{xgboost::Object{}}; jconfig["missing"] = Rf_asReal(missing); if (!Rf_isNull(n_threads)) { jconfig["nthread"] = Rf_asInteger(n_threads); } if (as_quantile_dmatrix) { if (!Rf_isNull(max_bin)) { jconfig["max_bin"] = Rf_asInteger(max_bin); } } else { str_cache_prefix = std::string(CHAR(Rf_asChar(cache_prefix))); jconfig["cache_prefix"] = str_cache_prefix; } std::string json_str = xgboost::Json::Dump(jconfig); DMatrixHandle ref_dmat_handle = nullptr; if (as_quantile_dmatrix && !Rf_isNull(ref_dmat)) { ref_dmat_handle = R_ExternalPtrAddr(ref_dmat); } if (as_quantile_dmatrix) { res_code = XGQuantileDMatrixCreateFromCallback( &data_iterator, R_ExternalPtrAddr(proxy_dmat), ref_dmat_handle, _reset_RDataIterator, _next_RDataIterator, json_str.c_str(), &out_dmat); } else { res_code = XGDMatrixCreateFromCallback( &data_iterator, R_ExternalPtrAddr(proxy_dmat), _reset_RDataIterator, _next_RDataIterator, json_str.c_str(), &out_dmat); } } catch (ErrorWithUnwind &e) { R_ContinueUnwind(continuation_token); } CHECK_CALL(res_code); R_SetExternalPtrAddr(out, out_dmat); R_RegisterCFinalizerEx(out, _DMatrixFinalizer, TRUE); Rf_unprotect(2); R_API_END(); return out; } } /* namespace */ XGB_DLL SEXP XGQuantileDMatrixCreateFromCallback_R( SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP proxy_dmat, SEXP n_threads, SEXP missing, SEXP max_bin, SEXP ref_dmat) { return XGDMatrixCreateFromCallbackGeneric_R( f_next, f_reset, calling_env, proxy_dmat, n_threads, missing, max_bin, ref_dmat, R_NilValue, true); } XGB_DLL SEXP XGDMatrixCreateFromCallback_R( SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP proxy_dmat, SEXP n_threads, SEXP missing, SEXP cache_prefix) { return XGDMatrixCreateFromCallbackGeneric_R( f_next, f_reset, calling_env, proxy_dmat, n_threads, missing, R_NilValue, R_NilValue, cache_prefix, false); } XGB_DLL SEXP XGDMatrixFree_R(SEXP proxy_dmat) { _DMatrixFinalizer(proxy_dmat); return R_NilValue; } XGB_DLL SEXP XGGetRNAIntAsDouble() { double sentinel_as_double = static_cast(R_NaInt); return Rf_ScalarReal(sentinel_as_double); } XGB_DLL SEXP XGDuplicate_R(SEXP obj) { return Rf_duplicate(obj); } XGB_DLL SEXP XGPointerEqComparison_R(SEXP obj1, SEXP obj2) { return Rf_ScalarLogical(R_ExternalPtrAddr(obj1) == R_ExternalPtrAddr(obj2)); } XGB_DLL SEXP XGDMatrixGetQuantileCut_R(SEXP handle) { const char *out_names[] = {"indptr", "data", ""}; SEXP continuation_token = Rf_protect(R_MakeUnwindCont()); SEXP out = Rf_protect(Rf_mkNamed(VECSXP, out_names)); R_API_BEGIN(); const char *out_indptr; const char *out_data; CHECK_CALL(XGDMatrixGetQuantileCut(R_ExternalPtrAddr(handle), "{}", &out_indptr, &out_data)); try { SET_VECTOR_ELT(out, 0, CopyArrayToR(out_indptr, continuation_token)); SET_VECTOR_ELT(out, 1, CopyArrayToR(out_data, continuation_token)); } catch (ErrorWithUnwind &e) { R_ContinueUnwind(continuation_token); } R_API_END(); Rf_unprotect(2); return out; } XGB_DLL SEXP XGDMatrixNumNonMissing_R(SEXP handle) { SEXP out = Rf_protect(Rf_allocVector(REALSXP, 1)); R_API_BEGIN(); bst_ulong out_; CHECK_CALL(XGDMatrixNumNonMissing(R_ExternalPtrAddr(handle), &out_)); REAL(out)[0] = static_cast(out_); R_API_END(); Rf_unprotect(1); return out; } XGB_DLL SEXP XGDMatrixGetDataAsCSR_R(SEXP handle) { const char *out_names[] = {"indptr", "indices", "data", "ncols", ""}; SEXP out = Rf_protect(Rf_mkNamed(VECSXP, out_names)); R_API_BEGIN(); bst_ulong nrows, ncols, nnz; CHECK_CALL(XGDMatrixNumRow(R_ExternalPtrAddr(handle), &nrows)); CHECK_CALL(XGDMatrixNumCol(R_ExternalPtrAddr(handle), &ncols)); CHECK_CALL(XGDMatrixNumNonMissing(R_ExternalPtrAddr(handle), &nnz)); if (std::max(nrows, ncols) > std::numeric_limits::max()) { Rf_error("%s", "Error: resulting DMatrix data does not fit into R 'dgRMatrix'."); } SET_VECTOR_ELT(out, 0, Rf_allocVector(INTSXP, nrows + 1)); SET_VECTOR_ELT(out, 1, Rf_allocVector(INTSXP, nnz)); SET_VECTOR_ELT(out, 2, Rf_allocVector(REALSXP, nnz)); SET_VECTOR_ELT(out, 3, Rf_ScalarInteger(ncols)); std::unique_ptr indptr(new bst_ulong[nrows + 1]); std::unique_ptr indices(new unsigned[nnz]); std::unique_ptr data(new float[nnz]); CHECK_CALL(XGDMatrixGetDataAsCSR(R_ExternalPtrAddr(handle), "{}", indptr.get(), indices.get(), data.get())); std::copy(indptr.get(), indptr.get() + nrows + 1, INTEGER(VECTOR_ELT(out, 0))); std::copy(indices.get(), indices.get() + nnz, INTEGER(VECTOR_ELT(out, 1))); std::copy(data.get(), data.get() + nnz, REAL(VECTOR_ELT(out, 2))); R_API_END(); Rf_unprotect(1); return out; } // functions related to booster namespace { void _BoosterFinalizer(SEXP R_ptr) { if (R_ExternalPtrAddr(R_ptr) == NULL) return; CHECK_CALL(XGBoosterFree(R_ExternalPtrAddr(R_ptr))); R_ClearExternalPtr(R_ptr); } /* Booster is represented as an altrep list with one element which corresponds to an 'externalptr' holding the C object, forbidding modification by not implementing setters, and adding custom serialization. */ R_altrep_class_t XGBAltrepPointerClass; R_xlen_t XGBAltrepPointerLength_R(SEXP R_altrepped_obj) { return 1; } SEXP XGBAltrepPointerGetElt_R(SEXP R_altrepped_obj, R_xlen_t idx) { return R_altrep_data1(R_altrepped_obj); } SEXP XGBMakeEmptyAltrep() { SEXP class_name = Rf_protect(Rf_mkString("xgb.Booster")); SEXP elt_names = Rf_protect(Rf_mkString("ptr")); SEXP R_ptr = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue)); SEXP R_altrepped_obj = Rf_protect(R_new_altrep(XGBAltrepPointerClass, R_ptr, R_NilValue)); Rf_setAttrib(R_altrepped_obj, R_NamesSymbol, elt_names); Rf_setAttrib(R_altrepped_obj, R_ClassSymbol, class_name); Rf_unprotect(4); return R_altrepped_obj; } /* Note: the idea for separating this function from the one above is to be able to trigger all R allocations first before doing non-R allocations. */ void XGBAltrepSetPointer(SEXP R_altrepped_obj, BoosterHandle handle) { SEXP R_ptr = R_altrep_data1(R_altrepped_obj); R_SetExternalPtrAddr(R_ptr, handle); R_RegisterCFinalizerEx(R_ptr, _BoosterFinalizer, TRUE); } SEXP XGBAltrepSerializer_R(SEXP R_altrepped_obj) { R_API_BEGIN(); BoosterHandle handle = R_ExternalPtrAddr(R_altrep_data1(R_altrepped_obj)); char const *serialized_bytes; bst_ulong serialized_length; CHECK_CALL(XGBoosterSerializeToBuffer( handle, &serialized_length, &serialized_bytes)); SEXP R_state = Rf_protect(Rf_allocVector(RAWSXP, serialized_length)); if (serialized_length != 0) { std::memcpy(RAW(R_state), serialized_bytes, serialized_length); } Rf_unprotect(1); return R_state; R_API_END(); return R_NilValue; /* <- should not be reached */ } SEXP XGBAltrepDeserializer_R(SEXP unused, SEXP R_state) { SEXP R_altrepped_obj = Rf_protect(XGBMakeEmptyAltrep()); R_API_BEGIN(); BoosterHandle handle = nullptr; CHECK_CALL(XGBoosterCreate(nullptr, 0, &handle)); int res_code = XGBoosterUnserializeFromBuffer(handle, RAW(R_state), Rf_xlength(R_state)); if (res_code != 0) { XGBoosterFree(handle); } CHECK_CALL(res_code); XGBAltrepSetPointer(R_altrepped_obj, handle); R_API_END(); Rf_unprotect(1); return R_altrepped_obj; } // https://purrple.cat/blog/2018/10/14/altrep-and-cpp/ Rboolean XGBAltrepInspector_R( SEXP x, int pre, int deep, int pvec, void (*inspect_subtree)(SEXP, int, int, int)) { Rprintf("Altrepped external pointer [address:%p]\n", R_ExternalPtrAddr(R_altrep_data1(x))); return TRUE; } SEXP XGBAltrepDuplicate_R(SEXP R_altrepped_obj, Rboolean deep) { R_API_BEGIN(); if (!deep) { SEXP out = Rf_protect(XGBMakeEmptyAltrep()); R_set_altrep_data1(out, R_altrep_data1(R_altrepped_obj)); Rf_unprotect(1); return out; } else { SEXP out = Rf_protect(XGBMakeEmptyAltrep()); char const *serialized_bytes; bst_ulong serialized_length; CHECK_CALL(XGBoosterSerializeToBuffer( R_ExternalPtrAddr(R_altrep_data1(R_altrepped_obj)), &serialized_length, &serialized_bytes)); BoosterHandle new_handle = nullptr; CHECK_CALL(XGBoosterCreate(nullptr, 0, &new_handle)); int res_code = XGBoosterUnserializeFromBuffer(new_handle, serialized_bytes, serialized_length); if (res_code != 0) { XGBoosterFree(new_handle); } CHECK_CALL(res_code); XGBAltrepSetPointer(out, new_handle); Rf_unprotect(1); return out; } R_API_END(); return R_NilValue; /* <- should not be reached */ } } /* namespace */ XGB_DLL void XGBInitializeAltrepClass_R(DllInfo *dll) { XGBAltrepPointerClass = R_make_altlist_class("XGBAltrepPointerClass", "xgboost", dll); R_set_altrep_Length_method(XGBAltrepPointerClass, XGBAltrepPointerLength_R); R_set_altlist_Elt_method(XGBAltrepPointerClass, XGBAltrepPointerGetElt_R); R_set_altrep_Inspect_method(XGBAltrepPointerClass, XGBAltrepInspector_R); R_set_altrep_Serialized_state_method(XGBAltrepPointerClass, XGBAltrepSerializer_R); R_set_altrep_Unserialize_method(XGBAltrepPointerClass, XGBAltrepDeserializer_R); R_set_altrep_Duplicate_method(XGBAltrepPointerClass, XGBAltrepDuplicate_R); } XGB_DLL SEXP XGBoosterCreate_R(SEXP dmats) { SEXP out = Rf_protect(XGBMakeEmptyAltrep()); R_API_BEGIN(); R_xlen_t len = Rf_xlength(dmats); BoosterHandle handle; int res_code; { std::vector dvec(len); for (R_xlen_t i = 0; i < len; ++i) { dvec[i] = R_ExternalPtrAddr(VECTOR_ELT(dmats, i)); } res_code = XGBoosterCreate(BeginPtr(dvec), dvec.size(), &handle); } CHECK_CALL(res_code); XGBAltrepSetPointer(out, handle); R_API_END(); Rf_unprotect(1); return out; } XGB_DLL SEXP XGBoosterCopyInfoFromDMatrix_R(SEXP booster, SEXP dmat) { R_API_BEGIN(); char const **feature_names; bst_ulong len_feature_names = 0; CHECK_CALL(XGDMatrixGetStrFeatureInfo(R_ExternalPtrAddr(dmat), "feature_name", &len_feature_names, &feature_names)); if (len_feature_names) { CHECK_CALL(XGBoosterSetStrFeatureInfo(R_ExternalPtrAddr(booster), "feature_name", feature_names, len_feature_names)); } char const **feature_types; bst_ulong len_feature_types = 0; CHECK_CALL(XGDMatrixGetStrFeatureInfo(R_ExternalPtrAddr(dmat), "feature_type", &len_feature_types, &feature_types)); if (len_feature_types) { CHECK_CALL(XGBoosterSetStrFeatureInfo(R_ExternalPtrAddr(booster), "feature_type", feature_types, len_feature_types)); } R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterSetStrFeatureInfo_R(SEXP handle, SEXP field, SEXP features) { R_API_BEGIN(); SEXP field_char = Rf_protect(Rf_asChar(field)); bst_ulong len_features = Rf_xlength(features); int res_code; { std::vector str_arr(len_features); for (bst_ulong idx = 0; idx < len_features; idx++) { str_arr[idx] = CHAR(STRING_ELT(features, idx)); } res_code = XGBoosterSetStrFeatureInfo(R_ExternalPtrAddr(handle), CHAR(field_char), str_arr.data(), len_features); } CHECK_CALL(res_code); Rf_unprotect(1); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterGetStrFeatureInfo_R(SEXP handle, SEXP field) { R_API_BEGIN(); bst_ulong len; const char **out_features; SEXP field_char = Rf_protect(Rf_asChar(field)); CHECK_CALL(XGBoosterGetStrFeatureInfo(R_ExternalPtrAddr(handle), CHAR(field_char), &len, &out_features)); SEXP out = Rf_protect(Rf_allocVector(STRSXP, len)); for (bst_ulong idx = 0; idx < len; idx++) { SET_STRING_ELT(out, idx, Rf_mkChar(out_features[idx])); } Rf_unprotect(2); return out; R_API_END(); return R_NilValue; /* <- should not be reached */ } XGB_DLL SEXP XGBoosterBoostedRounds_R(SEXP handle) { SEXP out = Rf_protect(Rf_allocVector(INTSXP, 1)); R_API_BEGIN(); CHECK_CALL(XGBoosterBoostedRounds(R_ExternalPtrAddr(handle), INTEGER(out))); R_API_END(); Rf_unprotect(1); return out; } /* Note: R's integer class is 32-bit-and-signed only, while xgboost supports more, so it returns it as a floating point instead */ XGB_DLL SEXP XGBoosterGetNumFeature_R(SEXP handle) { SEXP out = Rf_protect(Rf_allocVector(REALSXP, 1)); R_API_BEGIN(); bst_ulong res; CHECK_CALL(XGBoosterGetNumFeature(R_ExternalPtrAddr(handle), &res)); REAL(out)[0] = static_cast(res); R_API_END(); Rf_unprotect(1); return out; } XGB_DLL SEXP XGBoosterSetParam_R(SEXP handle, SEXP name, SEXP val) { R_API_BEGIN(); SEXP name_ = Rf_protect(Rf_asChar(name)); SEXP val_ = Rf_protect(Rf_asChar(val)); CHECK_CALL(XGBoosterSetParam(R_ExternalPtrAddr(handle), CHAR(name_), CHAR(val_))); Rf_unprotect(2); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterUpdateOneIter_R(SEXP handle, SEXP iter, SEXP dtrain) { R_API_BEGIN(); CHECK_CALL(XGBoosterUpdateOneIter(R_ExternalPtrAddr(handle), Rf_asInteger(iter), R_ExternalPtrAddr(dtrain))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterTrainOneIter_R(SEXP handle, SEXP dtrain, SEXP iter, SEXP grad, SEXP hess) { R_API_BEGIN(); CHECK_EQ(Rf_xlength(grad), Rf_xlength(hess)) << "gradient and hess must have same length."; SEXP gdim = Rf_getAttrib(grad, R_DimSymbol); SEXP hdim = Rf_getAttrib(hess, R_DimSymbol); int res_code; { const std::string s_grad = Rf_isNull(gdim)? MakeArrayInterfaceFromRVector(grad) : MakeArrayInterfaceFromRMat(grad); const std::string s_hess = Rf_isNull(hdim)? MakeArrayInterfaceFromRVector(hess) : MakeArrayInterfaceFromRMat(hess); res_code = XGBoosterTrainOneIter(R_ExternalPtrAddr(handle), R_ExternalPtrAddr(dtrain), Rf_asInteger(iter), s_grad.c_str(), s_hess.c_str()); } CHECK_CALL(res_code); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterEvalOneIter_R(SEXP handle, SEXP iter, SEXP dmats, SEXP evnames) { const char *ret; R_API_BEGIN(); CHECK_EQ(Rf_xlength(dmats), Rf_xlength(evnames)) << "dmats and evnams must have same length"; R_xlen_t len = Rf_xlength(dmats); SEXP evnames_lst = Rf_protect(Rf_allocVector(VECSXP, len)); for (R_xlen_t i = 0; i < len; i++) { SET_VECTOR_ELT(evnames_lst, i, Rf_asChar(VECTOR_ELT(evnames, i))); } int res_code; { std::vector vec_dmats(len); std::vector vec_names; vec_names.reserve(len); std::vector vec_sptr(len); for (R_xlen_t i = 0; i < len; ++i) { vec_dmats[i] = R_ExternalPtrAddr(VECTOR_ELT(dmats, i)); vec_names.emplace_back(CHAR(VECTOR_ELT(evnames_lst, i))); } for (R_xlen_t i = 0; i < len; ++i) { vec_sptr[i] = vec_names[i].c_str(); } res_code = XGBoosterEvalOneIter(R_ExternalPtrAddr(handle), Rf_asInteger(iter), BeginPtr(vec_dmats), BeginPtr(vec_sptr), len, &ret); } CHECK_CALL(res_code); Rf_unprotect(1); R_API_END(); return Rf_mkString(ret); } namespace { struct ProxyDmatrixError : public std::exception {}; struct ProxyDmatrixWrapper { DMatrixHandle proxy_dmat_handle; ProxyDmatrixWrapper() { int res_code = XGProxyDMatrixCreate(&this->proxy_dmat_handle); if (res_code != 0) { throw ProxyDmatrixError(); } } ~ProxyDmatrixWrapper() { if (this->proxy_dmat_handle) { XGDMatrixFree(this->proxy_dmat_handle); this->proxy_dmat_handle = nullptr; } } DMatrixHandle get_handle() { return this->proxy_dmat_handle; } }; std::unique_ptr GetProxyDMatrixWithBaseMargin(SEXP base_margin) { if (Rf_isNull(base_margin)) { return std::unique_ptr(nullptr); } SEXP base_margin_dim = Rf_getAttrib(base_margin, R_DimSymbol); int res_code; try { const std::string array_str = Rf_isNull(base_margin_dim)? MakeArrayInterfaceFromRVector(base_margin) : MakeArrayInterfaceFromRMat(base_margin); std::unique_ptr proxy_dmat(new ProxyDmatrixWrapper()); res_code = XGDMatrixSetInfoFromInterface(proxy_dmat->get_handle(), "base_margin", array_str.c_str()); if (res_code != 0) { throw ProxyDmatrixError(); } return proxy_dmat; } catch(ProxyDmatrixError &err) { Rf_error("%s", XGBGetLastError()); } } enum class PredictionInputType {DMatrix, DenseMatrix, CSRMatrix, DataFrame}; SEXP XGBoosterPredictGeneric(SEXP handle, SEXP input_data, SEXP json_config, PredictionInputType input_type, SEXP missing, SEXP base_margin) { SEXP r_out_result = R_NilValue; R_API_BEGIN(); SEXP json_config_ = Rf_protect(Rf_asChar(json_config)); char const *c_json_config = CHAR(json_config_); bst_ulong out_dim; bst_ulong const *out_shape; float const *out_result; int res_code; { switch (input_type) { case PredictionInputType::DMatrix: { res_code = XGBoosterPredictFromDMatrix(R_ExternalPtrAddr(handle), R_ExternalPtrAddr(input_data), c_json_config, &out_shape, &out_dim, &out_result); break; } case PredictionInputType::CSRMatrix: { std::unique_ptr proxy_dmat = GetProxyDMatrixWithBaseMargin( base_margin); DMatrixHandle proxy_dmat_handle = proxy_dmat.get()? proxy_dmat->get_handle() : nullptr; SEXP indptr = VECTOR_ELT(input_data, 0); SEXP indices = VECTOR_ELT(input_data, 1); SEXP data = VECTOR_ELT(input_data, 2); const int ncol_csr = Rf_asInteger(VECTOR_ELT(input_data, 3)); const SEXPTYPE type_data = TYPEOF(data); CHECK_EQ(type_data, REALSXP); std::string sindptr, sindices, sdata; CreateFromSparse(indptr, indices, data, &sindptr, &sindices, &sdata); xgboost::StringView json_str(c_json_config); xgboost::Json new_json = xgboost::Json::Load(json_str); AddMissingToJson(&new_json, missing, type_data); const std::string new_c_json = xgboost::Json::Dump(new_json); res_code = XGBoosterPredictFromCSR( R_ExternalPtrAddr(handle), sindptr.c_str(), sindices.c_str(), sdata.c_str(), ncol_csr, new_c_json.c_str(), proxy_dmat_handle, &out_shape, &out_dim, &out_result); break; } case PredictionInputType::DenseMatrix: { std::unique_ptr proxy_dmat = GetProxyDMatrixWithBaseMargin( base_margin); DMatrixHandle proxy_dmat_handle = proxy_dmat.get()? proxy_dmat->get_handle() : nullptr; const std::string array_str = MakeArrayInterfaceFromRMat(input_data); xgboost::StringView json_str(c_json_config); xgboost::Json new_json = xgboost::Json::Load(json_str); AddMissingToJson(&new_json, missing, TYPEOF(input_data)); const std::string new_c_json = xgboost::Json::Dump(new_json); res_code = XGBoosterPredictFromDense( R_ExternalPtrAddr(handle), array_str.c_str(), new_c_json.c_str(), proxy_dmat_handle, &out_shape, &out_dim, &out_result); break; } case PredictionInputType::DataFrame: { std::unique_ptr proxy_dmat = GetProxyDMatrixWithBaseMargin( base_margin); DMatrixHandle proxy_dmat_handle = proxy_dmat.get()? proxy_dmat->get_handle() : nullptr; const std::string df_str = MakeArrayInterfaceFromRDataFrame(input_data); xgboost::StringView json_str(c_json_config); xgboost::Json new_json = xgboost::Json::Load(json_str); AddMissingToJson(&new_json, missing, REALSXP); const std::string new_c_json = xgboost::Json::Dump(new_json); res_code = XGBoosterPredictFromColumnar( R_ExternalPtrAddr(handle), df_str.c_str(), new_c_json.c_str(), proxy_dmat_handle, &out_shape, &out_dim, &out_result); break; } } } CHECK_CALL(res_code); SEXP r_out_shape = Rf_protect(Rf_allocVector(INTSXP, out_dim)); size_t len = 1; int *r_out_shape_ = INTEGER(r_out_shape); for (size_t i = 0; i < out_dim; ++i) { r_out_shape_[out_dim - i - 1] = out_shape[i]; len *= out_shape[i]; } r_out_result = Rf_protect(Rf_allocVector(REALSXP, len)); std::copy(out_result, out_result + len, REAL(r_out_result)); if (out_dim > 1) { Rf_setAttrib(r_out_result, R_DimSymbol, r_out_shape); } R_API_END(); Rf_unprotect(3); return r_out_result; } } // namespace XGB_DLL SEXP XGBoosterPredictFromDMatrix_R(SEXP handle, SEXP dmat, SEXP json_config) { return XGBoosterPredictGeneric(handle, dmat, json_config, PredictionInputType::DMatrix, R_NilValue, R_NilValue); } XGB_DLL SEXP XGBoosterPredictFromDense_R(SEXP handle, SEXP R_mat, SEXP missing, SEXP json_config, SEXP base_margin) { return XGBoosterPredictGeneric(handle, R_mat, json_config, PredictionInputType::DenseMatrix, missing, base_margin); } XGB_DLL SEXP XGBoosterPredictFromCSR_R(SEXP handle, SEXP lst, SEXP missing, SEXP json_config, SEXP base_margin) { return XGBoosterPredictGeneric(handle, lst, json_config, PredictionInputType::CSRMatrix, missing, base_margin); } XGB_DLL SEXP XGBoosterPredictFromColumnar_R(SEXP handle, SEXP R_df, SEXP missing, SEXP json_config, SEXP base_margin) { return XGBoosterPredictGeneric(handle, R_df, json_config, PredictionInputType::DataFrame, missing, base_margin); } XGB_DLL SEXP XGBoosterLoadModel_R(SEXP handle, SEXP fname) { R_API_BEGIN(); CHECK_CALL(XGBoosterLoadModel(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(fname)))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterSaveModel_R(SEXP handle, SEXP fname) { R_API_BEGIN(); CHECK_CALL(XGBoosterSaveModel(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(fname)))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterLoadModelFromRaw_R(SEXP handle, SEXP raw) { R_API_BEGIN(); CHECK_CALL(XGBoosterLoadModelFromBuffer(R_ExternalPtrAddr(handle), RAW(raw), Rf_xlength(raw))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterSaveModelToRaw_R(SEXP handle, SEXP json_config) { SEXP ret; R_API_BEGIN(); bst_ulong olen; char const *c_json_config = CHAR(Rf_asChar(json_config)); char const *raw; CHECK_CALL(XGBoosterSaveModelToBuffer(R_ExternalPtrAddr(handle), c_json_config, &olen, &raw)) ret = Rf_protect(Rf_allocVector(RAWSXP, olen)); if (olen != 0) { std::memcpy(RAW(ret), raw, olen); } R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGBoosterSaveJsonConfig_R(SEXP handle) { const char* ret; R_API_BEGIN(); bst_ulong len {0}; CHECK_CALL(XGBoosterSaveJsonConfig(R_ExternalPtrAddr(handle), &len, &ret)); R_API_END(); return Rf_mkString(ret); } XGB_DLL SEXP XGBoosterLoadJsonConfig_R(SEXP handle, SEXP value) { R_API_BEGIN(); CHECK_CALL(XGBoosterLoadJsonConfig(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(value)))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterSerializeToBuffer_R(SEXP handle) { SEXP ret; R_API_BEGIN(); bst_ulong out_len; const char *raw; CHECK_CALL(XGBoosterSerializeToBuffer(R_ExternalPtrAddr(handle), &out_len, &raw)); ret = Rf_protect(Rf_allocVector(RAWSXP, out_len)); if (out_len != 0) { memcpy(RAW(ret), raw, out_len); } R_API_END(); Rf_unprotect(1); return ret; } XGB_DLL SEXP XGBoosterUnserializeFromBuffer_R(SEXP handle, SEXP raw) { R_API_BEGIN(); CHECK_CALL(XGBoosterUnserializeFromBuffer(R_ExternalPtrAddr(handle), RAW(raw), Rf_xlength(raw))); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterDumpModel_R(SEXP handle, SEXP fmap, SEXP with_stats, SEXP dump_format) { SEXP out; SEXP continuation_token = Rf_protect(R_MakeUnwindCont()); SEXP dump_format_ = Rf_protect(Rf_asChar(dump_format)); SEXP fmap_ = Rf_protect(Rf_asChar(fmap)); R_API_BEGIN(); bst_ulong olen; const char **res; const char *fmt = CHAR(dump_format_); CHECK_CALL(XGBoosterDumpModelEx(R_ExternalPtrAddr(handle), CHAR(fmap_), Rf_asInteger(with_stats), fmt, &olen, &res)); out = Rf_protect(Rf_allocVector(STRSXP, olen)); try { if (!strcmp("json", fmt)) { std::stringstream stream; stream << "[\n"; for (size_t i = 0; i < olen; ++i) { stream << res[i]; if (i < olen - 1) { stream << ",\n"; } else { stream << "\n"; } } stream << "]"; const std::string temp_str = stream.str(); SET_STRING_ELT(out, 0, SafeMkChar(temp_str.c_str(), continuation_token)); } else { for (size_t i = 0; i < olen; ++i) { std::stringstream stream; stream << "booster[" << i <<"]\n" << res[i]; const std::string temp_str = stream.str(); SET_STRING_ELT(out, i, SafeMkChar(temp_str.c_str(), continuation_token)); } } } catch (ErrorWithUnwind &e) { R_ContinueUnwind(continuation_token); } R_API_END(); Rf_unprotect(4); return out; } XGB_DLL SEXP XGBoosterGetAttr_R(SEXP handle, SEXP name) { SEXP out; R_API_BEGIN(); int success; const char *val; CHECK_CALL(XGBoosterGetAttr(R_ExternalPtrAddr(handle), CHAR(Rf_asChar(name)), &val, &success)); if (success) { out = Rf_protect(Rf_allocVector(STRSXP, 1)); SET_STRING_ELT(out, 0, Rf_mkChar(val)); } else { out = Rf_protect(R_NilValue); } R_API_END(); Rf_unprotect(1); return out; } XGB_DLL SEXP XGBoosterSetAttr_R(SEXP handle, SEXP name, SEXP val) { R_API_BEGIN(); const char *v = nullptr; SEXP name_ = Rf_protect(Rf_asChar(name)); SEXP val_; int n_protected = 1; if (!Rf_isNull(val)) { val_ = Rf_protect(Rf_asChar(val)); n_protected++; v = CHAR(val_); } CHECK_CALL(XGBoosterSetAttr(R_ExternalPtrAddr(handle), CHAR(name_), v)); Rf_unprotect(n_protected); R_API_END(); return R_NilValue; } XGB_DLL SEXP XGBoosterGetAttrNames_R(SEXP handle) { SEXP out; R_API_BEGIN(); bst_ulong len; const char **res; CHECK_CALL(XGBoosterGetAttrNames(R_ExternalPtrAddr(handle), &len, &res)); if (len > 0) { out = Rf_protect(Rf_allocVector(STRSXP, len)); for (size_t i = 0; i < len; ++i) { SET_STRING_ELT(out, i, Rf_mkChar(res[i])); } } else { out = Rf_protect(R_NilValue); } R_API_END(); Rf_unprotect(1); return out; } XGB_DLL SEXP XGBoosterFeatureScore_R(SEXP handle, SEXP json_config) { SEXP out_features_sexp; SEXP out_scores_sexp; SEXP out_shape_sexp; SEXP r_out = Rf_protect(Rf_allocVector(VECSXP, 3)); R_API_BEGIN(); char const *c_json_config = CHAR(Rf_asChar(json_config)); bst_ulong out_n_features; char const **out_features; bst_ulong out_dim; bst_ulong const *out_shape; float const *out_scores; CHECK_CALL(XGBoosterFeatureScore(R_ExternalPtrAddr(handle), c_json_config, &out_n_features, &out_features, &out_dim, &out_shape, &out_scores)); out_shape_sexp = Rf_protect(Rf_allocVector(INTSXP, out_dim)); size_t len = 1; int *out_shape_sexp_ = INTEGER(out_shape_sexp); for (size_t i = 0; i < out_dim; ++i) { out_shape_sexp_[i] = out_shape[i]; len *= out_shape[i]; } out_features_sexp = Rf_protect(Rf_allocVector(STRSXP, out_n_features)); for (size_t i = 0; i < out_n_features; ++i) { SET_STRING_ELT(out_features_sexp, i, Rf_mkChar(out_features[i])); } out_scores_sexp = Rf_protect(Rf_allocVector(REALSXP, len)); std::copy(out_scores, out_scores + len, REAL(out_scores_sexp)); SET_VECTOR_ELT(r_out, 0, out_features_sexp); SET_VECTOR_ELT(r_out, 1, out_shape_sexp); SET_VECTOR_ELT(r_out, 2, out_scores_sexp); R_API_END(); Rf_unprotect(4); return r_out; } XGB_DLL SEXP XGBoosterSlice_R(SEXP handle, SEXP begin_layer, SEXP end_layer, SEXP step) { SEXP out = Rf_protect(XGBMakeEmptyAltrep()); R_API_BEGIN(); BoosterHandle handle_out = nullptr; CHECK_CALL(XGBoosterSlice(R_ExternalPtrAddr(handle), Rf_asInteger(begin_layer), Rf_asInteger(end_layer), Rf_asInteger(step), &handle_out)); XGBAltrepSetPointer(out, handle_out); R_API_END(); Rf_unprotect(1); return out; } XGB_DLL SEXP XGBoosterSliceAndReplace_R(SEXP handle, SEXP begin_layer, SEXP end_layer, SEXP step) { R_API_BEGIN(); BoosterHandle old_handle = R_ExternalPtrAddr(handle); BoosterHandle new_handle = nullptr; CHECK_CALL(XGBoosterSlice(old_handle, Rf_asInteger(begin_layer), Rf_asInteger(end_layer), Rf_asInteger(step), &new_handle)); R_SetExternalPtrAddr(handle, new_handle); CHECK_CALL(XGBoosterFree(old_handle)); R_API_END(); return R_NilValue; } xgboost-3.0.0/R-package/src/xgboost_R.h000066400000000000000000000442021476511272400177120ustar00rootroot00000000000000/*! * Copyright 2014-2022 by XGBoost Contributors * \file xgboost_R.h * \author Tianqi Chen * \brief R wrapper of xgboost */ #ifndef XGBOOST_R_H_ // NOLINT(*) #define XGBOOST_R_H_ // NOLINT(*) #ifndef R_NO_REMAP # define R_NO_REMAP #endif #include #include #include #include #include #include /*! * \brief check whether a handle is NULL * \param handle * \return whether it is null ptr */ XGB_DLL SEXP XGCheckNullPtr_R(SEXP handle); /*! * \brief set the names of the dimensions of an array in-place * \param arr * \param dim_names names for the dimensions to set * \return NULL value */ XGB_DLL SEXP XGSetArrayDimNamesInplace_R(SEXP arr, SEXP dim_names); /*! * \brief set the names of a vector in-place * \param arr * \param names names for the dimensions to set * \return NULL value */ XGB_DLL SEXP XGSetVectorNamesInplace_R(SEXP arr, SEXP names); /*! * \brief Set global configuration * \param json_str a JSON string representing the list of key-value pairs * \return R_NilValue */ XGB_DLL SEXP XGBSetGlobalConfig_R(SEXP json_str); /*! * \brief Get global configuration * \return JSON string */ XGB_DLL SEXP XGBGetGlobalConfig_R(); /*! * \brief load a data matrix from URI * \param uri URI to the source file to read data from * \param silent whether print messages * \param Data split mode (0=rows, 1=columns) * \return a loaded data matrix */ XGB_DLL SEXP XGDMatrixCreateFromURI_R(SEXP uri, SEXP silent, SEXP data_split_mode); /*! * \brief create matrix content from dense matrix * This assumes the matrix is stored in column major format * \param data R Matrix object * \param missing which value to represent missing value * \param n_threads Number of threads used to construct DMatrix from dense matrix. * \return created dmatrix */ XGB_DLL SEXP XGDMatrixCreateFromMat_R(SEXP mat, SEXP missing, SEXP n_threads); /** * @brief Create matrix content from a data frame. * @param data R data.frame object * @param missing which value to represent missing value * @param n_threads Number of threads used to construct DMatrix from dense matrix. * @return created dmatrix */ XGB_DLL SEXP XGDMatrixCreateFromDF_R(SEXP df, SEXP missing, SEXP n_threads); /*! * \brief create a matrix content from CSC format * \param indptr pointer to column headers * \param indices row indices * \param data content of the data * \param num_row numer of rows (when it's set to 0, then guess from data) * \param missing which value to represent missing value * \param n_threads Number of threads used to construct DMatrix from csc matrix. * \return created dmatrix */ XGB_DLL SEXP XGDMatrixCreateFromCSC_R(SEXP indptr, SEXP indices, SEXP data, SEXP num_row, SEXP missing, SEXP n_threads); /*! * \brief create a matrix content from CSR format * \param indptr pointer to row headers * \param indices column indices * \param data content of the data * \param num_col numer of columns (when it's set to 0, then guess from data) * \param missing which value to represent missing value * \param n_threads Number of threads used to construct DMatrix from csr matrix. * \return created dmatrix */ XGB_DLL SEXP XGDMatrixCreateFromCSR_R(SEXP indptr, SEXP indices, SEXP data, SEXP num_col, SEXP missing, SEXP n_threads); /*! * \brief create a new dmatrix from sliced content of existing matrix * \param handle instance of data matrix to be sliced * \param idxset index set * \param allow_groups Whether to allow slicing the DMatrix if it has a 'group' field * \return a sliced new matrix */ XGB_DLL SEXP XGDMatrixSliceDMatrix_R(SEXP handle, SEXP idxset, SEXP allow_groups); /*! * \brief load a data matrix into binary file * \param handle a instance of data matrix * \param fname file name * \param silent print statistics when saving * \return R_NilValue */ XGB_DLL SEXP XGDMatrixSaveBinary_R(SEXP handle, SEXP fname, SEXP silent); /*! * \brief set information to dmatrix * \param handle a instance of data matrix * \param field field name, can be label, weight * \param array pointer to float vector * \return R_NilValue */ XGB_DLL SEXP XGDMatrixSetInfo_R(SEXP handle, SEXP field, SEXP array); /*! * \brief get info vector (float type) from matrix * \param handle a instance of data matrix * \param field field name * \return info vector */ XGB_DLL SEXP XGDMatrixGetFloatInfo_R(SEXP handle, SEXP field); /*! * \brief get info vector (uint type) from matrix * \param handle a instance of data matrix * \param field field name * \return info vector */ XGB_DLL SEXP XGDMatrixGetUIntInfo_R(SEXP handle, SEXP field); /*! * \brief return number of rows * \param handle an instance of data matrix */ XGB_DLL SEXP XGDMatrixNumRow_R(SEXP handle); /*! * \brief return number of columns * \param handle an instance of data matrix */ XGB_DLL SEXP XGDMatrixNumCol_R(SEXP handle); /*! <<<<<<< HEAD * \brief create a ProxyDMatrix and get an R externalptr object for it */ XGB_DLL SEXP XGProxyDMatrixCreate_R(); /*! * \brief Set dense matrix data on a proxy dmatrix * \param handle R externalptr pointing to a ProxyDMatrix * \param R_mat R matrix to set in the proxy dmatrix */ XGB_DLL SEXP XGProxyDMatrixSetDataDense_R(SEXP handle, SEXP R_mat); /*! * \brief Set dense matrix data on a proxy dmatrix * \param handle R externalptr pointing to a ProxyDMatrix * \param lst R list containing, in this order: * 1. 'p' or 'indptr' vector of the CSR matrix. * 2. 'j' or 'indices' vector of the CSR matrix. * 3. 'x' or 'data' vector of the CSR matrix. * 4. Number of columns in the CSR matrix. */ XGB_DLL SEXP XGProxyDMatrixSetDataCSR_R(SEXP handle, SEXP lst); /*! * \brief Set dense matrix data on a proxy dmatrix * \param handle R externalptr pointing to a ProxyDMatrix * \param lst R list or data.frame object containing its columns as numeric vectors */ XGB_DLL SEXP XGProxyDMatrixSetDataColumnar_R(SEXP handle, SEXP lst); /*! * \brief Create a DMatrix from a DataIter with callbacks * \param expr_f_next expression for function(env, proxy_dmat) that sets the data on the proxy * dmatrix and returns either zero (end of batch) or one (batch continues). * \param expr_f_reset expression for function(env) that resets the data iterator to * the beginning (first batch). * \param calling_env R environment where to evaluate the expressions above * \param proxy_dmat R externalptr holding a ProxyDMatrix. * \param n_threads number of parallel threads to use for constructing the DMatrix. * \param missing which value to represent missing value. * \param cache_prefix path of cache file * \return handle R externalptr holding the resulting DMatrix. */ XGB_DLL SEXP XGDMatrixCreateFromCallback_R( SEXP expr_f_next, SEXP expr_f_reset, SEXP calling_env, SEXP proxy_dmat, SEXP n_threads, SEXP missing, SEXP cache_prefix); /*! * \brief Create a QuantileDMatrix from a DataIter with callbacks * \param expr_f_next expression for function(env, proxy_dmat) that sets the data on the proxy * dmatrix and returns either zero (end of batch) or one (batch continues). * \param expr_f_reset expression for function(env) that resets the data iterator to * the beginning (first batch). * \param calling_env R environment where to evaluate the expressions above * \param proxy_dmat R externalptr holding a ProxyDMatrix. * \param n_threads number of parallel threads to use for constructing the QuantileDMatrix. * \param missing which value to represent missing value. * \param max_bin maximum number of bins to have in the resulting QuantileDMatrix. * \param ref_dmat an optional reference DMatrix from which to get the bin boundaries. * \return handle R externalptr holding the resulting QuantileDMatrix. */ XGB_DLL SEXP XGQuantileDMatrixCreateFromCallback_R( SEXP expr_f_next, SEXP expr_f_reset, SEXP calling_env, SEXP proxy_dmat, SEXP n_threads, SEXP missing, SEXP max_bin, SEXP ref_dmat); /*! * \brief Frees a ProxyDMatrix and empties out the R externalptr object that holds it * \param proxy_dmat R externalptr containing a ProxyDMatrix * \return NULL */ XGB_DLL SEXP XGDMatrixFree_R(SEXP proxy_dmat); /*! * \brief Get the value that represents missingness in R integers as a numeric non-missing value. */ XGB_DLL SEXP XGGetRNAIntAsDouble(); /*! * \brief Call R C-level function 'duplicate' * \param obj Object to duplicate */ XGB_DLL SEXP XGDuplicate_R(SEXP obj); /*! * \brief Equality comparison for two pointers * \param obj1 R 'externalptr' * \param obj2 R 'externalptr' */ XGB_DLL SEXP XGPointerEqComparison_R(SEXP obj1, SEXP obj2); /*! * \brief Register the Altrep class used for the booster * \param dll DLL info as provided by R_init */ XGB_DLL void XGBInitializeAltrepClass_R(DllInfo *dll); /*! * \brief return the quantile cuts used for the histogram method * \param handle an instance of data matrix * \return A list with entries 'indptr' and 'data' */ XGB_DLL SEXP XGDMatrixGetQuantileCut_R(SEXP handle); /*! * \brief get the number of non-missing entries in a dmatrix * \param handle an instance of data matrix * \return the number of non-missing entries */ XGB_DLL SEXP XGDMatrixNumNonMissing_R(SEXP handle); /*! * \brief get the data in a dmatrix in CSR format * \param handle an instance of data matrix * \return R list with the following entries in this order: * - 'indptr * - 'indices * - 'data' * - 'ncol' */ XGB_DLL SEXP XGDMatrixGetDataAsCSR_R(SEXP handle); /*! * \brief create xgboost learner * \param dmats a list of dmatrix handles that will be cached */ XGB_DLL SEXP XGBoosterCreate_R(SEXP dmats); /*! * \brief copy information about features from a DMatrix into a Booster * \param booster R 'externalptr' pointing to a booster object * \param dmat R 'externalptr' pointing to a DMatrix object */ XGB_DLL SEXP XGBoosterCopyInfoFromDMatrix_R(SEXP booster, SEXP dmat); /*! * \brief handle R 'externalptr' holding the booster object * \param field field name * \param features features to set for the field */ XGB_DLL SEXP XGBoosterSetStrFeatureInfo_R(SEXP handle, SEXP field, SEXP features); /*! * \brief handle R 'externalptr' holding the booster object * \param field field name */ XGB_DLL SEXP XGBoosterGetStrFeatureInfo_R(SEXP handle, SEXP field); /*! * \brief Get the number of boosted rounds from a model * \param handle R 'externalptr' holding the booster object */ XGB_DLL SEXP XGBoosterBoostedRounds_R(SEXP handle); /*! * \brief Get the number of features to which the model was fitted * \param handle R 'externalptr' holding the booster object */ XGB_DLL SEXP XGBoosterGetNumFeature_R(SEXP handle); /*! * \brief set parameters * \param handle handle * \param name parameter name * \param val value of parameter * \return R_NilValue */ XGB_DLL SEXP XGBoosterSetParam_R(SEXP handle, SEXP name, SEXP val); /*! * \brief update the model in one round using dtrain * \param handle handle * \param iter current iteration rounds * \param dtrain training data * \return R_NilValue */ XGB_DLL SEXP XGBoosterUpdateOneIter_R(SEXP ext, SEXP iter, SEXP dtrain); /*! * \brief update the model, by directly specify gradient and second order gradient, * this can be used to replace UpdateOneIter, to support customized loss function * \param handle handle * \param iter The current training iteration. * \param dtrain training data * \param grad gradient statistics * \param hess second order gradient statistics * \return R_NilValue */ XGB_DLL SEXP XGBoosterTrainOneIter_R(SEXP handle, SEXP dtrain, SEXP iter, SEXP grad, SEXP hess); /*! * \brief get evaluation statistics for xgboost * \param handle handle * \param iter current iteration rounds * \param dmats list of handles to dmatrices * \param evname name of evaluation * \return the string containing evaluation stats */ XGB_DLL SEXP XGBoosterEvalOneIter_R(SEXP handle, SEXP iter, SEXP dmats, SEXP evnames); /*! * \brief Run prediction on DMatrix, replacing `XGBoosterPredict_R` * \param handle handle * \param dmat data matrix * \param json_config See `XGBoosterPredictFromDMatrix` in xgboost c_api.h * * \return A list containing 2 vectors, first one for shape while second one for prediction result. */ XGB_DLL SEXP XGBoosterPredictFromDMatrix_R(SEXP handle, SEXP dmat, SEXP json_config); /*! * \brief Run prediction on R dense matrix * \param handle handle * \param R_mat R matrix * \param missing missing value * \param json_config See `XGBoosterPredictFromDense` in xgboost c_api.h. Doesn't include 'missing' * \param base_margin base margin for the prediction * * \return A list containing 2 vectors, first one for shape while second one for prediction result. */ XGB_DLL SEXP XGBoosterPredictFromDense_R(SEXP handle, SEXP R_mat, SEXP missing, SEXP json_config, SEXP base_margin); /*! * \brief Run prediction on R CSR matrix * \param handle handle * \param lst An R list, containing, in this order: * (a) 'p' array (a.k.a. indptr) * (b) 'j' array (a.k.a. indices) * (c) 'x' array (a.k.a. data / values) * (d) number of columns * \param missing missing value * \param json_config See `XGBoosterPredictFromCSR` in xgboost c_api.h. Doesn't include 'missing' * \param base_margin base margin for the prediction * * \return A list containing 2 vectors, first one for shape while second one for prediction result. */ XGB_DLL SEXP XGBoosterPredictFromCSR_R(SEXP handle, SEXP lst, SEXP missing, SEXP json_config, SEXP base_margin); /*! * \brief Run prediction on R data.frame * \param handle handle * \param R_df R data.frame * \param missing missing value * \param json_config See `XGBoosterPredictFromDense` in xgboost c_api.h. Doesn't include 'missing' * \param base_margin base margin for the prediction * * \return A list containing 2 vectors, first one for shape while second one for prediction result. */ XGB_DLL SEXP XGBoosterPredictFromColumnar_R(SEXP handle, SEXP R_df, SEXP missing, SEXP json_config, SEXP base_margin); /*! * \brief load model from existing file * \param handle handle * \param fname file name * \return R_NilValue */ XGB_DLL SEXP XGBoosterLoadModel_R(SEXP handle, SEXP fname); /*! * \brief save model into existing file * \param handle handle * \param fname file name * \return R_NilValue */ XGB_DLL SEXP XGBoosterSaveModel_R(SEXP handle, SEXP fname); /*! * \brief load model from raw array * \param handle handle * \return R_NilValue */ XGB_DLL SEXP XGBoosterLoadModelFromRaw_R(SEXP handle, SEXP raw); /*! * \brief Save model into R's raw array * * \param handle handle * \param json_config JSON encoded string storing parameters for the function. Following * keys are expected in the JSON document: * * "format": str * - json: Output booster will be encoded as JSON. * - ubj: Output booster will be encoded as Univeral binary JSON. * - deprecated: Output booster will be encoded as old custom binary format. Do now use * this format except for compatibility reasons. * * \return Raw array */ XGB_DLL SEXP XGBoosterSaveModelToRaw_R(SEXP handle, SEXP json_config); /*! * \brief Save internal parameters as a JSON string * \param handle handle * \return JSON string */ XGB_DLL SEXP XGBoosterSaveJsonConfig_R(SEXP handle); /*! * \brief Load the JSON string returnd by XGBoosterSaveJsonConfig_R * \param handle handle * \param value JSON string * \return R_NilValue */ XGB_DLL SEXP XGBoosterLoadJsonConfig_R(SEXP handle, SEXP value); /*! * \brief Memory snapshot based serialization method. Saves everything states * into buffer. * \param handle handle to booster */ XGB_DLL SEXP XGBoosterSerializeToBuffer_R(SEXP handle); /*! * \brief Memory snapshot based serialization method. Loads the buffer returned * from `XGBoosterSerializeToBuffer'. * \param handle handle to booster * \return raw byte array */ XGB_DLL SEXP XGBoosterUnserializeFromBuffer_R(SEXP handle, SEXP raw); /*! * \brief dump model into a string * \param handle handle * \param fmap name to fmap can be empty string * \param with_stats whether dump statistics of splits * \param dump_format the format to dump the model in */ XGB_DLL SEXP XGBoosterDumpModel_R(SEXP handle, SEXP fmap, SEXP with_stats, SEXP dump_format); /*! * \brief get learner attribute value * \param handle handle * \param name attribute name * \return character containing attribute value */ XGB_DLL SEXP XGBoosterGetAttr_R(SEXP handle, SEXP name); /*! * \brief set learner attribute value * \param handle handle * \param name attribute name * \param val attribute value; NULL value would delete an attribute * \return R_NilValue */ XGB_DLL SEXP XGBoosterSetAttr_R(SEXP handle, SEXP name, SEXP val); /*! * \brief get the names of learner attributes * \return string vector containing attribute names */ XGB_DLL SEXP XGBoosterGetAttrNames_R(SEXP handle); /*! * \brief Get feature scores from the model. * \param json_config See `XGBoosterFeatureScore` in xgboost c_api.h * \return A vector with the first element as feature names, second element as shape of * feature scores and thrid element as feature scores. */ XGB_DLL SEXP XGBoosterFeatureScore_R(SEXP handle, SEXP json_config); /*! * \brief Slice a fitted booster model (by rounds) * \param handle handle to the fitted booster * \param begin_layer start of the slice * \param end_later end of the slice; end_layer=0 is equivalent to end_layer=num_boost_round * \param step step size of the slice * \return The sliced booster with the requested rounds only */ XGB_DLL SEXP XGBoosterSlice_R(SEXP handle, SEXP begin_layer, SEXP end_layer, SEXP step); /*! * \brief Slice a fitted booster model (by rounds), and replace its handle with the result * \param handle handle to the fitted booster * \param begin_layer start of the slice * \param end_later end of the slice; end_layer=0 is equivalent to end_layer=num_boost_round * \param step step size of the slice * \return NULL */ XGB_DLL SEXP XGBoosterSliceAndReplace_R(SEXP handle, SEXP begin_layer, SEXP end_layer, SEXP step); #endif // XGBOOST_WRAPPER_R_H_ // NOLINT(*) xgboost-3.0.0/R-package/src/xgboost_custom.cc000066400000000000000000000021531476511272400211600ustar00rootroot00000000000000// Copyright (c) 2015 by Contributors // This file contains the customization implementations of R module // to change behavior of libxgboost #include #include "../../src/common/random.h" #include "./xgboost_R.h" // redirect the messages to R's console. namespace dmlc { void CustomLogMessage::Log(const std::string& msg) { Rprintf("%s\n", msg.c_str()); } } // namespace dmlc namespace xgboost { ConsoleLogger::~ConsoleLogger() { if (cur_verbosity_ == LogVerbosity::kIgnore || cur_verbosity_ <= GlobalVerbosity()) { if (cur_verbosity_ == LogVerbosity::kWarning) { REprintf("%s\n", log_stream_.str().c_str()); } else { dmlc::CustomLogMessage::Log(log_stream_.str()); } } } TrackerLogger::~TrackerLogger() { dmlc::CustomLogMessage::Log(log_stream_.str()); } } // namespace xgboost namespace xgboost { namespace common { // redirect the nath functions. bool CheckNAN(double v) { return ISNAN(v); } #if !defined(XGBOOST_USE_CUDA) double LogGamma(double v) { return lgammafn(v); } #endif // !defined(XGBOOST_USE_CUDA) } // namespace common } // namespace xgboost xgboost-3.0.0/R-package/tests/000077500000000000000000000000001476511272400161445ustar00rootroot00000000000000xgboost-3.0.0/R-package/tests/helper_scripts/000077500000000000000000000000001476511272400211725ustar00rootroot00000000000000xgboost-3.0.0/R-package/tests/helper_scripts/generate_models.R000066400000000000000000000074061476511272400244610ustar00rootroot00000000000000# Script to generate reference models. The reference models are used to test backward compatibility # of saved model files from XGBoost version 0.90 and 1.0.x. library(xgboost) library(Matrix) set.seed(0) metadata <- list( kRounds = 2, kRows = 1000, kCols = 4, kForests = 2, kMaxDepth = 2, kClasses = 3 ) X <- Matrix(data = rnorm(metadata$kRows * metadata$kCols), nrow = metadata$kRows, ncol = metadata$kCols, sparse = TRUE) w <- runif(metadata$kRows) version <- packageVersion('xgboost') target_dir <- 'models' save_booster <- function(booster, model_name) { booster_bin <- function(model_name) { return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.bin', sep = ''))) } booster_json <- function(model_name) { return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.json', sep = ''))) } booster_rds <- function(model_name) { return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.rds', sep = ''))) } xgb.save(booster, booster_bin(model_name)) saveRDS(booster, booster_rds(model_name)) if (version >= '1.0.0') { xgb.save(booster, booster_json(model_name)) } } generate_regression_model <- function() { print('Regression') y <- rnorm(metadata$kRows) data <- xgb.DMatrix(X, label = y, nthread = 1) params <- list(tree_method = 'hist', num_parallel_tree = metadata$kForests, max_depth = metadata$kMaxDepth) booster <- xgb.train(params, data, nrounds = metadata$kRounds) save_booster(booster, 'reg') } generate_logistic_model <- function() { print('Binary classification with logistic loss') y <- sample(0:1, size = metadata$kRows, replace = TRUE) stopifnot(max(y) == 1, min(y) == 0) objective <- c('binary:logistic', 'binary:logitraw') name <- c('logit', 'logitraw') for (i in seq_len(length(objective))) { data <- xgb.DMatrix(X, label = y, weight = w, nthread = 1) params <- list(tree_method = 'hist', num_parallel_tree = metadata$kForests, max_depth = metadata$kMaxDepth, objective = objective[i]) booster <- xgb.train(params, data, nrounds = metadata$kRounds) save_booster(booster, name[i]) } } generate_classification_model <- function() { print('Multi-class classification') y <- sample(0:(metadata$kClasses - 1), size = metadata$kRows, replace = TRUE) stopifnot(max(y) == metadata$kClasses - 1, min(y) == 0) data <- xgb.DMatrix(X, label = y, weight = w, nthread = 1) params <- list(num_class = metadata$kClasses, tree_method = 'hist', num_parallel_tree = metadata$kForests, max_depth = metadata$kMaxDepth, objective = 'multi:softmax') booster <- xgb.train(params, data, nrounds = metadata$kRounds) save_booster(booster, 'cls') } generate_ranking_model <- function() { print('Learning to rank') y <- sample(0:4, size = metadata$kRows, replace = TRUE) stopifnot(max(y) == 4, min(y) == 0) kGroups <- 20 w <- runif(kGroups) g <- rep(50, times = kGroups) data <- xgb.DMatrix(X, label = y, group = g, nthread = 1) # setinfo(data, 'weight', w) # ^^^ does not work in version <= 1.1.0; see https://github.com/dmlc/xgboost/issues/5942 # So call low-level function XGDMatrixSetInfo_R directly. Since this function is not an exported # symbol, use the triple-colon operator. .Call(xgboost:::XGDMatrixSetInfo_R, data, 'weight', as.numeric(w)) params <- list(objective = 'rank:ndcg', num_parallel_tree = metadata$kForests, tree_method = 'hist', max_depth = metadata$kMaxDepth) booster <- xgb.train(params, data, nrounds = metadata$kRounds) save_booster(booster, 'ltr') } dir.create(target_dir) invisible(generate_regression_model()) invisible(generate_logistic_model()) invisible(generate_classification_model()) invisible(generate_ranking_model()) xgboost-3.0.0/R-package/tests/helper_scripts/install_deps.R000066400000000000000000000021531476511272400237770ustar00rootroot00000000000000## Install dependencies of R package for testing. The list might not be ## up-to-date, check DESCRIPTION for the latest list and update this one if ## inconsistent is found. pkgs <- c( ## CI "pkgbuild", "roxygen2", "XML", "cplm", "e1071", ## suggests "knitr", "rmarkdown", "ggplot2", "DiagrammeR", "DiagrammeRsvg", "rsvg", "htmlwidgets", "Ckmeans.1d.dp", "vcd", "lintr", "testthat", "igraph", "float", "titanic", "RhpcBLASctl", ## imports "Matrix", "data.table", "jsonlite" ) ncpus <- parallel::detectCores() print(paste0("Using ", ncpus, " cores to install dependencies.")) if (.Platform$OS.type == "unix") { print("Installing source packages on unix.") install.packages( pkgs, repo = "https://cloud.r-project.org", dependencies = c("Depends", "Imports", "LinkingTo"), Ncpus = parallel::detectCores() ) } else { print("Installing binary packages on Windows.") install.packages( pkgs, repo = "https://cloud.r-project.org", dependencies = c("Depends", "Imports", "LinkingTo"), Ncpus = parallel::detectCores(), type = "binary" ) } xgboost-3.0.0/R-package/tests/helper_scripts/run-examples.R000066400000000000000000000010511476511272400237320ustar00rootroot00000000000000## Helper script for running individual examples. library(pkgload) library(xgboost) files <- list.files("./man") run_example_timeit <- function(f) { path <- paste("./man/", f, sep = "") print(paste("Test", f)) flush.console() t0 <- proc.time() run_example(path) t1 <- proc.time() list(file = f, time = t1 - t0) } timings <- lapply(files, run_example_timeit) for (t in timings) { ratio <- t$time[1] / t$time[3] if (!is.na(ratio) && !is.infinite(ratio) && ratio >= 2.5) { print(paste("Offending example:", t$file, ratio)) } } xgboost-3.0.0/R-package/tests/testthat.R000066400000000000000000000002131476511272400201230ustar00rootroot00000000000000library(testthat) library(xgboost) library(Matrix) test_check("xgboost", reporter = ProgressReporter) RhpcBLASctl::omp_set_num_threads(1) xgboost-3.0.0/R-package/tests/testthat/000077500000000000000000000000001476511272400200045ustar00rootroot00000000000000xgboost-3.0.0/R-package/tests/testthat/test_basic.R000066400000000000000000001040111476511272400222440ustar00rootroot00000000000000context("basic functions") data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") train <- agaricus.train test <- agaricus.test set.seed(1994) # disable some tests for Win32 windows_flag <- .Platform$OS.type == "windows" && .Machine$sizeof.pointer != 8 solaris_flag <- (Sys.info()["sysname"] == "SunOS") n_threads <- 1 test_that("train and predict binary classification", { nrounds <- 2 expect_output( bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = nrounds, params = xgb.params( max_depth = 2, learning_rate = 1, nthread = n_threads, objective = "binary:logistic", eval_metric = "error" ), evals = list(train = xgb.DMatrix(train$data, label = train$label, nthread = 1)) ), "train-error" ) expect_equal(class(bst), "xgb.Booster") expect_equal(xgb.get.num.boosted.rounds(bst), nrounds) expect_false(is.null(attributes(bst)$evaluation_log)) expect_equal(nrow(attributes(bst)$evaluation_log), nrounds) expect_lt(attributes(bst)$evaluation_log[, min(train_error)], 0.03) pred <- predict(bst, test$data) expect_length(pred, 1611) pred1 <- predict(bst, train$data, iterationrange = c(1, 1)) expect_length(pred1, 6513) err_pred1 <- sum((pred1 > 0.5) != train$label) / length(train$label) err_log <- attributes(bst)$evaluation_log[1, train_error] expect_lt(abs(err_pred1 - err_log), 10e-6) }) test_that("parameter validation works", { p <- list(foo = "bar") nrounds <- 1 set.seed(1994) d <- cbind( x1 = rnorm(10), x2 = rnorm(10), x3 = rnorm(10) ) y <- d[, "x1"] + d[, "x2"]^2 + ifelse(d[, "x3"] > .5, d[, "x3"]^2, 2^d[, "x3"]) + rnorm(10) dtrain <- xgb.DMatrix(data = d, label = y, nthread = n_threads) correct <- function() { params <- list( max_depth = 2, booster = "dart", rate_drop = 0.5, one_drop = TRUE, nthread = n_threads, objective = "reg:squarederror" ) xgb.train(params = params, data = dtrain, nrounds = nrounds) } expect_silent(correct()) incorrect <- function() { params <- list( max_depth = 2, booster = "dart", rate_drop = 0.5, one_drop = TRUE, objective = "reg:squarederror", nthread = n_threads, foo = "bar", bar = "foo" ) output <- capture.output( xgb.train(params = params, data = dtrain, nrounds = nrounds), type = "message" ) print(output) } expect_output(incorrect(), '\\\\"bar\\\\", \\\\"foo\\\\"') }) test_that("dart prediction works", { nrounds <- 32 set.seed(1994) d <- cbind( x1 = rnorm(100), x2 = rnorm(100), x3 = rnorm(100) ) y <- d[, "x1"] + d[, "x2"]^2 + ifelse(d[, "x3"] > .5, d[, "x3"]^2, 2^d[, "x3"]) + rnorm(100) set.seed(1994) booster_by_xgboost <- xgb.train( data = xgb.DMatrix(d, label = y, nthread = 1), nrounds = nrounds, params = xgb.params( max_depth = 2, booster = "dart", rate_drop = 0.5, one_drop = TRUE, learning_rate = 1, nthread = n_threads, objective = "reg:squarederror" ) ) pred_by_xgboost_0 <- predict(booster_by_xgboost, newdata = d, iterationrange = NULL) pred_by_xgboost_1 <- predict(booster_by_xgboost, newdata = d, iterationrange = c(1, nrounds)) expect_true(all(matrix(pred_by_xgboost_0, byrow = TRUE) == matrix(pred_by_xgboost_1, byrow = TRUE))) pred_by_xgboost_2 <- predict(booster_by_xgboost, newdata = d, training = TRUE) expect_false(all(matrix(pred_by_xgboost_0, byrow = TRUE) == matrix(pred_by_xgboost_2, byrow = TRUE))) set.seed(1994) dtrain <- xgb.DMatrix(data = d, label = y, nthread = n_threads) booster_by_train <- xgb.train( params = xgb.params( booster = "dart", max_depth = 2, learning_rate = 1, rate_drop = 0.5, one_drop = TRUE, nthread = n_threads, objective = "reg:squarederror" ), data = dtrain, nrounds = nrounds ) pred_by_train_0 <- predict(booster_by_train, newdata = dtrain, iterationrange = NULL) pred_by_train_1 <- predict(booster_by_train, newdata = dtrain, iterationrange = c(1, nrounds)) pred_by_train_2 <- predict(booster_by_train, newdata = dtrain, training = TRUE) expect_equal(pred_by_train_0, pred_by_xgboost_0, tolerance = 1e-6) expect_equal(pred_by_train_1, pred_by_xgboost_1, tolerance = 1e-6) expect_true(all(matrix(pred_by_train_2, byrow = TRUE) == matrix(pred_by_xgboost_2, byrow = TRUE))) }) test_that("train and predict softprob", { lb <- as.numeric(iris$Species) - 1 set.seed(11) expect_output( bst <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1), nrounds = 5, params = xgb.params( max_depth = 3, learning_rate = 0.5, nthread = n_threads, objective = "multi:softprob", num_class = 3, eval_metric = "merror" ), evals = list(train = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1)) ), "train-merror" ) expect_false(is.null(attributes(bst)$evaluation_log)) expect_lt(attributes(bst)$evaluation_log[, min(train_merror)], 0.025) expect_equal(xgb.get.num.boosted.rounds(bst), 5) pred <- predict(bst, as.matrix(iris[, -5])) expect_length(pred, nrow(iris) * 3) # row sums add up to total probability of 1: expect_equal(rowSums(pred), rep(1, nrow(iris)), tolerance = 1e-7) # manually calculate error at the last iteration: mpred <- predict(bst, as.matrix(iris[, -5])) expect_equal(mpred, pred) pred_labels <- max.col(mpred) - 1 err <- sum(pred_labels != lb) / length(lb) expect_equal(attributes(bst)$evaluation_log[5, train_merror], err, tolerance = 5e-6) # manually calculate error at the 1st iteration: mpred <- predict(bst, as.matrix(iris[, -5]), iterationrange = c(1, 1)) pred_labels <- max.col(mpred) - 1 err <- sum(pred_labels != lb) / length(lb) expect_equal(attributes(bst)$evaluation_log[1, train_merror], err, tolerance = 5e-6) mpred1 <- predict(bst, as.matrix(iris[, -5]), iterationrange = c(1, 1)) expect_equal(mpred, mpred1) d <- cbind( x1 = rnorm(100), x2 = rnorm(100), x3 = rnorm(100) ) y <- sample.int(10, 100, replace = TRUE) - 1 dtrain <- xgb.DMatrix(data = d, label = y, nthread = n_threads) booster <- xgb.train( params = xgb.params( objective = "multi:softprob", num_class = 10, tree_method = "hist", nthread = n_threads ), data = dtrain, nrounds = 4 ) predt <- predict(booster, as.matrix(d), strict_shape = FALSE) expect_equal(ncol(predt), 10) expect_equal(rowSums(predt), rep(1, 100), tolerance = 1e-7) }) test_that("train and predict softmax", { lb <- as.numeric(iris$Species) - 1 set.seed(11) expect_output( bst <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1), nrounds = 5, params = xgb.params( max_depth = 3, learning_rate = 0.5, nthread = n_threads, objective = "multi:softmax", num_class = 3, eval_metric = "merror" ), evals = list(train = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1)) ), "train-merror" ) expect_false(is.null(attributes(bst)$evaluation_log)) expect_lt(attributes(bst)$evaluation_log[, min(train_merror)], 0.025) expect_equal(xgb.get.num.boosted.rounds(bst), 5) pred <- predict(bst, as.matrix(iris[, -5])) expect_length(pred, nrow(iris)) err <- sum(pred != lb) / length(lb) expect_equal(attributes(bst)$evaluation_log[5, train_merror], err, tolerance = 5e-6) }) test_that("train and predict RF", { set.seed(11) lb <- train$label # single iteration bst <- xgb.train( data = xgb.DMatrix(train$data, label = lb, nthread = 1), nrounds = 1, params = xgb.params( max_depth = 5, nthread = n_threads, objective = "binary:logistic", eval_metric = "error", num_parallel_tree = 20, subsample = 0.6, colsample_bytree = 0.1 ), evals = list(train = xgb.DMatrix(train$data, label = lb, nthread = 1)), verbose = 0 ) expect_equal(xgb.get.num.boosted.rounds(bst), 1) pred <- predict(bst, train$data) pred_err <- sum((pred > 0.5) != lb) / length(lb) expect_lt(abs(attributes(bst)$evaluation_log[1, train_error] - pred_err), 10e-6) # expect_lt(pred_err, 0.03) pred <- predict(bst, train$data, iterationrange = c(1, 1)) pred_err_20 <- sum((pred > 0.5) != lb) / length(lb) expect_equal(pred_err_20, pred_err) }) test_that("train and predict RF with softprob", { lb <- as.numeric(iris$Species) - 1 nrounds <- 15 set.seed(11) bst <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1), nrounds = nrounds, verbose = 0, params = xgb.params( max_depth = 3, learning_rate = 0.9, nthread = n_threads, objective = "multi:softprob", eval_metric = "merror", num_class = 3, num_parallel_tree = 4, subsample = 0.5, colsample_bytree = 0.5 ), evals = list(train = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1)) ) expect_equal(xgb.get.num.boosted.rounds(bst), 15) # predict for all iterations: pred <- predict(bst, as.matrix(iris[, -5])) expect_equal(dim(pred), c(nrow(iris), 3)) pred_labels <- max.col(pred) - 1 err <- sum(pred_labels != lb) / length(lb) expect_equal(attributes(bst)$evaluation_log[nrounds, train_merror], err, tolerance = 5e-6) # predict for 7 iterations and adjust for 4 parallel trees per iteration pred <- predict(bst, as.matrix(iris[, -5]), iterationrange = c(1, 7)) err <- sum((max.col(pred) - 1) != lb) / length(lb) expect_equal(attributes(bst)$evaluation_log[7, train_merror], err, tolerance = 5e-6) }) test_that("use of multiple eval metrics works", { expect_output( bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = list( max_depth = 2, learning_rate = 1, nthread = n_threads, objective = "binary:logistic", eval_metric = "error", eval_metric = "auc", eval_metric = "logloss" ), evals = list(train = xgb.DMatrix(train$data, label = train$label, nthread = 1)) ), "train-error.*train-auc.*train-logloss" ) expect_false(is.null(attributes(bst)$evaluation_log)) expect_equal(dim(attributes(bst)$evaluation_log), c(2, 4)) expect_equal(colnames(attributes(bst)$evaluation_log), c("iter", "train_error", "train_auc", "train_logloss")) expect_output( bst2 <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, learning_rate = 1, nthread = n_threads, objective = "binary:logistic", eval_metric = list("error", "auc", "logloss") ), evals = list(train = xgb.DMatrix(train$data, label = train$label, nthread = 1)) ), "train-error.*train-auc.*train-logloss" ) expect_false(is.null(attributes(bst2)$evaluation_log)) expect_equal(dim(attributes(bst2)$evaluation_log), c(2, 4)) expect_equal(colnames(attributes(bst2)$evaluation_log), c("iter", "train_error", "train_auc", "train_logloss")) }) test_that("training continuation works", { dtrain <- xgb.DMatrix(train$data, label = train$label, nthread = n_threads) evals <- list(train = dtrain) params <- xgb.params( objective = "binary:logistic", max_depth = 2, learning_rate = 1, nthread = n_threads ) # for the reference, use 4 iterations at once: set.seed(11) bst <- xgb.train(params, dtrain, nrounds = 4, evals = evals, verbose = 0) # first two iterations: set.seed(11) bst1 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0) # continue for two more: bst2 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, xgb_model = bst1) if (!windows_flag && !solaris_flag) { expect_equal(xgb.save.raw(bst), xgb.save.raw(bst2)) } expect_false(is.null(attributes(bst2)$evaluation_log)) expect_equal(dim(attributes(bst2)$evaluation_log), c(4, 2)) expect_equal(attributes(bst2)$evaluation_log, attributes(bst)$evaluation_log) # test continuing from raw model data bst2 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, xgb_model = xgb.save.raw(bst1)) if (!windows_flag && !solaris_flag) { expect_equal(xgb.save.raw(bst), xgb.save.raw(bst2)) } expect_equal(dim(attributes(bst2)$evaluation_log), c(2, 2)) # test continuing from a model in file fname <- file.path(tempdir(), "xgboost.json") xgb.save(bst1, fname) bst2 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, xgb_model = fname) if (!windows_flag && !solaris_flag) { expect_equal(xgb.save.raw(bst), xgb.save.raw(bst2)) } expect_equal(dim(attributes(bst2)$evaluation_log), c(2, 2)) }) test_that("xgb.cv works", { set.seed(11) expect_output( cv <- xgb.cv( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nfold = 5, nrounds = 2, params = xgb.params( max_depth = 2, learning_rate = 1., nthread = n_threads, objective = "binary:logistic", eval_metric = "error" ), verbose = TRUE ), "train-error:" ) expect_is(cv, "xgb.cv.synchronous") expect_false(is.null(cv$evaluation_log)) expect_lt(cv$evaluation_log[, min(test_error_mean)], 0.03) expect_lt(cv$evaluation_log[, min(test_error_std)], 0.0085) expect_equal(cv$niter, 2) expect_false(is.null(cv$folds) && is.list(cv$folds)) expect_length(cv$folds, 5) expect_false(is.null(cv$params) && is.list(cv$params)) expect_false(is.null(cv$call)) }) test_that("xgb.cv invalid inputs", { data("mtcars") y <- mtcars$mpg x_df <- mtcars[, -1] expect_error( cv <- xgb.cv( data = xgb.QuantileDMatrix(x_df, label = y), nfold = 5, nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads ) ), regexp = ".*QuantileDMatrix.*" ) expect_error( cv <- xgb.cv( data = xgb.DMatrix(x_df, label = y), nfold = 5, nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads, ), callbacks = list( xgb.cb.early.stop(stopping_rounds = 3, save_best = TRUE) ) ), regexp = ".*save_best.*" ) }) test_that("xgb.cv works with stratified folds", { dtrain <- xgb.DMatrix(train$data, label = train$label, nthread = n_threads) set.seed(314159) cv <- xgb.cv( data = dtrain, nrounds = 2, nfold = 5, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ), verbose = FALSE, stratified = FALSE ) set.seed(314159) cv2 <- xgb.cv( data = dtrain, nfold = 5, nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ), verbose = FALSE, stratified = TRUE ) # Stratified folds should result in a different evaluation logs expect_true(all(cv$evaluation_log[, test_logloss_mean] != cv2$evaluation_log[, test_logloss_mean])) }) test_that("train and predict with non-strict classes", { # standard dense matrix input train_dense <- as.matrix(train$data) bst <- xgb.train( data = xgb.DMatrix(train_dense, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ), verbose = 0 ) pr0 <- predict(bst, train_dense) # dense matrix-like input of non-matrix class class(train_dense) <- "shmatrix" expect_true(is.matrix(train_dense)) expect_error( bst <- xgb.train( data = xgb.DMatrix(train_dense, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ), verbose = 0 ), regexp = NA ) expect_error(pr <- predict(bst, train_dense), regexp = NA) expect_equal(pr0, pr) # dense matrix-like input of non-matrix class with some inheritance class(train_dense) <- c("pphmatrix", "shmatrix") expect_true(is.matrix(train_dense)) expect_error( bst <- xgb.train( data = xgb.DMatrix(train_dense, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ), verbose = 0 ), regexp = NA ) expect_error(pr <- predict(bst, train_dense), regexp = NA) expect_equal(pr0, pr) # when someone inherits from xgb.Booster, it should still be possible to use it as xgb.Booster class(bst) <- c("super.Booster", "xgb.Booster") expect_error(pr <- predict(bst, train_dense), regexp = NA) expect_equal(pr0, pr) }) test_that("max_delta_step works", { dtrain <- xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) evals <- list(train = dtrain) params <- xgb.params( objective = "binary:logistic", eval_metric = "logloss", max_depth = 2, nthread = n_threads, learning_rate = 0.5 ) nrounds <- 5 # model with no restriction on max_delta_step bst1 <- xgb.train(params, dtrain, nrounds, evals = evals, verbose = 0) # model with restricted max_delta_step bst2 <- xgb.train(c(params, list(max_delta_step = 1)), dtrain, nrounds, evals = evals, verbose = 0) # the no-restriction model is expected to have consistently lower loss during the initial iterations expect_true(all(attributes(bst1)$evaluation_log$train_logloss < attributes(bst2)$evaluation_log$train_logloss)) expect_lt(mean(attributes(bst1)$evaluation_log$train_logloss) / mean(attributes(bst2)$evaluation_log$train_logloss), 0.8) }) test_that("colsample_bytree works", { # Randomly generate data matrix by sampling from uniform distribution [-1, 1] set.seed(1) train_x <- matrix(runif(1000, min = -1, max = 1), ncol = 100) train_y <- as.numeric(rowSums(train_x) > 0) test_x <- matrix(runif(1000, min = -1, max = 1), ncol = 100) test_y <- as.numeric(rowSums(test_x) > 0) colnames(train_x) <- paste0("Feature_", sprintf("%03d", 1:100)) colnames(test_x) <- paste0("Feature_", sprintf("%03d", 1:100)) dtrain <- xgb.DMatrix(train_x, label = train_y, nthread = n_threads) dtest <- xgb.DMatrix(test_x, label = test_y, nthread = n_threads) evals <- list(train = dtrain, eval = dtest) ## Use colsample_bytree = 0.01, so that roughly one out of 100 features is chosen for ## each tree params <- xgb.params( max_depth = 2, learning_rate = 0, nthread = n_threads, colsample_bytree = 0.01, objective = "binary:logistic", eval_metric = "auc" ) set.seed(2) bst <- xgb.train(params, dtrain, nrounds = 100, evals = evals, verbose = 0) xgb.importance(model = bst) # If colsample_bytree works properly, a variety of features should be used # in the 100 trees expect_gte(nrow(xgb.importance(model = bst)), 28) }) test_that("Configuration works", { bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 2, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ) ) config <- xgb.config(bst) xgb.config(bst) <- config reloaded_config <- xgb.config(bst) expect_equal(config, reloaded_config) }) test_that("strict_shape works", { n_rounds <- 2 test_strict_shape <- function(bst, X, n_groups) { predt <- predict(bst, X, strict_shape = TRUE) margin <- predict(bst, X, outputmargin = TRUE, strict_shape = TRUE) contri <- predict(bst, X, predcontrib = TRUE, strict_shape = TRUE) interact <- predict(bst, X, predinteraction = TRUE, strict_shape = TRUE) leaf <- predict(bst, X, predleaf = TRUE, strict_shape = TRUE) n_rows <- nrow(X) n_cols <- ncol(X) expect_equal(dim(predt), c(n_rows, n_groups)) expect_equal(dim(margin), c(n_rows, n_groups)) expect_equal(dim(contri), c(n_rows, n_groups, n_cols + 1)) expect_equal(dim(interact), c(n_rows, n_groups, n_cols + 1, n_cols + 1)) expect_equal(dim(leaf), c(n_rows, n_rounds, n_groups, 1)) if (n_groups != 1) { for (g in seq_len(n_groups)) { expect_lt(max(abs(rowSums(contri[, g, ]) - margin[, g])), 1e-5) } leaf_no_strict <- predict(bst, X, strict_shape = FALSE, predleaf = TRUE) for (g in seq_len(n_groups)) { g_mask <- rep(FALSE, n_groups) g_mask[g] <- TRUE expect_equal( leaf[, , g, 1L], leaf_no_strict[, g_mask] ) } } } test_iris <- function() { y <- as.numeric(iris$Species) - 1 X <- as.matrix(iris[, -5]) bst <- xgb.train( data = xgb.DMatrix(X, label = y, nthread = 1), nrounds = n_rounds, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "multi:softprob", num_class = 3 ) ) test_strict_shape(bst, X, 3) } test_agaricus <- function() { data(agaricus.train, package = "xgboost") X <- agaricus.train$data y <- agaricus.train$label bst <- xgb.train( data = xgb.DMatrix(X, label = y, nthread = 1), nrounds = n_rounds, params = xgb.params( max_depth = 2, nthread = n_threads, objective = "binary:logistic" ) ) test_strict_shape(bst, X, 1) } test_iris() test_agaricus() }) test_that("'predict' accepts CSR data", { X <- agaricus.train$data y <- agaricus.train$label x_csc <- as(X[1L, , drop = FALSE], "CsparseMatrix") x_csr <- as(x_csc, "RsparseMatrix") x_spv <- as(x_csc, "sparseVector") bst <- xgb.train( data = xgb.DMatrix(X, label = y, nthread = 1), nrounds = 5L, verbose = FALSE, params = xgb.params( objective = "binary:logistic", nthread = n_threads ) ) p_csc <- predict(bst, x_csc) p_csr <- predict(bst, x_csr) p_spv <- predict(bst, x_spv) expect_equal(p_csc, p_csr) expect_equal(p_csc, p_spv) }) test_that("Quantile regression accepts multiple quantiles", { data(mtcars) y <- mtcars[, 1] x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(data = x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params( objective = "reg:quantileerror", tree_method = "exact", quantile_alpha = c(0.05, 0.5, 0.95), nthread = n_threads ), nrounds = 15 ) pred <- predict(model, x) expect_equal(dim(pred)[1], nrow(x)) expect_equal(dim(pred)[2], 3) expect_true(all(pred[, 1] <= pred[, 3])) cors <- cor(y, pred) expect_true(cors[2] > cors[1]) expect_true(cors[2] > cors[3]) expect_true(cors[2] > 0.85) }) test_that("Can use multi-output labels with built-in objectives", { data("mtcars") y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) y_mirrored <- cbind(y, -y) dm <- xgb.DMatrix(x, label = y_mirrored, nthread = n_threads) model <- xgb.train( params = xgb.params( tree_method = "hist", multi_strategy = "multi_output_tree", objective = "reg:squarederror", nthread = n_threads ), data = dm, nrounds = 5 ) pred <- predict(model, x) expect_equal(pred[, 1], -pred[, 2]) expect_true(cor(y, pred[, 1]) > 0.9) expect_true(cor(y, pred[, 2]) < -0.9) }) test_that("Can use multi-output labels with custom objectives", { data("mtcars") y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) y_mirrored <- cbind(y, -y) dm <- xgb.DMatrix(x, label = y_mirrored, nthread = n_threads) model <- xgb.train( params = xgb.params( tree_method = "hist", multi_strategy = "multi_output_tree", base_score = 0, objective = function(pred, dtrain) { y <- getinfo(dtrain, "label") grad <- pred - y hess <- rep(1, nrow(grad) * ncol(grad)) hess <- matrix(hess, nrow = nrow(grad)) return(list(grad = grad, hess = hess)) }, nthread = n_threads ), data = dm, nrounds = 5 ) pred <- predict(model, x) expect_equal(pred[, 1], -pred[, 2]) expect_true(cor(y, pred[, 1]) > 0.9) expect_true(cor(y, pred[, 2]) < -0.9) }) test_that("Can use ranking objectives with either 'qid' or 'group'", { set.seed(123) x <- matrix(rnorm(100 * 10), nrow = 100) y <- sample(2, size = 100, replace = TRUE) - 1 qid <- c(rep(1, 20), rep(2, 20), rep(3, 60)) gr <- c(20, 20, 60) dmat_qid <- xgb.DMatrix(x, label = y, qid = qid, nthread = 1) dmat_gr <- xgb.DMatrix(x, label = y, group = gr, nthread = 1) params <- xgb.params( tree_method = "hist", lambdarank_num_pair_per_sample = 8, objective = "rank:ndcg", lambdarank_pair_method = "topk", nthread = n_threads ) set.seed(123) model_qid <- xgb.train(params, dmat_qid, nrounds = 5) set.seed(123) model_gr <- xgb.train(params, dmat_gr, nrounds = 5) pred_qid <- predict(model_qid, x) pred_gr <- predict(model_gr, x) expect_equal(pred_qid, pred_gr) }) test_that("Can predict on data.frame objects", { data("mtcars") y <- mtcars$mpg x_df <- mtcars[, -1] x_mat <- as.matrix(x_df) dm <- xgb.DMatrix(x_mat, label = y, nthread = n_threads) model <- xgb.train( params = xgb.params( tree_method = "hist", objective = "reg:squarederror", nthread = n_threads ), data = dm, nrounds = 5 ) pred_mat <- predict(model, xgb.DMatrix(x_mat, nthread = 1)) pred_df <- predict(model, x_df) expect_equal(pred_mat, unname(pred_df)) }) test_that("'base_margin' gives the same result in DMatrix as in inplace_predict", { data("mtcars") y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = n_threads) model <- xgb.train( params = xgb.params( tree_method = "hist", objective = "reg:squarederror", nthread = n_threads ), data = dm, nrounds = 5 ) set.seed(123) base_margin <- rnorm(nrow(x)) dm_w_base <- xgb.DMatrix(data = x, base_margin = base_margin, nthread = 1) pred_from_dm <- predict(model, dm_w_base) pred_from_mat <- predict(model, x, base_margin = base_margin) expect_equal(pred_from_dm, unname(pred_from_mat)) }) test_that("Coefficients from gblinear have the expected shape and names", { # Single-column coefficients data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) mm <- model.matrix(~., data = mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params( booster = "gblinear", nthread = 1 ), nrounds = 3 ) coefs <- coef(model) expect_equal(length(coefs), ncol(x) + 1) expect_equal(names(coefs), c("(Intercept)", colnames(x))) pred_auto <- predict(model, x) pred_manual <- as.numeric(mm %*% coefs) expect_equal(pred_manual, unname(pred_auto), tolerance = 1e-5) # Multi-column coefficients data(iris) y <- as.numeric(iris$Species) - 1 x <- as.matrix(iris[, -5]) dm <- xgb.DMatrix(x, label = y, nthread = 1) mm <- model.matrix(~., data = iris[, -5]) model <- xgb.train( data = dm, params = xgb.params( booster = "gblinear", objective = "multi:softprob", num_class = 3, nthread = 1 ), nrounds = 3 ) coefs <- coef(model) expect_equal(nrow(coefs), ncol(x) + 1) expect_equal(ncol(coefs), 3) expect_equal(row.names(coefs), c("(Intercept)", colnames(x))) pred_auto <- predict(model, x, outputmargin = TRUE) pred_manual <- unname(mm %*% coefs) expect_equal(pred_manual, pred_auto, tolerance = 1e-7) # xgboost() with additional metadata model <- xgboost( iris[, -5], iris$Species, booster = "gblinear", objective = "multi:softprob", nrounds = 3, nthread = 1 ) coefs <- coef(model) expect_equal(row.names(coefs), c("(Intercept)", colnames(x))) expect_equal(colnames(coefs), levels(iris$Species)) }) test_that("Deep copies work as expected", { data(mtcars) y <- mtcars$mpg x <- mtcars[, -1] dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params(nthread = 1), nrounds = 3 ) xgb.attr(model, "my_attr") <- 100 model_shallow_copy <- model xgb.attr(model_shallow_copy, "my_attr") <- 333 attr_orig <- xgb.attr(model, "my_attr") attr_shallow <- xgb.attr(model_shallow_copy, "my_attr") expect_equal(attr_orig, attr_shallow) model_deep_copy <- xgb.copy.Booster(model) xgb.attr(model_deep_copy, "my_attr") <- 444 attr_orig <- xgb.attr(model, "my_attr") attr_deep <- xgb.attr(model_deep_copy, "my_attr") expect_false(attr_orig == attr_deep) }) test_that("Pointer comparison works as expected", { library(xgboost) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) model <- xgb.train( params = xgb.params(nthread = 1), data = xgb.DMatrix(x, label = y, nthread = 1), nrounds = 3 ) model_shallow_copy <- model expect_true(xgb.is.same.Booster(model, model_shallow_copy)) model_deep_copy <- xgb.copy.Booster(model) expect_false(xgb.is.same.Booster(model, model_deep_copy)) xgb.attr(model_shallow_copy, "my_attr") <- 111 expect_equal(xgb.attr(model, "my_attr"), "111") expect_null(xgb.attr(model_deep_copy, "my_attr")) }) test_that("DMatrix field are set to booster when training", { set.seed(123) y <- rnorm(100) x <- matrix(rnorm(100 * 3), nrow = 100) x[, 2] <- abs(as.integer(x[, 2])) dm_unnamed <- xgb.DMatrix(x, label = y, nthread = 1) dm_feature_names <- xgb.DMatrix(x, label = y, feature_names = c("a", "b", "c"), nthread = 1) dm_feature_types <- xgb.DMatrix(x, label = y, nthread = 1) setinfo(dm_feature_types, "feature_type", c("q", "c", "q")) dm_both <- xgb.DMatrix(x, label = y, feature_names = c("a", "b", "c"), nthread = 1) setinfo(dm_both, "feature_type", c("q", "c", "q")) params <- xgb.params(nthread = 1) model_unnamed <- xgb.train(data = dm_unnamed, params = params, nrounds = 3) model_feature_names <- xgb.train(data = dm_feature_names, params = params, nrounds = 3) model_feature_types <- xgb.train(data = dm_feature_types, params = params, nrounds = 3) model_both <- xgb.train(data = dm_both, params = params, nrounds = 3) expect_null(getinfo(model_unnamed, "feature_name")) expect_equal(getinfo(model_feature_names, "feature_name"), c("a", "b", "c")) expect_null(getinfo(model_feature_types, "feature_name")) expect_equal(getinfo(model_both, "feature_name"), c("a", "b", "c")) expect_null(variable.names(model_unnamed)) expect_equal(variable.names(model_feature_names), c("a", "b", "c")) expect_null(variable.names(model_feature_types)) expect_equal(variable.names(model_both), c("a", "b", "c")) expect_null(getinfo(model_unnamed, "feature_type")) expect_null(getinfo(model_feature_names, "feature_type")) expect_equal(getinfo(model_feature_types, "feature_type"), c("q", "c", "q")) expect_equal(getinfo(model_both, "feature_type"), c("q", "c", "q")) }) test_that("Seed in params override PRNG from R", { set.seed(123) model1 <- xgb.train( data = xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = 1L ), params = xgb.params( objective = "binary:logistic", max_depth = 3L, subsample = 0.1, colsample_bytree = 0.1, seed = 111L ), nrounds = 3L ) set.seed(456) model2 <- xgb.train( data = xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = 1L ), params = xgb.params( objective = "binary:logistic", max_depth = 3L, subsample = 0.1, colsample_bytree = 0.1, seed = 111L ), nrounds = 3L ) expect_equal( xgb.save.raw(model1, raw_format = "json"), xgb.save.raw(model2, raw_format = "json") ) set.seed(123) model3 <- xgb.train( data = xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = 1L ), params = xgb.params( objective = "binary:logistic", max_depth = 3L, subsample = 0.1, colsample_bytree = 0.1, seed = 222L ), nrounds = 3L ) expect_false( isTRUE( all.equal( xgb.save.raw(model1, raw_format = "json"), xgb.save.raw(model3, raw_format = "json") ) ) ) }) test_that("xgb.cv works for AFT", { X <- matrix(c(1, -1, -1, 1, 0, 1, 1, 0), nrow = 4, byrow = TRUE) # 4x2 matrix dtrain <- xgb.DMatrix(X, nthread = n_threads) params <- xgb.params(objective = 'survival:aft', learning_rate = 0.2, max_depth = 2L, nthread = n_threads) # data must have bounds expect_error( xgb.cv( params = params, data = dtrain, nround = 5L, nfold = 4L ) ) setinfo(dtrain, 'label_lower_bound', c(2, 3, 0, 4)) setinfo(dtrain, 'label_upper_bound', c(2, Inf, 4, 5)) # automatic stratified splitting is turned off expect_warning( xgb.cv( params = params, data = dtrain, nround = 5L, nfold = 4L, stratified = TRUE, verbose = FALSE ) ) # this works without any issue expect_no_warning( xgb.cv(params = params, data = dtrain, nround = 5L, nfold = 4L, verbose = FALSE) ) }) test_that("xgb.cv works for ranking", { data(iris) x <- iris[, -(4:5)] y <- as.integer(iris$Petal.Width) group <- rep(50, 3) dm <- xgb.DMatrix(x, label = y, group = group, nthread = 1) res <- xgb.cv( data = dm, params = xgb.params( objective = "rank:pairwise", max_depth = 3, nthread = 1L ), nrounds = 3, nfold = 2, verbose = FALSE, stratified = FALSE ) expect_equal(length(res$folds), 2L) }) test_that("Row names are preserved in outputs", { data(iris) x <- iris[, -5] y <- as.numeric(iris$Species) - 1 dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params( objective = "multi:softprob", num_class = 3, max_depth = 2, nthread = 1 ), nrounds = 3 ) row.names(x) <- paste0("r", seq(1, nrow(x))) pred <- predict(model, x) expect_equal(row.names(pred), row.names(x)) pred <- predict(model, x, avoid_transpose = TRUE) expect_equal(colnames(pred), row.names(x)) data(mtcars) y <- mtcars[, 1] x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(data = x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params( max_depth = 2, nthread = 1 ), nrounds = 3 ) row.names(x) <- paste0("r", seq(1, nrow(x))) pred <- predict(model, x) expect_equal(names(pred), row.names(x)) pred <- predict(model, x, avoid_transpose = TRUE) expect_equal(names(pred), row.names(x)) pred <- predict(model, x, predleaf = TRUE) expect_equal(row.names(pred), row.names(x)) pred <- predict(model, x, predleaf = TRUE, avoid_transpose = TRUE) expect_equal(colnames(pred), row.names(x)) }) xgboost-3.0.0/R-package/tests/testthat/test_booster_slicing.R000066400000000000000000000042341476511272400243560ustar00rootroot00000000000000context("testing xgb.Booster slicing") data(agaricus.train, package = "xgboost") dm <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label, nthread = 1) # Note: here need large step sizes in order for the predictions # to have substantially different leaf assignments on each tree model <- xgb.train( params = xgb.params(objective = "binary:logistic", nthread = 1, max_depth = 4), data = dm, nrounds = 20 ) pred <- predict(model, dm, predleaf = TRUE) test_that("Slicing full model", { new_model <- xgb.slice.Booster(model, 1, 0) expect_equal(xgb.save.raw(new_model), xgb.save.raw(model)) new_model <- model[] expect_equal(xgb.save.raw(new_model), xgb.save.raw(model)) new_model <- model[1:length(model)] # nolint expect_equal(xgb.save.raw(new_model), xgb.save.raw(model)) }) test_that("Slicing sequence from start", { new_model <- xgb.slice.Booster(model, 1, 10) new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(1, 10)]) new_model <- model[1:10] new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(1, 10)]) }) test_that("Slicing sequence from middle", { new_model <- xgb.slice.Booster(model, 5, 10) new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(5, 10)]) new_model <- model[5:10] new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(5, 10)]) }) test_that("Slicing with non-unit step", { for (s in 2:5) { new_model <- xgb.slice.Booster(model, 1, 17, s) new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(1, 17, s)]) new_model <- model[seq(1, 17, s)] new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(1, 17, s)]) } }) test_that("Slicing with non-unit step from middle", { for (s in 2:5) { new_model <- xgb.slice.Booster(model, 4, 17, s) new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(4, 17, s)]) new_model <- model[seq(4, 17, s)] new_pred <- predict(new_model, dm, predleaf = TRUE) expect_equal(new_pred, pred[, seq(4, 17, s)]) } }) xgboost-3.0.0/R-package/tests/testthat/test_callbacks.R000066400000000000000000000423341476511272400231130ustar00rootroot00000000000000# More specific testing of callbacks context("callbacks") data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') train <- agaricus.train test <- agaricus.test n_threads <- 2 # add some label noise for early stopping tests add.noise <- function(label, frac) { inoise <- sample(length(label), length(label) * frac) label[inoise] <- !label[inoise] label } set.seed(11) ltrain <- add.noise(train$label, 0.2) ltest <- add.noise(test$label, 0.2) dtrain <- xgb.DMatrix(train$data, label = ltrain, nthread = n_threads) dtest <- xgb.DMatrix(test$data, label = ltest, nthread = n_threads) evals <- list(train = dtrain, test = dtest) err <- function(label, pr) sum((pr > 0.5) != label) / length(label) params <- xgb.params( objective = "binary:logistic", eval_metric = "error", max_depth = 2, nthread = n_threads ) test_that("xgb.cb.print.evaluation works as expected for xgb.train", { logs1 <- capture.output({ model <- xgb.train( data = dtrain, params = xgb.params( objective = "binary:logistic", eval_metric = "auc", max_depth = 2, nthread = n_threads ), nrounds = 10, evals = list(train = dtrain, test = dtest), callbacks = list(xgb.cb.print.evaluation(period = 1)) ) }) expect_equal(length(logs1), 10) expect_true(all(grepl("^\\[\\d{1,2}\\]\ttrain-auc:0\\.\\d+\ttest-auc:0\\.\\d+\\s*$", logs1))) lapply(seq(1, 10), function(x) expect_true(grepl(paste0("^\\[", x), logs1[x]))) logs2 <- capture.output({ model <- xgb.train( data = dtrain, params = xgb.params( objective = "binary:logistic", eval_metric = "auc", max_depth = 2, nthread = n_threads ), nrounds = 10, evals = list(train = dtrain, test = dtest), callbacks = list(xgb.cb.print.evaluation(period = 2)) ) }) expect_equal(length(logs2), 6) expect_true(all(grepl("^\\[\\d{1,2}\\]\ttrain-auc:0\\.\\d+\ttest-auc:0\\.\\d+\\s*$", logs2))) seq_matches <- c(seq(1, 10, 2), 10) lapply(seq_along(seq_matches), function(x) expect_true(grepl(paste0("^\\[", seq_matches[x]), logs2[x]))) }) test_that("xgb.cb.print.evaluation works as expected for xgb.cv", { logs1 <- capture.output({ model <- xgb.cv( data = dtrain, params = xgb.params( objective = "binary:logistic", eval_metric = "auc", max_depth = 2, nthread = n_threads ), nrounds = 10, nfold = 3, callbacks = list(xgb.cb.print.evaluation(period = 1, showsd = TRUE)) ) }) expect_equal(length(logs1), 10) expect_true(all(grepl("^\\[\\d{1,2}\\]\ttrain-auc:0\\.\\d+±0\\.\\d+\ttest-auc:0\\.\\d+±0\\.\\d+\\s*$", logs1))) lapply(seq(1, 10), function(x) expect_true(grepl(paste0("^\\[", x), logs1[x]))) logs2 <- capture.output({ model <- xgb.cv( data = dtrain, params = xgb.params( objective = "binary:logistic", eval_metric = "auc", max_depth = 2, nthread = n_threads ), nrounds = 10, nfold = 3, callbacks = list(xgb.cb.print.evaluation(period = 2, showsd = TRUE)) ) }) expect_equal(length(logs2), 6) expect_true(all(grepl("^\\[\\d{1,2}\\]\ttrain-auc:0\\.\\d+±0\\.\\d+\ttest-auc:0\\.\\d+±0\\.\\d+\\s*$", logs2))) seq_matches <- c(seq(1, 10, 2), 10) lapply(seq_along(seq_matches), function(x) expect_true(grepl(paste0("^\\[", seq_matches[x]), logs2[x]))) }) test_that("xgb.cb.evaluation.log works as expected for xgb.train", { model <- xgb.train( data = dtrain, params = xgb.params( objective = "binary:logistic", eval_metric = "auc", max_depth = 2, nthread = n_threads ), nrounds = 10, verbose = FALSE, evals = list(train = dtrain, test = dtest), callbacks = list(xgb.cb.evaluation.log()) ) logs <- attributes(model)$evaluation_log expect_equal(nrow(logs), 10) expect_equal(colnames(logs), c("iter", "train_auc", "test_auc")) }) test_that("xgb.cb.evaluation.log works as expected for xgb.cv", { model <- xgb.cv( data = dtrain, params = xgb.params( objective = "binary:logistic", eval_metric = "auc", max_depth = 2, nthread = n_threads ), nrounds = 10, verbose = FALSE, nfold = 3, callbacks = list(xgb.cb.evaluation.log()) ) logs <- model$evaluation_log expect_equal(nrow(logs), 10) expect_equal( colnames(logs), c("iter", "train_auc_mean", "train_auc_std", "test_auc_mean", "test_auc_std") ) }) params <- xgb.params( objective = "binary:logistic", eval_metric = "error", max_depth = 4, nthread = n_threads ) test_that("can store evaluation_log without printing", { expect_silent( bst <- xgb.train(params, dtrain, nrounds = 10, evals = evals, verbose = 0) ) expect_false(is.null(attributes(bst)$evaluation_log)) expect_false(is.null(attributes(bst)$evaluation_log$train_error)) expect_lt(attributes(bst)$evaluation_log[, min(train_error)], 0.2) }) test_that("xgb.cb.reset.parameters works as expected", { # fixed learning_rate params <- c(params, list(learning_rate = 0.9)) set.seed(111) bst0 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0) expect_false(is.null(attributes(bst0)$evaluation_log)) expect_false(is.null(attributes(bst0)$evaluation_log$train_error)) # same learning_rate but re-set as a vector parameter in the callback set.seed(111) my_par <- list(learning_rate = c(0.9, 0.9)) bst1 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_par))) expect_false(is.null(attributes(bst1)$evaluation_log$train_error)) expect_equal(attributes(bst0)$evaluation_log$train_error, attributes(bst1)$evaluation_log$train_error) # same learning_rate but re-set via a function in the callback set.seed(111) my_par <- list(learning_rate = function(itr, itr_end) 0.9) bst2 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_par))) expect_false(is.null(attributes(bst2)$evaluation_log$train_error)) expect_equal(attributes(bst0)$evaluation_log$train_error, attributes(bst2)$evaluation_log$train_error) # different learning_rate re-set as a vector parameter in the callback set.seed(111) my_par <- list(learning_rate = c(0.6, 0.5)) bst3 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_par))) expect_false(is.null(attributes(bst3)$evaluation_log$train_error)) expect_false(all(attributes(bst0)$evaluation_log$train_error == attributes(bst3)$evaluation_log$train_error)) # resetting multiple parameters at the same time runs with no error my_par <- list(learning_rate = c(1., 0.5), min_split_loss = c(1, 2), max_depth = c(4, 8)) expect_error( bst4 <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_par))) , NA) # NA = no error # CV works as well expect_error( bst4 <- xgb.cv(params, dtrain, nfold = 2, nrounds = 2, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_par))) , NA) # NA = no error # expect no learning with 0 learning rate my_par <- list(learning_rate = c(0., 0.)) bstX <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, callbacks = list(xgb.cb.reset.parameters(my_par))) expect_false(is.null(attributes(bstX)$evaluation_log$train_error)) er <- unique(attributes(bstX)$evaluation_log$train_error) expect_length(er, 1) expect_gt(er, 0.4) }) test_that("xgb.cb.save.model works as expected", { files <- c('xgboost_01.json', 'xgboost_02.json', 'xgboost.json') files <- unname(sapply(files, function(f) file.path(tempdir(), f))) for (f in files) if (file.exists(f)) file.remove(f) bst <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, save_period = 1, save_name = file.path(tempdir(), "xgboost_%02d.json")) expect_true(file.exists(files[1])) expect_true(file.exists(files[2])) b1 <- xgb.load(files[1]) xgb.model.parameters(b1) <- list(nthread = 2) expect_equal(xgb.get.num.boosted.rounds(b1), 1) b2 <- xgb.load(files[2]) xgb.model.parameters(b2) <- list(nthread = 2) expect_equal(xgb.get.num.boosted.rounds(b2), 2) xgb.config(b2) <- xgb.config(bst) expect_equal(xgb.config(bst), xgb.config(b2)) expect_equal(xgb.save.raw(bst), xgb.save.raw(b2)) # save_period = 0 saves the last iteration's model bst <- xgb.train(params, dtrain, nrounds = 2, evals = evals, verbose = 0, save_period = 0, save_name = file.path(tempdir(), 'xgboost.json')) expect_true(file.exists(files[3])) b2 <- xgb.load(files[3]) xgb.config(b2) <- xgb.config(bst) expect_equal(xgb.save.raw(bst), xgb.save.raw(b2)) for (f in files) if (file.exists(f)) file.remove(f) }) test_that("early stopping xgb.train works", { params <- c(params, list(learning_rate = 0.3)) set.seed(11) expect_output( bst <- xgb.train(params, dtrain, nrounds = 20, evals = evals, early_stopping_rounds = 3, maximize = FALSE) , "Stopping. Best iteration") expect_false(is.null(xgb.attr(bst, "best_iteration"))) expect_lt(xgb.attr(bst, "best_iteration"), 19) pred <- predict(bst, dtest) expect_equal(length(pred), 1611) err_pred <- err(ltest, pred) err_log <- attributes(bst)$evaluation_log[xgb.attr(bst, "best_iteration") + 1, test_error] expect_equal(err_log, err_pred, tolerance = 5e-6) set.seed(11) expect_silent( bst0 <- xgb.train(params, dtrain, nrounds = 20, evals = evals, early_stopping_rounds = 3, maximize = FALSE, verbose = 0) ) expect_equal(attributes(bst)$evaluation_log, attributes(bst0)$evaluation_log) fname <- file.path(tempdir(), "model.ubj") xgb.save(bst, fname) loaded <- xgb.load(fname) expect_false(is.null(xgb.attr(loaded, "best_iteration"))) expect_equal(xgb.attr(loaded, "best_iteration"), xgb.attr(bst, "best_iteration")) }) test_that("early stopping using a specific metric works", { set.seed(11) expect_output( bst <- xgb.train( c( within(params, rm("eval_metric")), list( learning_rate = 0.6, eval_metric = "logloss", eval_metric = "auc" ) ), dtrain, nrounds = 20, evals = evals, callbacks = list( xgb.cb.early.stop(stopping_rounds = 3, maximize = FALSE, metric_name = 'test_logloss') ) ) , "Stopping. Best iteration") expect_false(is.null(xgb.attr(bst, "best_iteration"))) expect_lt(xgb.attr(bst, "best_iteration"), 19) pred <- predict(bst, dtest, iterationrange = c(1, xgb.attr(bst, "best_iteration") + 1)) expect_equal(length(pred), 1611) logloss_pred <- sum(-ltest * log(pred) - (1 - ltest) * log(1 - pred)) / length(ltest) logloss_log <- attributes(bst)$evaluation_log[xgb.attr(bst, "best_iteration") + 1, test_logloss] expect_equal(logloss_log, logloss_pred, tolerance = 1e-5) }) test_that("early stopping works with titanic", { if (!requireNamespace("titanic")) { testthat::skip("Optional testing dependency 'titanic' not found.") } # This test was inspired by https://github.com/dmlc/xgboost/issues/5935 # It catches possible issues on noLD R titanic <- titanic::titanic_train titanic$Pclass <- as.factor(titanic$Pclass) dtx <- model.matrix(~ 0 + ., data = titanic[, c("Pclass", "Sex")]) dty <- titanic$Survived xgb.train( data = xgb.DMatrix(dtx, label = dty, nthread = 1), params = xgb.params( objective = "binary:logistic", eval_metric = "auc", nthread = n_threads ), nrounds = 100, early_stopping_rounds = 3, verbose = 0, evals = list(train = xgb.DMatrix(dtx, label = dty, nthread = 1)) ) expect_true(TRUE) # should not crash }) test_that("early stopping xgb.cv works", { set.seed(11) expect_output( { cv <- xgb.cv( c(params, list(learning_rate = 0.3)), dtrain, nfold = 5, nrounds = 20, early_stopping_rounds = 3, maximize = FALSE ) }, "Stopping. Best iteration" ) expect_false(is.null(cv$early_stop$best_iteration)) expect_lt(cv$early_stop$best_iteration, 19) # the best error is min error: expect_true(cv$evaluation_log[, test_error_mean[cv$early_stop$best_iteration] == min(test_error_mean)]) }) test_that("prediction in xgb.cv works", { params <- c(params, list(learning_rate = 0.5)) set.seed(11) nrounds <- 4 cv <- xgb.cv(params, dtrain, nfold = 5, nrounds = nrounds, prediction = TRUE, verbose = 0) expect_false(is.null(cv$evaluation_log)) expect_false(is.null(cv$cv_predict$pred)) expect_length(cv$cv_predict$pred, nrow(train$data)) err_pred <- mean(sapply(cv$folds, function(f) mean(err(ltrain[f], cv$cv_predict$pred[f])))) err_log <- cv$evaluation_log[nrounds, test_error_mean] expect_equal(err_pred, err_log, tolerance = 1e-6) # save CV models set.seed(11) cvx <- xgb.cv(params, dtrain, nfold = 5, nrounds = nrounds, prediction = TRUE, verbose = 0, callbacks = list(xgb.cb.cv.predict(save_models = TRUE))) expect_equal(cv$evaluation_log, cvx$evaluation_log) expect_length(cvx$cv_predict$models, 5) expect_true(all(sapply(cvx$cv_predict$models, class) == 'xgb.Booster')) }) test_that("prediction in xgb.cv works for gblinear too", { set.seed(11) p <- xgb.params( booster = 'gblinear', objective = "reg:logistic", learning_rate = 0.5, nthread = n_threads ) cv <- xgb.cv(p, dtrain, nfold = 5, nrounds = 2, prediction = TRUE, verbose = 0) expect_false(is.null(cv$evaluation_log)) expect_false(is.null(cv$cv_predict$pred)) expect_length(cv$cv_predict$pred, nrow(train$data)) }) test_that("prediction in early-stopping xgb.cv works", { params <- c(params, list(learning_rate = 0.1, base_score = 0.5)) set.seed(11) expect_output( cv <- xgb.cv(params, dtrain, nfold = 5, nrounds = 20, early_stopping_rounds = 5, maximize = FALSE, stratified = FALSE, prediction = TRUE, verbose = TRUE) , "Stopping. Best iteration") expect_false(is.null(cv$early_stop$best_iteration)) expect_lt(cv$early_stop$best_iteration, 19) expect_false(is.null(cv$evaluation_log)) expect_false(is.null(cv$cv_predict$pred)) expect_length(cv$cv_predict$pred, nrow(train$data)) err_pred <- mean(sapply(cv$folds, function(f) mean(err(ltrain[f], cv$cv_predict$pred[f])))) err_log <- cv$evaluation_log[cv$early_stop$best_iteration, test_error_mean] expect_equal(err_pred, err_log, tolerance = 1e-6) err_log_last <- cv$evaluation_log[cv$niter, test_error_mean] expect_gt(abs(err_pred - err_log_last), 1e-4) }) test_that("prediction in xgb.cv for softprob works", { lb <- as.numeric(iris$Species) - 1 set.seed(11) expect_warning( { cv <- xgb.cv( data = xgb.DMatrix(as.matrix(iris[, -5]), label = lb, nthread = 1), nfold = 4, nrounds = 5, params = xgb.params( objective = "multi:softprob", num_class = 3, learning_rate = 0.5, max_depth = 3, nthread = n_threads, subsample = 0.8, min_split_loss = 2 ), verbose = 0, prediction = TRUE ) }, NA ) expect_false(is.null(cv$cv_predict$pred)) expect_equal(dim(cv$cv_predict$pred), c(nrow(iris), 3)) expect_lt(diff(range(rowSums(cv$cv_predict$pred))), 1e-6) }) test_that("prediction in xgb.cv works for multi-quantile", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) cv <- xgb.cv( data = dm, params = xgb.params( objective = "reg:quantileerror", quantile_alpha = c(0.1, 0.2, 0.5, 0.8, 0.9), nthread = 1 ), nrounds = 5, nfold = 3, prediction = TRUE, verbose = 0 ) expect_equal(dim(cv$cv_predict$pred), c(nrow(x), 5)) }) test_that("prediction in xgb.cv works for multi-output", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = cbind(y, -y), nthread = 1) cv <- xgb.cv( data = dm, params = xgb.params( tree_method = "hist", multi_strategy = "multi_output_tree", objective = "reg:squarederror", nthread = n_threads ), nrounds = 5, nfold = 3, prediction = TRUE, verbose = 0 ) expect_equal(dim(cv$cv_predict$pred), c(nrow(x), 2)) }) test_that("prediction in xgb.cv works for multi-quantile", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) cv <- xgb.cv( data = dm, params = xgb.params( objective = "reg:quantileerror", quantile_alpha = c(0.1, 0.2, 0.5, 0.8, 0.9), nthread = 1 ), nrounds = 5, nfold = 3, prediction = TRUE, verbose = 0 ) expect_equal(dim(cv$cv_predict$pred), c(nrow(x), 5)) }) test_that("prediction in xgb.cv works for multi-output", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = cbind(y, -y), nthread = 1) cv <- xgb.cv( data = dm, params = xgb.params( tree_method = "hist", multi_strategy = "multi_output_tree", objective = "reg:squarederror", nthread = n_threads ), nrounds = 5, nfold = 3, prediction = TRUE, verbose = 0 ) expect_equal(dim(cv$cv_predict$pred), c(nrow(x), 2)) }) xgboost-3.0.0/R-package/tests/testthat/test_config.R000066400000000000000000000012601476511272400224320ustar00rootroot00000000000000context('Test global configuration') test_that('Global configuration works with verbosity', { old_verbosity <- xgb.get.config()$verbosity for (v in c(0, 1, 2, 3)) { xgb.set.config(verbosity = v) expect_equal(xgb.get.config()$verbosity, v) } xgb.set.config(verbosity = old_verbosity) expect_equal(xgb.get.config()$verbosity, old_verbosity) }) test_that('Global configuration works with use_rmm flag', { old_use_rmm_flag <- xgb.get.config()$use_rmm for (v in c(TRUE, FALSE)) { xgb.set.config(use_rmm = v) expect_equal(xgb.get.config()$use_rmm, v) } xgb.set.config(use_rmm = old_use_rmm_flag) expect_equal(xgb.get.config()$use_rmm, old_use_rmm_flag) }) xgboost-3.0.0/R-package/tests/testthat/test_custom_objective.R000066400000000000000000000130771476511272400245420ustar00rootroot00000000000000context('Test models with custom objective') set.seed(1994) n_threads <- 2 data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') dtrain <- xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) dtest <- xgb.DMatrix( agaricus.test$data, label = agaricus.test$label, nthread = n_threads ) evals <- list(eval = dtest, train = dtrain) logregobj <- function(preds, dtrain) { labels <- getinfo(dtrain, "label") preds <- 1 / (1 + exp(-preds)) grad <- preds - labels hess <- preds * (1 - preds) return(list(grad = grad, hess = hess)) } evalerror <- function(preds, dtrain) { labels <- getinfo(dtrain, "label") err <- as.numeric(sum(labels != (preds > 0.5))) / length(labels) return(list(metric = "error", value = err)) } param <- list(max_depth = 2, learning_rate = 1, nthread = n_threads, objective = logregobj, eval_metric = evalerror) num_round <- 2 test_that("custom objective works", { bst <- xgb.train(param, dtrain, num_round, evals, verbose = 0) expect_equal(class(bst), "xgb.Booster") expect_false(is.null(attributes(bst)$evaluation_log)) expect_false(is.null(attributes(bst)$evaluation_log$eval_error)) expect_lt(attributes(bst)$evaluation_log[num_round, eval_error], 0.03) }) test_that("custom objective in CV works", { cv <- xgb.cv(param, dtrain, num_round, nfold = 10, verbose = FALSE, stratified = FALSE) expect_false(is.null(cv$evaluation_log)) expect_equal(dim(cv$evaluation_log), c(2, 5)) expect_lt(cv$evaluation_log[num_round, test_error_mean], 0.03) }) test_that("custom objective with early stop works", { bst <- xgb.train(param, dtrain, 10, evals, verbose = 0) expect_equal(class(bst), "xgb.Booster") train_log <- attributes(bst)$evaluation_log$train_error expect_true(all(diff(train_log) <= 0)) }) test_that("custom objective using DMatrix attr works", { attr(dtrain, 'label') <- getinfo(dtrain, 'label') logregobjattr <- function(preds, dtrain) { labels <- attr(dtrain, 'label') preds <- 1 / (1 + exp(-preds)) grad <- preds - labels hess <- preds * (1 - preds) return(list(grad = grad, hess = hess)) } param$objective <- logregobjattr bst <- xgb.train(param, dtrain, num_round, evals, verbose = 0) expect_equal(class(bst), "xgb.Booster") }) test_that("custom objective with multi-class shape", { data <- as.matrix(iris[, -5]) label <- as.numeric(iris$Species) - 1 dtrain <- xgb.DMatrix(data = data, label = label, nthread = n_threads) n_classes <- 3 fake_softprob <- function(preds, dtrain) { expect_true(all(matrix(preds) == 0.5)) ## use numeric vector here to test compatibility with XGBoost < 2.1 grad <- rnorm(length(as.matrix(preds))) expect_equal(dim(data)[1] * n_classes, dim(as.matrix(preds))[1] * n_classes) hess <- rnorm(length(as.matrix(preds))) return(list(grad = grad, hess = hess)) } fake_merror <- function(preds, dtrain) { expect_equal(dim(data)[1] * n_classes, dim(as.matrix(preds))[1]) } param$objective <- fake_softprob param$eval_metric <- fake_merror expect_warning({ bst <- xgb.train(c(param, list(num_class = n_classes)), dtrain, nrounds = 1) }) }) softmax <- function(values) { values <- as.numeric(values) exps <- exp(values) den <- sum(exps) return(exps / den) } softprob <- function(predt, dtrain) { y <- getinfo(dtrain, "label") n_samples <- dim(predt)[1] n_classes <- dim(predt)[2] grad <- matrix(nrow = n_samples, ncol = n_classes) hess <- matrix(nrow = n_samples, ncol = n_classes) for (i in seq_len(n_samples)) { t <- y[i] p <- softmax(predt[i, ]) for (c in seq_len(n_classes)) { g <- if (c - 1 == t) { p[c] - 1.0 } else { p[c] } h <- max((2.0 * p[c] * (1.0 - p[c])), 1e-6) grad[i, c] <- g hess[i, c] <- h } } return(list(grad = grad, hess = hess)) } test_that("custom objective with multi-class works", { data <- as.matrix(iris[, -5]) label <- as.numeric(iris$Species) - 1 dtrain <- xgb.DMatrix(data = data, label = label, nthread = 1) param$num_class <- 3 param$objective <- softprob param$eval_metric <- "merror" param$base_score <- 0.5 custom_bst <- xgb.train(param, dtrain, 2) custom_predt <- predict(custom_bst, dtrain) param$objective <- "multi:softmax" builtin_bst <- xgb.train(param, dtrain, 2) builtin_predt <- predict(builtin_bst, dtrain) expect_equal(custom_predt, builtin_predt) }) test_that("custom metric with multi-target passes reshaped data to feval", { x <- as.matrix(iris[, -5]) y <- as.numeric(iris$Species) - 1 dtrain <- xgb.DMatrix(data = x, label = y, nthread = 1) multinomial.ll <- function(predt, dtrain) { expect_equal(dim(predt), c(nrow(iris), 3L)) y <- getinfo(dtrain, "label") probs <- apply(predt, 1, softmax) |> t() probs.y <- probs[cbind(seq(1L, nrow(predt)), y + 1L)] ll <- sum(log(probs.y)) return(list(metric = "multinomial-ll", value = -ll)) } model <- xgb.train( params = list( objective = "multi:softmax", num_class = 3L, base_score = 0, disable_default_eval_metric = TRUE, eval_metric = multinomial.ll, max_depth = 123, seed = 123 ), data = dtrain, nrounds = 2L, evals = list(Train = dtrain), verbose = 0 ) model <- xgb.train( params = list( objective = "multi:softmax", num_class = 3L, base_score = 0, disable_default_eval_metric = TRUE, max_depth = 123, seed = 123 ), data = dtrain, nrounds = 2L, evals = list(Train = dtrain), custom_metric = multinomial.ll, verbose = 0 ) }) xgboost-3.0.0/R-package/tests/testthat/test_dmatrix.R000066400000000000000000000533361476511272400226500ustar00rootroot00000000000000library(Matrix) context("testing xgb.DMatrix functionality") data(agaricus.test, package = "xgboost") test_data <- agaricus.test$data[1:100, ] test_label <- agaricus.test$label[1:100] n_threads <- 2 test_that("xgb.DMatrix: basic construction", { # from sparse matrix dtest1 <- xgb.DMatrix(test_data, label = test_label, nthread = n_threads) # from dense matrix dtest2 <- xgb.DMatrix(as.matrix(test_data), label = test_label, nthread = n_threads) expect_equal(getinfo(dtest1, "label"), getinfo(dtest2, "label")) expect_equal(dim(dtest1), dim(dtest2)) # from dense integer matrix int_data <- as.matrix(test_data) storage.mode(int_data) <- "integer" dtest3 <- xgb.DMatrix(int_data, label = test_label, nthread = n_threads) expect_equal(dim(dtest1), dim(dtest3)) n_samples <- 100 X <- cbind( x1 = sample(x = 4, size = n_samples, replace = TRUE), x2 = sample(x = 4, size = n_samples, replace = TRUE), x3 = sample(x = 4, size = n_samples, replace = TRUE) ) X <- matrix(X, nrow = n_samples) y <- rbinom(n = n_samples, size = 1, prob = 1 / 2) fd <- xgb.DMatrix(X, label = y, missing = 1, nthread = n_threads) dgc <- as(X, "dgCMatrix") fdgc <- xgb.DMatrix(dgc, label = y, missing = 1.0, nthread = n_threads) dgr <- as(X, "dgRMatrix") fdgr <- xgb.DMatrix(dgr, label = y, missing = 1, nthread = n_threads) params <- list(tree_method = "hist", nthread = n_threads) bst_fd <- xgb.train( params, nrounds = 8, fd, evals = list(train = fd), verbose = 0 ) bst_dgr <- xgb.train( params, nrounds = 8, fdgr, evals = list(train = fdgr), verbose = 0 ) bst_dgc <- xgb.train( params, nrounds = 8, fdgc, evals = list(train = fdgc), verbose = 0 ) raw_fd <- xgb.save.raw(bst_fd, raw_format = "ubj") raw_dgr <- xgb.save.raw(bst_dgr, raw_format = "ubj") raw_dgc <- xgb.save.raw(bst_dgc, raw_format = "ubj") expect_equal(raw_fd, raw_dgr) expect_equal(raw_fd, raw_dgc) }) test_that("xgb.DMatrix: NA", { n_samples <- 3 x <- cbind( x1 = sample(x = 4, size = n_samples, replace = TRUE), x2 = sample(x = 4, size = n_samples, replace = TRUE) ) x[1, "x1"] <- NA m <- xgb.DMatrix(x, nthread = n_threads) fname_int <- file.path(tempdir(), "int.dmatrix") xgb.DMatrix.save(m, fname_int) x <- matrix(as.numeric(x), nrow = n_samples, ncol = 2) colnames(x) <- c("x1", "x2") m <- xgb.DMatrix(x, nthread = n_threads) fname_float <- file.path(tempdir(), "float.dmatrix") xgb.DMatrix.save(m, fname_float) iconn <- file(fname_int, "rb") fconn <- file(fname_float, "rb") expect_equal(file.size(fname_int), file.size(fname_float)) bytes <- file.size(fname_int) idmatrix <- readBin(iconn, "raw", n = bytes) fdmatrix <- readBin(fconn, "raw", n = bytes) expect_equal(length(idmatrix), length(fdmatrix)) expect_equal(idmatrix, fdmatrix) close(iconn) close(fconn) file.remove(fname_int) file.remove(fname_float) }) test_that("xgb.DMatrix: saving, loading", { # save to a local file dtest1 <- xgb.DMatrix(test_data, label = test_label, nthread = n_threads) tmp_file <- tempfile('xgb.DMatrix_') on.exit(unlink(tmp_file)) expect_true(xgb.DMatrix.save(dtest1, tmp_file)) # read from a local file xgb.set.config(verbosity = 2) expect_output(dtest3 <- xgb.DMatrix(tmp_file, nthread = 1), "entries loaded from") xgb.set.config(verbosity = 1) expect_output(dtest3 <- xgb.DMatrix(tmp_file, nthread = 1), NA) unlink(tmp_file) expect_equal(getinfo(dtest1, 'label'), getinfo(dtest3, 'label')) # from a libsvm text file tmp <- c("0 1:1 2:1", "1 3:1", "0 1:1") tmp_file <- tempfile(fileext = ".libsvm") writeLines(tmp, tmp_file) expect_true(file.exists(tmp_file)) dtest4 <- xgb.DMatrix( paste(tmp_file, "?format=libsvm", sep = ""), silent = TRUE, nthread = n_threads ) expect_equal(dim(dtest4), c(3, 4)) expect_equal(getinfo(dtest4, 'label'), c(0, 1, 0)) # check that feature info is saved data(agaricus.train, package = 'xgboost') dtrain <- xgb.DMatrix( data = agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) cnames <- colnames(dtrain) expect_equal(length(cnames), 126) tmp_file <- tempfile('xgb.DMatrix_') xgb.DMatrix.save(dtrain, tmp_file) xgb.set.config(verbosity = 0) dtrain <- xgb.DMatrix(tmp_file, nthread = 1) expect_equal(colnames(dtrain), cnames) ft <- rep(c("c", "q"), each = length(cnames) / 2) setinfo(dtrain, "feature_type", ft) expect_equal(ft, getinfo(dtrain, "feature_type")) }) test_that("xgb.DMatrix: getinfo & setinfo", { dtest <- xgb.DMatrix(test_data, nthread = n_threads) expect_true(setinfo(dtest, 'label', test_label)) labels <- getinfo(dtest, 'label') expect_equal(test_label, getinfo(dtest, 'label')) expect_true(setinfo(dtest, 'label_lower_bound', test_label)) expect_equal(test_label, getinfo(dtest, 'label_lower_bound')) expect_true(setinfo(dtest, 'label_upper_bound', test_label)) expect_equal(test_label, getinfo(dtest, 'label_upper_bound')) expect_true(length(getinfo(dtest, 'weight')) == 0) expect_true(length(getinfo(dtest, 'base_margin')) == 0) expect_true(setinfo(dtest, 'weight', test_label)) expect_true(setinfo(dtest, 'base_margin', test_label)) expect_true(setinfo(dtest, 'group', c(50, 50))) expect_error(setinfo(dtest, 'group', test_label)) # providing character values will give an error expect_error(setinfo(dtest, 'weight', rep('a', nrow(test_data)))) # any other label should error expect_error(setinfo(dtest, 'asdf', test_label)) }) test_that("xgb.DMatrix: slice, dim", { dtest <- xgb.DMatrix(test_data, label = test_label, nthread = n_threads) expect_equal(dim(dtest), dim(test_data)) dsub1 <- xgb.slice.DMatrix(dtest, 1:42) expect_equal(nrow(dsub1), 42) expect_equal(ncol(dsub1), ncol(test_data)) dsub2 <- dtest[1:42, ] expect_equal(dim(dtest), dim(test_data)) expect_equal(getinfo(dsub1, 'label'), getinfo(dsub2, 'label')) }) test_that("xgb.DMatrix: slice, trailing empty rows", { data(agaricus.train, package = 'xgboost') train_data <- agaricus.train$data train_label <- agaricus.train$label dtrain <- xgb.DMatrix( data = train_data, label = train_label, nthread = n_threads ) xgb.slice.DMatrix(dtrain, 6513L) train_data[6513, ] <- 0 dtrain <- xgb.DMatrix( data = train_data, label = train_label, nthread = n_threads ) xgb.slice.DMatrix(dtrain, 6513L) expect_equal(nrow(dtrain), 6513) }) test_that("xgb.DMatrix: colnames", { dtest <- xgb.DMatrix(test_data, label = test_label, nthread = n_threads) expect_equal(colnames(dtest), colnames(test_data)) expect_error(colnames(dtest) <- 'asdf') new_names <- make.names(seq_len(ncol(test_data))) expect_silent(colnames(dtest) <- new_names) expect_equal(colnames(dtest), new_names) expect_silent(colnames(dtest) <- NULL) expect_null(colnames(dtest)) }) test_that("xgb.DMatrix: nrow is correct for a very sparse matrix", { set.seed(123) nr <- 1000 x <- Matrix::rsparsematrix(nr, 100, density = 0.0005) # we want it very sparse, so that last rows are empty expect_lt(max(x@i), nr) dtest <- xgb.DMatrix(x, nthread = n_threads) expect_equal(dim(dtest), dim(x)) }) test_that("xgb.DMatrix: print", { data(agaricus.train, package = 'xgboost') # core DMatrix with just data and labels dtrain <- xgb.DMatrix( data = agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) txt <- capture.output({ print(dtrain) }) expect_equal(txt, "xgb.DMatrix dim: 6513 x 126 info: label colnames: yes") # verbose=TRUE prints feature names txt <- capture.output({ print(dtrain, verbose = TRUE) }) expect_equal(txt[[1L]], "xgb.DMatrix dim: 6513 x 126 info: label colnames:") expect_equal(txt[[2L]], sprintf("'%s'", paste(colnames(dtrain), collapse = "','"))) # DMatrix with weights and base_margin dtrain <- xgb.DMatrix( data = agaricus.train$data, label = agaricus.train$label, weight = seq_along(agaricus.train$label), base_margin = agaricus.train$label, nthread = n_threads ) txt <- capture.output({ print(dtrain) }) expect_equal(txt, "xgb.DMatrix dim: 6513 x 126 info: base_margin, label, weight colnames: yes") # DMatrix with just features dtrain <- xgb.DMatrix( data = agaricus.train$data, nthread = n_threads ) txt <- capture.output({ print(dtrain) }) expect_equal(txt, "xgb.DMatrix dim: 6513 x 126 info: NA colnames: yes") # DMatrix with no column names data_no_colnames <- agaricus.train$data colnames(data_no_colnames) <- NULL dtrain <- xgb.DMatrix( data = data_no_colnames, nthread = n_threads ) txt <- capture.output({ print(dtrain) }) expect_equal(txt, "xgb.DMatrix dim: 6513 x 126 info: NA colnames: no") }) test_that("xgb.DMatrix: Inf as missing", { x_inf <- matrix(as.numeric(1:10), nrow = 5) x_inf[2, 1] <- Inf x_nan <- x_inf x_nan[2, 1] <- NA_real_ m_inf <- xgb.DMatrix(x_inf, nthread = n_threads, missing = Inf) fname_inf <- file.path(tempdir(), "inf.dmatrix") xgb.DMatrix.save(m_inf, fname_inf) m_nan <- xgb.DMatrix(x_nan, nthread = n_threads, missing = NA_real_) fname_nan <- file.path(tempdir(), "nan.dmatrix") xgb.DMatrix.save(m_nan, fname_nan) infconn <- file(fname_inf, "rb") nanconn <- file(fname_nan, "rb") expect_equal(file.size(fname_inf), file.size(fname_nan)) bytes <- file.size(fname_inf) infdmatrix <- readBin(infconn, "raw", n = bytes) nandmatrix <- readBin(nanconn, "raw", n = bytes) expect_equal(length(infdmatrix), length(nandmatrix)) expect_equal(infdmatrix, nandmatrix) close(infconn) close(nanconn) file.remove(fname_inf) file.remove(fname_nan) }) test_that("xgb.DMatrix: missing in CSR", { x_dense <- matrix(as.numeric(1:10), nrow = 5) x_dense[2, 1] <- NA_real_ x_csr <- as(x_dense, "RsparseMatrix") m_dense <- xgb.DMatrix(x_dense, nthread = n_threads, missing = NA_real_) xgb.DMatrix.save(m_dense, "dense.dmatrix") m_csr <- xgb.DMatrix(x_csr, nthread = n_threads, missing = NA) xgb.DMatrix.save(m_csr, "csr.dmatrix") denseconn <- file("dense.dmatrix", "rb") csrconn <- file("csr.dmatrix", "rb") expect_equal(file.size("dense.dmatrix"), file.size("csr.dmatrix")) bytes <- file.size("dense.dmatrix") densedmatrix <- readBin(denseconn, "raw", n = bytes) csrmatrix <- readBin(csrconn, "raw", n = bytes) expect_equal(length(densedmatrix), length(csrmatrix)) expect_equal(densedmatrix, csrmatrix) close(denseconn) close(csrconn) file.remove("dense.dmatrix") file.remove("csr.dmatrix") }) test_that("xgb.DMatrix: error on three-dimensional array", { set.seed(123) x <- matrix(rnorm(500), nrow = 50) y <- rnorm(400) dim(y) <- c(50, 4, 2) expect_error(xgb.DMatrix(data = x, label = y)) }) test_that("xgb.DMatrix: can get group for both 'qid' and 'group' constructors", { set.seed(123) x <- matrix(rnorm(1000), nrow = 100) group <- c(20, 20, 60) qid <- c(rep(1, 20), rep(2, 20), rep(3, 60)) gr_mat <- xgb.DMatrix(x, group = group, nthread = 1) qid_mat <- xgb.DMatrix(x, qid = qid, nthread = 1) info_gr <- getinfo(gr_mat, "group") info_qid <- getinfo(qid_mat, "group") expect_equal(info_gr, info_qid) expected_gr <- c(0, 20, 40, 100) expect_equal(info_gr, expected_gr) }) test_that("xgb.DMatrix: data.frame", { df <- data.frame( a = (1:4) / 10, num = c(1, NA, 3, 4), as.int = as.integer(c(1, 2, 3, 4)), lo = c(TRUE, FALSE, NA, TRUE), str.fac = c("a", "b", "d", "c"), as.fac = as.factor(c(3, 5, 8, 11)), stringsAsFactors = TRUE ) m <- xgb.DMatrix(df, nthread = 1) expect_equal(colnames(m), colnames(df)) expect_equal( getinfo(m, "feature_type"), c("float", "float", "int", "i", "c", "c") ) df <- data.frame( missing = c("a", "b", "d", NA), valid = c("a", "b", "d", "c"), stringsAsFactors = TRUE ) m <- xgb.DMatrix(df, nthread = 1) expect_equal(getinfo(m, "feature_type"), c("c", "c")) }) test_that("xgb.DMatrix: can take multi-dimensional 'base_margin'", { set.seed(123) x <- matrix(rnorm(100 * 10), nrow = 100) y <- matrix(rnorm(100 * 2), nrow = 100) b <- matrix(rnorm(100 * 2), nrow = 100) model <- xgb.train( data = xgb.DMatrix(data = x, label = y, nthread = n_threads), params = list( objective = "reg:squarederror", tree_method = "hist", multi_strategy = "multi_output_tree", base_score = 0, nthread = n_threads ), nround = 1 ) pred_only_x <- predict(model, x) pred_w_base <- predict( model, xgb.DMatrix(data = x, base_margin = b, nthread = 1) ) expect_equal(pred_only_x, pred_w_base - b, tolerance = 1e-5) }) test_that("xgb.DMatrix: QuantileDMatrix produces same result as DMatrix", { data(mtcars) y <- mtcars[, 1] x <- mtcars[, -1] cast_matrix <- function(x) as.matrix(x) cast_df <- function(x) as.data.frame(x) cast_csr <- function(x) as(as.matrix(x), "RsparseMatrix") casting_funs <- list(cast_matrix, cast_df, cast_csr) for (casting_fun in casting_funs) { qdm <- xgb.QuantileDMatrix( data = casting_fun(x), label = y, nthread = n_threads, max_bin = 5 ) params <- list( tree_method = "hist", objective = "reg:squarederror", nthread = n_threads, max_bin = 5 ) model_qdm <- xgb.train( params = params, data = qdm, nrounds = 2 ) pred_qdm <- predict(model_qdm, x) dm <- xgb.DMatrix( data = x, label = y, nthread = n_threads ) model_dm <- xgb.train( params = params, data = dm, nrounds = 2 ) pred_dm <- predict(model_dm, x) expect_equal(pred_qdm, pred_dm) } }) test_that("xgb.DMatrix: QuantileDMatrix is not accepted by exact method", { data(mtcars) y <- mtcars[, 1] x <- as.matrix(mtcars[, -1]) qdm <- xgb.QuantileDMatrix( data = x, label = y, nthread = n_threads ) params <- list( tree_method = "exact", objective = "reg:squarederror", nthread = n_threads ) expect_error({ xgb.train( params = params, data = qdm, nrounds = 2 ) }) }) test_that("xgb.DMatrix: ExtMemDMatrix produces the same results as regular DMatrix", { data(mtcars) y <- mtcars[, 1] x <- as.matrix(mtcars[, -1]) set.seed(123) params <- list( objective = "reg:squarederror", nthread = n_threads ) model <- xgb.train( data = xgb.DMatrix(x, label = y, nthread = 1), params = params, nrounds = 5 ) pred <- predict(model, x) pred <- unname(pred) iterator_env <- as.environment( list( iter = 0, x = mtcars[, -1], y = mtcars[, 1] ) ) iterator_next <- function(iterator_env) { curr_iter <- iterator_env[["iter"]] if (curr_iter >= 2) { return(NULL) } if (curr_iter == 0) { x_batch <- iterator_env[["x"]][1:16, ] y_batch <- iterator_env[["y"]][1:16] } else { x_batch <- iterator_env[["x"]][17:32, ] y_batch <- iterator_env[["y"]][17:32] } on.exit({ iterator_env[["iter"]] <- curr_iter + 1 }) return(xgb.DataBatch(data = x_batch, label = y_batch)) } iterator_reset <- function(iterator_env) { iterator_env[["iter"]] <- 0 } data_iterator <- xgb.DataIter( env = iterator_env, f_next = iterator_next, f_reset = iterator_reset ) cache_prefix <- tempdir() edm <- xgb.ExtMemDMatrix(data_iterator, cache_prefix, nthread = 1) expect_true(inherits(edm, "xgb.ExtMemDMatrix")) expect_true(inherits(edm, "xgb.DMatrix")) set.seed(123) model_ext <- xgb.train( data = edm, params = params, nrounds = 5 ) pred_model1_edm <- predict(model, edm) pred_model2_mat <- predict(model_ext, x) |> unname() pred_model2_edm <- predict(model_ext, edm) expect_equal(pred_model1_edm, pred) expect_equal(pred_model2_mat, pred) expect_equal(pred_model2_edm, pred) }) test_that("xgb.DMatrix: External QDM produces same results as regular QDM", { data(mtcars) y <- mtcars[, 1] x <- as.matrix(mtcars[, -1]) set.seed(123) params <- list( objective = "reg:squarederror", nthread = n_threads, max_bin = 3 ) model <- xgb.train( data = xgb.QuantileDMatrix( x, label = y, nthread = 1, max_bin = 3 ), params = params, nrounds = 5 ) pred <- predict(model, x) pred <- unname(pred) iterator_env <- as.environment( list( iter = 0, x = mtcars[, -1], y = mtcars[, 1] ) ) iterator_next <- function(iterator_env) { curr_iter <- iterator_env[["iter"]] if (curr_iter >= 2) { return(NULL) } if (curr_iter == 0) { x_batch <- iterator_env[["x"]][1:16, ] y_batch <- iterator_env[["y"]][1:16] } else { x_batch <- iterator_env[["x"]][17:32, ] y_batch <- iterator_env[["y"]][17:32] } on.exit({ iterator_env[["iter"]] <- curr_iter + 1 }) return(xgb.DataBatch(data = x_batch, label = y_batch)) } iterator_reset <- function(iterator_env) { iterator_env[["iter"]] <- 0 } data_iterator <- xgb.DataIter( env = iterator_env, f_next = iterator_next, f_reset = iterator_reset ) cache_prefix <- tempdir() qdm <- xgb.QuantileDMatrix.from_iterator( data_iterator, max_bin = 3, nthread = 1 ) expect_true(inherits(qdm, "xgb.QuantileDMatrix")) expect_true(inherits(qdm, "xgb.DMatrix")) set.seed(123) model_ext <- xgb.train( data = qdm, params = params, nrounds = 5 ) pred_model1_qdm <- predict(model, qdm) pred_model2_mat <- predict(model_ext, x) |> unname() pred_model2_qdm <- predict(model_ext, qdm) expect_equal(pred_model1_qdm, pred) expect_equal(pred_model2_mat, pred) expect_equal(pred_model2_qdm, pred) }) test_that("xgb.DMatrix: R errors thrown on DataIterator are thrown back to the user", { data(mtcars) iterator_env <- as.environment( list( iter = 0, x = mtcars[, -1], y = mtcars[, 1] ) ) iterator_next <- function(iterator_env) { curr_iter <- iterator_env[["iter"]] if (curr_iter >= 2) { return(0) } if (curr_iter == 0) { x_batch <- iterator_env[["x"]][1:16, ] y_batch <- iterator_env[["y"]][1:16] } else { stop("custom error") } on.exit({ iterator_env[["iter"]] <- curr_iter + 1 }) return(xgb.DataBatch(data = x_batch, label = y_batch)) } iterator_reset <- function(iterator_env) { iterator_env[["iter"]] <- 0 } data_iterator <- xgb.DataIter( env = iterator_env, f_next = iterator_next, f_reset = iterator_reset ) expect_error( {xgb.ExtMemDMatrix(data_iterator, nthread = 1)}, "custom error" ) }) test_that("xgb.DMatrix: number of non-missing matches data", { x <- matrix(1:10, nrow = 5) dm1 <- xgb.DMatrix(x, nthread = 1) expect_equal(xgb.get.DMatrix.num.non.missing(dm1), 10) x[2, 2] <- NA x[4, 1] <- NA dm2 <- xgb.DMatrix(x, nthread = 1) expect_equal(xgb.get.DMatrix.num.non.missing(dm2), 8) }) test_that("xgb.DMatrix: retrieving data as CSR", { data(mtcars) dm <- xgb.DMatrix(as.matrix(mtcars), nthread = 1) csr <- xgb.get.DMatrix.data(dm) expect_equal(dim(csr), dim(mtcars)) expect_equal(colnames(csr), colnames(mtcars)) expect_equal(unname(as.matrix(csr)), unname(as.matrix(mtcars)), tolerance = 1e-6) }) test_that("xgb.DMatrix: quantile cuts look correct", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = list( tree_method = "hist", max_bin = 8, nthread = 1 ), nrounds = 3 ) qcut_list <- xgb.get.DMatrix.qcut(dm, "list") qcut_arrays <- xgb.get.DMatrix.qcut(dm, "arrays") expect_equal(length(qcut_arrays), 2) expect_equal(names(qcut_arrays), c("indptr", "data")) expect_equal(length(qcut_arrays$indptr), ncol(x) + 1) expect_true(min(diff(qcut_arrays$indptr)) > 0) col_min <- apply(x, 2, min) col_max <- apply(x, 2, max) expect_equal(length(qcut_list), ncol(x)) expect_equal(names(qcut_list), colnames(x)) lapply( seq(1, ncol(x)), function(col) { cuts <- qcut_list[[col]] expect_true(min(diff(cuts)) > 0) expect_true(col_min[col] > cuts[1]) expect_true(col_max[col] < cuts[length(cuts)]) expect_true(length(cuts) <= 9) } ) }) test_that("xgb.DMatrix: slicing keeps field indicators", { data(mtcars) x <- as.matrix(mtcars[, -1]) y <- mtcars[, 1] dm <- xgb.DMatrix( data = x, label_lower_bound = -y, label_upper_bound = y, nthread = 1 ) idx_take <- seq(1, 5) dm_slice <- xgb.slice.DMatrix(dm, idx_take) expect_true(xgb.DMatrix.hasinfo(dm_slice, "label_lower_bound")) expect_true(xgb.DMatrix.hasinfo(dm_slice, "label_upper_bound")) expect_false(xgb.DMatrix.hasinfo(dm_slice, "label")) expect_equal(getinfo(dm_slice, "label_lower_bound"), -y[idx_take], tolerance = 1e-6) expect_equal(getinfo(dm_slice, "label_upper_bound"), y[idx_take], tolerance = 1e-6) }) test_that("xgb.DMatrix: can slice with groups", { data(iris) x <- as.matrix(iris[, -5]) set.seed(123) y <- sample(3, size = nrow(x), replace = TRUE) group <- c(50, 50, 50) dm <- xgb.DMatrix(x, label = y, group = group, nthread = 1) idx_take <- seq(1, 50) dm_slice <- xgb.slice.DMatrix(dm, idx_take, allow_groups = TRUE) expect_true(xgb.DMatrix.hasinfo(dm_slice, "label")) expect_false(xgb.DMatrix.hasinfo(dm_slice, "group")) expect_false(xgb.DMatrix.hasinfo(dm_slice, "qid")) expect_null(getinfo(dm_slice, "group")) expect_equal(getinfo(dm_slice, "label"), y[idx_take], tolerance = 1e-6) }) test_that("xgb.DMatrix: can read CSV", { txt <- paste( "1,2,3", "-1,3,2", sep = "\n" ) fname <- file.path(tempdir(), "data.csv") writeChar(txt, fname) uri <- paste0(fname, "?format=csv&label_column=0") dm <- xgb.DMatrix(uri, silent = TRUE, nthread = 1) expect_equal(getinfo(dm, "label"), c(1, -1)) expect_equal( as.matrix(xgb.get.DMatrix.data(dm)), matrix(c(2, 3, 3, 2), nrow = 2, byrow = TRUE) ) }) xgboost-3.0.0/R-package/tests/testthat/test_feature_weights.R000066400000000000000000000015471476511272400243620ustar00rootroot00000000000000context("feature weights") n_threads <- 2 test_that("training with feature weights works", { nrows <- 1000 ncols <- 9 set.seed(2022) x <- matrix(rnorm(nrows * ncols), nrow = nrows) y <- rowSums(x) weights <- seq(from = 1, to = ncols) test <- function(tm) { names <- paste0("f", 1:ncols) xy <- xgb.DMatrix( data = x, label = y, feature_weights = weights, nthread = n_threads ) params <- list( colsample_bynode = 0.4, tree_method = tm, nthread = n_threads ) model <- xgb.train(params = params, data = xy, nrounds = 32) importance <- xgb.importance(model = model, feature_names = names) expect_equal(dim(importance), c(ncols, 4)) importance <- importance[order(importance$Feature)] expect_lt(importance[1, Frequency], importance[9, Frequency]) } for (tm in c("hist", "approx")) { test(tm) } }) xgboost-3.0.0/R-package/tests/testthat/test_glm.R000066400000000000000000000064651476511272400217600ustar00rootroot00000000000000context('Test generalized linear models') n_threads <- 2 test_that("gblinear works", { data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') dtrain <- xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) dtest <- xgb.DMatrix( agaricus.test$data, label = agaricus.test$label, nthread = n_threads ) param <- list(objective = "binary:logistic", eval_metric = "error", booster = "gblinear", nthread = n_threads, learning_rate = 0.8, reg_alpha = 0.0001, reg_lambda = 0.0001) evals <- list(eval = dtest, train = dtrain) n <- 5 # iterations ERR_UL <- 0.005 # upper limit for the test set error VERB <- 0 # chatterbox switch param$updater <- 'shotgun' bst <- xgb.train(c(param, list(feature_selector = 'shuffle')), dtrain, n, evals, verbose = VERB) ypred <- predict(bst, dtest) expect_equal(length(getinfo(dtest, 'label')), 1611) expect_lt(attributes(bst)$evaluation_log$eval_error[n], ERR_UL) bst <- xgb.train(c(param, list(feature_selector = 'cyclic')), dtrain, n, evals, verbose = VERB, callbacks = list(xgb.cb.gblinear.history())) expect_lt(attributes(bst)$evaluation_log$eval_error[n], ERR_UL) h <- xgb.gblinear.history(bst) expect_equal(dim(h), c(n, ncol(dtrain) + 1)) expect_is(h, "matrix") param$updater <- 'coord_descent' bst <- xgb.train(c(param, list(feature_selector = 'cyclic')), dtrain, n, evals, verbose = VERB) expect_lt(attributes(bst)$evaluation_log$eval_error[n], ERR_UL) bst <- xgb.train(c(param, list(feature_selector = 'shuffle')), dtrain, n, evals, verbose = VERB) expect_lt(attributes(bst)$evaluation_log$eval_error[n], ERR_UL) bst <- xgb.train(c(param, list(feature_selector = 'greedy')), dtrain, 2, evals, verbose = VERB) expect_lt(attributes(bst)$evaluation_log$eval_error[2], ERR_UL) bst <- xgb.train(c(param, list(feature_selector = 'thrifty', top_k = 50)), dtrain, n, evals, verbose = VERB, callbacks = list(xgb.cb.gblinear.history(sparse = TRUE))) expect_lt(attributes(bst)$evaluation_log$eval_error[n], ERR_UL) h <- xgb.gblinear.history(bst) expect_equal(dim(h), c(n, ncol(dtrain) + 1)) expect_s4_class(h, "dgCMatrix") }) test_that("gblinear early stopping works", { data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') dtrain <- xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) dtest <- xgb.DMatrix( agaricus.test$data, label = agaricus.test$label, nthread = n_threads ) param <- xgb.params( objective = "binary:logistic", eval_metric = "error", booster = "gblinear", nthread = n_threads, learning_rate = 0.8, reg_alpha = 0.0001, reg_lambda = 0.0001, updater = "coord_descent" ) es_round <- 1 n <- 10 booster <- xgb.train( param, dtrain, nrounds = n, evals = list(eval = dtest, train = dtrain), early_stopping_rounds = es_round, verbose = 0 ) expect_equal(xgb.attr(booster, "best_iteration"), 4) predt_es <- predict(booster, dtrain) n <- xgb.attr(booster, "best_iteration") + es_round + 1 booster <- xgb.train( param, dtrain, nrounds = n, evals = list(eval = dtest, train = dtrain), early_stopping_rounds = es_round, verbose = 0 ) predt <- predict(booster, dtrain) expect_equal(predt_es, predt) }) xgboost-3.0.0/R-package/tests/testthat/test_helpers.R000066400000000000000000000672121476511272400226400ustar00rootroot00000000000000context('Test helper functions') VCD_AVAILABLE <- requireNamespace("vcd", quietly = TRUE) .skip_if_vcd_not_available <- function() { if (!VCD_AVAILABLE) { testthat::skip("Optional testing dependency 'vcd' not found.") } } float_tolerance <- 5e-6 # disable some tests for 32-bit environment flag_32bit <- .Machine$sizeof.pointer != 8 set.seed(1982) nrounds <- 12 if (isTRUE(VCD_AVAILABLE)) { data(Arthritis, package = "vcd") df <- data.table::data.table(Arthritis, keep.rownames = FALSE) df[, AgeDiscret := as.factor(round(Age / 10, 0))] df[, AgeCat := as.factor(ifelse(Age > 30, "Old", "Young"))] df[, ID := NULL] sparse_matrix <- Matrix::sparse.model.matrix(Improved~.-1, data = df) # nolint label <- df[, ifelse(Improved == "Marked", 1, 0)] # binary bst.Tree <- xgb.train( data = xgb.DMatrix(sparse_matrix, label = label, nthread = 1), nrounds = nrounds, verbose = 0, params = xgb.params( max_depth = 9, learning_rate = 1, nthread = 2, objective = "binary:logistic", booster = "gbtree", base_score = 0.5 ) ) bst.GLM <- xgb.train( data = xgb.DMatrix(sparse_matrix, label = label, nthread = 1), nrounds = nrounds, verbose = 0, params = xgb.params( learning_rate = 1, nthread = 1, objective = "binary:logistic", booster = "gblinear", base_score = 0.5 ) ) feature.names <- colnames(sparse_matrix) # without feature names bst.Tree.unnamed <- xgb.copy.Booster(bst.Tree) setinfo(bst.Tree.unnamed, "feature_name", NULL) } # multiclass mlabel <- as.numeric(iris$Species) - 1 nclass <- 3 mbst.Tree <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5]), label = mlabel, nthread = 1), verbose = 0, nrounds = nrounds, params = xgb.params( max_depth = 3, learning_rate = 0.5, nthread = 2, objective = "multi:softprob", num_class = nclass, base_score = 0 ) ) mbst.GLM <- xgb.train( data = xgb.DMatrix(as.matrix(iris[, -5]), label = mlabel, nthread = 1), verbose = 0, nrounds = nrounds, params = xgb.params( booster = "gblinear", learning_rate = 0.1, nthread = 1, objective = "multi:softprob", num_class = nclass, base_score = 0 ) ) test_that("xgb.dump works", { .skip_if_vcd_not_available() if (!flag_32bit) expect_length(xgb.dump(bst.Tree), 200) dump_file <- file.path(tempdir(), 'xgb.model.dump') expect_true(xgb.dump(bst.Tree, dump_file, with_stats = TRUE)) expect_true(file.exists(dump_file)) expect_gt(file.size(dump_file), 8000) # JSON format dmp <- xgb.dump(bst.Tree, dump_format = "json") expect_length(dmp, 1) if (!flag_32bit) expect_length(grep('nodeid', strsplit(dmp, '\n', fixed = TRUE)[[1]], fixed = TRUE), 188) }) test_that("xgb.dump works for gblinear", { .skip_if_vcd_not_available() expect_length(xgb.dump(bst.GLM), 14) # also make sure that it works properly for a sparse model where some coefficients # are 0 from setting large L1 regularization: bst.GLM.sp <- xgb.train( data = xgb.DMatrix(sparse_matrix, label = label, nthread = 1), nrounds = 1, params = xgb.params( learning_rate = 1, nthread = 2, reg_alpha = 2, objective = "binary:logistic", booster = "gblinear" ) ) d.sp <- xgb.dump(bst.GLM.sp) expect_length(d.sp, 14) expect_gt(sum(d.sp == "0"), 0) # JSON format dmp <- xgb.dump(bst.GLM.sp, dump_format = "json") expect_length(dmp, 1) expect_length(grep('\\d', strsplit(dmp, '\n', fixed = TRUE)[[1]]), 11) }) test_that("predict leafs works", { .skip_if_vcd_not_available() # no error for gbtree expect_error(pred_leaf <- predict(bst.Tree, sparse_matrix, predleaf = TRUE), regexp = NA) expect_equal(dim(pred_leaf), c(nrow(sparse_matrix), nrounds)) # error for gblinear expect_error(predict(bst.GLM, sparse_matrix, predleaf = TRUE)) }) test_that("predict feature contributions works", { .skip_if_vcd_not_available() # gbtree binary classifier expect_error(pred_contr <- predict(bst.Tree, sparse_matrix, predcontrib = TRUE), regexp = NA) expect_equal(dim(pred_contr), c(nrow(sparse_matrix), ncol(sparse_matrix) + 1)) expect_equal(colnames(pred_contr), c(colnames(sparse_matrix), "(Intercept)")) pred <- predict(bst.Tree, sparse_matrix, outputmargin = TRUE) expect_lt(max(abs(rowSums(pred_contr) - pred)), 1e-5) # must work with data that has no column names X <- sparse_matrix colnames(X) <- NULL expect_error(pred_contr_ <- predict(bst.Tree, X, predcontrib = TRUE), regexp = NA) expect_equal(pred_contr, pred_contr_, check.attributes = FALSE, tolerance = float_tolerance) # gbtree binary classifier (approximate method) expect_error(pred_contr <- predict(bst.Tree, sparse_matrix, predcontrib = TRUE, approxcontrib = TRUE), regexp = NA) expect_equal(dim(pred_contr), c(nrow(sparse_matrix), ncol(sparse_matrix) + 1)) expect_equal(colnames(pred_contr), c(colnames(sparse_matrix), "(Intercept)")) pred <- predict(bst.Tree, sparse_matrix, outputmargin = TRUE) expect_lt(max(abs(rowSums(pred_contr) - pred)), 1e-5) # gblinear binary classifier expect_error(pred_contr <- predict(bst.GLM, sparse_matrix, predcontrib = TRUE), regexp = NA) expect_equal(dim(pred_contr), c(nrow(sparse_matrix), ncol(sparse_matrix) + 1)) expect_equal(colnames(pred_contr), c(colnames(sparse_matrix), "(Intercept)")) pred <- predict(bst.GLM, sparse_matrix, outputmargin = TRUE) expect_lt(max(abs(rowSums(pred_contr) - pred)), 1e-5) # manual calculation of linear terms coefs <- as.numeric(xgb.dump(bst.GLM)[-c(1, 2, 4)]) coefs <- c(coefs[-1], coefs[1]) # intercept must be the last pred_contr_manual <- sweep(cbind(sparse_matrix, 1), 2, coefs, FUN = "*") expect_equal(as.numeric(pred_contr), as.numeric(pred_contr_manual), tolerance = float_tolerance) # gbtree multiclass pred <- predict(mbst.Tree, as.matrix(iris[, -5]), outputmargin = TRUE) pred_contr <- predict(mbst.Tree, as.matrix(iris[, -5]), predcontrib = TRUE) expect_is(pred_contr, "array") expect_length(dim(pred_contr), 3) for (g in seq_len(dim(pred_contr)[2])) { expect_equal(colnames(pred_contr[, g, ]), c(colnames(iris[, -5]), "(Intercept)")) expect_lt(max(abs(rowSums(pred_contr[, g, ]) - pred[, g])), 1e-5) } # gblinear multiclass (set base_score = 0, which is base margin in multiclass) pred <- predict(mbst.GLM, as.matrix(iris[, -5]), outputmargin = TRUE) pred_contr <- predict(mbst.GLM, as.matrix(iris[, -5]), predcontrib = TRUE) expect_length(dim(pred_contr), 3) coefs_all <- matrix( data = as.numeric(xgb.dump(mbst.GLM)[-c(1, 2, 6)]), ncol = 3, byrow = TRUE ) for (g in seq_along(dim(pred_contr)[2])) { expect_equal(colnames(pred_contr[, g, ]), c(colnames(iris[, -5]), "(Intercept)")) expect_lt(max(abs(rowSums(pred_contr[, g, ]) - pred[, g])), float_tolerance) # manual calculation of linear terms coefs <- c(coefs_all[-1, g], coefs_all[1, g]) # intercept needs to be the last pred_contr_manual <- sweep(as.matrix(cbind(iris[, -5], 1)), 2, coefs, FUN = "*") expect_equal(as.numeric(pred_contr[, g, ]), as.numeric(pred_contr_manual), tolerance = float_tolerance) } }) test_that("SHAPs sum to predictions, with or without DART", { d <- cbind( x1 = rnorm(100), x2 = rnorm(100), x3 = rnorm(100)) y <- d[, "x1"] + d[, "x2"]^2 + ifelse(d[, "x3"] > .5, d[, "x3"]^2, 2^d[, "x3"]) + rnorm(100) nrounds <- 30 for (booster in list("gbtree", "dart")) { fit <- xgb.train( params = c( list( nthread = 2, booster = booster, objective = "reg:squarederror", eval_metric = "rmse"), if (booster == "dart") list(rate_drop = .01, one_drop = TRUE)), data = xgb.DMatrix(d, label = y, nthread = 1), nrounds = nrounds) pr <- function(...) { predict(fit, newdata = d, ...) } pred <- pr() shap <- pr(predcontrib = TRUE) shapi <- pr(predinteraction = TRUE) tol <- 1e-5 expect_equal(rowSums(shap), pred, tol = tol) expect_equal(rowSums(shapi), pred, tol = tol) for (i in seq_len(nrow(d))) for (f in list(rowSums, colSums)) expect_equal(f(shapi[i, , ]), shap[i, ], tol = tol) } }) test_that("xgb-attribute functionality", { .skip_if_vcd_not_available() val <- "my attribute value" list.val <- list(my_attr = val, a = 123, b = 'ok') list.ch <- list.val[order(names(list.val))] list.ch <- lapply(list.ch, as.character) # note: iter is 0-index in xgb attributes list.default <- list() list.ch <- c(list.ch, list.default) # proper input: expect_error(xgb.attr(bst.Tree, NULL)) expect_error(xgb.attr(val, val)) # set & get: expect_null(xgb.attr(bst.Tree, "asdf")) expect_equal(xgb.attributes(bst.Tree), list.default) bst.Tree.copy <- xgb.copy.Booster(bst.Tree) xgb.attr(bst.Tree.copy, "my_attr") <- val expect_equal(xgb.attr(bst.Tree.copy, "my_attr"), val) xgb.attributes(bst.Tree.copy) <- list.val expect_equal(xgb.attributes(bst.Tree.copy), list.ch) # serializing: fname <- file.path(tempdir(), "xgb.ubj") xgb.save(bst.Tree.copy, fname) bst <- xgb.load(fname) expect_equal(xgb.attr(bst, "my_attr"), val) expect_equal(xgb.attributes(bst), list.ch) # deletion: xgb.attr(bst, "my_attr") <- NULL expect_null(xgb.attr(bst, "my_attr")) expect_equal(xgb.attributes(bst), list.ch[c("a", "b")]) xgb.attributes(bst) <- list(a = NULL, b = NULL) expect_equal(xgb.attributes(bst), list.default) xgb.attributes(bst) <- list(niter = NULL) expect_equal(xgb.attributes(bst), list()) }) if (grepl('Windows', Sys.info()[['sysname']], fixed = TRUE) || grepl('Linux', Sys.info()[['sysname']], fixed = TRUE) || grepl('Darwin', Sys.info()[['sysname']], fixed = TRUE)) { test_that("xgb-attribute numeric precision", { .skip_if_vcd_not_available() # check that lossless conversion works with 17 digits # numeric -> character -> numeric X <- 10^runif(100, -20, 20) if (capabilities('long.double')) { X2X <- as.numeric(format(X, digits = 17)) expect_equal(X, X2X, tolerance = float_tolerance) } # retrieved attributes to be the same as written for (x in X) { xgb.attr(bst.Tree, "x") <- x expect_equal(as.numeric(xgb.attr(bst.Tree, "x")), x, tolerance = float_tolerance) xgb.attributes(bst.Tree) <- list(a = "A", b = x) expect_equal(as.numeric(xgb.attr(bst.Tree, "b")), x, tolerance = float_tolerance) } }) } test_that("xgb.Booster serializing as R object works", { .skip_if_vcd_not_available() fname_rds <- file.path(tempdir(), "xgb.model.rds") saveRDS(bst.Tree, fname_rds) bst <- readRDS(fname_rds) dtrain <- xgb.DMatrix(sparse_matrix, label = label, nthread = 2) expect_equal(predict(bst.Tree, dtrain), predict(bst, dtrain), tolerance = float_tolerance) expect_equal(xgb.dump(bst.Tree), xgb.dump(bst)) fname_bin <- file.path(tempdir(), "xgb.model") xgb.save(bst, fname_bin) bst <- readRDS(fname_rds) expect_equal(predict(bst.Tree, dtrain), predict(bst, dtrain), tolerance = float_tolerance) }) test_that("xgb.model.dt.tree works with and without feature names", { .skip_if_vcd_not_available() names.dt.trees <- c("Tree", "Node", "ID", "Feature", "Split", "Yes", "No", "Missing", "Gain", "Cover") dt.tree <- xgb.model.dt.tree(model = bst.Tree) expect_equal(names.dt.trees, names(dt.tree)) if (!flag_32bit) expect_equal(dim(dt.tree), c(188, 10)) expect_output(str(dt.tree), 'Feature.*\\"Age\\"') # when model contains no feature names: dt.tree.x <- xgb.model.dt.tree(model = bst.Tree.unnamed) expect_output(str(dt.tree.x), 'Feature.*\\"3\\"') expect_equal(dt.tree[, -4, with = FALSE], dt.tree.x[, -4, with = FALSE]) # using integer node ID instead of character dt.tree.int <- xgb.model.dt.tree(model = bst.Tree, use_int_id = TRUE) expect_equal(as.integer(data.table::tstrsplit(dt.tree$Yes, '-', fixed = TRUE)[[2]]), dt.tree.int$Yes) expect_equal(as.integer(data.table::tstrsplit(dt.tree$No, '-', fixed = TRUE)[[2]]), dt.tree.int$No) expect_equal(as.integer(data.table::tstrsplit(dt.tree$Missing, '-', fixed = TRUE)[[2]]), dt.tree.int$Missing) }) test_that("xgb.model.dt.tree throws error for gblinear", { .skip_if_vcd_not_available() expect_error(xgb.model.dt.tree(model = bst.GLM)) }) test_that("xgb.importance works with and without feature names", { .skip_if_vcd_not_available() importance.Tree <- xgb.importance(feature_names = feature.names, model = bst.Tree.unnamed) if (!flag_32bit) expect_equal(dim(importance.Tree), c(7, 4)) expect_equal(colnames(importance.Tree), c("Feature", "Gain", "Cover", "Frequency")) expect_output(str(importance.Tree), 'Feature.*\\"Age\\"') importance.Tree.0 <- xgb.importance(model = bst.Tree) expect_equal(importance.Tree, importance.Tree.0, tolerance = float_tolerance) # when model contains no feature names: importance.Tree.x <- xgb.importance(model = bst.Tree.unnamed) expect_equal(importance.Tree[, -1, with = FALSE], importance.Tree.x[, -1, with = FALSE], tolerance = float_tolerance) imp2plot <- xgb.plot.importance(importance_matrix = importance.Tree) expect_equal(colnames(imp2plot), c("Feature", "Gain", "Cover", "Frequency", "Importance")) xgb.ggplot.importance(importance_matrix = importance.Tree) # for multiclass imp.Tree <- xgb.importance(model = mbst.Tree) expect_equal(dim(imp.Tree), c(4, 4)) trees <- seq(from = 1, by = 2, length.out = 2) importance <- xgb.importance(feature_names = feature.names, model = bst.Tree, trees = trees) importance_from_dump <- function() { imp <- xgb.model.dt.tree( model = bst.Tree, trees = trees )[ Feature != "Leaf", .( Gain = sum(Gain), Cover = sum(Cover), Frequency = .N ), by = Feature ][ , `:=`( Gain = Gain / sum(Gain), Cover = Cover / sum(Cover), Frequency = Frequency / sum(Frequency) ) ][ order(Gain, decreasing = TRUE) ] imp } expect_equal(importance_from_dump(), importance, tolerance = 1e-6) ## decision stump m <- xgb.train( data = xgb.DMatrix(as.matrix(data.frame(x = c(0, 1))), label = c(1, 2), nthread = 1), nrounds = 1, params = xgb.params( base_score = 0.5, nthread = 2 ) ) df <- xgb.model.dt.tree(model = m) expect_equal(df$Feature, "Leaf") expect_equal(df$Cover, 2) }) test_that("xgb.importance works with GLM model", { .skip_if_vcd_not_available() importance.GLM <- xgb.importance(feature_names = feature.names, model = bst.GLM) expect_equal(dim(importance.GLM), c(10, 2)) expect_equal(colnames(importance.GLM), c("Feature", "Weight")) xgb.importance(model = bst.GLM) imp2plot <- xgb.plot.importance(importance.GLM) expect_equal(colnames(imp2plot), c("Feature", "Weight", "Importance")) xgb.ggplot.importance(importance.GLM) # check that the input is not modified in-place expect_false("Importance" %in% names(importance.GLM)) # for multiclass imp.GLM <- xgb.importance(model = mbst.GLM) expect_equal(dim(imp.GLM), c(12, 3)) expect_equal(imp.GLM$Class, rep(0:2, each = 4)) }) test_that("xgb.model.dt.tree and xgb.importance work with a single split model", { .skip_if_vcd_not_available() bst1 <- xgb.train( data = xgb.DMatrix(sparse_matrix, label = label, nthread = 1), nrounds = 1, verbose = 0, params = xgb.params( max_depth = 1, learning_rate = 1, nthread = 2, objective = "binary:logistic" ) ) expect_error(dt <- xgb.model.dt.tree(model = bst1), regexp = NA) # no error expect_equal(nrow(dt), 3) expect_error(imp <- xgb.importance(model = bst1), regexp = NA) # no error expect_equal(nrow(imp), 1) expect_equal(imp$Gain, 1) }) test_that("xgb.plot.importance de-duplicates features", { importances <- data.table( Feature = c("col1", "col2", "col2"), Gain = c(0.4, 0.3, 0.3) ) imp2plot <- xgb.plot.importance(importances) expect_equal(nrow(imp2plot), 2L) expect_equal(imp2plot$Feature, c("col2", "col1")) }) test_that("xgb.plot.tree works with and without feature names", { .skip_if_vcd_not_available() expect_silent(xgb.plot.tree(model = bst.Tree.unnamed)) expect_silent(xgb.plot.tree(model = bst.Tree)) ## Categorical y <- rnorm(100) x <- sample(3, size = 100 * 3, replace = TRUE) |> matrix(nrow = 100) x <- x - 1 dm <- xgb.DMatrix(data = x, label = y, nthread = 1) setinfo(dm, "feature_type", c("c", "c", "c")) model <- xgb.train( data = dm, params = list(tree_method = "hist"), nrounds = 2 ) expect_silent(xgb.plot.tree(model = model)) }) test_that("xgb.plot.multi.trees works with and without feature names", { .skip_if_vcd_not_available() xgb.plot.multi.trees(model = bst.Tree.unnamed, features_keep = 3) xgb.plot.multi.trees(model = bst.Tree, features_keep = 3) expect_true(TRUE) }) test_that("xgb.plot.deepness works", { .skip_if_vcd_not_available() d2p <- xgb.plot.deepness(model = bst.Tree) expect_equal(colnames(d2p), c("ID", "Tree", "Depth", "Cover", "Weight")) xgb.plot.deepness(model = bst.Tree, which = "med.depth") xgb.ggplot.deepness(model = bst.Tree) }) test_that("xgb.shap.data works when top_n is provided", { .skip_if_vcd_not_available() data_list <- xgb.shap.data(data = sparse_matrix, model = bst.Tree, top_n = 2) expect_equal(names(data_list), c("data", "shap_contrib")) expect_equal(NCOL(data_list$data), 2) expect_equal(NCOL(data_list$shap_contrib), 2) expect_equal(NROW(data_list$data), NROW(data_list$shap_contrib)) expect_gt(length(colnames(data_list$data)), 0) expect_gt(length(colnames(data_list$shap_contrib)), 0) # for multiclass without target class provided data_list <- xgb.shap.data(data = as.matrix(iris[, -5]), model = mbst.Tree, top_n = 2) expect_equal(dim(data_list$shap_contrib), c(nrow(iris), 2)) # for multiclass with target class provided data_list <- xgb.shap.data(data = as.matrix(iris[, -5]), model = mbst.Tree, top_n = 2, target_class = 0) expect_equal(dim(data_list$shap_contrib), c(nrow(iris), 2)) }) test_that("xgb.shap.data works with subsampling", { .skip_if_vcd_not_available() data_list <- xgb.shap.data(data = sparse_matrix, model = bst.Tree, top_n = 2, subsample = 0.8) expect_equal(NROW(data_list$data), as.integer(0.8 * nrow(sparse_matrix))) expect_equal(NROW(data_list$data), NROW(data_list$shap_contrib)) }) test_that("xgb.shap.data works with data frames", { data(mtcars) df <- mtcars df$cyl <- factor(df$cyl) x <- df[, -1] y <- df$mpg dm <- xgb.DMatrix(x, label = y, nthread = 1L) model <- xgb.train( data = dm, params = list( max_depth = 2, nthread = 1 ), nrounds = 2 ) data_list <- xgb.shap.data(data = df[, -1], model = model, top_n = 2, subsample = 0.8) expect_equal(NROW(data_list$data), as.integer(0.8 * nrow(df))) expect_equal(NROW(data_list$data), NROW(data_list$shap_contrib)) }) test_that("prepare.ggplot.shap.data works", { .skip_if_vcd_not_available() data_list <- xgb.shap.data(data = sparse_matrix, model = bst.Tree, top_n = 2) plot_data <- prepare.ggplot.shap.data(data_list, normalize = TRUE) expect_s3_class(plot_data, "data.frame") expect_equal(names(plot_data), c("id", "feature", "feature_value", "shap_value")) expect_s3_class(plot_data$feature, "factor") # Each observation should have 1 row for each feature expect_equal(nrow(plot_data), nrow(sparse_matrix) * 2) }) test_that("xgb.plot.shap works", { .skip_if_vcd_not_available() sh <- xgb.plot.shap(data = sparse_matrix, model = bst.Tree, top_n = 2, col = 4) expect_equal(names(sh), c("data", "shap_contrib")) }) test_that("xgb.plot.shap.summary works", { .skip_if_vcd_not_available() expect_silent(xgb.plot.shap.summary(data = sparse_matrix, model = bst.Tree, top_n = 2)) expect_silent(xgb.ggplot.shap.summary(data = sparse_matrix, model = bst.Tree, top_n = 2)) }) test_that("xgb.plot.shap.summary ignores categorical features", { .skip_if_vcd_not_available() data(mtcars) df <- mtcars df$cyl <- factor(df$cyl) levels(df$cyl) <- c("a", "b", "c") x <- df[, -1] y <- df$mpg dm <- xgb.DMatrix(x, label = y, nthread = 1L) model <- xgb.train( data = dm, params = list( max_depth = 2, nthread = 1 ), nrounds = 2 ) expect_warning({ xgb.ggplot.shap.summary(data = x, model = model, top_n = 2) }) x_num <- mtcars[, -1] x_num$gear <- as.numeric(x_num$gear) - 1 x_num <- as.matrix(x_num) dm <- xgb.DMatrix(x_num, label = y, feature_types = c(rep("q", 8), "c", "q"), nthread = 1L) model <- xgb.train( data = dm, params = list( max_depth = 2, nthread = 1 ), nrounds = 2 ) expect_warning({ xgb.ggplot.shap.summary(data = x_num, model = model, top_n = 2) }) }) test_that("check.deprecation works", { data(mtcars) dm <- xgb.DMatrix(mtcars[, -1L], label = mtcars$mpg, nthread = 1) params <- xgb.params(nthread = 1, max_depth = 2, eval_metric = "rmse") args_train <- list( data = dm, params = params, nrounds = 10, verbose = 0 ) # with exact name options("xgboost.strict_mode" = TRUE) expect_error({ model <- xgb.train( data = dm, params = params, nrounds = 10, watchlist = list(tr = dm), verbose = 0 ) }, regexp = "watchlist") options("xgboost.strict_mode" = FALSE) expect_warning({ model <- xgb.train( data = dm, params = params, nrounds = 10, watchlist = list(tr = dm), verbose = 0 ) }, regexp = "watchlist") expect_true(hasName(attributes(model), "evaluation_log")) expect_equal(names(attributes(model)$evaluation_log), c("iter", "tr_rmse")) # with partial name match expect_warning({ model <- xgb.train( data = dm, params = params, nrounds = 10, watchlis = list(train = dm), verbose = 0 ) }, regexp = "watchlist") expect_true(hasName(attributes(model), "evaluation_log")) expect_equal(names(attributes(model)$evaluation_log), c("iter", "train_rmse")) # error/warning is thrown if argument cannot be matched options("xgboost.strict_mode" = TRUE) expect_error({ model <- xgb.train( data = dm, params = params, nrounds = 10, watchlistt = list(train = dm), verbose = 0 ) }, regexp = "unrecognized") options("xgboost.strict_mode" = FALSE) expect_warning({ model <- xgb.train( data = dm, params = params, nrounds = 10, watchlistt = list(train = dm), verbose = 0 ) }, regexp = "unrecognized") # error should suggest to put under 'params' if it goes there options("xgboost.strict_mode" = TRUE) expect_error({ model <- xgb.train( data = dm, nthread = 1, max_depth = 2, eval_metric = "rmse", nrounds = 10, evals = list(train = dm), verbose = 0 ) }, regexp = "should be passed as a list to argument 'params'") options("xgboost.strict_mode" = FALSE) expect_warning({ model <- xgb.train( data = dm, nthread = 1, max_depth = 2, eval_metric = "mae", nrounds = 10, evals = list(train = dm), verbose = 0 ) }, regexp = "should be passed as a list to argument 'params'") expect_true(hasName(attributes(model), "evaluation_log")) expect_equal(names(attributes(model)$evaluation_log), c("iter", "train_mae")) # can take more than one deprecated parameter expect_warning({ model <- xgb.train( training.data = dm, params = params, nrounds = 10, watchlis = list(tr = dm), verbose = 0 ) }, regexp = "training.data") expect_true(hasName(attributes(model), "evaluation_log")) expect_equal(names(attributes(model)$evaluation_log), c("iter", "tr_rmse")) }) test_that('convert.labels works', { y <- c(0, 1, 0, 0, 1) for (objective in c('binary:logistic', 'binary:logitraw', 'binary:hinge')) { res <- xgboost:::convert.labels(y, objective_name = objective) expect_s3_class(res, 'factor') expect_equal(res, factor(res)) } y <- c(0, 1, 3, 2, 1, 4) for (objective in c('multi:softmax', 'multi:softprob', 'rank:pairwise', 'rank:ndcg', 'rank:map')) { res <- xgboost:::convert.labels(y, objective_name = objective) expect_s3_class(res, 'factor') expect_equal(res, factor(res)) } y <- c(1.2, 3.0, -1.0, 10.0) for (objective in c('reg:squarederror', 'reg:squaredlogerror', 'reg:logistic', 'reg:pseudohubererror', 'count:poisson', 'survival:cox', 'survival:aft', 'reg:gamma', 'reg:tweedie')) { res <- xgboost:::convert.labels(y, objective_name = objective) expect_equal(class(res), 'numeric') } }) test_that("validate.features works as expected", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( params = list(nthread = 1), data = dm, nrounds = 3 ) # result is output as-is when needed res <- validate.features(model, x) expect_equal(res, x) res <- validate.features(model, dm) expect_identical(res, dm) res <- validate.features(model, as(x[1, ], "dsparseVector")) expect_equal(as.numeric(res), unname(x[1, ])) res <- validate.features(model, "file.txt") expect_equal(res, "file.txt") # columns are reordered res <- validate.features(model, mtcars[, rev(names(mtcars))]) expect_equal(names(res), colnames(x)) expect_equal(as.matrix(res), x) res <- validate.features(model, as.matrix(mtcars[, rev(names(mtcars))])) expect_equal(colnames(res), colnames(x)) expect_equal(res, x) res <- validate.features(model, mtcars[1, rev(names(mtcars)), drop = FALSE]) expect_equal(names(res), colnames(x)) expect_equal(unname(as.matrix(res)), unname(x[1, , drop = FALSE])) res <- validate.features(model, as.data.table(mtcars[, rev(names(mtcars))])) expect_equal(names(res), colnames(x)) expect_equal(unname(as.matrix(res)), unname(x)) # error when columns are missing expect_error({ validate.features(model, mtcars[, 1:3]) }) expect_error({ validate.features(model, as.matrix(mtcars[, 1:ncol(x)])) # nolint }) expect_error({ validate.features(model, xgb.DMatrix(mtcars[, 1:3], nthread = 1)) }) expect_error({ validate.features(model, as(x[, 1:3], "CsparseMatrix")) }) # error when it cannot reorder or subset expect_error({ validate.features(model, xgb.DMatrix(mtcars, nthread = 1)) }, "Feature names") expect_error({ validate.features(model, xgb.DMatrix(x[, rev(colnames(x))], nthread = 1)) }, "Feature names") # no error about types if the booster doesn't have types expect_error({ validate.features(model, xgb.DMatrix(x, feature_types = c(rep("q", 5), rep("c", 5)), nthread = 1)) }, NA) tmp <- mtcars tmp[["vs"]] <- factor(tmp[["vs"]]) expect_error({ validate.features(model, tmp) }, NA) # error when types do not match setinfo(model, "feature_type", rep("q", 10)) expect_error({ validate.features(model, xgb.DMatrix(x, feature_types = c(rep("q", 5), rep("c", 5)), nthread = 1)) }, "Feature types") tmp <- mtcars tmp[["vs"]] <- factor(tmp[["vs"]]) expect_error({ validate.features(model, tmp) }, "Feature types") }) test_that("Parameters constructor works as expected", { empty_list <- list() names(empty_list) <- character() params <- xgb.params() expect_equal(params, empty_list) params <- xgb.params(max_depth = 2) expect_equal(params, list(max_depth = 2)) params <- xgb.params(max_depth = NULL) expect_equal(params, empty_list) max_depth <- 3 params <- xgb.params(max_depth = max_depth) expect_equal(params, list(max_depth = 3)) four <- 4L params <- xgb.params(max_depth = four) expect_equal(params, list(max_depth = 4L)) params <- xgb.params(objective = "binary:logistic", nthread = 10) expect_equal(params, list(objective = "binary:logistic", nthread = 10)) expect_error({ xgb.params(max_xgboost = 10) }) expect_error({ xgb.params(max_depth = 2, max_depth = 3) }) }) xgboost-3.0.0/R-package/tests/testthat/test_interaction_constraints.R000066400000000000000000000041641476511272400261410ustar00rootroot00000000000000require(xgboost) context("interaction constraints") n_threads <- 2 set.seed(1024) x1 <- rnorm(1000, 1) x2 <- rnorm(1000, 1) x3 <- sample(c(1, 2, 3), size = 1000, replace = TRUE) y <- x1 + x2 + x3 + x1 * x2 * x3 + rnorm(1000, 0.001) + 3 * sin(x1) train <- matrix(c(x1, x2, x3), ncol = 3) test_that("interaction constraints for regression", { # Fit a model that only allows interaction between x1 and x2 bst <- xgb.train( data = xgb.DMatrix(train, label = y, nthread = 1), nrounds = 100, verbose = 0, params = xgb.params( max_depth = 3, learning_rate = 0.1, nthread = 2, interaction_constraints = list(c(0, 1)) ) ) # Set all observations to have the same x3 values then increment # by the same amount preds <- lapply(c(1, 2, 3), function(x) { tmat <- matrix(c(x1, x2, rep(x, 1000)), ncol = 3) return(predict(bst, tmat)) }) # Check incrementing x3 has the same effect on all observations # since x3 is constrained to be independent of x1 and x2 # and all observations start off from the same x3 value diff1 <- preds[[2]] - preds[[1]] test1 <- all(abs(diff1 - diff1[1]) < 1e-4) diff2 <- preds[[3]] - preds[[2]] test2 <- all(abs(diff2 - diff2[1]) < 1e-4) expect_true({ test1 & test2 }, "Interaction Contraint Satisfied") }) test_that("interaction constraints scientific representation", { rows <- 10 ## When number exceeds 1e5, R paste function uses scientific representation. ## See: https://github.com/dmlc/xgboost/issues/5179 cols <- 1e5 + 10 d <- matrix(rexp(rows, rate = .1), nrow = rows, ncol = cols) y <- rnorm(rows) dtrain <- xgb.DMatrix(data = d, label = y, nthread = n_threads) inc <- list(c(seq.int(from = 0, to = cols, by = 1))) with_inc <- xgb.train( data = dtrain, nrounds = 10, params = xgb.params( tree_method = 'hist', interaction_constraints = inc, nthread = n_threads ) ) without_inc <- xgb.train( data = dtrain, nrounds = 10, params = xgb.params( tree_method = 'hist', nthread = n_threads ) ) expect_equal(xgb.save.raw(with_inc), xgb.save.raw(without_inc)) }) xgboost-3.0.0/R-package/tests/testthat/test_interactions.R000066400000000000000000000137051476511272400236760ustar00rootroot00000000000000context('Test prediction of feature interactions') set.seed(123) n_threads <- 2 test_that("predict feature interactions works", { # simulate some binary data and a linear outcome with an interaction term N <- 1000 P <- 5 X <- matrix(rbinom(N * P, 1, 0.5), ncol = P, dimnames = list(NULL, letters[1:P])) # center the data (as contributions are computed WRT feature means) X <- scale(X, scale = FALSE) # outcome without any interactions, without any noise: f <- function(x) 2 * x[, 1] - 3 * x[, 2] # outcome with interactions, without noise: f_int <- function(x) f(x) + 2 * x[, 2] * x[, 3] # outcome with interactions, with noise: #f_int_noise <- function(x) f_int(x) + rnorm(N, 0, 0.3) y <- f_int(X) dm <- xgb.DMatrix(X, label = y, nthread = n_threads) param <- xgb.params( learning_rate = 0.1, max_depth = 4, base_score = mean(y), reg_lambda = 0, nthread = n_threads ) b <- xgb.train(param, dm, 100) pred <- predict(b, dm, outputmargin = TRUE) # SHAP contributions: cont <- predict(b, dm, predcontrib = TRUE) expect_equal(dim(cont), c(N, P + 1)) # make sure for each row they add up to marginal predictions expect_lt(max(abs(rowSums(cont) - pred)), 0.001) # Hand-construct the 'ground truth' feature contributions: gt_cont <- cbind( 2. * X[, 1], -3. * X[, 2] + 1. * X[, 2] * X[, 3], # attribute a HALF of the interaction term to feature #2 1. * X[, 2] * X[, 3] # and another HALF of the interaction term to feature #3 ) gt_cont <- cbind(gt_cont, matrix(0, nrow = N, ncol = P + 1 - 3)) # These should be relatively close: expect_lt(max(abs(cont - gt_cont)), 0.05) # SHAP interaction contributions: intr <- predict(b, dm, predinteraction = TRUE) expect_equal(dim(intr), c(N, P + 1, P + 1)) # check assigned colnames cn <- c(letters[1:P], "(Intercept)") expect_equal(dimnames(intr), list(NULL, cn, cn)) # check the symmetry expect_lt(max(abs(aperm(intr, c(1, 3, 2)) - intr)), 0.00001) # sums WRT columns must be close to feature contributions expect_lt(max(abs(apply(intr, c(1, 2), sum) - cont)), 0.00001) # diagonal terms for features 3,4,5 must be close to zero expect_lt(Reduce(max, sapply(3:P, function(i) max(abs(intr[, i, i])))), 0.05) # Intercept must have no interactions expect_lt(max(abs(intr[, 1:P, P + 1])), 0.00001) # interactions other than 2 x 3 must be close to zero intr23 <- intr intr23[, 2, 3] <- 0 expect_lt( Reduce(max, sapply(1:P, function(i) max(abs(intr23[, i, (i + 1):(P + 1)])))), 0.05 ) # Construct the 'ground truth' contributions of interactions directly from the linear terms: gt_intr <- array(0, c(N, P + 1, P + 1)) gt_intr[, 2, 3] <- 1. * X[, 2] * X[, 3] # attribute a HALF of the interaction term to each symmetric element gt_intr[, 3, 2] <- gt_intr[, 2, 3] # merge-in the diagonal based on 'ground truth' feature contributions intr_diag <- gt_cont - apply(gt_intr, c(1, 2), sum) for (j in seq_len(P)) { gt_intr[, j, j] <- intr_diag[, j] } # These should be relatively close: expect_lt(max(abs(intr - gt_intr)), 0.1) }) test_that("SHAP contribution values are not NAN", { d <- data.frame( x1 = c(-2.3, 1.4, 5.9, 2, 2.5, 0.3, -3.6, -0.2, 0.5, -2.8, -4.6, 3.3, -1.2, -1.1, -2.3, 0.4, -1.5, -0.2, -1, 3.7), x2 = c(291.179171, 269.198331, 289.942097, 283.191669, 269.673332, 294.158346, 287.255835, 291.530838, 285.899586, 269.290833, 268.649586, 291.530841, 280.074593, 269.484168, 293.94042, 294.327506, 296.20709, 295.441669, 283.16792, 270.227085), y = c(9, 15, 5.7, 9.2, 22.4, 5, 9, 3.2, 7.2, 13.1, 7.8, 16.9, 6.5, 22.1, 5.3, 10.4, 11.1, 13.9, 11, 20.5), fold = c(2, 2, 2, 1, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)) ivs <- c("x1", "x2") fit <- xgb.train( verbose = 0, params = list( objective = "reg:squarederror", eval_metric = "rmse", nthread = n_threads ), data = xgb.DMatrix(as.matrix(subset(d, fold == 2)[, ivs]), label = subset(d, fold == 2)$y, nthread = 1), nrounds = 3 ) shaps <- as.data.frame(predict(fit, newdata = as.matrix(subset(d, fold == 1)[, ivs]), predcontrib = TRUE)) result <- cbind(shaps, sum = rowSums(shaps), pred = predict(fit, newdata = as.matrix(subset(d, fold == 1)[, ivs]))) expect_true(identical(TRUE, all.equal(result$sum, result$pred, tol = 1e-6))) }) test_that("multiclass feature interactions work", { dm <- xgb.DMatrix( as.matrix(iris[, -5]), label = as.numeric(iris$Species) - 1, nthread = n_threads ) param <- xgb.params( learning_rate = 0.1, max_depth = 4, objective = 'multi:softprob', num_class = 3, nthread = n_threads ) b <- xgb.train(param, dm, 40) pred <- predict(b, dm, outputmargin = TRUE) # SHAP contributions: cont <- predict(b, dm, predcontrib = TRUE) expect_length(dim(cont), 3) # make sure for each row they add up to marginal predictions expect_lt(max(abs(apply(cont, c(1, 2), sum) - pred)), 0.001) # SHAP interaction contributions: intr <- predict(b, dm, predinteraction = TRUE) expect_length(dim(intr), 4) # check the symmetry expect_lt(max(abs(aperm(intr, c(1, 2, 4, 3)) - intr)), 0.00001) # sums WRT columns must be close to feature contributions expect_lt(max(abs(apply(intr, c(1, 2, 3), sum) - cont)), 0.00001) }) test_that("SHAP single sample works", { train <- agaricus.train test <- agaricus.test booster <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = 4, params = xgb.params( max_depth = 2, objective = "binary:logistic", nthread = n_threads ) ) predt <- predict( booster, newdata = train$data[1, , drop = FALSE], predcontrib = TRUE ) expect_equal(dim(predt), c(1, dim(train$data)[2] + 1)) predt <- predict( booster, newdata = train$data[1, , drop = FALSE], predinteraction = TRUE ) expect_equal(dim(predt), c(1, dim(train$data)[2] + 1, dim(train$data)[2] + 1)) }) xgboost-3.0.0/R-package/tests/testthat/test_io.R000066400000000000000000000040521476511272400215760ustar00rootroot00000000000000context("Test model IO.") data(agaricus.train, package = "xgboost") data(agaricus.test, package = "xgboost") train <- agaricus.train test <- agaricus.test test_that("load/save raw works", { nrounds <- 8 booster <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = nrounds, params = xgb.params( objective = "binary:logistic", nthread = 2 ) ) json_bytes <- xgb.save.raw(booster, raw_format = "json") ubj_bytes <- xgb.save.raw(booster, raw_format = "ubj") old_bytes <- xgb.save.raw(booster, raw_format = "deprecated") from_json <- xgb.load.raw(json_bytes) from_ubj <- xgb.load.raw(ubj_bytes) json2old <- xgb.save.raw(from_json, raw_format = "deprecated") ubj2old <- xgb.save.raw(from_ubj, raw_format = "deprecated") expect_equal(json2old, ubj2old) expect_equal(json2old, old_bytes) }) test_that("saveRDS preserves C and R attributes", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = xgb.params(nthread = 1, max_depth = 2), nrounds = 5 ) attributes(model)$my_attr <- "qwerty" xgb.attr(model, "c_attr") <- "asdf" fname <- file.path(tempdir(), "xgb_model.Rds") saveRDS(model, fname) model_new <- readRDS(fname) expect_equal(attributes(model_new)$my_attr, attributes(model)$my_attr) expect_equal(xgb.attr(model, "c_attr"), xgb.attr(model_new, "c_attr")) }) test_that("R serializers keep C config", { data(mtcars) y <- mtcars$mpg x <- as.matrix(mtcars[, -1]) dm <- xgb.DMatrix(x, label = y, nthread = 1) model <- xgb.train( data = dm, params = list( tree_method = "approx", nthread = 1, max_depth = 2 ), nrounds = 3 ) model_new <- unserialize(serialize(model, NULL)) expect_equal( xgb.config(model)$learner$gradient_booster$gbtree_train_param$tree_method, xgb.config(model_new)$learner$gradient_booster$gbtree_train_param$tree_method ) expect_equal(variable.names(model), variable.names(model_new)) }) xgboost-3.0.0/R-package/tests/testthat/test_model_compatibility.R000066400000000000000000000076471476511272400252350ustar00rootroot00000000000000context("Models from previous versions of XGBoost can be loaded") metadata <- list( kRounds = 2, kRows = 1000, kCols = 4, kForests = 2, kMaxDepth = 2, kClasses = 3 ) run_model_param_check <- function(config) { testthat::expect_equal(config$learner$learner_model_param$num_feature, '4') testthat::expect_equal(config$learner$learner_train_param$booster, 'gbtree') } get_num_tree <- function(booster) { dump <- xgb.dump(booster) m <- regexec('booster\\[[0-9]+\\]', dump, perl = TRUE) m <- regmatches(dump, m) num_tree <- Reduce('+', lapply(m, length)) return(num_tree) } run_booster_check <- function(booster, name) { config <- xgb.config(booster) run_model_param_check(config) if (name == 'cls') { testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds * metadata$kClasses) testthat::expect_equal(as.numeric(config$learner$learner_model_param$base_score), 0.5) testthat::expect_equal(config$learner$learner_train_param$objective, 'multi:softmax') testthat::expect_equal(as.numeric(config$learner$learner_model_param$num_class), metadata$kClasses) } else if (name == 'logitraw') { testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds) testthat::expect_equal(as.numeric(config$learner$learner_model_param$num_class), 0) testthat::expect_equal(config$learner$learner_train_param$objective, 'binary:logitraw') } else if (name == 'logit') { testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds) testthat::expect_equal(as.numeric(config$learner$learner_model_param$num_class), 0) testthat::expect_equal(config$learner$learner_train_param$objective, 'binary:logistic') } else if (name == 'ltr') { testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds) testthat::expect_equal(config$learner$learner_train_param$objective, 'rank:ndcg') } else { testthat::expect_equal(name, 'reg') testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds) testthat::expect_equal(as.numeric(config$learner$learner_model_param$base_score), 0.5) testthat::expect_equal(config$learner$learner_train_param$objective, 'reg:squarederror') } } test_that("Models from previous versions of XGBoost can be loaded", { bucket <- 'xgboost-ci-jenkins-artifacts' region <- 'us-west-2' file_name <- 'xgboost_r_model_compatibility_test.zip' zipfile <- tempfile(fileext = ".zip") extract_dir <- tempdir() download.file(paste('https://', bucket, '.s3-', region, '.amazonaws.com/', file_name, sep = ''), destfile = zipfile, mode = 'wb', quiet = TRUE) unzip(zipfile, exdir = extract_dir, overwrite = TRUE) model_dir <- file.path(extract_dir, 'models') pred_data <- xgb.DMatrix(matrix(c(0, 0, 0, 0), nrow = 1, ncol = 4), nthread = 2) lapply(list.files(model_dir), function(x) { model_file <- file.path(model_dir, x) m <- regexec("xgboost-([0-9\\.]+)\\.([a-z]+)\\.[a-z]+", model_file, perl = TRUE) m <- regmatches(model_file, m)[[1]] model_xgb_ver <- m[2] name <- m[3] is_rds <- endsWith(model_file, '.rds') is_json <- endsWith(model_file, '.json') # TODO: update this test for new RDS format if (is_rds) { return(NULL) } # Expect an R warning when a model is loaded from RDS and it was generated by version < 1.1.x if (is_rds && compareVersion(model_xgb_ver, '1.1.1.1') < 0) { booster <- readRDS(model_file) expect_warning(predict(booster, newdata = pred_data)) booster <- readRDS(model_file) expect_warning(run_booster_check(booster, name)) } else { if (is_rds) { booster <- readRDS(model_file) } else { booster <- xgb.load(model_file) xgb.model.parameters(booster) <- list(nthread = 2) } predict(booster, newdata = pred_data) run_booster_check(booster, name) } }) }) xgboost-3.0.0/R-package/tests/testthat/test_monotone.R000066400000000000000000000012311476511272400230210ustar00rootroot00000000000000context("monotone constraints") set.seed(1024) x <- rnorm(1000, 10) y <- -1 * x + rnorm(1000, 0.001) + 3 * sin(x) train <- matrix(x, ncol = 1) test_that("monotone constraints for regression", { bst <- xgb.train( data = xgb.DMatrix(train, label = y, nthread = 1), nrounds = 100, verbose = 0, params = xgb.params( max_depth = 2, learning_rate = 0.1, nthread = 2, monotone_constraints = -1 ) ) pred <- predict(bst, train) ind <- order(train[, 1]) pred.ord <- pred[ind] expect_true({ !any(diff(pred.ord) > 0) }, "Monotone constraint satisfied") }) xgboost-3.0.0/R-package/tests/testthat/test_parameter_exposure.R000066400000000000000000000016011476511272400250760ustar00rootroot00000000000000context('Test model params and call are exposed to R') data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') dtrain <- xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = 2 ) dtest <- xgb.DMatrix( agaricus.test$data, label = agaricus.test$label, nthread = 2 ) bst <- xgb.train( data = dtrain, verbose = 0, nrounds = 10, params = xgb.params( max_depth = 2, learning_rate = 1, nthread = 1, objective = "binary:logistic" ) ) test_that("call is exposed to R", { expect_false(is.null(attributes(bst)$call)) expect_is(attributes(bst)$call, "call") }) test_that("params is exposed to R", { model_params <- attributes(bst)$params expect_is(model_params, "list") expect_equal(model_params$learning_rate, 1) expect_equal(model_params$max_depth, 2) expect_equal(model_params$objective, "binary:logistic") }) xgboost-3.0.0/R-package/tests/testthat/test_poisson_regression.R000066400000000000000000000030211476511272400251140ustar00rootroot00000000000000context('Test Poisson regression model') set.seed(1994) test_that("Poisson regression works", { data(mtcars) bst <- xgb.train( data = xgb.DMatrix(as.matrix(mtcars[, -11]), label = mtcars[, 11], nthread = 1), nrounds = 10, verbose = 0, params = xgb.params(objective = 'count:poisson', nthread = 2) ) expect_equal(class(bst), "xgb.Booster") pred <- predict(bst, as.matrix(mtcars[, -11])) expect_equal(length(pred), 32) expect_lt(sqrt(mean((pred - mtcars[, 11])^2)), 1.2) }) test_that("Poisson regression is centered around mean", { m <- 50L n <- 10L y <- rpois(m, n) x <- matrix(rnorm(m * n), nrow = m) model <- xgb.train( data = xgb.DMatrix(x, label = y, nthread = 1), params = xgb.params(objective = "count:poisson", min_split_loss = 1e4), nrounds = 1 ) model_json <- xgb.save.raw(model, "json") |> rawToChar() |> jsonlite::fromJSON() expect_equal( model_json$learner$learner_model_param$base_score |> as.numeric(), mean(y), tolerance = 1e-4 ) pred <- predict(model, x) expect_equal( pred, rep(mean(y), m), tolerance = 1e-4 ) w <- y + 1 model_weighted <- xgb.train( data = xgb.DMatrix(x, label = y, weight = w, nthread = 1), params = xgb.params(objective = "count:poisson", min_split_loss = 1e4), nrounds = 1 ) model_json <- xgb.save.raw(model_weighted, "json") |> rawToChar() |> jsonlite::fromJSON() expect_equal( model_json$learner$learner_model_param$base_score |> as.numeric(), weighted.mean(y, w), tolerance = 1e-4 ) }) xgboost-3.0.0/R-package/tests/testthat/test_ranking.R000066400000000000000000000045751476511272400226320ustar00rootroot00000000000000context('Learning to rank') n_threads <- 2 test_that('Test ranking with unweighted data', { X <- Matrix::sparseMatrix( i = c(2, 3, 7, 9, 12, 15, 17, 18) , j = c(1, 1, 2, 2, 3, 3, 4, 4) , x = rep(1.0, 8) , dims = c(20, 4) ) y <- c(0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0) group <- c(5, 5, 5, 5) dtrain <- xgb.DMatrix(X, label = y, group = group, nthread = n_threads) params <- xgb.params( learning_rate = 1, tree_method = 'exact', objective = 'rank:pairwise', max_depth = 1, eval_metric = c('auc', 'aucpr'), nthread = n_threads ) bst <- xgb.train(params, dtrain, nrounds = 10, evals = list(train = dtrain), verbose = 0) # Check if the metric is monotone increasing expect_true(all(diff(attributes(bst)$evaluation_log$train_auc) >= 0)) expect_true(all(diff(attributes(bst)$evaluation_log$train_aucpr) >= 0)) }) test_that('Test ranking with weighted data', { X <- Matrix::sparseMatrix( i = c(2, 3, 7, 9, 12, 15, 17, 18) , j = c(1, 1, 2, 2, 3, 3, 4, 4) , x = rep(1.0, 8) , dims = c(20, 4) ) y <- c(0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0) group <- c(5, 5, 5, 5) weight <- c(1.0, 2.0, 3.0, 4.0) dtrain <- xgb.DMatrix( X, label = y, group = group, weight = weight, nthread = n_threads ) params <- xgb.params( learning_rate = 1, tree_method = "exact", objective = "rank:pairwise", max_depth = 1, eval_metric = c("auc", "aucpr"), nthread = n_threads ) bst <- xgb.train(params, dtrain, nrounds = 10, evals = list(train = dtrain), verbose = 0) # Check if the metric is monotone increasing expect_true(all(diff(attributes(bst)$evaluation_log$train_auc) >= 0)) expect_true(all(diff(attributes(bst)$evaluation_log$train_aucpr) >= 0)) for (i in 1:10) { pred <- predict(bst, newdata = dtrain, iterationrange = c(1, i)) # is_sorted[i]: is i-th group correctly sorted by the ranking predictor? is_sorted <- lapply(seq(1, 20, by = 5), function(k) { ind <- order(-pred[k:(k + 4)]) z <- y[ind + (k - 1)] all(diff(z) <= 0) # Check if z is monotone decreasing }) # Since we give weights 1, 2, 3, 4 to the four query groups, # the ranking predictor will first try to correctly sort the last query group # before correctly sorting other groups. expect_true(all(diff(as.numeric(is_sorted)) >= 0)) } }) xgboost-3.0.0/R-package/tests/testthat/test_unicode.R000066400000000000000000000014341476511272400226160ustar00rootroot00000000000000context("Test Unicode handling") data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') train <- agaricus.train test <- agaricus.test set.seed(1994) test_that("Can save and load models with Unicode paths", { nrounds <- 2 bst <- xgb.train( data = xgb.DMatrix(train$data, label = train$label, nthread = 1), nrounds = nrounds, params = xgb.params( max_depth = 2, nthread = 2, objective = "binary:logistic" ) ) tmpdir <- tempdir() lapply(c("모델.json", "がうる・ぐら.json", "类继承.ubj"), function(x) { path <- file.path(tmpdir, x) xgb.save(bst, path) bst2 <- xgb.load(path) xgb.model.parameters(bst2) <- list(nthread = 2) expect_equal(predict(bst, test$data), predict(bst2, test$data)) }) }) xgboost-3.0.0/R-package/tests/testthat/test_update.R000066400000000000000000000147641476511272400224640ustar00rootroot00000000000000context("update trees in an existing model") data(agaricus.train, package = 'xgboost') data(agaricus.test, package = 'xgboost') n_threads <- 1 dtrain <- xgb.DMatrix( agaricus.train$data, label = agaricus.train$label, nthread = n_threads ) dtest <- xgb.DMatrix( agaricus.test$data, label = agaricus.test$label, nthread = n_threads ) # Disable flaky tests for 32-bit Windows. # See https://github.com/dmlc/xgboost/issues/3720 win32_flag <- .Platform$OS.type == "windows" && .Machine$sizeof.pointer != 8 test_that("updating the model works", { evals <- list(train = dtrain, test = dtest) # no-subsampling p1 <- xgb.params( objective = "binary:logistic", max_depth = 2, learning_rate = 0.05, nthread = n_threads, updater = "grow_colmaker,prune" ) set.seed(11) bst1 <- xgb.train(p1, dtrain, nrounds = 10, evals = evals, verbose = 0) tr1 <- xgb.model.dt.tree(model = bst1) # with subsampling p2 <- modifyList(p1, list(subsample = 0.1)) set.seed(11) bst2 <- xgb.train(p2, dtrain, nrounds = 10, evals = evals, verbose = 0) tr2 <- xgb.model.dt.tree(model = bst2) # the same no-subsampling boosting with an extra 'refresh' updater: p1r <- modifyList(p1, list(updater = 'grow_colmaker,prune,refresh', refresh_leaf = FALSE)) set.seed(11) bst1r <- xgb.train(p1r, dtrain, nrounds = 10, evals = evals, verbose = 0) tr1r <- xgb.model.dt.tree(model = bst1r) # all should be the same when no subsampling expect_equal(attributes(bst1)$evaluation_log, attributes(bst1r)$evaluation_log) expect_equal( jsonlite::fromJSON(rawToChar(xgb.save.raw(bst1, raw_format = "json"))), jsonlite::fromJSON(rawToChar(xgb.save.raw(bst1r, raw_format = "json"))), tolerance = 1e-6 ) if (!win32_flag) { expect_equal(tr1, tr1r, tolerance = 0.00001, check.attributes = FALSE) } # the same boosting with subsampling with an extra 'refresh' updater: p2r <- modifyList(p2, list(updater = 'grow_colmaker,prune,refresh', refresh_leaf = FALSE)) set.seed(11) bst2r <- xgb.train(p2r, dtrain, nrounds = 10, evals = evals, verbose = 0) tr2r <- xgb.model.dt.tree(model = bst2r) # should be the same evaluation but different gains and larger cover expect_equal(attributes(bst2)$evaluation_log, attributes(bst2r)$evaluation_log) if (!win32_flag) { expect_equal(tr2[Feature == 'Leaf']$Gain, tr2r[Feature == 'Leaf']$Gain) } expect_gt(sum(abs(tr2[Feature != 'Leaf']$Gain - tr2r[Feature != 'Leaf']$Gain)), 100) expect_gt(sum(tr2r$Cover) / sum(tr2$Cover), 1.5) # process type 'update' for no-subsampling model, refreshing the tree stats AND leaves from training data: set.seed(123) p1u <- modifyList(p1, list(process_type = 'update', updater = 'refresh', refresh_leaf = TRUE)) bst1u <- xgb.train(p1u, dtrain, nrounds = 10, evals = evals, verbose = 0, xgb_model = bst1) tr1u <- xgb.model.dt.tree(model = bst1u) # all should be the same when no subsampling expect_equal(attributes(bst1)$evaluation_log, attributes(bst1u)$evaluation_log) expect_equal( jsonlite::fromJSON(rawToChar(xgb.save.raw(bst1, raw_format = "json"))), jsonlite::fromJSON(rawToChar(xgb.save.raw(bst1u, raw_format = "json"))), tolerance = 1e-6 ) expect_equal(tr1, tr1u, tolerance = 0.00001, check.attributes = FALSE) # same thing but with a serialized model set.seed(123) bst1u <- xgb.train(p1u, dtrain, nrounds = 10, evals = evals, verbose = 0, xgb_model = xgb.save.raw(bst1)) tr1u <- xgb.model.dt.tree(model = bst1u) # all should be the same when no subsampling expect_equal(attributes(bst1)$evaluation_log, attributes(bst1u)$evaluation_log) expect_equal(tr1, tr1u, tolerance = 0.00001, check.attributes = FALSE) # process type 'update' for model with subsampling, refreshing only the tree stats from training data: p2u <- modifyList(p2, list(process_type = 'update', updater = 'refresh', refresh_leaf = FALSE)) bst2u <- xgb.train(p2u, dtrain, nrounds = 10, evals = evals, verbose = 0, xgb_model = bst2) tr2u <- xgb.model.dt.tree(model = bst2u) # should be the same evaluation but different gains and larger cover expect_equal(attributes(bst2)$evaluation_log, attributes(bst2u)$evaluation_log) expect_equal(tr2[Feature == 'Leaf']$Gain, tr2u[Feature == 'Leaf']$Gain) expect_gt(sum(abs(tr2[Feature != 'Leaf']$Gain - tr2u[Feature != 'Leaf']$Gain)), 100) expect_gt(sum(tr2u$Cover) / sum(tr2$Cover), 1.5) # the results should be the same as for the model with an extra 'refresh' updater expect_equal(attributes(bst2r)$evaluation_log, attributes(bst2u)$evaluation_log) if (!win32_flag) { expect_equal(tr2r, tr2u, tolerance = 0.00001, check.attributes = FALSE) } # process type 'update' for no-subsampling model, refreshing only the tree stats from TEST data: p1ut <- modifyList(p1, list(process_type = 'update', updater = 'refresh', refresh_leaf = FALSE)) bst1ut <- xgb.train(p1ut, dtest, nrounds = 10, evals = evals, verbose = 0, xgb_model = bst1) tr1ut <- xgb.model.dt.tree(model = bst1ut) # should be the same evaluations but different gains and smaller cover (test data is smaller) expect_equal(attributes(bst1)$evaluation_log, attributes(bst1ut)$evaluation_log) expect_equal(tr1[Feature == 'Leaf']$Gain, tr1ut[Feature == 'Leaf']$Gain) expect_gt(sum(abs(tr1[Feature != 'Leaf']$Gain - tr1ut[Feature != 'Leaf']$Gain)), 100) expect_lt(sum(tr1ut$Cover) / sum(tr1$Cover), 0.5) }) test_that("updating works for multiclass & multitree", { dtr <- xgb.DMatrix( as.matrix(iris[, -5]), label = as.numeric(iris$Species) - 1, nthread = n_threads ) evals <- list(train = dtr) p0 <- xgb.params( max_depth = 2, learning_rate = 0.5, nthread = n_threads, subsample = 0.6, objective = "multi:softprob", num_class = 3, num_parallel_tree = 2, base_score = 0 ) set.seed(121) bst0 <- xgb.train(p0, dtr, 5, evals = evals, verbose = 0) tr0 <- xgb.model.dt.tree(model = bst0) # run update process for an original model with subsampling p0u <- modifyList(p0, list(process_type = 'update', updater = 'refresh', refresh_leaf = FALSE)) bst0u <- xgb.train(p0u, dtr, nrounds = xgb.get.num.boosted.rounds(bst0), evals = evals, xgb_model = bst0, verbose = 0) tr0u <- xgb.model.dt.tree(model = bst0u) # should be the same evaluation but different gains and larger cover expect_equal(attributes(bst0)$evaluation_log, attributes(bst0u)$evaluation_log) expect_equal(tr0[Feature == 'Leaf']$Gain, tr0u[Feature == 'Leaf']$Gain) expect_gt(sum(abs(tr0[Feature != 'Leaf']$Gain - tr0u[Feature != 'Leaf']$Gain)), 100) expect_gt(sum(tr0u$Cover) / sum(tr0$Cover), 1.5) }) xgboost-3.0.0/R-package/tests/testthat/test_xgboost.R000066400000000000000000000655561476511272400226740ustar00rootroot00000000000000library(survival) library(data.table) data("iris") data("mtcars") data("ToothGrowth") test_that("Auto determine objective", { y_num <- seq(1, 10) res_num <- process.y.margin.and.objective(y_num, NULL, NULL, NULL) expect_equal(res_num$params$objective, "reg:squarederror") y_bin <- factor(c('a', 'b', 'a', 'b'), c('a', 'b')) res_bin <- process.y.margin.and.objective(y_bin, NULL, NULL, NULL) expect_equal(res_bin$params$objective, "binary:logistic") y_multi <- factor(c('a', 'b', 'a', 'b', 'c'), c('a', 'b', 'c')) res_multi <- process.y.margin.and.objective(y_multi, NULL, NULL, NULL) expect_equal(res_multi$params$objective, "multi:softprob") y_surv <- Surv(1:10, rep(c(0, 1), 5), type = "right") res_surv <- process.y.margin.and.objective(y_surv, NULL, NULL, NULL) expect_equal(res_surv$params$objective, "survival:aft") y_multicol <- matrix(seq(1, 20), nrow = 5) res_multicol <- process.y.margin.and.objective(y_multicol, NULL, NULL, NULL) expect_equal(res_multicol$params$objective, "reg:squarederror") }) test_that("Process vectors", { y <- seq(1, 10) for (y_inp in list(as.integer(y), as.numeric(y))) { res <- process.y.margin.and.objective(y_inp, NULL, "reg:pseudohubererror", NULL) expect_equal( res$dmatrix_args$label, y ) expect_equal( res$params$objective, "reg:pseudohubererror" ) } }) test_that("Process factors", { y_bin <- factor(c('a', 'b', 'a', 'b'), c('a', 'b')) expect_error({ process.y.margin.and.objective(y_bin, NULL, "multi:softprob", NULL) }) for (bin_obj in c("binary:logistic", "binary:hinge")) { for (y_inp in list(y_bin, as.ordered(y_bin))) { res_bin <- process.y.margin.and.objective(y_inp, NULL, bin_obj, NULL) expect_equal( res_bin$dmatrix_args$label, c(0, 1, 0, 1) ) expect_equal( res_bin$metadata$y_levels, c('a', 'b') ) expect_equal( res_bin$params$objective, bin_obj ) } } y_bin2 <- factor(c(1, 0, 1, 0), c(1, 0)) res_bin <- process.y.margin.and.objective(y_bin2, NULL, "binary:logistic", NULL) expect_equal( res_bin$dmatrix_args$label, c(0, 1, 0, 1) ) expect_equal( res_bin$metadata$y_levels, c("1", "0") ) y_bin3 <- c(TRUE, FALSE, TRUE) res_bin <- process.y.margin.and.objective(y_bin3, NULL, "binary:logistic", NULL) expect_equal( res_bin$dmatrix_args$label, c(1, 0, 1) ) expect_equal( res_bin$metadata$y_levels, c("FALSE", "TRUE") ) y_multi <- factor(c('a', 'b', 'c', 'd', 'a', 'b'), c('a', 'b', 'c', 'd')) expect_error({ process.y.margin.and.objective(y_multi, NULL, "binary:logistic", NULL) }) expect_error({ process.y.margin.and.objective(y_multi, NULL, "binary:logistic", NULL) }) res_multi <- process.y.margin.and.objective(y_multi, NULL, "multi:softprob", NULL) expect_equal( res_multi$dmatrix_args$label, c(0, 1, 2, 3, 0, 1) ) expect_equal( res_multi$metadata$y_levels, c('a', 'b', 'c', 'd') ) expect_equal( res_multi$params$num_class, 4 ) expect_equal( res_multi$params$objective, "multi:softprob" ) }) test_that("Process survival objects", { data(cancer, package = "survival") y_right <- Surv(cancer$time, cancer$status - 1, type = "right") res_cox <- process.y.margin.and.objective(y_right, NULL, "survival:cox", NULL) expect_equal( res_cox$dmatrix_args$label, ifelse(cancer$status == 2, cancer$time, -cancer$time) ) expect_equal( res_cox$params$objective, "survival:cox" ) res_aft <- process.y.margin.and.objective(y_right, NULL, "survival:aft", NULL) expect_equal( res_aft$dmatrix_args$label_lower_bound, cancer$time ) expect_equal( res_aft$dmatrix_args$label_upper_bound, ifelse(cancer$status == 2, cancer$time, Inf) ) expect_equal( res_aft$params$objective, "survival:aft" ) y_left <- Surv(seq(1, 4), c(1, 0, 1, 0), type = "left") expect_error({ process.y.margin.and.objective(y_left, NULL, "survival:cox", NULL) }) res_aft <- process.y.margin.and.objective(y_left, NULL, "survival:aft", NULL) expect_equal( res_aft$dmatrix_args$label_lower_bound, c(1, 0, 3, 0) ) expect_equal( res_aft$dmatrix_args$label_upper_bound, seq(1, 4) ) expect_equal( res_aft$params$objective, "survival:aft" ) y_interval <- Surv( time = c(1, 5, 2, 10, 3), time2 = c(2, 5, 2.5, 10, 3), event = c(3, 1, 3, 0, 2), type = "interval" ) expect_error({ process.y.margin.and.objective(y_interval, NULL, "survival:cox", NULL) }) res_aft <- process.y.margin.and.objective(y_interval, NULL, "survival:aft", NULL) expect_equal( res_aft$dmatrix_args$label_lower_bound, c(1, 5, 2, 10, 0) ) expect_equal( res_aft$dmatrix_args$label_upper_bound, c(2, 5, 2.5, Inf, 3) ) expect_equal( res_aft$params$objective, "survival:aft" ) y_interval_neg <- Surv( time = c(1, -5, 2, 10, 3), time2 = c(2, -5, 2.5, 10, 3), event = c(3, 1, 3, 0, 2), type = "interval" ) expect_error({ process.y.margin.and.objective(y_interval_neg, NULL, "survival:aft", NULL) }) }) test_that("Process multi-target", { data(mtcars) y_multi <- data.frame( y1 = mtcars$mpg, y2 = mtcars$mpg ^ 2 ) for (y_inp in list(y_multi, as.matrix(y_multi), data.table::as.data.table(y_multi))) { res_multi <- process.y.margin.and.objective(y_inp, NULL, "reg:pseudohubererror", NULL) expect_equal( res_multi$dmatrix_args$label, as.matrix(y_multi) ) expect_equal( res_multi$metadata$y_names, c("y1", "y2") ) expect_equal( res_multi$params$objective, "reg:pseudohubererror" ) } expect_error({ process.y.margin.and.objective(y_multi, NULL, "count:poisson", NULL) }) y_bad <- data.frame( c1 = seq(1, 3), c2 = rep(as.Date("2024-01-01"), 3) ) expect_error({ process.y.margin.and.objective(y_bad, NULL, "reg:squarederror", NULL) }) y_bad <- data.frame( c1 = seq(1, 3), c2 = factor(c('a', 'b', 'a'), c('a', 'b')) ) expect_error({ process.y.margin.and.objective(y_bad, NULL, "reg:squarederror", NULL) }) y_bad <- seq(1, 20) dim(y_bad) <- c(5, 2, 2) expect_error({ process.y.margin.and.objective(y_bad, NULL, "reg:squarederror", NULL) }) }) test_that("Process base_margin", { y <- seq(101, 110) bm_good <- seq(1, 10) for (bm in list(bm_good, as.matrix(bm_good), as.data.frame(as.matrix(bm_good)))) { res <- process.y.margin.and.objective(y, bm, "reg:squarederror", NULL) expect_equal( res$dmatrix_args$base_margin, seq(1, 10) ) } expect_error({ process.y.margin.and.objective(y, 5, "reg:squarederror", NULL) }) expect_error({ process.y.margin.and.objective(y, seq(1, 5), "reg:squarederror", NULL) }) expect_error({ process.y.margin.and.objective(y, matrix(seq(1, 20), ncol = 2), "reg:squarederror", NULL) }) expect_error({ process.y.margin.and.objective( y, as.data.frame(matrix(seq(1, 20), ncol = 2)), "reg:squarederror", NULL ) }) y <- factor(c('a', 'b', 'c', 'a')) bm_good <- matrix(seq(1, 12), ncol = 3) for (bm in list(bm_good, as.data.frame(bm_good))) { res <- process.y.margin.and.objective(y, bm, "multi:softprob", NULL) expect_equal( res$dmatrix_args$base_margin |> unname(), matrix(seq(1, 12), ncol = 3) ) } expect_error({ process.y.margin.and.objective(y, as.numeric(bm_good), "multi:softprob", NULL) }) expect_error({ process.y.margin.and.objective(y, 5, "multi:softprob", NULL) }) expect_error({ process.y.margin.and.objective(y, bm_good[, 1], "multi:softprob", NULL) }) expect_error({ process.y.margin.and.objective(y, bm_good[, c(1, 2)], "multi:softprob", NULL) }) expect_error({ process.y.margin.and.objective(y, bm_good[c(1, 2), ], "multi:softprob", NULL) }) y <- seq(101, 110) bm_good <- matrix(seq(1, 30), ncol = 3) params <- list(quantile_alpha = c(0.1, 0.5, 0.9)) for (bm in list(bm_good, as.data.frame(bm_good))) { res <- process.y.margin.and.objective(y, bm, "reg:quantileerror", params) expect_equal( res$dmatrix_args$base_margin |> unname(), matrix(seq(1, 30), ncol = 3) ) } expect_error({ process.y.margin.and.objective(y, as.numeric(bm_good), "reg:quantileerror", params) }) expect_error({ process.y.margin.and.objective(y, 5, "reg:quantileerror", params) }) expect_error({ process.y.margin.and.objective(y, bm_good[, 1], "reg:quantileerror", params) }) expect_error({ process.y.margin.and.objective(y, bm_good[, c(1, 2)], "reg:quantileerror", params) }) expect_error({ process.y.margin.and.objective(y, bm_good[c(1, 2, 3), ], "reg:quantileerror", params) }) y <- matrix(seq(101, 130), ncol = 3) for (bm in list(bm_good, as.data.frame(bm_good))) { res <- process.y.margin.and.objective(y, bm, "reg:squarederror", params) expect_equal( res$dmatrix_args$base_margin |> unname(), matrix(seq(1, 30), ncol = 3) ) } expect_error({ process.y.margin.and.objective(y, as.numeric(bm_good), "reg:squarederror", params) }) expect_error({ process.y.margin.and.objective(y, 5, "reg:squarederror", params) }) expect_error({ process.y.margin.and.objective(y, bm_good[, 1], "reg:squarederror", params) }) expect_error({ process.y.margin.and.objective(y, bm_good[, c(1, 2)], "reg:squarederror", params) }) expect_error({ process.y.margin.and.objective(y, bm_good[c(1, 2, 3), ], "reg:squarederror", params) }) }) test_that("Process monotone constraints", { data(iris) mc_list <- list(Sepal.Width = 1) res <- process.x.and.col.args( iris, monotone_constraints = mc_list, interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) expect_equal( res$params$monotone_constraints, c(0, 1, 0, 0, 0) ) mc_list2 <- list(Sepal.Width = 1, Petal.Width = -1) res <- process.x.and.col.args( iris, monotone_constraints = mc_list2, interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) expect_equal( res$params$monotone_constraints, c(0, 1, 0, -1, 0) ) mc_vec <- c(0, 1, -1, 0, 0) res <- process.x.and.col.args( iris, monotone_constraints = mc_vec, interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) expect_equal( res$params$monotone_constraints, c(0, 1, -1, 0, 0) ) mc_named_vec <- c(1, 1) names(mc_named_vec) <- names(iris)[1:2] res <- process.x.and.col.args( iris, monotone_constraints = mc_named_vec, interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) expect_equal( res$params$monotone_constraints, c(1, 1, 0, 0, 0) ) mc_named_all <- c(0, -1, 1, 0, -1) names(mc_named_all) <- rev(names(iris)) res <- process.x.and.col.args( iris, monotone_constraints = mc_named_all, interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) expect_equal( res$params$monotone_constraints, rev(mc_named_all) |> unname() ) expect_error({ process.x.and.col.args( iris, monotone_constraints = list( Sepal.Width = 1, Petal.Width = -1, Sepal.Width = -1 ), interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) }) expect_error({ process.x.and.col.args( iris, monotone_constraints = rep(0, 6), interaction_constraints = NULL, feature_weights = NULL, lst_args = list(), use_qdm = FALSE ) }) }) test_that("Process interaction_constraints", { data(iris) res <- process.x.and.col.args(iris, NULL, list(c(1L, 2L)), NULL, NULL, FALSE) expect_equal( res$params$interaction_constraints, list(c(0, 1)) ) res <- process.x.and.col.args(iris, NULL, list(c(1.0, 2.0)), NULL, NULL, FALSE) expect_equal( res$params$interaction_constraints, list(c(0, 1)) ) res <- process.x.and.col.args(iris, NULL, list(c(1, 2), c(3, 4)), NULL, NULL, FALSE) expect_equal( res$params$interaction_constraints, list(c(0, 1), c(2, 3)) ) res <- process.x.and.col.args( iris, NULL, list(c("Sepal.Length", "Sepal.Width")), NULL, NULL, FALSE ) expect_equal( res$params$interaction_constraints, list(c(0, 1)) ) res <- process.x.and.col.args( as.matrix(iris), NULL, list(c("Sepal.Length", "Sepal.Width")), NULL, NULL, FALSE ) expect_equal( res$params$interaction_constraints, list(c(0, 1)) ) res <- process.x.and.col.args( iris, NULL, list(c("Sepal.Width", "Petal.Length"), c("Sepal.Length", "Petal.Width", "Species")), NULL, NULL, FALSE ) expect_equal( res$params$interaction_constraints, list(c(1, 2), c(0, 3, 4)) ) expect_error({ process.x.and.col.args(iris, NULL, list(c(1L, 20L)), NULL, NULL, FALSE) }) expect_error({ process.x.and.col.args(iris, NULL, list(c(0L, 2L)), NULL, NULL, FALSE) }) expect_error({ process.x.and.col.args(iris, NULL, list(c("1", "2")), NULL, NULL, FALSE) }) expect_error({ process.x.and.col.args(iris, NULL, list(c("Sepal", "Petal")), NULL, NULL, FALSE) }) expect_error({ process.x.and.col.args(iris, NULL, c(1L, 2L), NULL, NULL, FALSE) }) expect_error({ process.x.and.col.args(iris, NULL, matrix(c(1L, 2L)), NULL, NULL, FALSE) }) expect_error({ process.x.and.col.args(iris, NULL, list(c(1, 2.5)), NULL, NULL, FALSE) }) }) test_that("Sparse matrices are casted to CSR for QDM", { data(agaricus.test, package = "xgboost") x <- agaricus.test$data for (x_in in list(x, methods::as(x, "TsparseMatrix"))) { res <- process.x.and.col.args( x_in, NULL, NULL, NULL, NULL, TRUE ) expect_s4_class(res$dmatrix_args$data, "dgRMatrix") } }) test_that("Process feature_weights", { data(iris) w_vector <- seq(1, 5) res <- process.x.and.col.args( iris, monotone_constraints = NULL, interaction_constraints = NULL, feature_weights = w_vector, lst_args = list(), use_qdm = FALSE ) expect_equal( res$dmatrix_args$feature_weights, seq(1, 5) ) w_named_vector <- seq(1, 5) names(w_named_vector) <- rev(names(iris)) res <- process.x.and.col.args( iris, monotone_constraints = NULL, interaction_constraints = NULL, feature_weights = w_named_vector, lst_args = list(), use_qdm = FALSE ) expect_equal( res$dmatrix_args$feature_weights, rev(seq(1, 5)) ) w_list <- list( Species = 5, Sepal.Length = 1, Sepal.Width = 2, Petal.Length = 3, Petal.Width = 4 ) res <- process.x.and.col.args( iris, monotone_constraints = NULL, interaction_constraints = NULL, feature_weights = w_list, lst_args = list(), use_qdm = FALSE ) expect_equal( res$dmatrix_args$feature_weights, seq(1, 5) ) }) test_that("Whole function works", { data(cancer, package = "survival") y <- Surv(cancer$time, cancer$status - 1, type = "right") x <- as.data.table(cancer)[, -c("time", "status")] model <- xgboost( x, y, monotone_constraints = list(age = -1), nthreads = 1L, nrounds = 5L, learning_rate = 3 ) expect_equal( attributes(model)$params$objective, "survival:aft" ) expect_equal( attributes(model)$metadata$n_targets, 1L ) expect_equal( attributes(model)$params$monotone_constraints, "(0,-1,0,0,0,0,0,0)" ) expect_false( "interaction_constraints" %in% names(attributes(model)$params) ) expect_equal( attributes(model)$params$learning_rate, 3 ) txt <- capture.output({ print(model) }) expect_true(any(grepl("Objective: survival:aft", txt, fixed = TRUE))) expect_true(any(grepl("monotone_constraints", txt, fixed = TRUE))) expect_true(any(grepl("Number of iterations: 5", txt, fixed = TRUE))) expect_true(any(grepl("Number of features: 8", txt, fixed = TRUE))) }) test_that("Can predict probabilities and raw scores", { y <- ToothGrowth$supp x <- ToothGrowth[, -2L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred_prob <- predict(model, x, type = "response") pred_raw <- predict(model, x, type = "raw") expect_true(is.vector(pred_prob)) expect_equal(length(pred_prob), nrow(x)) expect_true(min(pred_prob) >= 0) expect_true(max(pred_prob) <= 1) expect_equal(length(pred_raw), nrow(x)) expect_true(is.vector(pred_raw)) expect_true(min(pred_raw) < 0) expect_true(max(pred_raw) > 0) expect_equal( pred_prob, 1 / (1 + exp(-pred_raw)), tolerance = 1e-6 ) }) test_that("Can predict class", { y <- iris$Species x <- iris[, -5L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred_class <- predict(model, x, type = "class") expect_true(is.factor(pred_class)) expect_equal(levels(pred_class), levels(y)) y <- ToothGrowth$supp x <- ToothGrowth[, -2L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred_class <- predict(model, x, type = "class") expect_true(is.factor(pred_class)) expect_equal(levels(pred_class), levels(y)) probs <- predict(model, x, type = "response") expect_true(all(pred_class[probs >= 0.5] == levels(y)[[2L]])) expect_true(all(pred_class[probs < 0.5] == levels(y)[[1L]])) # Check that it fails for regression models y <- mtcars$mpg x <- mtcars[, -1L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) expect_error({ predict(model, x, type = "class") }) }) test_that("Metadata survives serialization", { y <- iris$Species x <- iris[, -5L] model_fresh <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) temp_file <- file.path(tempdir(), "xgb_model.Rds") saveRDS(model_fresh, temp_file) model <- readRDS(temp_file) pred_class <- predict(model, x, type = "class") expect_true(is.factor(pred_class)) expect_equal(levels(pred_class), levels(y)) }) test_that("Column names aren't added when not appropriate", { pred_types <- c( "response", "raw", "leaf" ) for (pred_type in pred_types) { y <- mtcars$mpg x <- mtcars[, -1L] model <- xgboost( x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = 0.5 ) pred <- predict(model, x, type = pred_type) if (pred_type %in% c("raw", "response")) { expect_true(is.vector(pred)) } else { expect_true(length(dim(pred)) >= 2L) expect_null(colnames(pred)) } y <- ToothGrowth$supp x <- ToothGrowth[, -2L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred <- predict(model, x, type = pred_type) if (pred_type %in% c("raw", "response")) { expect_true(is.vector(pred)) } else { expect_true(length(dim(pred)) >= 2L) expect_null(colnames(pred)) } } }) test_that("Column names from multiclass are added to non-class predictions", { y <- iris$Species x <- iris[, -5L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred_types_with_colnames <- c( "response", "raw", "contrib", "interaction" ) for (pred_type in pred_types_with_colnames) { pred <- predict(model, x, type = pred_type) expect_equal(nrow(pred), nrow(x)) expect_equal(ncol(pred), 3L) expect_equal(colnames(pred), levels(y)) } }) test_that("Column names from multitarget are added to predictions", { y <- data.frame( ylog = log(mtcars$mpg), ysqrt = sqrt(mtcars$mpg) ) x <- mtcars[, -1L] model <- xgboost(x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L) pred_types_with_colnames <- c( "response", "raw", "contrib", "interaction" ) for (pred_type in pred_types_with_colnames) { pred <- predict(model, x, type = pred_type) expect_equal(nrow(pred), nrow(x)) expect_equal(ncol(pred), 2L) expect_equal(colnames(pred), colnames(y)) } }) test_that("Column names from multiquantile are added to predictions", { y <- mtcars$mpg x <- mtcars[, -1L] model <- xgboost( x, y, nthreads = 1L, nrounds = 3L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = c(0.25, 0.5, 0.75) ) pred_types_with_colnames <- c( "response", "raw", "contrib", "interaction" ) for (pred_type in pred_types_with_colnames) { pred <- predict(model, x, type = pred_type) expect_equal(nrow(pred), nrow(x)) expect_equal(ncol(pred), 3L) expect_equal(colnames(pred), c("q0.25", "q0.5", "q0.75")) } }) test_that("Leaf predictions have multiple dimensions when needed", { # single score, multiple trees y <- mtcars$mpg x <- mtcars[, -1L] model <- xgboost( x, y, nthreads = 1L, nrounds = 4L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = 0.5 ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 4L)) expect_equal(row.names(pred), row.names(x)) expect_null(colnames(pred)) # single score, single tree model <- xgboost( x, y, nthreads = 1L, nrounds = 1L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = 0.5 ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 1L)) expect_equal(row.names(pred), row.names(x)) expect_null(colnames(pred)) # multiple score, multiple trees model <- xgboost( x, y, nthreads = 1L, nrounds = 4L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = c(0.25, 0.5, 0.75) ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 4L, 3L)) expect_equal(row.names(pred), row.names(x)) expect_null(colnames(pred)) expect_equal(dimnames(pred)[[3L]], c("q0.25", "q0.5", "q0.75")) # multiple score, single tree model <- xgboost( x, y, nthreads = 1L, nrounds = 1L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = c(0.25, 0.5, 0.75) ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 1L, 3L)) expect_equal(row.names(pred), row.names(x)) expect_null(colnames(pred)) expect_equal(dimnames(pred)[[3L]], c("q0.25", "q0.5", "q0.75")) # parallel trees, single tree, single score model <- xgboost( x, y, nthreads = 1L, nrounds = 1L, max_depth = 2L, objective = "count:poisson", num_parallel_tree = 2L ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 1L, 2L)) expect_equal(row.names(pred), row.names(x)) expect_null(colnames(pred)) expect_null(dimnames(pred)[[3L]]) # num_parallel_tree>1 + multiple scores is not supported at the moment so no test for it. }) test_that("Column names from multiclass are added to leaf predictions", { y <- iris$Species x <- iris[, -5L] model <- xgboost(x, y, nthreads = 1L, nrounds = 4L, max_depth = 2L) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 4L, 3L)) expect_equal(dimnames(pred)[[3L]], levels(y)) # Check also for a single tree model <- xgboost(x, y, nthreads = 1L, nrounds = 1L, max_depth = 2L) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 1L, 3L)) expect_equal(dimnames(pred)[[3L]], levels(y)) }) test_that("Column names from multitarget are added to leaf predictions", { y <- data.frame( ylog = log(mtcars$mpg), ysqrt = sqrt(mtcars$mpg) ) x <- mtcars[, -1L] model <- xgboost(x, y, nthreads = 1L, nrounds = 4L, max_depth = 2L) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 4L, 2L)) expect_equal(dimnames(pred)[[3L]], colnames(y)) # Check also for a single tree model <- xgboost(x, y, nthreads = 1L, nrounds = 1L, max_depth = 2L) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 1L, 2L)) expect_equal(dimnames(pred)[[3L]], colnames(y)) }) test_that("Column names from multiquantile are added to leaf predictions", { y <- mtcars$mpg x <- mtcars[, -1L] model <- xgboost( x, y, nthreads = 1L, nrounds = 4L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = c(0.25, 0.5, 0.75) ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 4L, 3L)) expect_equal(dimnames(pred)[[3L]], c("q0.25", "q0.5", "q0.75")) # Check also for a single tree model <- xgboost( x, y, nthreads = 1L, nrounds = 1L, max_depth = 2L, objective = "reg:quantileerror", quantile_alpha = c(0.25, 0.5, 0.75) ) pred <- predict(model, x, type = "leaf") expect_equal(dim(pred), c(nrow(x), 1L, 3L)) expect_equal(dimnames(pred)[[3L]], c("q0.25", "q0.5", "q0.75")) }) test_that("Evaluation fraction leaves examples of all classes for training", { # With minimal sample leave no remainder lst_args <- list( dmatrix_args = list( data = matrix(seq(1, 4), ncol = 1L), label = c(0, 0, 1, 1) ), metadata = list( y_levels = c("a", "b") ), params = list( seed = 123 ) ) for (retry in seq_len(10)) { lst_args$params$seed <- retry res <- process.eval.set(0.5, lst_args) expect_equal(length(intersect(res$idx_train, res$idx_eval)), 0) expect_equal(length(res$idx_train), 2L) expect_equal(length(res$idx_eval), 2L) expect_true(length(intersect(c(1L, 2L), res$idx_train)) >= 1L) expect_true(length(intersect(c(3L, 4L), res$idx_train)) >= 1L) } # With minimal sample leaving some remainder lst_args <- list( dmatrix_args = list( data = matrix(seq(1, 5), ncol = 1L), label = c(0, 0, 1, 1, 1) ), metadata = list( y_levels = c("a", "b") ), params = list( seed = 123 ) ) for (retry in seq_len(20)) { lst_args$params$seed <- retry res <- process.eval.set(0.4, lst_args) expect_equal(length(intersect(res$idx_train, res$idx_eval)), 0) expect_equal(length(res$idx_train), 3L) expect_equal(length(res$idx_eval), 2L) expect_true(length(intersect(c(1L, 2L), res$idx_train)) >= 1L) expect_true(length(intersect(c(3L, 4L, 5L), res$idx_train)) >= 1L) } }) test_that("'eval_set' as fraction works", { y <- iris$Species x <- iris[, -5L] model <- xgboost( x, y, base_margin = matrix(0.1, nrow = nrow(x), ncol = 3L), eval_set = 0.2, nthreads = 1L, nrounds = 4L, max_depth = 2L, verbosity = 0L ) expect_true(hasName(attributes(model), "evaluation_log")) evaluation_log <- attributes(model)$evaluation_log expect_equal(nrow(evaluation_log), 4L) expect_true(hasName(evaluation_log, "eval_mlogloss")) expect_equal(length(attributes(model)$metadata$y_levels), 3L) }) test_that("Linear booster importance uses class names", { y <- iris$Species x <- iris[, -5L] model <- xgboost( x, y, nthreads = 1L, nrounds = 4L, verbosity = 0L, booster = "gblinear", learning_rate = 0.2 ) imp <- xgb.importance(model) expect_true(is.factor(imp$Class)) expect_equal(levels(imp$Class), levels(y)) }) xgboost-3.0.0/R-package/vignettes/000077500000000000000000000000001476511272400170125ustar00rootroot00000000000000xgboost-3.0.0/R-package/vignettes/xgboost_introduction.Rmd000066400000000000000000000273601476511272400237540ustar00rootroot00000000000000--- title: "XGBoost for R introduction" vignette: > %\VignetteIndexEntry{XGBoost for R introduction} %\VignetteEncoding{UTF-8} %\VignetteEngine{knitr::rmarkdown} output: html_document: theme: "spacelab" highlight: "kate" toc: true toc_float: true --- XGBoost for R introduction ========================== ## Introduction **XGBoost** is an optimized distributed gradient boosting library designed to be highly **efficient**, **flexible** and **portable**. It implements machine learning algorithms under the [Gradient Boosting](https://en.wikipedia.org/wiki/Gradient_boosting) framework. XGBoost provides a parallel tree boosting (also known as GBDT, GBM) that solve many data science problems in a fast and accurate way. The same code runs on major distributed environment (Hadoop, SGE, MPI) and can solve problems beyond billions of examples. For an introduction to the concept of gradient boosting, see the tutorial [Introduction to Boosted Trees](https://xgboost.readthedocs.io/en/stable/tutorials/model.html) in XGBoost's online docs. For more details about XGBoost's features and usage, see the [online documentation](https://xgboost.readthedocs.io/en/stable/) which contains more tutorials, examples, and details. This short vignette outlines the basic usage of the R interface for XGBoost, assuming the reader has some familiarity with the underlying concepts behind statistical modeling with gradient-boosted decision trees. ## Building a predictive model At its core, XGBoost consists of a C++ library which offers bindings for different programming languages, including R. The R package for XGBoost provides an idiomatic interface similar to those of other statistical modeling packages using and x/y design, as well as a lower-level interface that interacts more directly with the underlying core library and which is similar to those of other language bindings like Python, plus various helpers to interact with its model objects such as by plotting their feature importances or converting them to other formats. The main function of interest is `xgboost(x, y, ...)`, which calls the XGBoost model building procedure on observed data of covariates/features/predictors "x", and a response variable "y" - it should feel familiar to users of packages like `glmnet` or `ncvreg`: ```{r} library(xgboost) data(ToothGrowth) y <- ToothGrowth$supp # the response which we want to model/predict x <- ToothGrowth[, c("len", "dose")] # the features from which we want to predct it model <- xgboost(x, y, nthreads = 1, nrounds = 2) model ``` In this case, the "y" response variable that was supplied is a "factor" type with two classes ("OJ" and "VC") - hence, XGBoost builds a binary classification model for it based on the features "x", by finding a maximum likelihood estimate (similar to the `family="binomial"` model from R's `glm` function) through rule buckets obtained from the sum of two decision trees (from `nrounds=2`), from which we can then predict probabilities, log-odds, class with highest likelihood, among others: ```{r} predict(model, x[1:6, ], type = "response") # probabilities for y's last level ("VC") predict(model, x[1:6, ], type = "raw") # log-odds predict(model, x[1:6, ], type = "class") # class with highest probability ``` Compared to R's `glm` function which follows the concepts of "families" and "links" from GLM theory to fit models for different kinds of response distributions, XGBoost follows the simpler concept of "objectives" which mix both of them into one, and which just like `glm`, allow modeling very different kinds of response distributions (e.g. discrete choices, real-valued numbers, counts, censored measurements, etc.) through a common framework. XGBoost will automatically determine a suitable objective for the response given its object class (can pass factors for classification, numeric vectors for regression, `Surv` objects from the `survival` package for survival, etc. - see `?xgboost` for more details), but this can be controlled manually through an `objective` parameter based the kind of model that is desired: ```{r} data(mtcars) y <- mtcars$mpg x <- mtcars[, -1] model_gaussian <- xgboost(x, y, nthreads = 1, nrounds = 2) # default is squared loss (Gaussian) model_poisson <- xgboost(x, y, objective = "count:poisson", nthreads = 1, nrounds = 2) model_abserr <- xgboost(x, y, objective = "reg:absoluteerror", nthreads = 1, nrounds = 2) ``` _Note: the objective must match with the type of the "y" response variable - for example, classification objectives for discrete choices require "factor" types, while regression models for real-valued data require "numeric" types._ ## Model parameters XGBoost models allow a large degree of control over how they are built. By their nature, gradient-boosted decision tree ensembles are able to capture very complex patterns between features in the data and a response variable, which also means they can suffer from overfitting if not controlled appropirately. For best results, one needs to find suitable parameters for the data being modeled. Note that XGBoost does not adjust its default hyperparameters based on the data, and different datasets will require vastly different hyperparameters for optimal predictive performance. For example, for a small dataset like "TootGrowth" which has only two features and 60 observations, the defaults from XGBoost are an overkill which lead to severe overfitting - for such data, one might want to have smaller trees (i.e. more convervative decision rules, capturing simpler patterns) and fewer of them, for example. Parameters can be controlled by passing additional arguments to `xgboost()`. See `?xgb.params` for details about what parameters are available to control. ```{r} y <- ToothGrowth$supp x <- ToothGrowth[, c("len", "dose")] model_conservative <- xgboost( x, y, nthreads = 1, nrounds = 5, max_depth = 2, reg_lambda = 0.5, learning_rate = 0.15 ) pred_conservative <- predict( model_conservative, x ) pred_conservative[1:6] # probabilities are all closer to 0.5 now ``` XGBoost also allows the possibility of calculating evaluation metrics for model quality over boosting rounds, with a wide variety of built-in metrics available to use. It's possible to automatically set aside a fraction of the data to use as evaluation set, from which one can then visually monitor progress and overfitting: ```{r} xgboost( x, y, nthreads = 1, eval_set = 0.2, monitor_training = TRUE, verbosity = 1, eval_metric = c("auc", "logloss"), nrounds = 5, max_depth = 2, reg_lambda = 0.5, learning_rate = 0.15 ) ``` ## Examining model objects XGBoost model objects for the most part consist of a pointer to a C++ object where most of the information is held and which is interfaced through the utility functions and methods in the package, but also contains some R attributes that can be retrieved (and new ones added) through `attributes()`: ```{r} attributes(model) ``` In addition to R attributes (which can be arbitrary R objects), it may also keep some standardized C-level attributes that one can access and modify (but which can only be JSON-format): ```{r} xgb.attributes(model) ``` (they are empty for this model) ... but usually, when it comes to getting something out of a model object, one would typically want to do this through the built-in utility functions. Some examples: ```{r} xgb.importance(model) ``` ```{r} xgb.model.dt.tree(model) ``` ## Other features XGBoost supports many additional features on top of its traditional gradient-boosting framework, including, among others: * Building decision tree models with characteristics such as per-feature monotonicity constraints or interaction constraints. * Calculating feature contributions in individual predictions. * Using custom objectives and custom evaluation metrics. * Fitting linear models. * Fitting models on GPUs and/or on data that doesn't fit in RAM ("external memory"). See the [online documentation](https://xgboost.readthedocs.io/en/stable/index.html) - particularly the [tutorials section](https://xgboost.readthedocs.io/en/stable/tutorials/index.html) - for a glimpse over further functionalities that XGBoost offers. ## The low-level interface In addition to the `xgboost(x, y, ...)` function, XGBoost also provides a lower-level interface for creating model objects through the function `xgb.train()`, which resembles the same `xgb.train` functions in other language bindings of XGBoost. This `xgb.train()` interface exposes additional functionalities (such as user-supplied callbacks or external-memory data support) and performs fewer data validations and castings compared to the `xgboost()` function interface. Some key differences between the two interfaces: * Unlike `xgboost()` which takes R objects such as `matrix` or `data.frame` as inputs, the function `xgb.train()` uses XGBoost's own data container called "DMatrix", which can be created from R objects through the function `xgb.DMatrix()`. Note that there are other "DMatrix" constructors too, such as "xgb.QuantileDMatrix()", which might be more beneficial for some use-cases. * A "DMatrix" object may contain a mixture of features/covariates, the response variable, observation weights, base margins, among others; and unlike `xgboost()`, requires its inputs to have already been encoded into the representation that XGBoost uses behind the scenes - for example, while `xgboost()` may take a `factor` object as "y", `xgb.DMatrix()` requires instead a binary response variable to be passed as a vector of zeros and ones. * Hyperparameters are passed as function arguments in `xgboost()`, while they are passed as a named list to `xgb.train()`. * The `xgb.train()` interface keeps less metadata about its inputs - for example, it will not add levels of factors as column names to estimated probabilities when calling `predict`. Example usage of `xgb.train()`: ```{r} data("agaricus.train") dmatrix <- xgb.DMatrix( data = agaricus.train$data, # a sparse CSC matrix ('dgCMatrix') label = agaricus.train$label, # zeros and ones nthread = 1 ) booster <- xgb.train( data = dmatrix, nrounds = 10, params = xgb.params( objective = "binary:logistic", nthread = 1, max_depth = 3 ) ) data("agaricus.test") dmatrix_test <- xgb.DMatrix(agaricus.test$data, nthread = 1) pred_prob <- predict(booster, dmatrix_test) pred_raw <- predict(booster, dmatrix_test, outputmargin = TRUE) ``` Model objects produced by `xgb.train()` have class `xgb.Booster`, while model objects produced by `xgboost()` have class `xgboost`, which is a subclass of `xgb.Booster`. Their `predict` methods also take different arguments - for example, `predict.xgboost` has a `type` parameter, while `predict.xgb.Booster` controls this through binary arguments - but as `xgboost` is a subclass of `xgb.Booster`, methods for `xgb.Booster` can be called on `xgboost` objects if needed. Utility functions in the XGBoost R package will work with both model classes - for example: ```{r} xgb.importance(model) xgb.importance(booster) ``` While `xgboost()` aims to provide a user-friendly interface, there are still many situations where one should prefer the `xgb.train()` interface - for example: * For latency-sensitive applications (e.g. when serving models in real time), `xgb.train()` will have a speed advantage, as it performs fewer validations, conversions, and post-processings with metadata. * If you are developing an R package that depends on XGBoost, `xgb.train()` will provide a more stable interface (less subject to changes) and will have lower time/memory overhead. * If you need functionalities that are not exposed by the `xgboost()` interface - for example, if your dataset does not fit into the computer's RAM, it's still possible to construct a DMatrix from it if the data is loaded in batches through `xgb.ExtMemDMatrix()`. xgboost-3.0.0/R-package/vignettes/xgboostfromJSON.Rmd000066400000000000000000000163761476511272400225360ustar00rootroot00000000000000--- title: "XGBoost from JSON" output: rmarkdown::html_vignette: number_sections: yes toc: yes author: Roland Stevenson vignette: > %\VignetteIndexEntry{XGBoost from JSON} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- XGBoost from JSON ================= ## Introduction The purpose of this Vignette is to show you how to correctly load and work with an **XGBoost** model that has been dumped to JSON. **XGBoost** internally converts all data to [32-bit floats](https://en.wikipedia.org/wiki/Single-precision_floating-point_format), and the values dumped to JSON are decimal representations of these values. When working with a model that has been parsed from a JSON file, care must be taken to correctly treat: - the input data, which should be converted to 32-bit floats - any 32-bit floats that were stored in JSON as decimal representations - any calculations must be done with 32-bit mathematical operators ## Setup For the purpose of this tutorial we will load the xgboost, jsonlite, and float packages. We'll also set `digits=22` in our options in case we want to inspect many digits of our results. ```{r} require(xgboost) require(jsonlite) require(float) options(digits = 22) ``` We will create a toy binary logistic model based on the example first provided [here](https://github.com/dmlc/xgboost/issues/3960), so that we can easily understand the structure of the dumped JSON model object. This will allow us to understand where discrepancies can occur and how they should be handled. ```{r} dates <- c(20180130, 20180130, 20180130, 20180130, 20180130, 20180130, 20180131, 20180131, 20180131, 20180131, 20180131, 20180131, 20180131, 20180131, 20180131, 20180134, 20180134, 20180134) labels <- c(1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) data <- data.frame(dates = dates, labels = labels) bst <- xgb.train( data = xgb.DMatrix(as.matrix(data$dates), label = labels, missing = NA, nthread = 1), nrounds = 1, params = xgb.params( objective = "binary:logistic", nthread = 2, max_depth = 1 ) ) ``` ## Comparing results We will now dump the model to JSON and attempt to illustrate a variety of issues that can arise, and how to properly deal with them. First let's dump the model to JSON: ```{r} bst_json <- xgb.dump(bst, with_stats = FALSE, dump_format = 'json') bst_from_json <- fromJSON(bst_json, simplifyDataFrame = FALSE) node <- bst_from_json[[1]] cat(bst_json) ``` The tree JSON shown by the above code-chunk tells us that if the data is less than 20180132, the tree will output the value in the first leaf. Otherwise it will output the value in the second leaf. Let's try to reproduce this manually with the data we have and confirm that it matches the model predictions we've already calculated. ```{r} bst_preds_logodds <- predict(bst, as.matrix(data$dates), outputmargin = TRUE) # calculate the logodds values using the JSON representation bst_from_json_logodds <- ifelse(data$dates < node$split_condition, node$children[[1]]$leaf, node$children[[2]]$leaf) bst_preds_logodds bst_from_json_logodds # test that values are equal bst_preds_logodds == bst_from_json_logodds ``` None are equal. What happened? At this stage two things happened: - input data was not converted to 32-bit floats - the JSON variables were not converted to 32-bit floats ### Lesson 1: All data is 32-bit floats > When working with imported JSON, all data must be converted to 32-bit floats To explain this, let's repeat the comparison and round to two decimals: ```{r} round(bst_preds_logodds, 2) == round(bst_from_json_logodds, 2) ``` If we round to two decimals, we see that only the elements related to data values of `20180131` don't agree. If we convert the data to floats, they agree: ```{r} # now convert the dates to floats first bst_from_json_logodds <- ifelse(fl(data$dates) < node$split_condition, node$children[[1]]$leaf, node$children[[2]]$leaf) # test that values are equal round(bst_preds_logodds, 2) == round(bst_from_json_logodds, 2) ``` What's the lesson? If we are going to work with an imported JSON model, any data must be converted to floats first. In this case, since '20180131' cannot be represented as a 32-bit float, it is rounded up to 20180132, as shown here: ```{r} fl(20180131) ``` ### Lesson 2: JSON parameters are 32-bit floats > All JSON parameters stored as floats must be converted to floats. Let's now say we do care about numbers past the first two decimals. ```{r} # test that values are equal bst_preds_logodds == bst_from_json_logodds ``` None are exactly equal. What happened? Although we've converted the data to 32-bit floats, we also need to convert the JSON parameters to 32-bit floats. Let's do this: ```{r} # now convert the dates to floats first bst_from_json_logodds <- ifelse(fl(data$dates) < fl(node$split_condition), as.numeric(fl(node$children[[1]]$leaf)), as.numeric(fl(node$children[[2]]$leaf))) # test that values are equal bst_preds_logodds == bst_from_json_logodds ``` All equal. What's the lesson? If we are going to work with an imported JSON model, any JSON parameters that were stored as floats must also be converted to floats first. ### Lesson 3: Use 32-bit math > Always use 32-bit numbers and operators We were able to get the log-odds to agree, so now let's manually calculate the sigmoid of the log-odds. This should agree with the xgboost predictions. ```{r} bst_preds <- predict(bst, as.matrix(data$dates)) # calculate the predictions casting doubles to floats bst_from_json_preds <- ifelse( fl(data$dates) < fl(node$split_condition) , as.numeric(1 / (1 + exp(-1 * fl(node$children[[1]]$leaf)))) , as.numeric(1 / (1 + exp(-1 * fl(node$children[[2]]$leaf)))) ) # test that values are equal bst_preds == bst_from_json_preds ``` None are exactly equal again. What is going on here? Well, since we are using the value `1` in the calculations, we have introduced a double into the calculation. Because of this, all float values are promoted to 64-bit doubles and the 64-bit version of the exponentiation operator `exp` is also used. On the other hand, xgboost uses the 32-bit version of the exponentiation operator in its [sigmoid function](https://github.com/dmlc/xgboost/blob/54980b8959680a0da06a3fc0ec776e47c8cbb0a1/src/common/math.h#L25-L27). How do we fix this? We have to ensure we use the correct data types everywhere and the correct operators. If we use only floats, the float library that we have loaded will ensure the 32-bit float exponentiation operator is applied. ```{r} # calculate the predictions casting doubles to floats bst_from_json_preds <- ifelse( fl(data$dates) < fl(node$split_condition) , as.numeric(fl(1) / (fl(1) + exp(fl(-1) * fl(node$children[[1]]$leaf)))) , as.numeric(fl(1) / (fl(1) + exp(fl(-1) * fl(node$children[[2]]$leaf)))) ) # test that values are equal bst_preds == bst_from_json_preds ``` All equal. What's the lesson? We have to ensure that all calculations are done with 32-bit floating point operators if we want to reproduce the results that we see with xgboost. xgboost-3.0.0/README.md000066400000000000000000000103751476511272400144750ustar00rootroot00000000000000 eXtreme Gradient Boosting =========== [![Build Status](https://badge.buildkite.com/aca47f40a32735c00a8550540c5eeff6a4c1d246a580cae9b0.svg?branch=master)](https://buildkite.com/xgboost/xgboost-ci) [![XGBoost-CI](https://github.com/dmlc/xgboost/workflows/XGBoost-CI/badge.svg?branch=master)](https://github.com/dmlc/xgboost/actions) [![Documentation Status](https://readthedocs.org/projects/xgboost/badge/?version=latest)](https://xgboost.readthedocs.org) [![GitHub license](https://dmlc.github.io/img/apache2.svg)](./LICENSE) [![CRAN Status Badge](https://www.r-pkg.org/badges/version/xgboost)](https://cran.r-project.org/web/packages/xgboost) [![PyPI version](https://badge.fury.io/py/xgboost.svg)](https://pypi.python.org/pypi/xgboost/) [![Conda version](https://img.shields.io/conda/vn/conda-forge/py-xgboost.svg)](https://anaconda.org/conda-forge/py-xgboost) [![Optuna](https://img.shields.io/badge/Optuna-integrated-blue)](https://optuna.org) [![Twitter](https://img.shields.io/badge/@XGBoostProject--_.svg?style=social&logo=twitter)](https://twitter.com/XGBoostProject) [![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/dmlc/xgboost/badge)](https://api.securityscorecards.dev/projects/github.com/dmlc/xgboost) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/comet-examples/blob/master/integrations/model-training/xgboost/notebooks/how_to_use_comet_with_xgboost_tutorial.ipynb) [Community](https://xgboost.ai/community) | [Documentation](https://xgboost.readthedocs.org) | [Resources](demo/README.md) | [Contributors](CONTRIBUTORS.md) | [Release Notes](https://xgboost.readthedocs.io/en/latest/changes/index.html) XGBoost is an optimized distributed gradient boosting library designed to be highly ***efficient***, ***flexible*** and ***portable***. It implements machine learning algorithms under the [Gradient Boosting](https://en.wikipedia.org/wiki/Gradient_boosting) framework. XGBoost provides a parallel tree boosting (also known as GBDT, GBM) that solve many data science problems in a fast and accurate way. The same code runs on major distributed environment (Kubernetes, Hadoop, SGE, Dask, Spark, PySpark) and can solve problems beyond billions of examples. License ------- © Contributors, 2021. Licensed under an [Apache-2](https://github.com/dmlc/xgboost/blob/master/LICENSE) license. Contribute to XGBoost --------------------- XGBoost has been developed and used by a group of active community members. Your help is very valuable to make the package better for everyone. Checkout the [Community Page](https://xgboost.ai/community). Reference --------- - Tianqi Chen and Carlos Guestrin. [XGBoost: A Scalable Tree Boosting System](https://arxiv.org/abs/1603.02754). In 22nd SIGKDD Conference on Knowledge Discovery and Data Mining, 2016 - XGBoost originates from research project at University of Washington. Sponsors -------- Become a sponsor and get a logo here. See details at [Sponsoring the XGBoost Project](https://xgboost.ai/sponsors). The funds are used to defray the cost of continuous integration and testing infrastructure (https://xgboost-ci.net). ## Open Source Collective sponsors [![Backers on Open Collective](https://opencollective.com/xgboost/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/xgboost/sponsors/badge.svg)](#sponsors) ### Sponsors [[Become a sponsor](https://opencollective.com/xgboost#sponsor)] NVIDIA ### Backers [[Become a backer](https://opencollective.com/xgboost#backer)] xgboost-3.0.0/SECURITY.md000066400000000000000000000015571476511272400150110ustar00rootroot00000000000000# Security Policy ## Supported Versions Security updates are applied only to the most recent release. ## Reporting a Vulnerability To report a security issue, please email [security@xgboost-ci.net](mailto:security@xgboost-ci.net) with a description of the issue, the steps you took to create the issue, affected versions, and, if known, mitigations for the issue. All support will be made on the best effort base, so please indicate the "urgency level" of the vulnerability as Critical, High, Medium or Low. xgboost-3.0.0/amalgamation/000077500000000000000000000000001476511272400156425ustar00rootroot00000000000000xgboost-3.0.0/amalgamation/dmlc-minimum0.cc000066400000000000000000000010551476511272400206220ustar00rootroot00000000000000/*! * Copyright 2015 by Contributors. * \brief Mininum DMLC library Amalgamation, used for easy plugin of dmlc lib. * Normally this is not needed. */ #include "../dmlc-core/src/io/line_split.cc" #include "../dmlc-core/src/io/recordio_split.cc" #include "../dmlc-core/src/io/input_split_base.cc" #include "../dmlc-core/src/io/local_filesys.cc" #include "../dmlc-core/src/io/filesys.cc" #include "../dmlc-core/src/io/indexed_recordio_split.cc" #include "../dmlc-core/src/data.cc" #include "../dmlc-core/src/io.cc" #include "../dmlc-core/src/recordio.cc" xgboost-3.0.0/cmake/000077500000000000000000000000001476511272400142705ustar00rootroot00000000000000xgboost-3.0.0/cmake/Doc.cmake000066400000000000000000000010041476511272400157720ustar00rootroot00000000000000function(run_doxygen) find_package(Doxygen REQUIRED) if(NOT DOXYGEN_DOT_FOUND) message(FATAL_ERROR "Command `dot` not found. Please install graphviz.") endif() configure_file( ${xgboost_SOURCE_DIR}/doc/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY) add_custom_target( doc_doxygen ALL COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} COMMENT "Generate C APIs documentation." VERBATIM) endfunction() xgboost-3.0.0/cmake/FindOpenMPMacOS.cmake000066400000000000000000000106721476511272400201220ustar00rootroot00000000000000# Find OpenMP library on MacOS # Automatically handle locating libomp from the Homebrew package manager # lint_cmake: -package/consistency macro(find_openmp_macos) if(NOT APPLE) message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}() must only be used on MacOS") endif() find_package(OpenMP) if(NOT OpenMP_FOUND) # Try again with extra path info. This step is required for libomp 15+ from Homebrew, # as libomp 15.0+ from brew is keg-only # See https://github.com/Homebrew/homebrew-core/issues/112107#issuecomment-1278042927. execute_process(COMMAND brew --prefix libomp OUTPUT_VARIABLE HOMEBREW_LIBOMP_PREFIX OUTPUT_STRIP_TRAILING_WHITESPACE) set(OpenMP_C_FLAGS "-Xpreprocessor -fopenmp -I${HOMEBREW_LIBOMP_PREFIX}/include") set(OpenMP_CXX_FLAGS "-Xpreprocessor -fopenmp -I${HOMEBREW_LIBOMP_PREFIX}/include") set(OpenMP_C_LIB_NAMES omp) set(OpenMP_CXX_LIB_NAMES omp) set(OpenMP_omp_LIBRARY ${HOMEBREW_LIBOMP_PREFIX}/lib/libomp.dylib) find_package(OpenMP REQUIRED) endif() endmacro() # Patch libxgboost.dylib so that it depends on @rpath/libomp.dylib instead of # /opt/homebrew/opt/libomp/lib/libomp.dylib or other hard-coded paths. # Doing so enables XGBoost to interoperate with multiple kinds of OpenMP # libraries. See https://github.com/microsoft/LightGBM/pull/6391 for detailed # explanation. Adapted from https://github.com/microsoft/LightGBM/pull/6391 # by James Lamb. # MacOS only. function(patch_openmp_path_macos target target_default_output_name) if(NOT APPLE) message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}() must only be used on MacOS") endif() # Get path to libomp found at build time get_target_property( __OpenMP_LIBRARY_LOCATION OpenMP::OpenMP_CXX INTERFACE_LINK_LIBRARIES ) # Get the base name of the OpenMP lib # Usually: libomp.dylib, libgomp.dylib, or libiomp.dylib get_filename_component( __OpenMP_LIBRARY_NAME ${__OpenMP_LIBRARY_LOCATION} NAME ) # Get the directory containing the OpenMP lib get_filename_component( __OpenMP_LIBRARY_DIR ${__OpenMP_LIBRARY_LOCATION} DIRECTORY ) # Get the name of the XGBoost lib, e.g. libxgboost get_target_property( __LIBXGBOOST_OUTPUT_NAME ${target} OUTPUT_NAME ) if(NOT __LIBXGBOOST_OUTPUT_NAME) set(__LIBXGBOOST_OUTPUT_NAME "${target_default_output_name}") endif() # Get the file name of the XGBoost lib, e.g. libxgboost.dylib if(CMAKE_SHARED_LIBRARY_SUFFIX_CXX) set( __LIBXGBOOST_FILENAME_${target} "${__LIBXGBOOST_OUTPUT_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX_CXX}" CACHE INTERNAL "Shared library filename ${target}" ) else() set( __LIBXGBOOST_FILENAME_${target} "${__LIBXGBOOST_OUTPUT_NAME}.dylib" CACHE INTERNAL "Shared library filename ${target}" ) endif() message(STATUS "Creating shared lib for target ${target}: ${__LIBXGBOOST_FILENAME_${target}}") # Override the absolute path to OpenMP with a relative one using @rpath. # # This also ensures that if a libomp.dylib has already been loaded, it'll just use that. if(KEEP_BUILD_ARTIFACTS_IN_BINARY_DIR) set(__LIB_DIR ${xgboost_BINARY_DIR}/lib) else() set(__LIB_DIR ${xgboost_SOURCE_DIR}/lib) endif() add_custom_command( TARGET ${target} POST_BUILD COMMAND install_name_tool -change ${__OpenMP_LIBRARY_LOCATION} "@rpath/${__OpenMP_LIBRARY_NAME}" "${__LIBXGBOOST_FILENAME_${target}}" WORKING_DIRECTORY ${__LIB_DIR} ) message(STATUS "${__LIBXGBOOST_FILENAME_${target}}: " "Replacing hard-coded OpenMP install_name with '@rpath/${__OpenMP_LIBRARY_NAME}'..." ) # Add RPATH entries to ensure the loader looks in the following, in the following order: # # - /opt/homebrew/opt/libomp/lib (where 'brew install' / 'brew link' puts libomp.dylib) # - ${__OpenMP_LIBRARY_DIR} (wherever find_package(OpenMP) found OpenMP at build time) # # Note: This list will only be used if libomp.dylib isn't already loaded into memory. # So Conda users will likely use ${CONDA_PREFIX}/libomp.dylib execute_process(COMMAND brew --prefix libomp OUTPUT_VARIABLE HOMEBREW_LIBOMP_PREFIX OUTPUT_STRIP_TRAILING_WHITESPACE) set_target_properties( ${target} PROPERTIES BUILD_WITH_INSTALL_RPATH TRUE INSTALL_RPATH "${HOMEBREW_LIBOMP_PREFIX}/lib;${__OpenMP_LIBRARY_DIR}" INSTALL_RPATH_USE_LINK_PATH FALSE ) endfunction() xgboost-3.0.0/cmake/PrefetchIntrinsics.cmake000066400000000000000000000012211476511272400210740ustar00rootroot00000000000000function(find_prefetch_intrinsics) include(CheckCXXSourceCompiles) check_cxx_source_compiles(" #include int main() { char data = 0; const char* address = &data; _mm_prefetch(address, _MM_HINT_NTA); return 0; } " XGBOOST_MM_PREFETCH_PRESENT) check_cxx_source_compiles(" int main() { char data = 0; const char* address = &data; __builtin_prefetch(address, 0, 0); return 0; } " XGBOOST_BUILTIN_PREFETCH_PRESENT) set(XGBOOST_MM_PREFETCH_PRESENT ${XGBOOST_MM_PREFETCH_PRESENT} PARENT_SCOPE) set(XGBOOST_BUILTIN_PREFETCH_PRESENT ${XGBOOST_BUILTIN_PREFETCH_PRESENT} PARENT_SCOPE) endfunction() xgboost-3.0.0/cmake/RPackageInstall.cmake.in000066400000000000000000000022431476511272400207040ustar00rootroot00000000000000# Commands to install the R package as a CMake install target function(check_call) set(cmd COMMAND) cmake_parse_arguments( PARSE_ARGV 0 CALL_ARG "" "" "${cmd}" ) string(REPLACE ";" " " commands "${CALL_ARG_COMMAND}") message("Command: ${commands}") execute_process(COMMAND ${CALL_ARG_COMMAND} OUTPUT_VARIABLE _out ERROR_VARIABLE _err RESULT_VARIABLE _res) if(NOT "${_res}" EQUAL "0") message(FATAL_ERROR "out: ${_out}, err: ${_err}, res: ${_res}") endif() endfunction() # Important paths set(build_dir "@build_dir@") set(LIBR_EXECUTABLE "@LIBR_EXECUTABLE@") # Back up cmake_install.cmake file(WRITE "${build_dir}/R-package/src/Makevars" "all:") file(WRITE "${build_dir}/R-package/src/Makevars.win" "all:") # Install dependencies set(XGB_DEPS_SCRIPT "deps = setdiff(c('data.table', 'jsonlite', 'Matrix'), rownames(installed.packages())); if(length(deps)>0) install.packages(deps, repo = 'https://cloud.r-project.org/')") check_call(COMMAND "${LIBR_EXECUTABLE}" -q -e "${XGB_DEPS_SCRIPT}") # Install the XGBoost R package check_call(COMMAND "${LIBR_EXECUTABLE}" CMD INSTALL --no-multiarch --build "${build_dir}/R-package")xgboost-3.0.0/cmake/RPackageInstallTargetSetup.cmake000066400000000000000000000012631476511272400224700ustar00rootroot00000000000000# Assembles the R-package files in build_dir; # if necessary, installs the main R package dependencies; # runs R CMD INSTALL. function(setup_rpackage_install_target rlib_target build_dir) configure_file(${PROJECT_SOURCE_DIR}/cmake/RPackageInstall.cmake.in ${PROJECT_BINARY_DIR}/RPackageInstall.cmake @ONLY) install( DIRECTORY "${xgboost_SOURCE_DIR}/R-package" DESTINATION "${build_dir}" PATTERN "src/*" EXCLUDE PATTERN "R-package/configure" EXCLUDE ) install(TARGETS ${rlib_target} LIBRARY DESTINATION "${build_dir}/R-package/src/" RUNTIME DESTINATION "${build_dir}/R-package/src/") install(SCRIPT ${PROJECT_BINARY_DIR}/RPackageInstall.cmake) endfunction() xgboost-3.0.0/cmake/Sanitizer.cmake000066400000000000000000000036131476511272400172450ustar00rootroot00000000000000# Set appropriate compiler and linker flags for sanitizers. # # Usage of this module: # enable_sanitizers("address;leak") # Add flags macro(enable_sanitizer sanitizer) if(${sanitizer} MATCHES "address") find_package(ASan) set(SAN_COMPILE_FLAGS "${SAN_COMPILE_FLAGS} -fsanitize=address") elseif(${sanitizer} MATCHES "thread") find_package(TSan) set(SAN_COMPILE_FLAGS "${SAN_COMPILE_FLAGS} -fsanitize=thread") if(TSan_FOUND) link_libraries(${TSan_LIBRARY}) endif() elseif(${sanitizer} MATCHES "leak") find_package(LSan) set(SAN_COMPILE_FLAGS "${SAN_COMPILE_FLAGS} -fsanitize=leak") elseif(${sanitizer} MATCHES "undefined") find_package(UBSan) set(SAN_COMPILE_FLAGS "${SAN_COMPILE_FLAGS} -fsanitize=undefined -fno-sanitize-recover=undefined") else() message(FATAL_ERROR "Santizer ${sanitizer} not supported.") endif() endmacro() macro(enable_sanitizers SANITIZERS) # Check sanitizers compatibility. # Idealy, we should use if(san IN_LIST SANITIZERS) ... endif() # But I haven't figure out how to make it work. foreach( _san ${SANITIZERS} ) string(TOLOWER ${_san} _san) if(_san MATCHES "thread") if(${_use_other_sanitizers}) message(FATAL_ERROR "thread sanitizer is not compatible with ${_san} sanitizer.") endif() set(_use_thread_sanitizer 1) else() if(${_use_thread_sanitizer}) message(FATAL_ERROR "${_san} sanitizer is not compatible with thread sanitizer.") endif() set(_use_other_sanitizers 1) endif() endforeach() message("Sanitizers: ${SANITIZERS}") foreach( _san ${SANITIZERS} ) string(TOLOWER ${_san} _san) enable_sanitizer(${_san}) endforeach() message("Sanitizers compile flags: ${SAN_COMPILE_FLAGS}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_COMPILE_FLAGS}") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_COMPILE_FLAGS}") endmacro() xgboost-3.0.0/cmake/Utils.cmake000066400000000000000000000223511476511272400163750ustar00rootroot00000000000000# Automatically set source group based on folder function(auto_source_group SOURCES) foreach(FILE ${SOURCES}) get_filename_component(PARENT_DIR "${FILE}" PATH) # skip src or include and changes /'s to \\'s string(REPLACE "${CMAKE_CURRENT_LIST_DIR}" "" GROUP "${PARENT_DIR}") string(REPLACE "/" "\\\\" GROUP "${GROUP}") string(REGEX REPLACE "^\\\\" "" GROUP "${GROUP}") source_group("${GROUP}" FILES "${FILE}") endforeach() endfunction() # Set output directory of target, ignoring debug or release function(set_output_directory target dir) set_target_properties(${target} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${dir} RUNTIME_OUTPUT_DIRECTORY_DEBUG ${dir} RUNTIME_OUTPUT_DIRECTORY_RELEASE ${dir} RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO ${dir} RUNTIME_OUTPUT_DIRECTORY_MINSIZEREL ${dir} LIBRARY_OUTPUT_DIRECTORY ${dir} LIBRARY_OUTPUT_DIRECTORY_DEBUG ${dir} LIBRARY_OUTPUT_DIRECTORY_RELEASE ${dir} LIBRARY_OUTPUT_DIRECTORY_RELWITHDEBINFO ${dir} LIBRARY_OUTPUT_DIRECTORY_MINSIZEREL ${dir} ARCHIVE_OUTPUT_DIRECTORY ${dir} ARCHIVE_OUTPUT_DIRECTORY_DEBUG ${dir} ARCHIVE_OUTPUT_DIRECTORY_RELEASE ${dir} ARCHIVE_OUTPUT_DIRECTORY_RELWITHDEBINFO ${dir} ARCHIVE_OUTPUT_DIRECTORY_MINSIZEREL ${dir}) endfunction() # Set a default build type to release if none was specified function(set_default_configuration_release) if(CMAKE_CONFIGURATION_TYPES STREQUAL "Debug;Release;MinSizeRel;RelWithDebInfo") # multiconfig generator? set(CMAKE_CONFIGURATION_TYPES Release CACHE STRING "" FORCE) elseif(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) message(STATUS "Setting build type to 'Release' as none was specified.") set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) endif() endfunction() # Generate CMAKE_CUDA_ARCHITECTURES form a list of architectures # Also generates PTX for the most recent architecture for forwards compatibility function(compute_cmake_cuda_archs archs) if(CMAKE_CUDA_COMPILER_VERSION MATCHES "^([0-9]+\\.[0-9]+)") set(CUDA_VERSION "${CMAKE_MATCH_1}") endif() list(SORT archs) unset(CMAKE_CUDA_ARCHITECTURES CACHE) set(CMAKE_CUDA_ARCHITECTURES ${archs}) # Set up defaults based on CUDA varsion if(NOT CMAKE_CUDA_ARCHITECTURES) if(CUDA_VERSION VERSION_GREATER_EQUAL "12.8") set(CMAKE_CUDA_ARCHITECTURES 50 60 70 80 90 100 120) elseif(CUDA_VERSION VERSION_GREATER_EQUAL "11.8") set(CMAKE_CUDA_ARCHITECTURES 50 60 70 80 90) elseif(CUDA_VERSION VERSION_GREATER_EQUAL "11.0") set(CMAKE_CUDA_ARCHITECTURES 50 60 70 80) elseif(CUDA_VERSION VERSION_GREATER_EQUAL "10.0") set(CMAKE_CUDA_ARCHITECTURES 35 50 60 70) elseif(CUDA_VERSION VERSION_GREATER_EQUAL "9.0") set(CMAKE_CUDA_ARCHITECTURES 35 50 60 70) else() set(CMAKE_CUDA_ARCHITECTURES 35 50 60) endif() endif() list(TRANSFORM CMAKE_CUDA_ARCHITECTURES APPEND "-real") list(TRANSFORM CMAKE_CUDA_ARCHITECTURES REPLACE "([0-9]+)-real" "\\0;\\1-virtual" AT -1) set(CMAKE_CUDA_ARCHITECTURES "${CMAKE_CUDA_ARCHITECTURES}" PARENT_SCOPE) message(STATUS "CMAKE_CUDA_ARCHITECTURES: ${CMAKE_CUDA_ARCHITECTURES}") endfunction() # Set CUDA related flags to target. Must be used after code `format_gencode_flags`. function(xgboost_set_cuda_flags target) target_compile_options(${target} PRIVATE $<$:--expt-extended-lambda> $<$:--expt-relaxed-constexpr> $<$:-Xcompiler=${OpenMP_CXX_FLAGS}> $<$:-Xfatbin=-compress-all> $<$:--default-stream per-thread> ) if(FORCE_COLORED_OUTPUT) if(FORCE_COLORED_OUTPUT AND (CMAKE_GENERATOR STREQUAL "Ninja") AND ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") OR (CMAKE_CXX_COMPILER_ID STREQUAL "Clang"))) target_compile_options(${target} PRIVATE $<$:-Xcompiler=-fdiagnostics-color=always>) endif() endif() if(USE_DEVICE_DEBUG) target_compile_options(${target} PRIVATE $<$,$>:-G;-src-in-ptx>) else() target_compile_options(${target} PRIVATE $<$:-lineinfo>) endif() if(USE_NVTX) target_compile_definitions(${target} PRIVATE -DXGBOOST_USE_NVTX=1) endif() # Use CCCL we find before CUDA Toolkit to make sure we get newer headers as intended # The CUDA Toolkit includes its own copy of CCCL that often lags the latest releases # (and would be picked up otherwise) if(BUILD_STATIC_LIB) # If the downstream user is statically linking with libxgboost, it needs to # explicitly link with CCCL and CUDA runtime. target_link_libraries(${target} PUBLIC CCCL::CCCL CUDA::cudart_static) else() # If the downstream user is dynamically linking with libxgboost, it does not # need to link with CCCL and CUDA runtime. target_link_libraries(${target} PRIVATE CCCL::CCCL CUDA::cudart_static) endif() target_compile_definitions(${target} PRIVATE -DXGBOOST_USE_CUDA=1) target_include_directories( ${target} PRIVATE ${xgboost_SOURCE_DIR}/gputreeshap) if(MSVC) target_compile_options(${target} PRIVATE $<$:-Xcompiler=/utf-8>) endif() set_target_properties(${target} PROPERTIES CUDA_STANDARD 17 CUDA_STANDARD_REQUIRED ON) if(USE_CUDA_LTO) set_target_properties(${target} PROPERTIES INTERPROCEDURAL_OPTIMIZATION ON CUDA_SEPARABLE_COMPILATION ON) else() set_target_properties(${target} PROPERTIES CUDA_SEPARABLE_COMPILATION OFF) endif() endfunction() function(xgboost_link_nccl target) set(xgboost_nccl_flags -DXGBOOST_USE_NCCL=1) if(USE_DLOPEN_NCCL) list(APPEND xgboost_nccl_flags -DXGBOOST_USE_DLOPEN_NCCL=1) target_link_libraries(${target} PRIVATE ${CMAKE_DL_LIBS}) endif() if(BUILD_STATIC_LIB) target_include_directories(${target} PUBLIC ${NCCL_INCLUDE_DIR}) target_compile_definitions(${target} PUBLIC ${xgboost_nccl_flags}) target_link_libraries(${target} PUBLIC ${NCCL_LIBRARY}) else() target_include_directories(${target} PRIVATE ${NCCL_INCLUDE_DIR}) target_compile_definitions(${target} PRIVATE ${xgboost_nccl_flags}) if(NOT USE_DLOPEN_NCCL) target_link_libraries(${target} PRIVATE ${NCCL_LIBRARY}) endif() endif() endfunction() # compile options macro(xgboost_target_properties target) set_target_properties(${target} PROPERTIES CXX_STANDARD 17 CXX_STANDARD_REQUIRED ON POSITION_INDEPENDENT_CODE ON) if(HIDE_CXX_SYMBOLS) #-- Hide all C++ symbols set_target_properties(${target} PROPERTIES C_VISIBILITY_PRESET hidden CXX_VISIBILITY_PRESET hidden CUDA_VISIBILITY_PRESET hidden ) endif() if(ENABLE_ALL_WARNINGS) target_compile_options(${target} PUBLIC $, -Xcompiler=-Wall -Xcompiler=-Wextra -Xcompiler=-Wno-expansion-to-defined, -Wall -Wextra -Wno-expansion-to-defined> ) endif() target_compile_options(${target} PRIVATE $<$,$>:/MP> $<$>,$>:-funroll-loops>) if(MSVC) target_compile_options(${target} PRIVATE $<$>:/utf-8> -D_CRT_SECURE_NO_WARNINGS -D_CRT_SECURE_NO_DEPRECATE ) endif() if(WIN32 AND MINGW) target_compile_options(${target} PUBLIC -static-libstdc++) endif() if(NOT WIN32 AND ENABLE_ALL_WARNINGS) target_compile_options(${target} PRIVATE $<$:-Werror=cross-execution-space-call> ) endif() endmacro() # Custom definitions used in xgboost. macro(xgboost_target_defs target) if(NOT ${target} STREQUAL "dmlc") # skip dmlc core for custom logging. target_compile_definitions(${target} PRIVATE -DDMLC_LOG_CUSTOMIZE=1 $<$>:_MWAITXINTRIN_H_INCLUDED>) endif() if(USE_DEBUG_OUTPUT) target_compile_definitions(${target} PRIVATE -DXGBOOST_USE_DEBUG_OUTPUT=1) endif() if(XGBOOST_MM_PREFETCH_PRESENT) target_compile_definitions(${target} PRIVATE -DXGBOOST_MM_PREFETCH_PRESENT=1) endif() if(XGBOOST_BUILTIN_PREFETCH_PRESENT) target_compile_definitions(${target} PRIVATE -DXGBOOST_BUILTIN_PREFETCH_PRESENT=1) endif() if(PLUGIN_RMM) target_compile_definitions(objxgboost PUBLIC -DXGBOOST_USE_RMM=1) endif() endmacro() # handles dependencies macro(xgboost_target_link_libraries target) if(BUILD_STATIC_LIB) target_link_libraries(${target} PUBLIC Threads::Threads ${CMAKE_THREAD_LIBS_INIT}) else() target_link_libraries(${target} PRIVATE Threads::Threads ${CMAKE_THREAD_LIBS_INIT}) endif() if(USE_OPENMP) if(BUILD_STATIC_LIB) target_link_libraries(${target} PUBLIC OpenMP::OpenMP_CXX) else() target_link_libraries(${target} PRIVATE OpenMP::OpenMP_CXX) endif() endif() if(USE_CUDA) xgboost_set_cuda_flags(${target}) endif() if(PLUGIN_RMM) target_link_libraries(${target} PRIVATE rmm::rmm) endif() if(USE_NCCL) xgboost_link_nccl(${target}) endif() if(USE_NVTX) target_link_libraries(${target} PRIVATE CUDA::nvtx3) endif() if(MINGW) target_link_libraries(${target} PRIVATE wsock32 ws2_32) endif() endmacro() xgboost-3.0.0/cmake/Version.cmake000066400000000000000000000004011476511272400167120ustar00rootroot00000000000000function(write_version) message(STATUS "xgboost VERSION: ${xgboost_VERSION}") configure_file( ${xgboost_SOURCE_DIR}/cmake/version_config.h.in ${xgboost_SOURCE_DIR}/include/xgboost/version_config.h @ONLY NEWLINE_STYLE UNIX) endfunction() xgboost-3.0.0/cmake/modules/000077500000000000000000000000001476511272400157405ustar00rootroot00000000000000xgboost-3.0.0/cmake/modules/FindASan.cmake000066400000000000000000000006471476511272400203740ustar00rootroot00000000000000set(ASan_LIB_NAME ASan) find_library(ASan_LIBRARY NAMES libasan.so libasan.so.6 libasan.so.5 libasan.so.4 libasan.so.3 libasan.so.2 libasan.so.1 libasan.so.0 PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib) include(FindPackageHandleStandardArgs) find_package_handle_standard_args(ASan DEFAULT_MSG ASan_LIBRARY) mark_as_advanced( ASan_LIBRARY ASan_LIB_NAME) xgboost-3.0.0/cmake/modules/FindLSan.cmake000066400000000000000000000005521476511272400204020ustar00rootroot00000000000000set(LSan_LIB_NAME lsan) find_library(LSan_LIBRARY NAMES liblsan.so liblsan.so.0 liblsan.so.0.0.0 PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib) include(FindPackageHandleStandardArgs) find_package_handle_standard_args(LSan DEFAULT_MSG LSan_LIBRARY) mark_as_advanced( LSan_LIBRARY LSan_LIB_NAME) xgboost-3.0.0/cmake/modules/FindLibR.cmake000066400000000000000000000143431476511272400204000ustar00rootroot00000000000000# CMake module for R # Borrows ideas from RStudio's FindLibR.cmake # # Defines the following: # LIBR_FOUND # LIBR_HOME # LIBR_EXECUTABLE # LIBR_INCLUDE_DIRS # LIBR_LIB_DIR # LIBR_CORE_LIBRARY # and a cmake function to create R.lib for MSVC # # The following could be provided by user through cmake's -D options: # LIBR_EXECUTABLE (for unix and win) # R_VERSION (for win) # R_ARCH (for win 64 when want 32 bit build) # # TODO: # - someone to verify OSX detection, # - possibly, add OSX detection based on current R in PATH or LIBR_EXECUTABLE # - improve registry-based R_HOME detection in Windows (from a set of R_VERSION's) # Windows users might want to change this to their R version: if(NOT R_VERSION) set(R_VERSION "4.0.0") endif() if(NOT R_ARCH) if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "4") set(R_ARCH "i386") else() set(R_ARCH "x64") endif() endif() # Creates R.lib and R.def in the build directory for linking with MSVC function(create_rlib_for_msvc) # various checks and warnings if(NOT WIN32 OR (NOT MSVC AND NOT MINGW)) message(FATAL_ERROR "create_rlib_for_msvc() can only be used with MSVC or MINGW") endif() if(NOT EXISTS "${LIBR_LIB_DIR}") message(FATAL_ERROR "LIBR_LIB_DIR was not set!") endif() find_program(DLLTOOL_EXE dlltool) if(NOT DLLTOOL_EXE) message(FATAL_ERROR "\ndlltool.exe not found!\ \nDo you have Rtools installed with its MinGW's bin/ in PATH?") endif() # extract symbols from R.dll into R.def and R.lib import library get_filename_component( LIBR_RSCRIPT_EXECUTABLE_DIR ${LIBR_EXECUTABLE} DIRECTORY ) set(LIBR_RSCRIPT_EXECUTABLE "${LIBR_RSCRIPT_EXECUTABLE_DIR}/Rscript") execute_process( COMMAND ${LIBR_RSCRIPT_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/../../R-package/inst/make-r-def.R" "${LIBR_LIB_DIR}/R.dll" "${CMAKE_CURRENT_BINARY_DIR}/R.def" ) execute_process(COMMAND ${DLLTOOL_EXE} "--input-def" "${CMAKE_CURRENT_BINARY_DIR}/R.def" "--output-lib" "${CMAKE_CURRENT_BINARY_DIR}/R.lib" "--temp-prefix" "Rlibtemp" COMMAND_ECHO STDOUT COMMAND_ERROR_IS_FATAL ANY) endfunction() # detection for OSX if(APPLE) find_library(LIBR_LIBRARIES R) if(LIBR_LIBRARIES MATCHES ".*\\.framework") set(LIBR_HOME "${LIBR_LIBRARIES}/Resources" CACHE PATH "R home directory") set(LIBR_INCLUDE_DIRS "${LIBR_HOME}/include" CACHE PATH "R include directory") set(LIBR_EXECUTABLE "${LIBR_HOME}/R" CACHE PATH "R executable") set(LIBR_LIB_DIR "${LIBR_HOME}/lib" CACHE PATH "R lib directory") else() get_filename_component(_LIBR_LIBRARIES "${LIBR_LIBRARIES}" REALPATH) get_filename_component(_LIBR_LIBRARIES_DIR "${_LIBR_LIBRARIES}" DIRECTORY) set(LIBR_EXECUTABLE "${_LIBR_LIBRARIES_DIR}/../bin/R") execute_process( COMMAND ${LIBR_EXECUTABLE} "--slave" "--vanilla" "-e" "cat(R.home())" OUTPUT_VARIABLE LIBR_HOME) set(LIBR_HOME ${LIBR_HOME} CACHE PATH "R home directory") set(LIBR_INCLUDE_DIRS "${LIBR_HOME}/include" CACHE PATH "R include directory") set(LIBR_LIB_DIR "${LIBR_HOME}/lib" CACHE PATH "R lib directory") endif() # detection for UNIX & Win32 else() # attempt to find R executable if(NOT LIBR_EXECUTABLE) find_program(LIBR_EXECUTABLE NAMES R R.exe) endif() if(UNIX) if(NOT LIBR_EXECUTABLE) message(FATAL_ERROR "Unable to locate R executable.\ \nEither add its location to PATH or provide it through the LIBR_EXECUTABLE cmake variable") endif() # ask R for the home path execute_process( COMMAND ${LIBR_EXECUTABLE} "--slave" "--vanilla" "-e" "cat(R.home())" OUTPUT_VARIABLE LIBR_HOME ) # ask R for the include dir execute_process( COMMAND ${LIBR_EXECUTABLE} "--slave" "--vanilla" "-e" "cat(R.home('include'))" OUTPUT_VARIABLE LIBR_INCLUDE_DIRS ) # ask R for the lib dir execute_process( COMMAND ${LIBR_EXECUTABLE} "--slave" "--vanilla" "-e" "cat(R.home('lib'))" OUTPUT_VARIABLE LIBR_LIB_DIR ) # Windows else() # ask R for R_HOME if(LIBR_EXECUTABLE) execute_process( COMMAND ${LIBR_EXECUTABLE} "--slave" "--no-save" "-e" "cat(normalizePath(R.home(),winslash='/'))" OUTPUT_VARIABLE LIBR_HOME) endif() # if R executable not available, query R_HOME path from registry if(NOT LIBR_HOME) get_filename_component(LIBR_HOME "[HKEY_LOCAL_MACHINE\\SOFTWARE\\R-core\\R\\${R_VERSION};InstallPath]" ABSOLUTE) if(NOT LIBR_HOME) message(FATAL_ERROR "\nUnable to locate R executable.\ \nEither add its location to PATH or provide it through the LIBR_EXECUTABLE cmake variable") endif() endif() # set exe location based on R_ARCH if(NOT LIBR_EXECUTABLE) set(LIBR_EXECUTABLE "${LIBR_HOME}/bin/${R_ARCH}/R.exe") endif() # set other R paths based on home path set(LIBR_INCLUDE_DIRS "${LIBR_HOME}/include") set(LIBR_LIB_DIR "${LIBR_HOME}/bin/${R_ARCH}") message(STATUS "LIBR_HOME [${LIBR_HOME}]") message(STATUS "LIBR_EXECUTABLE [${LIBR_EXECUTABLE}]") message(STATUS "LIBR_INCLUDE_DIRS [${LIBR_INCLUDE_DIRS}]") message(STATUS "LIBR_LIB_DIR [${LIBR_LIB_DIR}]") message(STATUS "LIBR_CORE_LIBRARY [${LIBR_CORE_LIBRARY}]") endif() endif() if((WIN32 AND MSVC) OR (WIN32 AND MINGW)) # create a local R.lib import library for R.dll if it doesn't exist if(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}/R.lib") create_rlib_for_msvc() endif() endif() # look for the core R library find_library(LIBR_CORE_LIBRARY NAMES R HINTS "${CMAKE_CURRENT_BINARY_DIR}" "${LIBR_LIB_DIR}" "${LIBR_HOME}/bin" "${LIBR_LIBRARIES}") if(LIBR_CORE_LIBRARY-NOTFOUND) message(STATUS "Could not find R core shared library.") endif() set(LIBR_HOME ${LIBR_HOME} CACHE PATH "R home directory") set(LIBR_EXECUTABLE ${LIBR_EXECUTABLE} CACHE PATH "R executable") set(LIBR_INCLUDE_DIRS ${LIBR_INCLUDE_DIRS} CACHE PATH "R include directory") set(LIBR_LIB_DIR ${LIBR_LIB_DIR} CACHE PATH "R shared libraries directory") set(LIBR_CORE_LIBRARY ${LIBR_CORE_LIBRARY} CACHE PATH "R core shared library") # define find requirements include(FindPackageHandleStandardArgs) find_package_handle_standard_args(LibR DEFAULT_MSG LIBR_HOME LIBR_EXECUTABLE LIBR_INCLUDE_DIRS LIBR_LIB_DIR LIBR_CORE_LIBRARY ) if(LIBR_FOUND) message(STATUS "Found R: ${LIBR_EXECUTABLE}") endif() xgboost-3.0.0/cmake/modules/FindNVML.cmake000066400000000000000000000007671476511272400203310ustar00rootroot00000000000000if(NVML_LIBRARY) unset(NVML_LIBRARY CACHE) endif() set(NVML_LIB_NAME nvml) find_path(NVML_INCLUDE_DIR NAMES nvml.h PATHS ${CUDA_HOME}/include ${CUDA_INCLUDE} /usr/local/cuda/include) find_library(NVML_LIBRARY NAMES nvidia-ml) message(STATUS "Using nvml library: ${NVML_LIBRARY}") include(FindPackageHandleStandardArgs) find_package_handle_standard_args(NVML DEFAULT_MSG NVML_INCLUDE_DIR NVML_LIBRARY) mark_as_advanced( NVML_INCLUDE_DIR NVML_LIBRARY ) xgboost-3.0.0/cmake/modules/FindNccl.cmake000066400000000000000000000045651476511272400204340ustar00rootroot00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Tries to find NCCL headers and libraries. # # Usage of this module as follows: # # find_package(NCCL) # # Variables used by this module, they can change the default behaviour and need # to be set before calling find_package: # # NCCL_ROOT - When set, this path is inspected instead of standard library # locations as the root of the NCCL installation. # The environment variable NCCL_ROOT overrides this variable. # # This module defines # Nccl_FOUND, whether nccl has been found # NCCL_INCLUDE_DIR, directory containing header # NCCL_LIBRARY, directory containing nccl library # NCCL_LIB_NAME, nccl library name # USE_NCCL_LIB_PATH, when set, NCCL_LIBRARY path is also inspected for the # location of the nccl library. This would disable # switching between static and shared. # # This module assumes that the user has already called find_package(CUDA) if(NCCL_LIBRARY) if(NOT USE_NCCL_LIB_PATH) # Don't cache NCCL_LIBRARY to enable switching between static and shared. unset(NCCL_LIBRARY CACHE) endif() endif() if(BUILD_WITH_SHARED_NCCL) # libnccl.so set(NCCL_LIB_NAME nccl) else() # libnccl_static.a set(NCCL_LIB_NAME nccl_static) endif() find_path(NCCL_INCLUDE_DIR NAMES nccl.h HINTS ${NCCL_ROOT}/include $ENV{NCCL_ROOT}/include) if(USE_DLOPEN_NCCL) include(FindPackageHandleStandardArgs) find_package_handle_standard_args(Nccl DEFAULT_MSG NCCL_INCLUDE_DIR) mark_as_advanced(NCCL_INCLUDE_DIR) else() find_library(NCCL_LIBRARY NAMES ${NCCL_LIB_NAME} HINTS ${NCCL_ROOT}/lib $ENV{NCCL_ROOT}/lib/) message(STATUS "Using nccl library: ${NCCL_LIBRARY}") include(FindPackageHandleStandardArgs) find_package_handle_standard_args(Nccl DEFAULT_MSG NCCL_INCLUDE_DIR NCCL_LIBRARY) mark_as_advanced( NCCL_INCLUDE_DIR NCCL_LIBRARY ) endif() xgboost-3.0.0/cmake/modules/FindTSan.cmake000066400000000000000000000005521476511272400204120ustar00rootroot00000000000000set(TSan_LIB_NAME tsan) find_library(TSan_LIBRARY NAMES libtsan.so libtsan.so.0 libtsan.so.0.0.0 PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib) include(FindPackageHandleStandardArgs) find_package_handle_standard_args(TSan DEFAULT_MSG TSan_LIBRARY) mark_as_advanced( TSan_LIBRARY TSan_LIB_NAME) xgboost-3.0.0/cmake/modules/FindUBSan.cmake000066400000000000000000000006501476511272400205140ustar00rootroot00000000000000set(UBSan_LIB_NAME UBSan) find_library(UBSan_LIBRARY NAMES libubsan.so libubsan.so.5 libubsan.so.4 libubsan.so.3 libubsan.so.2 libubsan.so.1 libubsan.so.0 PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib) include(FindPackageHandleStandardArgs) find_package_handle_standard_args(UBSan DEFAULT_MSG UBSan_LIBRARY) mark_as_advanced( UBSan_LIBRARY UBSan_LIB_NAME) xgboost-3.0.0/cmake/version_config.h.in000066400000000000000000000005421476511272400200610ustar00rootroot00000000000000/** * Copyright 2019-2023 by XGBoost contributors */ #ifndef XGBOOST_VERSION_CONFIG_H_ #define XGBOOST_VERSION_CONFIG_H_ #define XGBOOST_VER_MAJOR @xgboost_VERSION_MAJOR@ /* NOLINT */ #define XGBOOST_VER_MINOR @xgboost_VERSION_MINOR@ /* NOLINT */ #define XGBOOST_VER_PATCH @xgboost_VERSION_PATCH@ /* NOLINT */ #endif // XGBOOST_VERSION_CONFIG_H_ xgboost-3.0.0/cmake/xgboost-config.cmake.in000066400000000000000000000011571476511272400206330ustar00rootroot00000000000000@PACKAGE_INIT@ set(USE_OPENMP @USE_OPENMP@) set(USE_CUDA @USE_CUDA@) set(USE_NCCL @USE_NCCL@) set(XGBOOST_BUILD_STATIC_LIB @BUILD_STATIC_LIB@) include(CMakeFindDependencyMacro) if (XGBOOST_BUILD_STATIC_LIB) find_dependency(Threads) if(USE_OPENMP) find_dependency(OpenMP) endif() if(USE_CUDA) find_dependency(CUDA) endif() # nccl should be linked statically if xgboost is built as static library. endif (XGBOOST_BUILD_STATIC_LIB) if(NOT TARGET xgboost::xgboost) include(${CMAKE_CURRENT_LIST_DIR}/XGBoostTargets.cmake) endif() message(STATUS "Found XGBoost (found version \"${xgboost_VERSION}\")") xgboost-3.0.0/cmake/xgboost.pc.in000066400000000000000000000004341476511272400167070ustar00rootroot00000000000000prefix=@CMAKE_INSTALL_PREFIX@ version=@xgboost_VERSION@ exec_prefix=${prefix}/bin libdir=${prefix}/lib includedir=${prefix}/include Name: xgboost Description: XGBoost - Scalable and Flexible Gradient Boosting. Version: ${version} Cflags: -I${includedir} Libs: -L${libdir} -lxgboost xgboost-3.0.0/demo/000077500000000000000000000000001476511272400141345ustar00rootroot00000000000000xgboost-3.0.0/demo/.gitignore000066400000000000000000000000171476511272400161220ustar00rootroot00000000000000*.libsvm *.pkl xgboost-3.0.0/demo/CLI/000077500000000000000000000000001476511272400145435ustar00rootroot00000000000000xgboost-3.0.0/demo/CLI/README.rst000066400000000000000000000004201476511272400162260ustar00rootroot00000000000000XGBoost Command Line Interface Walkthrough ========================================== Please note that the command line interface is deprecated in 2.1.0, use other language bindings instead. For a list of available bindings, see https://xgboost.readthedocs.io/en/stable/ xgboost-3.0.0/demo/CLI/binary_classification/000077500000000000000000000000001476511272400211025ustar00rootroot00000000000000xgboost-3.0.0/demo/CLI/binary_classification/README.md000066400000000000000000000175671476511272400224010ustar00rootroot00000000000000Binary Classification ===================== This is the quick start tutorial for xgboost CLI version. Here we demonstrate how to use XGBoost for a binary classification task. Before getting started, make sure you compile xgboost in the root directory of the project by typing ```make```. The script 'runexp.sh' can be used to run the demo. Here we use [mushroom dataset](https://archive.ics.uci.edu/ml/datasets/Mushroom) from UCI machine learning repository. ### Tutorial #### Generate Input Data XGBoost takes LIBSVM format. An example of faked input data is below: ``` 1 101:1.2 102:0.03 0 1:2.1 10001:300 10002:400 ... ``` Each line represent a single instance, and in the first line '1' is the instance label,'101' and '102' are feature indices, '1.2' and '0.03' are feature values. In the binary classification case, '1' is used to indicate positive samples, and '0' is used to indicate negative samples. We also support probability values in [0,1] as label, to indicate the probability of the instance being positive. First we will transform the dataset into classic LIBSVM format and split the data into training set and test set by running: ``` python mapfeat.py python mknfold.py agaricus.txt 1 ``` The two files, 'agaricus.txt.train' and 'agaricus.txt.test' will be used as training set and test set. #### Training Then we can run the training process: ``` ../../xgboost mushroom.conf ``` mushroom.conf is the configuration for both training and testing. Each line containing the [attribute]=[value] configuration: ```conf # General Parameters, see comment for each definition # can be gbtree or gblinear booster = gbtree # choose logistic regression loss function for binary classification objective = binary:logistic # Tree Booster Parameters # step size shrinkage eta = 1.0 # minimum loss reduction required to make a further partition gamma = 1.0 # minimum sum of instance weight(hessian) needed in a child min_child_weight = 1 # maximum depth of a tree max_depth = 3 # Task Parameters # the number of round to do boosting num_round = 2 # 0 means do not save any model except the final round model save_period = 0 # The path of training data data = "agaricus.txt.train" # The path of validation data, used to monitor training process, here [test] sets name of the validation set eval[test] = "agaricus.txt.test" # The path of test data test:data = "agaricus.txt.test" ``` We use the tree booster and logistic regression objective in our setting. This indicates that we accomplish our task using classic gradient boosting regression tree(GBRT), which is a promising method for binary classification. The parameters shown in the example gives the most common ones that are needed to use xgboost. If you are interested in more parameter settings, the complete parameter settings and detailed descriptions are [here](https://xgboost.readthedocs.io/en/stable/parameter.html). Besides putting the parameters in the configuration file, we can set them by passing them as arguments as below: ``` ../../xgboost mushroom.conf max_depth=6 ``` This means that the parameter max_depth will be set as 6 rather than 3 in the conf file. When you use command line, make sure max_depth=6 is passed in as single argument, i.e. do not contain space in the argument. When a parameter setting is provided in both command line input and the config file, the command line setting will override the setting in config file. In this example, we use tree booster for gradient boosting. If you would like to use linear booster for regression, you can keep all the parameters except booster and the tree booster parameters as below: ```conf # General Parameters # choose the linear booster booster = gblinear ... # Change Tree Booster Parameters into Linear Booster Parameters # L2 regularization term on weights, default 0 lambda = 0.01 # L1 regularization term on weights, default 0 alpha = 0.01 # L2 regularization term on bias, default 0 lambda_bias = 0.01 # Regression Parameters ... ``` #### Get Predictions After training, we can use the output model to get the prediction of the test data: ``` ../../xgboost mushroom.conf task=pred model_in=0002.model ``` For binary classification, the output predictions are probability confidence scores in [0,1], corresponds to the probability of the label to be positive. #### Dump Model This is a preliminary feature, so only tree models support text dump. XGBoost can display the tree models in text or JSON files, and we can scan the model in an easy way: ``` ../../xgboost mushroom.conf task=dump model_in=0002.model name_dump=dump.raw.txt ../../xgboost mushroom.conf task=dump model_in=0002.model fmap=featmap.txt name_dump=dump.nice.txt ``` In this demo, the tree boosters obtained will be printed in dump.raw.txt and dump.nice.txt, and the latter one is easier to understand because of usage of feature mapping featmap.txt Format of ```featmap.txt: \n ```: - Feature id must be from 0 to number of features, in sorted order. - i means this feature is binary indicator feature - q means this feature is a quantitative value, such as age, time, can be missing - int means this feature is integer value (when int is hinted, the decision boundary will be integer) #### Monitoring Progress When you run training we can find there are messages displayed on screen ``` tree train end, 1 roots, 12 extra nodes, 0 pruned nodes ,max_depth=3 [0] test-error:0.016139 boosting round 1, 0 sec elapsed tree train end, 1 roots, 10 extra nodes, 0 pruned nodes ,max_depth=3 [1] test-error:0.000000 ``` The messages for evaluation are printed into stderr, so if you want only to log the evaluation progress, simply type ``` ../../xgboost mushroom.conf 2>log.txt ``` Then you can find the following content in log.txt ``` [0] test-error:0.016139 [1] test-error:0.000000 ``` We can also monitor both training and test statistics, by adding following lines to configure ```conf eval[test] = "agaricus.txt.test" eval[trainname] = "agaricus.txt.train" ``` Run the command again, we can find the log file becomes ``` [0] test-error:0.016139 trainname-error:0.014433 [1] test-error:0.000000 trainname-error:0.001228 ``` The rule is eval[name-printed-in-log] = filename, then the file will be added to monitoring process, and evaluated each round. xgboost also supports monitoring multiple metrics, suppose we also want to monitor average log-likelihood of each prediction during training, simply add ```eval_metric=logloss``` to configure. Run again, we can find the log file becomes ``` [0] test-error:0.016139 test-negllik:0.029795 trainname-error:0.014433 trainname-negllik:0.027023 [1] test-error:0.000000 test-negllik:0.000000 trainname-error:0.001228 trainname-negllik:0.002457 ``` ### Saving Progress Models If you want to save model every two round, simply set save_period=2. You will find 0002.model in the current folder. If you want to change the output folder of models, add model_dir=foldername. By default xgboost saves the model of last round. #### Continue from Existing Model If you want to continue boosting from existing model, say 0002.model, use ``` ../../xgboost mushroom.conf model_in=0002.model num_round=2 model_out=continue.model ``` xgboost will load from 0002.model continue boosting for 2 rounds, and save output to continue.model. However, beware that the training and evaluation data specified in mushroom.conf should not change when you use this function. #### Use Multi-Threading When you are working with a large dataset, you may want to take advantage of parallelism. If your compiler supports OpenMP, xgboost is naturally multi-threaded, to set number of parallel running add ```nthread``` parameter to your configuration. Eg. ```nthread=10``` Set nthread to be the number of your real cpu (On Unix, this can be found using ```lscpu```) Some systems will have ```Thread(s) per core = 2```, for example, a 4 core cpu with 8 threads, in such case set ```nthread=4``` and not 8. xgboost-3.0.0/demo/CLI/binary_classification/agaricus-lepiota.data000066400000000000000000013317101476511272400251740ustar00rootroot00000000000000p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,f,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,s,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,s,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u e,f,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u e,f,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,f,n,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,y,u e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,s,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,s,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,y,u e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u e,b,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g p,x,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,g e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,s,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,s,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m p,x,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,s,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,w,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,s,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,y,n,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,f,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,b,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,g e,b,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,y,u e,b,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,b,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u p,x,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,b,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,m e,f,s,w,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,b,s,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,y,t,l,f,w,n,w,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,f,n,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,y,u p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,p e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,y,y,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,f,y,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,y,u p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,b,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,b,s,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,b,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,g e,s,f,n,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,y,u e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,y,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g p,x,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,g e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g p,x,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,b,y,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,p p,x,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g p,x,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,b,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,b,y,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,m p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,g,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,g,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u e,b,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,f,f,n,f,n,f,c,n,g,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,y,n,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,x,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,p e,x,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,k,y,g e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,y,n,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g p,x,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,b,y,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,s,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,g e,b,s,w,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,s,p e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,b,y,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,b,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,b,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,p p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,s,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,s,y,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,y,g p,x,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,b,y,y,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,f,w,t,a,f,w,n,p,t,b,s,s,w,w,p,w,o,p,u,v,d e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,y,y,t,a,f,c,b,w,e,r,s,y,w,w,p,w,o,p,k,y,g e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,b,s,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,b,y,y,t,a,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,b,y,w,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,s,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,s,g e,f,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g p,x,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,p e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,s,g e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d p,f,s,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,y,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g p,x,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,y,y,t,l,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,y,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,y,n,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,b,s,w,t,l,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,n,g e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,y,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,s,m p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,w,t,l,f,c,b,w,e,c,s,s,w,w,p,w,o,p,k,n,g e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,m e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,b,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,n,m p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,y,n,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,k,s,g e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,y,n,t,l,f,c,b,w,e,r,s,y,w,w,p,w,o,p,n,y,g p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,y,u p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,s,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,y,y,t,a,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,y,p e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u e,f,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g e,x,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,g,f,n,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,s,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,a,g p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g e,b,s,y,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g p,f,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,y,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,g p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,n,g e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,s,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,y,t,a,f,w,n,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,w,t,l,f,w,n,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,s,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,f,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,s,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,y,y,t,l,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,y,g p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,u e,b,s,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,g,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,s,w,t,l,f,w,n,n,t,b,s,s,w,w,p,w,o,p,u,v,d e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,s,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,s,n,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g p,f,s,w,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,v,g e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,a,g e,f,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,s,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,f,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g p,x,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,u e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g p,f,y,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,u p,f,y,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,s,n,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,s,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,y,y,t,l,f,c,b,n,e,r,s,y,w,w,p,w,o,p,n,s,g p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,a,g p,f,s,n,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,s,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,f,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,s,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,a,g e,f,f,w,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,s,y,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,w,t,a,f,w,n,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,n,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,a,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,s,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,a,g e,b,y,w,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,s,n,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,s,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,f,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,s,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,s,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,y,t,a,f,c,b,p,e,r,s,y,w,w,p,w,o,p,n,s,p e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,n,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,w,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,y,w,t,a,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,s,m e,x,s,n,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,s,w,t,a,f,c,b,n,e,c,s,s,w,w,p,w,o,p,k,n,m e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g p,f,y,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,f,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,w,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,u e,f,s,g,f,n,f,w,b,p,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,s,y,t,l,f,c,b,k,e,c,s,s,w,w,p,w,o,p,k,n,m e,x,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,g,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,s,g e,f,s,w,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,g,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,g,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,f,f,n,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d p,f,y,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,b,s,w,t,a,f,c,b,g,e,c,s,s,w,w,p,w,o,p,n,s,m e,x,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,f,n,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,f,n,f,w,b,p,t,e,f,s,w,w,p,w,o,e,k,s,g p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,s,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,s,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d p,f,s,n,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,s,g e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,s,g,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,s,w,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,w,f,n,f,w,b,k,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,g,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,f,g,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,s,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,a,g e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,n,f,n,f,w,b,n,t,e,s,f,w,w,p,w,o,e,k,s,g e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,s,n,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,s,w,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,n,s,g e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,n,s,g e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d p,f,y,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,s,u e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,s,w,t,p,f,c,n,w,e,e,s,s,w,w,p,w,o,p,n,v,u e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,s,g e,x,s,g,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,n,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,k,a,g p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,n,v,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,s,w,t,p,f,c,n,n,e,e,s,s,w,w,p,w,o,p,k,v,g e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,g,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,n,a,g e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d p,f,y,n,t,p,f,c,n,k,e,e,s,s,w,w,p,w,o,p,k,s,u e,x,s,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,s,w,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,n,s,g e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d p,f,y,w,t,p,f,c,n,p,e,e,s,s,w,w,p,w,o,p,n,v,u e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,s,w,f,n,f,w,b,h,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,n,f,n,f,w,b,k,t,e,f,s,w,w,p,w,o,e,n,a,g e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,s,w,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,s,n,f,n,f,w,b,p,t,e,s,f,w,w,p,w,o,e,k,a,g e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,f,w,f,n,f,w,b,h,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,s,n,f,n,f,w,b,n,t,e,s,s,w,w,p,w,o,e,n,a,g e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,s,g,f,n,f,w,b,h,t,e,f,s,w,w,p,w,o,e,k,s,g e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,s,g,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,w,f,n,f,w,b,h,t,e,s,s,w,w,p,w,o,e,n,s,g e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,n,f,n,f,w,b,n,t,e,f,f,w,w,p,w,o,e,k,s,g e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,s,n,f,n,f,w,b,p,t,e,f,f,w,w,p,w,o,e,n,s,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,s,w,f,n,f,w,b,k,t,e,s,f,w,w,p,w,o,e,k,s,g e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,n,f,n,f,w,b,k,t,e,s,s,w,w,p,w,o,e,k,a,g e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d e,x,f,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,g,f,n,f,w,b,n,t,e,f,s,w,w,p,w,o,e,n,s,g e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p e,x,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,s,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,s,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p e,x,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,s,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,x,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,s,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p e,f,f,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,f,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,f,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p e,x,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p e,x,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,y,d p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,x,f,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,x,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,g,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,f,w,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,x,f,g,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,f,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,y,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p e,x,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,s,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,v,d p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,g,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d e,x,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,k,y,d p,x,f,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,y,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,s,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,k,v,d p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,f,y,g,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,y,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,y,d p,x,s,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,x,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,f,w,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,f,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,s,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,x,y,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,f,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,s,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d e,x,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,f,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,s,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,g,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,f,g,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,s,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,f,g,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,k,v,d p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d e,f,y,n,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,f,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,f,w,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,k,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g e,x,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,s,p,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,f,p,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g e,x,y,g,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,s,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,v,d p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,x,s,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p p,x,s,p,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,s,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,n,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p e,x,y,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,f,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p e,x,f,e,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d e,x,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,s,w,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,f,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,n,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,s,d p,x,s,p,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d e,x,f,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g e,f,f,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,k,y,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,n,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,n,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g e,f,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p e,x,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g e,x,f,e,t,n,f,c,b,w,t,b,s,s,p,p,p,w,o,p,k,v,d e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,f,w,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,n,y,d p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,f,y,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d p,b,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,g,p,w,o,p,k,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,k,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p e,f,y,e,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,k,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g e,f,f,n,t,n,f,c,b,u,t,b,s,s,w,p,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,k,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,p,t,b,s,s,g,g,p,w,o,p,k,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,s,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,w,t,b,s,s,p,g,p,w,o,p,k,v,d p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p e,f,y,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,v,d e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,u,t,b,s,s,g,p,p,w,o,p,k,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d e,f,f,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,k,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,k,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g p,x,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,p,t,b,s,s,p,w,p,w,o,p,n,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,n,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u e,f,f,e,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,y,d p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,x,y,g,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,v,d p,x,f,g,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,p,f,c,f,w,n,n,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,p,t,b,s,s,w,w,p,w,o,p,n,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g e,f,y,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,f,p,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p e,k,y,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,x,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g e,f,s,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g e,k,s,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g e,f,y,g,t,n,f,c,b,w,t,b,s,s,p,w,p,w,o,p,n,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p e,x,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d p,k,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,w,t,b,s,s,g,p,p,w,o,p,k,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g e,x,s,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,b,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u e,f,y,g,t,n,f,c,b,n,t,b,s,s,g,p,p,w,o,p,n,v,d p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g e,f,y,e,t,n,f,c,b,n,t,b,s,s,p,g,p,w,o,p,k,v,d e,x,y,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p e,x,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,u,t,b,s,s,g,w,p,w,o,p,n,y,d p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d e,f,y,n,t,n,f,c,b,u,t,b,s,s,w,g,p,w,o,p,k,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g e,x,s,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,k,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,y,g,t,n,f,c,b,u,t,b,s,s,p,g,p,w,o,p,k,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p e,f,y,n,t,n,f,c,b,n,t,b,s,s,g,g,p,w,o,p,n,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g e,k,y,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,p,t,b,s,s,w,p,p,w,o,p,k,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d e,f,f,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g e,f,s,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,g,f,c,f,w,n,u,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d e,x,s,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g e,x,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d e,f,y,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g p,f,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,g,g,p,w,o,p,n,v,d e,x,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,g,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,n,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,f,y,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,f,g,f,c,f,c,n,u,e,b,s,s,w,w,p,w,o,p,k,s,d p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,k,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d e,f,y,u,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,p,p,w,o,p,n,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u e,f,y,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,s,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,n,t,b,s,s,w,g,p,w,o,p,n,y,d p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,f,w,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,s,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p e,f,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g e,f,f,e,t,n,f,c,b,w,t,b,s,s,g,w,p,w,o,p,n,v,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g e,f,f,e,t,n,f,c,b,w,t,b,s,s,w,w,p,w,o,p,k,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,f,p,f,c,f,w,n,p,e,b,s,s,w,w,p,w,o,p,k,s,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p e,x,s,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d p,x,s,p,f,c,f,w,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g e,f,f,e,t,n,f,c,b,p,t,b,s,s,p,p,p,w,o,p,n,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g e,f,f,g,t,n,f,c,b,u,t,b,s,s,w,w,p,w,o,p,n,y,d p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g e,f,y,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g p,k,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g e,x,y,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,d p,b,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g e,k,y,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p e,k,y,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,s,w,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p p,b,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g e,f,y,e,t,n,f,c,b,n,t,b,s,s,w,p,p,w,o,p,n,y,d p,b,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m e,f,y,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g e,k,y,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p e,k,s,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,y,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,s,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d e,f,f,g,t,n,f,c,b,u,t,b,s,s,p,p,p,w,o,p,n,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p e,f,y,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,f,y,e,t,n,f,c,b,p,t,b,s,s,g,p,p,w,o,p,k,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,p,w,p,w,o,p,n,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d p,b,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p e,f,y,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u p,x,f,g,f,c,f,c,n,g,e,b,s,s,w,w,p,w,o,p,k,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p e,x,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d e,f,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,p e,f,s,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,s,w,f,c,f,c,n,n,e,b,s,s,w,w,p,w,o,p,k,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p e,f,y,e,t,n,f,c,b,u,t,b,s,s,p,w,p,w,o,p,n,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p e,k,s,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g e,x,f,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l e,x,y,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,k,s,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g e,k,s,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u e,f,s,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u e,f,s,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w e,x,s,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w e,k,s,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,p e,f,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p e,f,s,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d e,x,s,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,b,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d e,f,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d e,x,y,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p e,k,s,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u p,x,s,g,f,c,f,c,n,p,e,b,s,s,w,w,p,w,o,p,n,v,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,d e,f,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l e,k,y,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,y,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,y,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u p,x,f,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,p e,x,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d e,x,s,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,d e,f,f,e,t,n,f,c,b,n,t,b,s,s,g,w,p,w,o,p,n,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,k,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,g e,k,f,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d p,b,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g e,k,y,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g e,k,y,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u e,f,y,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p e,f,y,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p e,k,y,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g e,k,y,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u p,c,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u p,b,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u p,b,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d e,k,s,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,k,y,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d e,x,s,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,f,f,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,x,s,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w e,f,y,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,p p,f,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d e,x,y,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g e,k,y,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p p,x,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d e,x,y,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,d e,x,s,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w e,f,s,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,b,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g e,x,y,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l e,f,s,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,d p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g e,f,y,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,d e,x,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,p p,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,k,s,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u e,k,y,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,f,y,f,f,f,c,b,h,e,b,k,k,n,n,p,w,o,l,h,v,d p,f,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d e,k,s,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u e,f,s,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m e,k,y,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,k,y,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,d e,x,y,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u e,x,s,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,b,y,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,k,y,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w e,k,f,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,g e,x,s,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u p,k,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l e,k,f,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,k,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g p,f,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d e,k,s,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,p p,b,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g e,k,y,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g p,b,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g e,k,s,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u p,x,f,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d p,k,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u e,f,s,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u e,k,s,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g e,x,s,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u e,k,f,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u p,k,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p e,k,y,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u e,f,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d e,x,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d e,x,s,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,p p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g e,f,s,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,d e,x,s,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g e,x,f,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p e,x,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,f,y,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u e,k,y,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w e,f,y,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,b,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,f,y,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,g e,f,y,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,b,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m e,x,y,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,f,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,f,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,d p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u p,k,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u e,x,y,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,p e,x,s,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p e,f,f,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l e,x,f,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,u p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g e,x,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,d p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g p,f,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,p p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d e,f,s,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,g e,k,s,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,b,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,s,b,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u e,x,y,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,v,p p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u e,f,y,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u e,x,f,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l e,f,s,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w e,x,y,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l e,f,s,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u e,k,s,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w e,k,y,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g e,k,s,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g p,b,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,b,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,y,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,b,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,p p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p e,x,s,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,v,g e,f,s,b,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w e,f,s,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,x,s,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,d e,f,s,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,x,s,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d p,b,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,u e,f,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g e,x,f,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u e,x,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,v,p p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p e,x,y,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,u p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,g p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g e,k,s,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,k,y,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,b,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,x,y,g,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,v,g p,f,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g e,k,y,p,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g e,k,s,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,d p,f,y,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,y,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p e,x,y,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,p p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,d e,k,y,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g e,f,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,k,y,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w e,k,s,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,y,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,c,y,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,x,y,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,x,f,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,d p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d e,k,f,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,p e,x,y,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,b,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,p e,f,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,d p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,f,y,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,f,y,e,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w e,x,y,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,s,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g e,x,y,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,y,g e,k,y,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u e,k,y,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l e,k,y,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,b,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u e,f,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g e,f,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u e,x,s,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,y,p,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,p e,x,y,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,y,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u p,x,y,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,g p,f,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g e,f,s,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,x,y,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,b,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,d p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,b,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g e,x,y,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,f,y,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d e,x,y,r,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d p,b,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m e,x,s,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,y,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d e,f,s,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u e,x,y,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l e,x,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,f,y,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,k,y,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,g e,k,y,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,f,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,g e,f,s,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,d p,x,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,g p,b,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u e,f,y,b,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w e,x,f,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,u e,k,s,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,k,y,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,x,y,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,g p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,p e,x,y,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g e,x,y,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,d p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,d p,b,y,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,p p,f,f,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g e,k,s,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,n,p,w,o,l,h,v,p e,k,s,n,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,f,y,g,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,f,s,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d e,f,y,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,p e,k,s,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w e,x,y,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,g p,x,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g e,f,y,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,g p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,u e,k,f,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,g e,k,y,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,x,f,g,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,y,p p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p e,f,y,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,n,p,w,o,l,h,y,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,y,g p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,f,y,r,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,p p,x,s,g,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,g p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,s,b,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g e,x,y,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,f,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d e,f,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,x,s,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,b,p,w,o,l,h,v,d e,f,s,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w e,k,f,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,f,y,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,k,y,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,g p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d e,f,s,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,g p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d e,f,s,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,p,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,y,p e,f,f,c,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,b,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g e,k,y,e,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,g p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,w,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u e,f,s,p,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p e,f,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d e,x,y,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u p,k,g,w,t,n,f,w,n,w,e,b,s,s,w,w,p,w,o,p,w,c,l p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,k,s,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,s,g e,f,s,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g p,b,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m e,f,f,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,f,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,u p,x,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,g e,x,s,b,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,y,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,x,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d e,k,s,b,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u p,x,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d e,x,y,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g e,k,s,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g e,k,y,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,v,p p,x,s,b,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,u e,k,f,c,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,f,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,g p,b,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,b,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g e,x,y,n,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,y,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g e,x,s,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,u e,f,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,s,w,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u e,x,y,w,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,k,y,b,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,u e,f,y,r,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d p,b,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,s,b,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u e,f,y,c,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l e,x,y,u,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,u e,x,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d p,x,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l e,x,s,p,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,y,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,p p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,y,g,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,v,g e,f,y,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,b,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,v,u p,x,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,y,p p,b,y,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g e,f,y,n,f,n,f,w,n,w,e,b,s,f,w,n,p,w,o,e,w,v,l p,f,f,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,b,p,w,o,l,h,v,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,g p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,y,y,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,x,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g e,f,f,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u e,f,s,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,d p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g e,f,f,c,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,k,y,e,t,n,f,c,b,e,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,w,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,x,s,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,p p,x,s,w,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u e,f,s,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,v,d e,f,y,e,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,g e,k,s,n,t,n,f,c,b,e,e,?,s,s,e,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,s,w,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,g p,f,y,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d e,f,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d e,x,y,w,f,n,f,c,n,h,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u e,x,y,u,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,v,d p,f,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,y,f,f,f,c,b,g,e,b,k,k,p,b,p,w,o,l,h,v,d p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,n,p,w,o,l,h,y,p p,x,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,y,d p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,s,g e,f,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,v,d p,f,s,w,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,s,g e,k,s,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,s,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,p,p,p,w,o,l,h,y,g p,b,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,b,s,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,m p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,y,g,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,d e,x,y,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,k,s,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l e,x,s,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,m e,f,s,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,f,y,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,s,g,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,s,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,h,e,b,k,k,b,b,p,w,o,l,h,y,g p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,s,g,t,f,f,c,b,w,t,b,f,s,w,w,p,w,o,p,h,v,u e,x,y,p,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,x,y,u,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,y,p p,x,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,b,p,p,w,o,l,h,y,g p,b,s,w,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g e,k,y,n,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,x,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,v,p e,x,y,b,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w p,x,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,g p,f,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u p,f,s,b,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,b,p,w,o,l,h,y,d p,x,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,w,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u e,k,y,n,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,s,e,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w e,x,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,v,d p,f,s,b,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u p,f,s,g,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,s,u e,f,s,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,y,g p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,s,w,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,g,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d e,x,f,n,f,n,f,w,n,w,e,b,s,s,w,n,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,p,e,b,k,k,b,n,p,w,o,l,h,y,d p,x,s,w,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,u e,x,s,b,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w e,x,s,p,t,n,f,c,b,w,e,?,s,s,w,e,p,w,t,e,w,c,w p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,g p,f,f,g,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,p p,x,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,f,y,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,v,p p,f,s,w,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,u e,k,y,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w e,x,y,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,y,g e,f,y,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,s,e,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,s,g,t,f,f,c,b,p,t,b,f,f,w,w,p,w,o,p,h,v,u p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,p p,x,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,s,w,t,f,f,c,b,h,t,b,s,s,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,n,p,p,w,o,l,h,y,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,n,p,p,w,o,l,h,y,p p,f,f,g,f,f,f,c,b,g,e,b,k,k,b,n,p,w,o,l,h,v,p p,b,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,b,s,w,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g e,x,y,n,t,n,f,c,b,w,e,?,s,s,w,w,p,w,t,e,w,c,w p,x,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,x,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,g p,b,y,y,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,x,s,g,t,f,f,c,b,w,t,b,f,f,w,w,p,w,o,p,h,v,u e,f,y,p,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,k,s,p,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w e,f,y,n,t,n,f,c,b,w,e,?,s,s,e,e,p,w,t,e,w,c,w p,b,y,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,s,g,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,v,g p,x,s,g,t,f,f,c,b,p,t,b,s,s,w,w,p,w,o,p,h,s,g p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,g p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,y,b,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,r,v,g p,x,s,b,t,f,f,c,b,h,t,b,f,f,w,w,p,w,o,p,h,v,u p,b,s,b,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g e,f,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d e,x,y,n,f,n,f,w,n,w,e,b,f,s,w,n,p,w,o,e,w,v,l p,x,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,v,p p,f,s,b,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,v,g e,k,s,e,t,n,f,c,b,e,e,?,s,s,e,e,p,w,t,e,w,c,w p,f,f,y,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,p p,b,f,n,f,n,f,c,n,w,e,?,k,y,w,n,p,w,o,e,w,v,d p,x,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,y,y,f,f,f,c,b,h,e,b,k,k,n,b,p,w,o,l,h,y,d p,x,y,y,f,f,f,c,b,g,e,b,k,k,n,b,p,w,o,l,h,y,g p,x,s,g,t,f,f,c,b,p,t,b,f,s,w,w,p,w,o,p,h,s,g p,x,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d e,x,y,n,t,n,f,c,b,w,e,?,s,s,e,w,p,w,t,e,w,c,w e,f,y,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,f,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,k,f,n,f,n,f,c,n,w,e,?,k,y,w,y,p,w,o,e,w,v,d p,f,y,g,f,f,f,c,b,h,e,b,k,k,p,n,p,w,o,l,h,v,g p,x,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,f,g,f,f,f,c,b,h,e,b,k,k,p,p,p,w,o,l,h,y,d p,f,f,g,f,f,f,c,b,p,e,b,k,k,b,p,p,w,o,l,h,v,d p,x,s,g,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,s,g e,x,y,r,f,n,f,c,n,u,e,?,s,f,w,w,p,w,o,f,h,y,d p,x,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,s,p,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,m p,x,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,g p,x,s,b,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,v,u e,k,s,e,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w p,f,s,b,t,f,f,c,b,w,t,b,s,f,w,w,p,w,o,p,h,s,g e,x,f,n,f,n,f,w,n,w,e,b,f,f,w,n,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,v,d e,f,y,w,f,n,f,c,n,w,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,s,w,t,f,f,c,b,w,t,b,s,s,w,w,p,w,o,p,h,s,u p,f,s,p,t,n,f,c,b,r,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,y,f,f,f,c,b,p,e,b,k,k,p,n,p,w,o,l,h,y,d p,f,s,g,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,g p,x,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,b,p,w,o,l,h,v,g p,f,s,g,t,f,f,c,b,h,t,b,f,s,w,w,p,w,o,p,h,s,g p,f,s,w,t,f,f,c,b,h,t,b,s,f,w,w,p,w,o,p,h,s,u p,x,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,f,y,f,f,f,c,b,p,e,b,k,k,n,n,p,w,o,l,h,y,d p,x,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,b,y,b,t,n,f,c,b,g,e,b,s,s,w,w,p,w,t,p,r,v,g p,f,y,y,f,f,f,c,b,g,e,b,k,k,b,p,p,w,o,l,h,v,g p,x,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,k,y,b,t,n,f,c,b,e,e,?,s,s,w,w,p,w,t,e,w,c,w e,x,y,w,f,n,f,c,n,p,e,?,s,f,w,w,p,w,o,f,h,y,d p,f,s,b,t,f,f,c,b,p,t,b,s,f,w,w,p,w,o,p,h,v,u p,f,y,y,f,f,f,c,b,g,e,b,k,k,n,p,p,w,o,l,h,y,g p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,x,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p e,x,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l e,b,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,k,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,k,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,k,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p e,b,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,x,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p e,k,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,x,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,y,e,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,k,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l e,x,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,k,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,b,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,c,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d e,x,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,x,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,y,n,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,k,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,x,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,x,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l e,k,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,x,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,b,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,b,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l e,b,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l e,x,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,b,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g p,x,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g e,b,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l e,f,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,x,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,x,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,k,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d e,b,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d p,f,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,x,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,k,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,f,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,b,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l e,k,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,y,e,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d e,k,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,x,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,k,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g e,k,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,k,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g e,x,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,b,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,k,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g e,x,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g e,k,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,f,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l e,k,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l p,k,y,c,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d e,x,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,x,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l e,k,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,x,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,y,n,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,k,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d e,f,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l p,f,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,b,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l p,f,y,n,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,f,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,x,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,y,c,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,b,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d e,b,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,f,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l e,x,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,k,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,x,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,k,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l e,k,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l e,x,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,b,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,x,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,f,y,c,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,f,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d e,k,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,x,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g p,x,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,b,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,x,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,f,y,n,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,b,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,f,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,k,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,x,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,f,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l e,b,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l p,x,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d e,k,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d p,f,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d e,b,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,x,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,x,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,x,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,k,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,x,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l e,k,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,x,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l e,k,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,y,e,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,b,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l e,b,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l e,b,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l e,k,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,k,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,f,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,n,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d e,b,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g e,k,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,k,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l e,k,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,b,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,x,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,x,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,x,y,e,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l e,f,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d e,k,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g p,f,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,k,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l e,x,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,k,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,b,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l e,x,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g e,x,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d e,b,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,x,y,n,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l e,b,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g e,b,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,b,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g e,k,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g p,f,y,e,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,f,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,y,c,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d e,k,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,f,y,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l e,b,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,f,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,k,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l e,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,b,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,x,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g e,x,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,k,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l p,f,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l p,x,y,c,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,x,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,x,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,k,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g e,b,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l e,f,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,k,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,b,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,b,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,c,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l p,x,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,x,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l p,x,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d e,x,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p e,x,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l p,f,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,x,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l p,f,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,f,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,x,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g e,k,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,k,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p e,x,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l p,f,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l e,x,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l e,b,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,x,y,e,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,x,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,f,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,y,f,n,f,w,n,y,e,c,y,y,y,y,p,y,o,e,w,c,l p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,f,y,e,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,x,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,x,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g e,x,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l p,f,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,x,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g e,x,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l e,b,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g e,b,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,x,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l e,x,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p e,k,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g e,x,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,y,c,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,x,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g e,k,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g e,b,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l e,f,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p e,b,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,k,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l p,k,s,n,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l e,x,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g e,x,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p e,x,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d e,x,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,k,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,k,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d e,k,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,b,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g e,b,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d e,b,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,f,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,k,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,x,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,b,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l e,x,s,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,k,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,x,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g e,b,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l p,f,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,b,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g e,k,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,x,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l p,k,s,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l e,b,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,x,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,y,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,x,y,e,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d e,b,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g e,b,s,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,k,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g e,k,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g e,x,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,f,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,f,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,b,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,k,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,b,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,f,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,x,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l e,k,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,x,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,f,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,b,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l e,x,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,x,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,k,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l e,f,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l e,f,s,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,x,s,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,c,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,x,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,x,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,y,c,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,f,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l e,b,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,b,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,x,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,f,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l e,b,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g e,x,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,x,y,n,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,x,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,y,y,f,n,f,w,n,w,e,c,y,y,y,y,p,y,o,e,w,c,l e,k,s,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,k,f,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g e,k,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,c,l e,x,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g e,b,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g e,x,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,k,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g e,x,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l e,b,s,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,b,s,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,x,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l e,x,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,x,s,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l e,b,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,s,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l p,f,y,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,f,y,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,k,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,k,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l e,x,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,f,y,c,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,x,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d e,k,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,x,y,n,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,f,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,b,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g e,b,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l e,x,s,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,p e,x,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g e,b,s,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d p,k,y,e,f,m,f,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p e,x,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l p,k,y,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l e,k,s,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,y,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d e,b,f,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,l e,x,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,c,l p,k,y,e,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,x,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,n,f,s,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,x,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,s,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,x,f,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l e,b,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,y,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,x,y,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l e,x,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l e,k,s,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l e,k,s,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,n,g p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,x,s,n,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,s,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,x,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,f,s,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,v,l e,x,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d p,k,y,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,l e,b,s,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,c,l e,x,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g e,x,s,g,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g p,f,y,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,f,y,n,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d e,f,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,k,s,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p p,x,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,x,f,g,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,s,g e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l p,x,y,e,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,s,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,x,f,g,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,s,g p,f,y,n,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l e,b,s,g,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,v,l p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l e,x,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g p,k,s,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,x,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g e,f,y,p,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,c,l p,k,s,n,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,d e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,c,l p,k,y,n,f,s,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p e,k,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p e,f,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p p,k,s,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,x,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l e,k,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l e,f,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,k,y,e,f,m,a,c,b,w,e,c,k,y,c,c,p,w,n,n,w,c,d p,x,y,e,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,y,v,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,o,c,l e,x,y,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,f,s,c,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,v,p e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,c,l e,b,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,n,g e,k,f,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,c,l e,k,f,w,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g p,k,y,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,l e,b,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l e,x,y,n,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p e,b,s,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,n,g p,f,y,n,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l e,b,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g p,x,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g e,b,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g e,b,f,w,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,v,l p,k,y,e,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l p,k,s,e,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,d p,k,y,c,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d p,k,s,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d e,x,s,g,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,d e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l e,b,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p p,x,s,n,f,f,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l p,f,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p e,k,s,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l e,k,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,s,g e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,v,l p,k,s,n,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,c,l e,x,y,n,f,n,f,c,b,w,e,b,y,y,n,n,p,w,t,p,w,y,p e,x,s,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l p,f,s,n,f,f,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,x,y,e,f,s,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,d p,k,y,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,s,e,f,f,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,s,e,f,f,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,n,v,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,v,l e,k,s,g,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,s,g e,k,f,w,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g p,k,s,e,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,p e,b,f,w,f,n,f,w,b,g,e,?,k,k,w,w,p,w,t,p,w,n,g e,x,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g e,b,f,w,f,n,f,w,b,p,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,n,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,d p,f,s,n,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l p,f,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,s,e,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,p p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,c,l e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,v,l p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l e,x,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,n,g e,k,s,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,k,p,p,p,w,o,e,w,v,p p,x,s,n,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l e,b,f,w,f,n,f,w,b,p,e,?,k,s,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,w,p,w,o,e,w,v,d e,x,y,g,t,n,f,c,b,w,e,b,s,s,w,w,p,w,t,p,w,y,p p,k,s,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,b,v,l e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,c,l p,k,y,e,f,y,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,b,f,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,s,e,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,s,n,f,y,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,d e,x,f,w,f,n,f,w,b,w,e,?,s,k,w,w,p,w,t,p,w,s,g e,f,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,v,l p,k,y,n,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,x,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,v,l e,b,f,g,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,y,v,l e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,k,w,p,p,w,o,e,w,v,l p,k,s,n,f,f,f,c,n,b,t,?,s,s,w,p,p,w,o,e,w,v,p p,k,s,n,f,s,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,s,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l e,k,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,v,l p,k,s,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p e,b,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,b,c,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,n,c,l p,k,y,e,f,s,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,l e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,y,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,y,v,l e,b,f,g,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,n,g e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,o,c,l e,b,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,y,c,l e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,o,v,l e,b,f,g,f,n,f,w,b,g,e,?,s,s,w,w,p,w,t,p,w,s,g p,k,y,e,f,f,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l p,k,s,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,s,w,w,p,w,o,e,w,v,p p,k,s,e,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,p p,k,y,n,f,y,f,c,n,b,t,?,s,s,w,w,p,w,o,e,w,v,l e,b,f,g,f,n,f,w,b,p,e,?,k,k,w,w,p,w,t,p,w,s,g e,k,f,w,f,n,f,w,b,g,e,?,s,k,w,w,p,w,t,p,w,s,g e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,n,o,p,o,v,l p,x,s,e,f,f,f,c,n,b,t,?,k,s,w,p,p,w,o,e,w,v,p e,k,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,n,v,l p,k,y,e,f,f,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d p,k,s,n,f,f,f,c,n,b,t,?,k,s,p,p,p,w,o,e,w,v,d p,k,y,e,f,f,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,p p,k,y,e,f,y,f,c,n,b,t,?,s,s,p,p,p,w,o,e,w,v,p p,x,s,n,f,y,f,c,n,b,t,?,k,k,w,w,p,w,o,e,w,v,d e,b,s,g,f,n,f,w,b,g,e,?,k,s,w,w,p,w,t,p,w,n,g p,x,y,c,f,m,f,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,k,f,w,f,n,f,w,b,w,e,?,k,s,w,w,p,w,t,p,w,n,g p,k,y,n,f,s,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,k,k,w,p,p,w,o,e,w,v,d e,k,f,w,f,n,f,w,b,w,e,?,k,k,w,w,p,w,t,p,w,s,g e,f,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l p,k,s,e,f,s,f,c,n,b,t,?,s,s,p,w,p,w,o,e,w,v,p e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,n,c,l e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,o,c,l e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,v,l e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,y,v,l e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,n,c,l p,k,y,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,l e,b,s,w,f,n,f,w,b,w,e,?,s,s,w,w,p,w,t,p,w,n,g e,x,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,o,o,p,n,v,l e,k,s,w,f,n,f,w,b,p,e,?,s,s,w,w,p,w,t,p,w,n,g e,k,s,n,f,n,a,c,b,o,e,?,s,s,o,o,p,n,o,p,b,v,l p,k,y,e,f,y,f,c,n,b,t,?,k,k,p,p,p,w,o,e,w,v,d p,f,y,c,f,m,a,c,b,y,e,c,k,y,c,c,p,w,n,n,w,c,d e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,v,l p,k,y,n,f,s,f,c,n,b,t,?,s,k,p,w,p,w,o,e,w,v,l p,k,s,e,f,y,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d p,k,y,n,f,f,f,c,n,b,t,?,k,s,p,w,p,w,o,e,w,v,d e,k,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,b,c,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,n,o,p,b,v,l e,f,s,n,f,n,a,c,b,n,e,?,s,s,o,o,p,o,o,p,b,c,l p,k,y,n,f,y,f,c,n,b,t,?,s,k,w,w,p,w,o,e,w,v,l e,x,s,n,f,n,a,c,b,y,e,?,s,s,o,o,p,o,o,p,o,c,l xgboost-3.0.0/demo/CLI/binary_classification/agaricus-lepiota.fmap000066400000000000000000000042321476511272400252010ustar00rootroot00000000000000 1. cap-shape: bell=b,conical=c,convex=x,flat=f,knobbed=k,sunken=s 2. cap-surface: fibrous=f,grooves=g,scaly=y,smooth=s 3. cap-color: brown=n,buff=b,cinnamon=c,gray=g,green=r,pink=p,purple=u,red=e,white=w,yellow=y 4. bruises?: bruises=t,no=f 5. odor: almond=a,anise=l,creosote=c,fishy=y,foul=f, musty=m,none=n,pungent=p,spicy=s 6. gill-attachment: attached=a,descending=d,free=f,notched=n 7. gill-spacing: close=c,crowded=w,distant=d 8. gill-size: broad=b,narrow=n 9. gill-color: black=k,brown=n,buff=b,chocolate=h,gray=g, green=r,orange=o,pink=p,purple=u,red=e, white=w,yellow=y 10. stalk-shape: enlarging=e,tapering=t 11. stalk-root: bulbous=b,club=c,cup=u,equal=e, rhizomorphs=z,rooted=r,missing=? 12. stalk-surface-above-ring: fibrous=f,scaly=y,silky=k,smooth=s 13. stalk-surface-below-ring: fibrous=f,scaly=y,silky=k,smooth=s 14. stalk-color-above-ring: brown=n,buff=b,cinnamon=c,gray=g,orange=o, pink=p,red=e,white=w,yellow=y 15. stalk-color-below-ring: brown=n,buff=b,cinnamon=c,gray=g,orange=o, pink=p,red=e,white=w,yellow=y 16. veil-type: partial=p,universal=u 17. veil-color: brown=n,orange=o,white=w,yellow=y 18. ring-number: none=n,one=o,two=t 19. ring-type: cobwebby=c,evanescent=e,flaring=f,large=l, none=n,pendant=p,sheathing=s,zone=z 20. spore-print-color: black=k,brown=n,buff=b,chocolate=h,green=r, orange=o,purple=u,white=w,yellow=y 21. population: abundant=a,clustered=c,numerous=n, scattered=s,several=v,solitary=y 22. habitat: grasses=g,leaves=l,meadows=m,paths=p, urban=u,waste=w,woods=d xgboost-3.0.0/demo/CLI/binary_classification/agaricus-lepiota.names000066400000000000000000000152401476511272400253620ustar00rootroot000000000000001. Title: Mushroom Database 2. Sources: (a) Mushroom records drawn from The Audubon Society Field Guide to North American Mushrooms (1981). G. H. Lincoff (Pres.), New York: Alfred A. Knopf (b) Donor: Jeff Schlimmer (Jeffrey.Schlimmer@a.gp.cs.cmu.edu) (c) Date: 27 April 1987 3. Past Usage: 1. Schlimmer,J.S. (1987). Concept Acquisition Through Representational Adjustment (Technical Report 87-19). Doctoral disseration, Department of Information and Computer Science, University of California, Irvine. --- STAGGER: asymptoted to 95% classification accuracy after reviewing 1000 instances. 2. Iba,W., Wogulis,J., & Langley,P. (1988). Trading off Simplicity and Coverage in Incremental Concept Learning. In Proceedings of the 5th International Conference on Machine Learning, 73-79. Ann Arbor, Michigan: Morgan Kaufmann. -- approximately the same results with their HILLARY algorithm 3. In the following references a set of rules (given below) were learned for this data set which may serve as a point of comparison for other researchers. Duch W, Adamczak R, Grabczewski K (1996) Extraction of logical rules from training data using backpropagation networks, in: Proc. of the The 1st Online Workshop on Soft Computing, 19-30.Aug.1996, pp. 25-30, available on-line at: http://www.bioele.nuee.nagoya-u.ac.jp/wsc1/ Duch W, Adamczak R, Grabczewski K, Ishikawa M, Ueda H, Extraction of crisp logical rules using constrained backpropagation networks - comparison of two new approaches, in: Proc. of the European Symposium on Artificial Neural Networks (ESANN'97), Bruge, Belgium 16-18.4.1997, pp. xx-xx Wlodzislaw Duch, Department of Computer Methods, Nicholas Copernicus University, 87-100 Torun, Grudziadzka 5, Poland e-mail: duch@phys.uni.torun.pl WWW http://www.phys.uni.torun.pl/kmk/ Date: Mon, 17 Feb 1997 13:47:40 +0100 From: Wlodzislaw Duch Organization: Dept. of Computer Methods, UMK I have attached a file containing logical rules for mushrooms. It should be helpful for other people since only in the last year I have seen about 10 papers analyzing this dataset and obtaining quite complex rules. We will try to contribute other results later. With best regards, Wlodek Duch ________________________________________________________________ Logical rules for the mushroom data sets. Logical rules given below seem to be the simplest possible for the mushroom dataset and therefore should be treated as benchmark results. Disjunctive rules for poisonous mushrooms, from most general to most specific: P_1) odor=NOT(almond.OR.anise.OR.none) 120 poisonous cases missed, 98.52% accuracy P_2) spore-print-color=green 48 cases missed, 99.41% accuracy P_3) odor=none.AND.stalk-surface-below-ring=scaly.AND. (stalk-color-above-ring=NOT.brown) 8 cases missed, 99.90% accuracy P_4) habitat=leaves.AND.cap-color=white 100% accuracy Rule P_4) may also be P_4') population=clustered.AND.cap_color=white These rule involve 6 attributes (out of 22). Rules for edible mushrooms are obtained as negation of the rules given above, for example the rule: odor=(almond.OR.anise.OR.none).AND.spore-print-color=NOT.green gives 48 errors, or 99.41% accuracy on the whole dataset. Several slightly more complex variations on these rules exist, involving other attributes, such as gill_size, gill_spacing, stalk_surface_above_ring, but the rules given above are the simplest we have found. 4. Relevant Information: This data set includes descriptions of hypothetical samples corresponding to 23 species of gilled mushrooms in the Agaricus and Lepiota Family (pp. 500-525). Each species is identified as definitely edible, definitely poisonous, or of unknown edibility and not recommended. This latter class was combined with the poisonous one. The Guide clearly states that there is no simple rule for determining the edibility of a mushroom; no rule like ``leaflets three, let it be'' for Poisonous Oak and Ivy. 5. Number of Instances: 8124 6. Number of Attributes: 22 (all nominally valued) 7. Attribute Information: (classes: edible=e, poisonous=p) 1. cap-shape: bell=b,conical=c,convex=x,flat=f, knobbed=k,sunken=s 2. cap-surface: fibrous=f,grooves=g,scaly=y,smooth=s 3. cap-color: brown=n,buff=b,cinnamon=c,gray=g,green=r, pink=p,purple=u,red=e,white=w,yellow=y 4. bruises?: bruises=t,no=f 5. odor: almond=a,anise=l,creosote=c,fishy=y,foul=f, musty=m,none=n,pungent=p,spicy=s 6. gill-attachment: attached=a,descending=d,free=f,notched=n 7. gill-spacing: close=c,crowded=w,distant=d 8. gill-size: broad=b,narrow=n 9. gill-color: black=k,brown=n,buff=b,chocolate=h,gray=g, green=r,orange=o,pink=p,purple=u,red=e, white=w,yellow=y 10. stalk-shape: enlarging=e,tapering=t 11. stalk-root: bulbous=b,club=c,cup=u,equal=e, rhizomorphs=z,rooted=r,missing=? 12. stalk-surface-above-ring: fibrous=f,scaly=y,silky=k,smooth=s 13. stalk-surface-below-ring: fibrous=f,scaly=y,silky=k,smooth=s 14. stalk-color-above-ring: brown=n,buff=b,cinnamon=c,gray=g,orange=o, pink=p,red=e,white=w,yellow=y 15. stalk-color-below-ring: brown=n,buff=b,cinnamon=c,gray=g,orange=o, pink=p,red=e,white=w,yellow=y 16. veil-type: partial=p,universal=u 17. veil-color: brown=n,orange=o,white=w,yellow=y 18. ring-number: none=n,one=o,two=t 19. ring-type: cobwebby=c,evanescent=e,flaring=f,large=l, none=n,pendant=p,sheathing=s,zone=z 20. spore-print-color: black=k,brown=n,buff=b,chocolate=h,green=r, orange=o,purple=u,white=w,yellow=y 21. population: abundant=a,clustered=c,numerous=n, scattered=s,several=v,solitary=y 22. habitat: grasses=g,leaves=l,meadows=m,paths=p, urban=u,waste=w,woods=d 8. Missing Attribute Values: 2480 of them (denoted by "?"), all for attribute #11. 9. Class Distribution: -- edible: 4208 (51.8%) -- poisonous: 3916 (48.2%) -- total: 8124 instances xgboost-3.0.0/demo/CLI/binary_classification/mapfeat.py000077500000000000000000000022331476511272400230740ustar00rootroot00000000000000#!/usr/bin/env python3 def loadfmap( fname ): fmap = {} nmap = {} for l in open( fname ): arr = l.split() if arr[0].find('.') != -1: idx = int( arr[0].strip('.') ) assert idx not in fmap fmap[ idx ] = {} ftype = arr[1].strip(':') content = arr[2] else: content = arr[0] for it in content.split(','): if it.strip() == '': continue k , v = it.split('=') fmap[ idx ][ v ] = len(nmap) nmap[ len(nmap) ] = ftype+'='+k return fmap, nmap def write_nmap( fo, nmap ): for i in range( len(nmap) ): fo.write('%d\t%s\ti\n' % (i, nmap[i]) ) # start here fmap, nmap = loadfmap( 'agaricus-lepiota.fmap' ) fo = open( 'featmap.txt', 'w' ) write_nmap( fo, nmap ) fo.close() fo = open( 'agaricus.txt', 'w' ) for l in open( 'agaricus-lepiota.data' ): arr = l.split(',') if arr[0] == 'p': fo.write('1') else: assert arr[0] == 'e' fo.write('0') for i in range( 1,len(arr) ): fo.write( ' %d:1' % fmap[i][arr[i].strip()] ) fo.write('\n') fo.close() xgboost-3.0.0/demo/CLI/binary_classification/mknfold.py000077500000000000000000000007671476511272400231230ustar00rootroot00000000000000#!/usr/bin/env python3 import random import sys if len(sys.argv) < 2: print ('Usage: [nfold = 5]') exit(0) random.seed( 10 ) k = int( sys.argv[2] ) if len(sys.argv) > 3: nfold = int( sys.argv[3] ) else: nfold = 5 fi = open( sys.argv[1], 'r' ) ftr = open( sys.argv[1]+'.train', 'w' ) fte = open( sys.argv[1]+'.test', 'w' ) for l in fi: if random.randint( 1 , nfold ) == k: fte.write( l ) else: ftr.write( l ) fi.close() ftr.close() fte.close() xgboost-3.0.0/demo/CLI/binary_classification/mushroom.conf000066400000000000000000000017171476511272400236300ustar00rootroot00000000000000# General Parameters, see comment for each definition # choose the booster, can be gbtree or gblinear booster = gbtree # choose logistic regression loss function for binary classification objective = binary:logistic # Tree Booster Parameters # step size shrinkage eta = 1.0 # minimum loss reduction required to make a further partition gamma = 1.0 # minimum sum of instance weight(hessian) needed in a child min_child_weight = 1 # maximum depth of a tree max_depth = 3 # Task Parameters # the number of round to do boosting num_round = 2 # 0 means do not save any model except the final round model save_period = 2 # The path of training data data = "agaricus.txt.train?format=libsvm" # The path of validation data, used to monitor training process, here [test] sets name of the validation set eval[test] = "agaricus.txt.test?format=libsvm" # evaluate on training data as well each round eval_train = 1 # The path of test data test:data = "agaricus.txt.test?format=libsvm" xgboost-3.0.0/demo/CLI/binary_classification/runexp.sh000077500000000000000000000011501476511272400227570ustar00rootroot00000000000000#!/bin/bash # map feature using indicator encoding, also produce featmap.txt python mapfeat.py # split train and test python mknfold.py agaricus.txt 1 XGBOOST=../../../xgboost # training and output the models $XGBOOST mushroom.conf # output prediction task=pred $XGBOOST mushroom.conf task=pred model_in=0002.model # print the boosters of 00002.model in dump.raw.txt $XGBOOST mushroom.conf task=dump model_in=0002.model name_dump=dump.raw.txt # use the feature map in printing for better visualization $XGBOOST mushroom.conf task=dump model_in=0002.model fmap=featmap.txt name_dump=dump.nice.txt cat dump.nice.txt xgboost-3.0.0/demo/CLI/distributed-training/000077500000000000000000000000001476511272400206765ustar00rootroot00000000000000xgboost-3.0.0/demo/CLI/distributed-training/README.md000066400000000000000000000021031476511272400221510ustar00rootroot00000000000000Distributed XGBoost Training ============================ This is an tutorial of Distributed XGBoost Training. Currently xgboost supports distributed training via CLI program with the configuration file. There is also plan push distributed python and other language bindings, please open an issue if you are interested in contributing. Build XGBoost with Distributed Filesystem Support ------------------------------------------------- To use distributed xgboost, you only need to turn the options on to build with distributed filesystems(HDFS or S3) in cmake. ``` cmake -DUSE_HDFS=ON -DUSE_S3=ON -DUSE_AZURE=ON ``` Step by Step Tutorial on AWS ---------------------------- Checkout [this tutorial](https://xgboost.readthedocs.org/en/latest/tutorials/aws_yarn.html) for running distributed xgboost. Model Analysis -------------- XGBoost is exchangeable across all bindings and platforms. This means you can use python or R to analyze the learnt model and do prediction. For example, you can use the [plot_model.ipynb](plot_model.ipynb) to visualize the learnt model. xgboost-3.0.0/demo/CLI/distributed-training/mushroom.aws.conf000066400000000000000000000015271476511272400242140ustar00rootroot00000000000000# General Parameters, see comment for each definition # choose the booster, can be gbtree or gblinear booster = gbtree # choose logistic regression loss function for binary classification objective = binary:logistic # Tree Booster Parameters # step size shrinkage eta = 1.0 # minimum loss reduction required to make a further partition gamma = 1.0 # minimum sum of instance weight(hessian) needed in a child min_child_weight = 1 # maximum depth of a tree max_depth = 3 # Task Parameters # the number of round to do boosting num_round = 2 # 0 means do not save any model except the final round model save_period = 0 # The path of training data data = "s3://mybucket/xgb-demo/train" # The path of validation data, used to monitor training process, here [test] sets name of the validation set # evaluate on training data as well each round eval_train = 1 xgboost-3.0.0/demo/CLI/distributed-training/plot_model.ipynb000066400000000000000000000040101476511272400240720ustar00rootroot00000000000000{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# XGBoost Model Analysis\n", "\n", "This notebook can be used to load and analysis model learnt from all xgboost bindings, including distributed training. " ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "import sys\n", "import os\n", "%matplotlib inline " ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Please change the ```pkg_path``` and ```model_file``` to be correct path" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "pkg_path = '../../python-package/'\n", "model_file = 's3://my-bucket/xgb-demo/model/0002.model'\n", "sys.path.insert(0, pkg_path)\n", "import xgboost as xgb" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Plot the Feature Importance" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# plot the first two trees.\n", "bst = xgb.Booster(model_file=model_file)\n", "xgb.plot_importance(bst)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Plot the First Tree" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "tree_id = 0\n", "xgb.to_graphviz(bst, tree_id)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 2", "language": "python", "name": "python2" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 2 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.3" } }, "nbformat": 4, "nbformat_minor": 0 } xgboost-3.0.0/demo/CLI/distributed-training/run_aws.sh000066400000000000000000000007141476511272400227120ustar00rootroot00000000000000# This is the example script to run distributed xgboost on AWS. # Change the following two lines for configuration export BUCKET=mybucket # submit the job to YARN ../../../dmlc-core/tracker/dmlc-submit --cluster=yarn --num-workers=2 --worker-cores=2\ ../../../xgboost mushroom.aws.conf nthread=2\ data=s3://${BUCKET}/xgb-demo/train\ eval[test]=s3://${BUCKET}/xgb-demo/test\ model_dir=s3://${BUCKET}/xgb-demo/model xgboost-3.0.0/demo/CLI/regression/000077500000000000000000000000001476511272400167235ustar00rootroot00000000000000xgboost-3.0.0/demo/CLI/regression/README.md000066400000000000000000000021411476511272400202000ustar00rootroot00000000000000Regression ==== Using XGBoost for regression is very similar to using it for binary classification. We suggest that you can refer to the [binary classification demo](../binary_classification) first. In XGBoost if we use negative log likelihood as the loss function for regression, the training procedure is same as training binary classifier of XGBoost. ### Tutorial The dataset we used is the [computer hardware dataset from UCI repository](https://archive.ics.uci.edu/ml/datasets/Computer+Hardware). The demo for regression is almost the same as the [binary classification demo](../binary_classification), except a little difference in general parameter: ``` # General parameter # this is the only difference with classification, use reg:squarederror to do linear regression # when labels are in [0,1] we can also use reg:logistic objective = reg:squarederror ... ``` The input format is same as binary classification, except that the label is now the target regression values. We use linear regression here, if we want use objective = reg:logistic logistic regression, the label needed to be pre-scaled into [0,1]. xgboost-3.0.0/demo/CLI/regression/machine.conf000066400000000000000000000017551476511272400212060ustar00rootroot00000000000000# General Parameters, see comment for each definition # choose the tree booster, can also change to gblinear booster = gbtree # this is the only difference with classification, use reg:squarederror to do linear classification # when labels are in [0,1] we can also use reg:logistic objective = reg:squarederror # Tree Booster Parameters # step size shrinkage eta = 1.0 # minimum loss reduction required to make a further partition gamma = 1.0 # minimum sum of instance weight(hessian) needed in a child min_child_weight = 1 # maximum depth of a tree max_depth = 3 # Task parameters # the number of round to do boosting num_round = 2 # 0 means do not save any model except the final round model save_period = 0 # The path of training data data = "machine.txt.train?format=libsvm" # The path of validation data, used to monitor training process, here [test] sets name of the validation set eval[test] = "machine.txt.test?format=libsvm" # The path of test data test:data = "machine.txt.test?format=libsvm" xgboost-3.0.0/demo/CLI/regression/machine.data000066400000000000000000000210261476511272400211630ustar00rootroot00000000000000adviser,32/60,125,256,6000,256,16,128,198,199 amdahl,470v/7,29,8000,32000,32,8,32,269,253 amdahl,470v/7a,29,8000,32000,32,8,32,220,253 amdahl,470v/7b,29,8000,32000,32,8,32,172,253 amdahl,470v/7c,29,8000,16000,32,8,16,132,132 amdahl,470v/b,26,8000,32000,64,8,32,318,290 amdahl,580-5840,23,16000,32000,64,16,32,367,381 amdahl,580-5850,23,16000,32000,64,16,32,489,381 amdahl,580-5860,23,16000,64000,64,16,32,636,749 amdahl,580-5880,23,32000,64000,128,32,64,1144,1238 apollo,dn320,400,1000,3000,0,1,2,38,23 apollo,dn420,400,512,3500,4,1,6,40,24 basf,7/65,60,2000,8000,65,1,8,92,70 basf,7/68,50,4000,16000,65,1,8,138,117 bti,5000,350,64,64,0,1,4,10,15 bti,8000,200,512,16000,0,4,32,35,64 burroughs,b1955,167,524,2000,8,4,15,19,23 burroughs,b2900,143,512,5000,0,7,32,28,29 burroughs,b2925,143,1000,2000,0,5,16,31,22 burroughs,b4955,110,5000,5000,142,8,64,120,124 burroughs,b5900,143,1500,6300,0,5,32,30,35 burroughs,b5920,143,3100,6200,0,5,20,33,39 burroughs,b6900,143,2300,6200,0,6,64,61,40 burroughs,b6925,110,3100,6200,0,6,64,76,45 c.r.d,68/10-80,320,128,6000,0,1,12,23,28 c.r.d,universe:2203t,320,512,2000,4,1,3,69,21 c.r.d,universe:68,320,256,6000,0,1,6,33,28 c.r.d,universe:68/05,320,256,3000,4,1,3,27,22 c.r.d,universe:68/137,320,512,5000,4,1,5,77,28 c.r.d,universe:68/37,320,256,5000,4,1,6,27,27 cdc,cyber:170/750,25,1310,2620,131,12,24,274,102 cdc,cyber:170/760,25,1310,2620,131,12,24,368,102 cdc,cyber:170/815,50,2620,10480,30,12,24,32,74 cdc,cyber:170/825,50,2620,10480,30,12,24,63,74 cdc,cyber:170/835,56,5240,20970,30,12,24,106,138 cdc,cyber:170/845,64,5240,20970,30,12,24,208,136 cdc,omega:480-i,50,500,2000,8,1,4,20,23 cdc,omega:480-ii,50,1000,4000,8,1,5,29,29 cdc,omega:480-iii,50,2000,8000,8,1,5,71,44 cambex,1636-1,50,1000,4000,8,3,5,26,30 cambex,1636-10,50,1000,8000,8,3,5,36,41 cambex,1641-1,50,2000,16000,8,3,5,40,74 cambex,1641-11,50,2000,16000,8,3,6,52,74 cambex,1651-1,50,2000,16000,8,3,6,60,74 dec,decsys:10:1091,133,1000,12000,9,3,12,72,54 dec,decsys:20:2060,133,1000,8000,9,3,12,72,41 dec,microvax-1,810,512,512,8,1,1,18,18 dec,vax:11/730,810,1000,5000,0,1,1,20,28 dec,vax:11/750,320,512,8000,4,1,5,40,36 dec,vax:11/780,200,512,8000,8,1,8,62,38 dg,eclipse:c/350,700,384,8000,0,1,1,24,34 dg,eclipse:m/600,700,256,2000,0,1,1,24,19 dg,eclipse:mv/10000,140,1000,16000,16,1,3,138,72 dg,eclipse:mv/4000,200,1000,8000,0,1,2,36,36 dg,eclipse:mv/6000,110,1000,4000,16,1,2,26,30 dg,eclipse:mv/8000,110,1000,12000,16,1,2,60,56 dg,eclipse:mv/8000-ii,220,1000,8000,16,1,2,71,42 formation,f4000/100,800,256,8000,0,1,4,12,34 formation,f4000/200,800,256,8000,0,1,4,14,34 formation,f4000/200ap,800,256,8000,0,1,4,20,34 formation,f4000/300,800,256,8000,0,1,4,16,34 formation,f4000/300ap,800,256,8000,0,1,4,22,34 four-phase,2000/260,125,512,1000,0,8,20,36,19 gould,concept:32/8705,75,2000,8000,64,1,38,144,75 gould,concept:32/8750,75,2000,16000,64,1,38,144,113 gould,concept:32/8780,75,2000,16000,128,1,38,259,157 hp,3000/30,90,256,1000,0,3,10,17,18 hp,3000/40,105,256,2000,0,3,10,26,20 hp,3000/44,105,1000,4000,0,3,24,32,28 hp,3000/48,105,2000,4000,8,3,19,32,33 hp,3000/64,75,2000,8000,8,3,24,62,47 hp,3000/88,75,3000,8000,8,3,48,64,54 hp,3000/iii,175,256,2000,0,3,24,22,20 harris,100,300,768,3000,0,6,24,36,23 harris,300,300,768,3000,6,6,24,44,25 harris,500,300,768,12000,6,6,24,50,52 harris,600,300,768,4500,0,1,24,45,27 harris,700,300,384,12000,6,1,24,53,50 harris,80,300,192,768,6,6,24,36,18 harris,800,180,768,12000,6,1,31,84,53 honeywell,dps:6/35,330,1000,3000,0,2,4,16,23 honeywell,dps:6/92,300,1000,4000,8,3,64,38,30 honeywell,dps:6/96,300,1000,16000,8,2,112,38,73 honeywell,dps:7/35,330,1000,2000,0,1,2,16,20 honeywell,dps:7/45,330,1000,4000,0,3,6,22,25 honeywell,dps:7/55,140,2000,4000,0,3,6,29,28 honeywell,dps:7/65,140,2000,4000,0,4,8,40,29 honeywell,dps:8/44,140,2000,4000,8,1,20,35,32 honeywell,dps:8/49,140,2000,32000,32,1,20,134,175 honeywell,dps:8/50,140,2000,8000,32,1,54,66,57 honeywell,dps:8/52,140,2000,32000,32,1,54,141,181 honeywell,dps:8/62,140,2000,32000,32,1,54,189,181 honeywell,dps:8/20,140,2000,4000,8,1,20,22,32 ibm,3033:s,57,4000,16000,1,6,12,132,82 ibm,3033:u,57,4000,24000,64,12,16,237,171 ibm,3081,26,16000,32000,64,16,24,465,361 ibm,3081:d,26,16000,32000,64,8,24,465,350 ibm,3083:b,26,8000,32000,0,8,24,277,220 ibm,3083:e,26,8000,16000,0,8,16,185,113 ibm,370/125-2,480,96,512,0,1,1,6,15 ibm,370/148,203,1000,2000,0,1,5,24,21 ibm,370/158-3,115,512,6000,16,1,6,45,35 ibm,38/3,1100,512,1500,0,1,1,7,18 ibm,38/4,1100,768,2000,0,1,1,13,20 ibm,38/5,600,768,2000,0,1,1,16,20 ibm,38/7,400,2000,4000,0,1,1,32,28 ibm,38/8,400,4000,8000,0,1,1,32,45 ibm,4321,900,1000,1000,0,1,2,11,18 ibm,4331-1,900,512,1000,0,1,2,11,17 ibm,4331-11,900,1000,4000,4,1,2,18,26 ibm,4331-2,900,1000,4000,8,1,2,22,28 ibm,4341,900,2000,4000,0,3,6,37,28 ibm,4341-1,225,2000,4000,8,3,6,40,31 ibm,4341-10,225,2000,4000,8,3,6,34,31 ibm,4341-11,180,2000,8000,8,1,6,50,42 ibm,4341-12,185,2000,16000,16,1,6,76,76 ibm,4341-2,180,2000,16000,16,1,6,66,76 ibm,4341-9,225,1000,4000,2,3,6,24,26 ibm,4361-4,25,2000,12000,8,1,4,49,59 ibm,4361-5,25,2000,12000,16,3,5,66,65 ibm,4381-1,17,4000,16000,8,6,12,100,101 ibm,4381-2,17,4000,16000,32,6,12,133,116 ibm,8130-a,1500,768,1000,0,0,0,12,18 ibm,8130-b,1500,768,2000,0,0,0,18,20 ibm,8140,800,768,2000,0,0,0,20,20 ipl,4436,50,2000,4000,0,3,6,27,30 ipl,4443,50,2000,8000,8,3,6,45,44 ipl,4445,50,2000,8000,8,1,6,56,44 ipl,4446,50,2000,16000,24,1,6,70,82 ipl,4460,50,2000,16000,24,1,6,80,82 ipl,4480,50,8000,16000,48,1,10,136,128 magnuson,m80/30,100,1000,8000,0,2,6,16,37 magnuson,m80/31,100,1000,8000,24,2,6,26,46 magnuson,m80/32,100,1000,8000,24,3,6,32,46 magnuson,m80/42,50,2000,16000,12,3,16,45,80 magnuson,m80/43,50,2000,16000,24,6,16,54,88 magnuson,m80/44,50,2000,16000,24,6,16,65,88 microdata,seq.ms/3200,150,512,4000,0,8,128,30,33 nas,as/3000,115,2000,8000,16,1,3,50,46 nas,as/3000-n,115,2000,4000,2,1,5,40,29 nas,as/5000,92,2000,8000,32,1,6,62,53 nas,as/5000-e,92,2000,8000,32,1,6,60,53 nas,as/5000-n,92,2000,8000,4,1,6,50,41 nas,as/6130,75,4000,16000,16,1,6,66,86 nas,as/6150,60,4000,16000,32,1,6,86,95 nas,as/6620,60,2000,16000,64,5,8,74,107 nas,as/6630,60,4000,16000,64,5,8,93,117 nas,as/6650,50,4000,16000,64,5,10,111,119 nas,as/7000,72,4000,16000,64,8,16,143,120 nas,as/7000-n,72,2000,8000,16,6,8,105,48 nas,as/8040,40,8000,16000,32,8,16,214,126 nas,as/8050,40,8000,32000,64,8,24,277,266 nas,as/8060,35,8000,32000,64,8,24,370,270 nas,as/9000-dpc,38,16000,32000,128,16,32,510,426 nas,as/9000-n,48,4000,24000,32,8,24,214,151 nas,as/9040,38,8000,32000,64,8,24,326,267 nas,as/9060,30,16000,32000,256,16,24,510,603 ncr,v8535:ii,112,1000,1000,0,1,4,8,19 ncr,v8545:ii,84,1000,2000,0,1,6,12,21 ncr,v8555:ii,56,1000,4000,0,1,6,17,26 ncr,v8565:ii,56,2000,6000,0,1,8,21,35 ncr,v8565:ii-e,56,2000,8000,0,1,8,24,41 ncr,v8575:ii,56,4000,8000,0,1,8,34,47 ncr,v8585:ii,56,4000,12000,0,1,8,42,62 ncr,v8595:ii,56,4000,16000,0,1,8,46,78 ncr,v8635,38,4000,8000,32,16,32,51,80 ncr,v8650,38,4000,8000,32,16,32,116,80 ncr,v8655,38,8000,16000,64,4,8,100,142 ncr,v8665,38,8000,24000,160,4,8,140,281 ncr,v8670,38,4000,16000,128,16,32,212,190 nixdorf,8890/30,200,1000,2000,0,1,2,25,21 nixdorf,8890/50,200,1000,4000,0,1,4,30,25 nixdorf,8890/70,200,2000,8000,64,1,5,41,67 perkin-elmer,3205,250,512,4000,0,1,7,25,24 perkin-elmer,3210,250,512,4000,0,4,7,50,24 perkin-elmer,3230,250,1000,16000,1,1,8,50,64 prime,50-2250,160,512,4000,2,1,5,30,25 prime,50-250-ii,160,512,2000,2,3,8,32,20 prime,50-550-ii,160,1000,4000,8,1,14,38,29 prime,50-750-ii,160,1000,8000,16,1,14,60,43 prime,50-850-ii,160,2000,8000,32,1,13,109,53 siemens,7.521,240,512,1000,8,1,3,6,19 siemens,7.531,240,512,2000,8,1,5,11,22 siemens,7.536,105,2000,4000,8,3,8,22,31 siemens,7.541,105,2000,6000,16,6,16,33,41 siemens,7.551,105,2000,8000,16,4,14,58,47 siemens,7.561,52,4000,16000,32,4,12,130,99 siemens,7.865-2,70,4000,12000,8,6,8,75,67 siemens,7.870-2,59,4000,12000,32,6,12,113,81 siemens,7.872-2,59,8000,16000,64,12,24,188,149 siemens,7.875-2,26,8000,24000,32,8,16,173,183 siemens,7.880-2,26,8000,32000,64,12,16,248,275 siemens,7.881-2,26,8000,32000,128,24,32,405,382 sperry,1100/61-h1,116,2000,8000,32,5,28,70,56 sperry,1100/81,50,2000,32000,24,6,26,114,182 sperry,1100/82,50,2000,32000,48,26,52,208,227 sperry,1100/83,50,2000,32000,112,52,104,307,341 sperry,1100/84,50,4000,32000,112,52,104,397,360 sperry,1100/93,30,8000,64000,96,12,176,915,919 sperry,1100/94,30,8000,64000,128,12,176,1150,978 sperry,80/3,180,262,4000,0,1,3,12,24 sperry,80/4,180,512,4000,0,1,3,14,24 sperry,80/5,180,262,4000,0,1,3,18,24 sperry,80/6,180,512,4000,0,1,3,21,24 sperry,80/8,124,1000,8000,0,1,8,42,37 sperry,90/80-model-3,98,1000,8000,32,2,8,46,50 sratus,32,125,2000,8000,0,2,14,52,41 wang,vs-100,480,512,8000,32,0,0,67,47 wang,vs-90,480,1000,4000,0,0,0,45,25 xgboost-3.0.0/demo/CLI/regression/machine.names000066400000000000000000000055271476511272400213650ustar00rootroot000000000000001. Title: Relative CPU Performance Data 2. Source Information -- Creators: Phillip Ein-Dor and Jacob Feldmesser -- Ein-Dor: Faculty of Management; Tel Aviv University; Ramat-Aviv; Tel Aviv, 69978; Israel -- Donor: David W. Aha (aha@ics.uci.edu) (714) 856-8779 -- Date: October, 1987 3. Past Usage: 1. Ein-Dor and Feldmesser (CACM 4/87, pp 308-317) -- Results: -- linear regression prediction of relative cpu performance -- Recorded 34% average deviation from actual values 2. Kibler,D. & Aha,D. (1988). Instance-Based Prediction of Real-Valued Attributes. In Proceedings of the CSCSI (Canadian AI) Conference. -- Results: -- instance-based prediction of relative cpu performance -- similar results; no transformations required - Predicted attribute: cpu relative performance (numeric) 4. Relevant Information: -- The estimated relative performance values were estimated by the authors using a linear regression method. See their article (pp 308-313) for more details on how the relative performance values were set. 5. Number of Instances: 209 6. Number of Attributes: 10 (6 predictive attributes, 2 non-predictive, 1 goal field, and the linear regression's guess) 7. Attribute Information: 1. vendor name: 30 (adviser, amdahl,apollo, basf, bti, burroughs, c.r.d, cambex, cdc, dec, dg, formation, four-phase, gould, honeywell, hp, ibm, ipl, magnuson, microdata, nas, ncr, nixdorf, perkin-elmer, prime, siemens, sperry, sratus, wang) 2. Model Name: many unique symbols 3. MYCT: machine cycle time in nanoseconds (integer) 4. MMIN: minimum main memory in kilobytes (integer) 5. MMAX: maximum main memory in kilobytes (integer) 6. CACH: cache memory in kilobytes (integer) 7. CHMIN: minimum channels in units (integer) 8. CHMAX: maximum channels in units (integer) 9. PRP: published relative performance (integer) 10. ERP: estimated relative performance from the original article (integer) 8. Missing Attribute Values: None 9. Class Distribution: the class value (PRP) is continuously valued. PRP Value Range: Number of Instances in Range: 0-20 31 21-100 121 101-200 27 201-300 13 301-400 7 401-500 4 501-600 2 above 600 4 Summary Statistics: Min Max Mean SD PRP Correlation MCYT: 17 1500 203.8 260.3 -0.3071 MMIN: 64 32000 2868.0 3878.7 0.7949 MMAX: 64 64000 11796.1 11726.6 0.8630 CACH: 0 256 25.2 40.6 0.6626 CHMIN: 0 52 4.7 6.8 0.6089 CHMAX: 0 176 18.2 26.0 0.6052 PRP: 6 1150 105.6 160.8 1.0000 ERP: 15 1238 99.3 154.8 0.9665 xgboost-3.0.0/demo/CLI/regression/mapfeat.py000077500000000000000000000013261476511272400207170ustar00rootroot00000000000000#!/usr/bin/env python3 fo = open('machine.txt', 'w') cnt = 6 fmap = {} for l in open('machine.data'): arr = l.split(',') fo.write(arr[8]) for i in range(0, 6): fo.write(' %d:%s' % (i, arr[i + 2])) if arr[0] not in fmap: fmap[arr[0]] = cnt cnt += 1 fo.write(' %d:1' % fmap[arr[0]]) fo.write('\n') fo.close() # create feature map for machine data fo = open('featmap.txt', 'w') # list from machine.names names = [ 'vendor', 'MYCT', 'MMIN', 'MMAX', 'CACH', 'CHMIN', 'CHMAX', 'PRP', 'ERP' ] for i in range(0, 6): fo.write('%d\t%s\tint\n' % (i, names[i + 1])) for v, k in sorted(fmap.items(), key=lambda x: x[1]): fo.write('%d\tvendor=%s\ti\n' % (k, v)) fo.close() xgboost-3.0.0/demo/CLI/regression/mknfold.py000077500000000000000000000007471476511272400207420ustar00rootroot00000000000000#!/usr/bin/env python3 import random import sys if len(sys.argv) < 2: print('Usage: [nfold = 5]') exit(0) random.seed(10) k = int(sys.argv[2]) if len(sys.argv) > 3: nfold = int(sys.argv[3]) else: nfold = 5 fi = open(sys.argv[1], 'r') ftr = open(sys.argv[1] + '.train', 'w') fte = open(sys.argv[1] + '.test', 'w') for l in fi: if random.randint(1, nfold) == k: fte.write(l) else: ftr.write(l) fi.close() ftr.close() fte.close() xgboost-3.0.0/demo/CLI/regression/runexp.sh000077500000000000000000000012671476511272400206110ustar00rootroot00000000000000#!/bin/bash # map the data to features. For convenience we only use 7 original attributes and encode them as features in a trivial way python mapfeat.py # split train and test python mknfold.py machine.txt 1 # training and output the models ../../xgboost machine.conf # output predictions of test data ../../xgboost machine.conf task=pred model_in=0002.model # print the boosters of 0002.model in dump.raw.txt ../../xgboost machine.conf task=dump model_in=0002.model name_dump=dump.raw.txt # print the boosters of 0002.model in dump.nice.txt with feature map ../../xgboost machine.conf task=dump model_in=0002.model fmap=featmap.txt name_dump=dump.nice.txt # cat the result cat dump.nice.txt xgboost-3.0.0/demo/CLI/yearpredMSD/000077500000000000000000000000001476511272400167225ustar00rootroot00000000000000xgboost-3.0.0/demo/CLI/yearpredMSD/README.md000066400000000000000000000006701476511272400202040ustar00rootroot00000000000000Demonstrating how to use XGBoost on [Year Prediction task of Million Song Dataset](https://archive.ics.uci.edu/ml/datasets/YearPredictionMSD) 1. Run runexp.sh ```bash ./runexp.sh ``` You can also use the script to prepare LIBSVM format, and run the [Distributed Version](../../multi-node). Note that though that normally you only need to use single machine for dataset at this scale, and use distributed version for larger scale dataset. xgboost-3.0.0/demo/CLI/yearpredMSD/csv2libsvm.py000077500000000000000000000003501476511272400213670ustar00rootroot00000000000000#!/usr/bin/env python3 import sys fo = open(sys.argv[2], 'w') for l in open(sys.argv[1]): arr = l.split(',') fo.write('%s' % arr[0]) for i in range(len(arr) - 1): fo.write(' %d:%s' % (i, arr[i+1])) fo.close() xgboost-3.0.0/demo/CLI/yearpredMSD/runexp.sh000077500000000000000000000012141476511272400206000ustar00rootroot00000000000000#!/bin/bash if [ -f YearPredictionMSD.txt ] then echo "use existing data to run experiment" else echo "getting data from uci, make sure you are connected to internet" wget https://archive.ics.uci.edu/ml/machine-learning-databases/00203/YearPredictionMSD.txt.zip unzip YearPredictionMSD.txt.zip fi echo "start making data.." # map feature using indicator encoding, also produce featmap.txt python csv2libsvm.py YearPredictionMSD.txt yearpredMSD.libsvm head -n 463715 yearpredMSD.libsvm > yearpredMSD.libsvm.train tail -n 51630 yearpredMSD.libsvm > yearpredMSD.libsvm.test echo "finish making the data" ../../../xgboost yearpredMSD.conf xgboost-3.0.0/demo/CLI/yearpredMSD/yearpredMSD.conf000066400000000000000000000017551476511272400217600ustar00rootroot00000000000000# General Parameters, see comment for each definition # choose the tree booster, can also change to gblinear booster = gbtree # this is the only difference with classification, use reg:squarederror to do linear classification # when labels are in [0,1] we can also use reg:logistic objective = reg:squarederror # Tree Booster Parameters # step size shrinkage eta = 1.0 # minimum loss reduction required to make a further partition gamma = 1.0 # minimum sum of instance weight(hessian) needed in a child min_child_weight = 1 # maximum depth of a tree max_depth = 5 base_score = 2001 # Task parameters # the number of round to do boosting num_round = 100 # 0 means do not save any model except the final round model save_period = 0 # The path of training data data = "yearpredMSD.libsvm.train" # The path of validation data, used to monitor training process, here [test] sets name of the validation set eval[test] = "yearpredMSD.libsvm.test" # The path of test data #test:data = "yearpredMSD.libsvm.test" xgboost-3.0.0/demo/README.md000066400000000000000000000431751476511272400154250ustar00rootroot00000000000000Awesome XGBoost =============== This page contains a curated list of examples, tutorials, blogs about XGBoost usecases. It is inspired by [awesome-MXNet](https://github.com/dmlc/mxnet/blob/master/example/README.md), [awesome-php](https://github.com/ziadoz/awesome-php) and [awesome-machine-learning](https://github.com/josephmisiti/awesome-machine-learning). Please send a pull request if you find things that belongs to here. Contents -------- - [Code Examples](#code-examples) - [Features Walkthrough](#features-walkthrough) - [Basic Examples by Tasks](#basic-examples-by-tasks) - [Benchmarks](#benchmarks) - [Machine Learning Challenge Winning Solutions](#machine-learning-challenge-winning-solutions) - [Tutorials](#tutorials) - [Usecases](#usecases) - [Tools using XGBoost](#tools-using-xgboost) - [Integrations with 3rd party software](#integrations-with-3rd-party-software) - [Awards](#awards) - [Windows Binaries](#windows-binaries) Code Examples ------------- ### Features Walkthrough _Note: for the R package, see the in-package examples and vignettes instead_ This is a list of short codes introducing different functionalities of xgboost packages. * Basic walkthrough of packages [python](guide-python/basic_walkthrough.py) [Julia](https://github.com/antinucleon/XGBoost.jl/blob/master/demo/basic_walkthrough.jl) [PHP](https://github.com/bpachev/xgboost-php/blob/master/demo/titanic_demo.php) * Customize loss function, and evaluation metric [python](guide-python/custom_objective.py) [Julia](https://github.com/antinucleon/XGBoost.jl/blob/master/demo/custom_objective.jl) * Boosting from existing prediction [python](guide-python/boost_from_prediction.py) [Julia](https://github.com/antinucleon/XGBoost.jl/blob/master/demo/boost_from_prediction.jl) * Predicting using first n trees [python](guide-python/predict_first_ntree.py) [Julia](https://github.com/antinucleon/XGBoost.jl/blob/master/demo/predict_first_ntree.jl) * Generalized Linear Model [python](guide-python/generalized_linear_model.py) [Julia](https://github.com/antinucleon/XGBoost.jl/blob/master/demo/generalized_linear_model.jl) * Cross validation [python](guide-python/cross_validation.py) [Julia](https://github.com/antinucleon/XGBoost.jl/blob/master/demo/cross_validation.jl) * Predicting leaf indices [python](guide-python/predict_leaf_indices.py) ### Basic Examples by Tasks Most of examples in this section are based on CLI or python version. However, the parameter settings can be applied to all versions - [Binary classification](CLI/binary_classification) - [Multiclass classification](multiclass_classification) - [Regression](CLI/regression) - [Learning to Rank](rank) ### Benchmarks - [Starter script for Kaggle Higgs Boson](kaggle-higgs) - [Kaggle Tradeshift winning solution by daxiongshu](https://github.com/daxiongshu/kaggle-tradeshift-winning-solution) - [Benchmarking the most commonly used open source tools for binary classification](https://github.com/szilard/benchm-ml#boosting-gradient-boosted-treesgradient-boosting-machines) ## Machine Learning Challenge Winning Solutions XGBoost is extensively used by machine learning practitioners to create state of art data science solutions, this is a list of machine learning winning solutions with XGBoost. Please send pull requests if you find ones that are missing here. - Bishwarup Bhattacharjee, 1st place winner of [Allstate Claims Severity](https://www.kaggle.com/competitions/allstate-claims-severity/overview) conducted on December 2016. Link to [discussion](https://www.kaggle.com/competitions/allstate-claims-severity/discussion/26416) - Benedikt Schifferer, Gilberto Titericz, Chris Deotte, Christof Henkel, Kazuki Onodera, Jiwei Liu, Bojan Tunguz, Even Oldridge, Gabriel De Souza Pereira Moreira and Ahmet Erdem, 1st place winner of [Twitter RecSys Challenge 2020](https://recsys-twitter.com/) conducted from June,20-August,20. [GPU Accelerated Feature Engineering and Training for Recommender Systems](https://medium.com/rapids-ai/winning-solution-of-recsys2020-challenge-gpu-accelerated-feature-engineering-and-training-for-cd67c5a87b1f) - Eugene Khvedchenya,Jessica Fridrich, Jan Butora, Yassine Yousfi 1st place winner in [ALASKA2 Image Steganalysis](https://www.kaggle.com/c/alaska2-image-steganalysis/overview). Link to [discussion](https://www.kaggle.com/c/alaska2-image-steganalysis/discussion/168546) - Dan Ofer, Seffi Cohen, Noa Dagan, Nurit, 1st place in WiDS Datathon 2020. Link to [discussion](https://www.kaggle.com/c/widsdatathon2020/discussion/133189) - Chris Deotte, Konstantin Yakovlev 1st place in [IEEE-CIS Fraud Detection](https://www.kaggle.com/c/ieee-fraud-detection/overview). Link to [discussion](https://www.kaggle.com/c/ieee-fraud-detection/discussion/111308) - Giba, Lucasz, 1st place winner in [Santander Value Prediction Challenge](https://www.kaggle.com/c/santander-value-prediction-challenge) organized on August,2018. Solution [discussion](https://www.kaggle.com/c/santander-value-prediction-challenge/discussion/65272) and [code](https://www.kaggle.com/titericz/winner-model-giba-single-xgb-lb0-5178/comments) - Beluga, 2nd place and Evgeny Nekrasov, 3rd place winner in Statoil/C-CORE Iceberg Classifier Challenge'2018. Link to [discussion](https://www.kaggle.com/c/statoil-iceberg-classifier-challenge/discussion/48294) - Radek Osmulski, 1st place of the [iMaterialist Challenge (Fashion) at FGVC5](https://www.kaggle.com/c/imaterialist-challenge-fashion-2018/overview). Link to [the winning solution](https://www.kaggle.com/c/imaterialist-challenge-fashion-2018/discussion/57944). - Maksims Volkovs, Guangwei Yu and Tomi Poutanen, 1st place of the [2017 ACM RecSys challenge](http://2017.recsyschallenge.com/). Link to [paper](http://www.cs.toronto.edu/~mvolkovs/recsys2017_challenge.pdf). - Vlad Sandulescu, Mihai Chiru, 1st place of the [KDD Cup 2016 competition](https://kddcup2016.azurewebsites.net). Link to [the arxiv paper](http://arxiv.org/abs/1609.02728). - Marios Michailidis, Mathias Müller and HJ van Veen, 1st place of the [Dato Truely Native? competition](https://www.kaggle.com/c/dato-native). Link to [the Kaggle interview](http://blog.kaggle.com/2015/12/03/dato-winners-interview-1st-place-mad-professors/). - Vlad Mironov, Alexander Guschin, 1st place of the [CERN LHCb experiment Flavour of Physics competition](https://www.kaggle.com/c/flavours-of-physics). Link to [the Kaggle interview](http://blog.kaggle.com/2015/11/30/flavour-of-physics-technical-write-up-1st-place-go-polar-bears/). - Josef Slavicek, 3rd place of the [CERN LHCb experiment Flavour of Physics competition](https://www.kaggle.com/c/flavours-of-physics). Link to [the Kaggle interview](http://blog.kaggle.com/2015/11/23/flavour-of-physics-winners-interview-3rd-place-josef-slavicek/). - Mario Filho, Josef Feigl, Lucas, Gilberto, 1st place of the [Caterpillar Tube Pricing competition](https://www.kaggle.com/c/caterpillar-tube-pricing). Link to [the Kaggle interview](http://blog.kaggle.com/2015/09/22/caterpillar-winners-interview-1st-place-gilberto-josef-leustagos-mario/). - Qingchen Wang, 1st place of the [Liberty Mutual Property Inspection](https://www.kaggle.com/c/liberty-mutual-group-property-inspection-prediction). Link to [the Kaggle interview](http://blog.kaggle.com/2015/09/28/liberty-mutual-property-inspection-winners-interview-qingchen-wang/). - Chenglong Chen, 1st place of the [Crowdflower Search Results Relevance](https://www.kaggle.com/c/crowdflower-search-relevance). Link to [the winning solution](https://www.kaggle.com/c/crowdflower-search-relevance/forums/t/15186/1st-place-winner-solution-chenglong-chen/). - Alexandre Barachant (“Cat”) and Rafał Cycoń (“Dog”), 1st place of the [Grasp-and-Lift EEG Detection](https://www.kaggle.com/c/grasp-and-lift-eeg-detection). Link to [the Kaggle interview](http://blog.kaggle.com/2015/10/12/grasp-and-lift-eeg-winners-interview-1st-place-cat-dog/). - Halla Yang, 2nd place of the [Recruit Coupon Purchase Prediction Challenge](https://www.kaggle.com/c/coupon-purchase-prediction). Link to [the Kaggle interview](http://blog.kaggle.com/2015/10/21/recruit-coupon-purchase-winners-interview-2nd-place-halla-yang/). - Owen Zhang, 1st place of the [Avito Context Ad Clicks competition](https://www.kaggle.com/c/avito-context-ad-clicks). Link to [the Kaggle interview](http://blog.kaggle.com/2015/08/26/avito-winners-interview-1st-place-owen-zhang/). - Keiichi Kuroyanagi, 2nd place of the [Airbnb New User Bookings](https://www.kaggle.com/c/airbnb-recruiting-new-user-bookings). Link to [the Kaggle interview](http://blog.kaggle.com/2016/03/17/airbnb-new-user-bookings-winners-interview-2nd-place-keiichi-kuroyanagi-keiku/). - Marios Michailidis, Mathias Müller and Ning Situ, 1st place [Homesite Quote Conversion](https://www.kaggle.com/c/homesite-quote-conversion). Link to [the Kaggle interview](http://blog.kaggle.com/2016/04/08/homesite-quote-conversion-winners-write-up-1st-place-kazanova-faron-clobber/). - Gilberto Titericz, Stanislav Semenov, 1st place in challenge to classify products into the correct category organized by Otto Group in 2015. Link to [challenge](https://www.kaggle.com/c/otto-group-product-classification-challenge). Link to [kaggle winning solution](https://www.kaggle.com/c/otto-group-product-classification-challenge/discussion/14335) - Darius Barušauskas, 1st place winner in [Predicting Red Hat Business Value](https://www.kaggle.com/c/predicting-red-hat-business-value). Link to [interview](https://medium.com/kaggle-blog/red-hat-business-value-competition-1st-place-winners-interview-darius-baru%C5%A1auskas-646692a2841b). Link to [discussion](https://www.kaggle.com/c/predicting-red-hat-business-value/discussion/23786) - David Austin, Weimin Wang, 1st place winner in [Iceberg-classifier-challenge](https://www.kaggle.com/c/statoil-iceberg-classifier-challenge/leaderboard) Link to [discussion](https://www.kaggle.com/c/statoil-iceberg-classifier-challenge/discussion/48241) - Kazuki Onodera, Kazuki Fujikawa, 2nd place winner in [OpenVaccine: COVID-19 mRNA Vaccine Degradation Prediction](https://www.kaggle.com/c/stanford-covid-vaccine/overview) Link to [Discussion](https://www.kaggle.com/c/stanford-covid-vaccine/discussion/189709) - Prarthana Bhat, 2nd place winner in [DYD Competition](https://datahack.analyticsvidhya.com/contest/date-your-data/). Link to [Solution](https://github.com/analyticsvidhya/DateYourData/blob/master/Prathna_Bhat_Model.R). ## Talks - XGBoost: A Scalable Tree Boosting System ([video] (https://www.youtube.com/watch?v=Vly8xGnNiWs) + [slides](https://speakerdeck.com/datasciencela/tianqi-chen-xgboost-overview-and-latest-news-la-meetup-talk)) by Tianqi Chen at the Los Angeles Data Science meetup ## Tutorials - [XGBoost Training with Dask, using Saturn Cloud](https://www.saturncloud.io/docs/tutorials/xgboost/) - [Machine Learning with XGBoost on Qubole Spark Cluster](https://www.qubole.com/blog/machine-learning-xgboost-qubole-spark-cluster/) - [XGBoost Official RMarkdown Tutorials](https://xgboost.readthedocs.org/en/latest/R-package/index.html#tutorials) - [An Introduction to XGBoost R Package](http://dmlc.ml/rstats/2016/03/10/xgboost.html) by Tong He - [Open Source Tools & Data Science Competitions](http://www.slideshare.net/odsc/owen-zhangopen-sourcetoolsanddscompetitions1) by Owen Zhang - XGBoost parameter tuning tips * [Feature Importance Analysis with XGBoost in Tax audit](http://fr.slideshare.net/MichaelBENESTY/feature-importance-analysis-with-xgboost-in-tax-audit) * [Winning solution of Kaggle Higgs competition: what a single model can do](http://no2147483647.wordpress.com/2014/09/17/winning-solution-of-kaggle-higgs-competition-what-a-single-model-can-do/) - [XGBoost - eXtreme Gradient Boosting](http://www.slideshare.net/ShangxuanZhang/xgboost) by Tong He - [How to use XGBoost algorithm in R in easy steps](http://www.analyticsvidhya.com/blog/2016/01/xgboost-algorithm-easy-steps/) by TAVISH SRIVASTAVA ([Chinese Translation 中文翻译](https://segmentfault.com/a/1190000004421821) by [HarryZhu](https://segmentfault.com/u/harryprince)) - [Kaggle Solution: What’s Cooking ? (Text Mining Competition)](http://www.analyticsvidhya.com/blog/2015/12/kaggle-solution-cooking-text-mining-competition/) by MANISH SARASWAT - Better Optimization with Repeated Cross Validation and the XGBoost model - Machine Learning with R) by Manuel Amunategui ([Youtube Link](https://www.youtube.com/watch?v=Og7CGAfSr_Y)) ([GitHub Link](https://github.com/amunategui/BetterCrossValidation)) - [XGBoost Rossman Parameter Tuning](https://www.kaggle.com/khozzy/rossmann-store-sales/xgboost-parameter-tuning-template/run/90168/notebook) by [Norbert Kozlowski](https://www.kaggle.com/khozzy) - [Featurizing log data before XGBoost](http://www.slideshare.net/DataRobot/featurizing-log-data-before-xgboost) by Xavier Conort, Owen Zhang etc - [West Nile Virus Competition Benchmarks & Tutorials](http://blog.kaggle.com/2015/07/21/west-nile-virus-competition-benchmarks-tutorials/) by [Anna Montoya](http://blog.kaggle.com/author/annamontoya/) - [Ensemble Decision Tree with XGBoost](https://www.kaggle.com/binghsu/predict-west-nile-virus/xgboost-starter-code-python-0-69) by [Bing Xu](https://www.kaggle.com/binghsu) - [Notes on eXtreme Gradient Boosting](http://startup.ml/blog/xgboost) by ARSHAK NAVRUZYAN ([iPython Notebook](https://github.com/startupml/koan/blob/master/eXtreme%20Gradient%20Boosting.ipynb)) - [Complete Guide to Parameter Tuning in XGBoost](http://www.analyticsvidhya.com/blog/2016/03/complete-guide-parameter-tuning-xgboost-with-codes-python/) by Aarshay Jain - [Practical XGBoost in Python online course](http://education.parrotprediction.teachable.com/courses/practical-xgboost-in-python) by Parrot Prediction - [Spark and XGBoost using Scala](http://www.elenacuoco.com/2016/10/10/scala-spark-xgboost-classification/) by Elena Cuoco ## Usecases If you have particular usecase of xgboost that you would like to highlight. Send a PR to add a one sentence description:) - XGBoost is used in [Kaggle Script](https://www.kaggle.com/scripts) to solve data science challenges. - Distribute XGBoost as Rest API server from Jupyter notebook with [BentoML](https://github.com/bentoml/bentoml). [Link to notebook](https://github.com/bentoml/BentoML/blob/master/examples/xgboost-predict-titanic-survival/XGBoost-titanic-survival-prediction.ipynb) - [Seldon predictive service powered by XGBoost](https://docs.seldon.io/projects/seldon-core/en/latest/servers/xgboost.html) - XGBoost Distributed is used in [ODPS Cloud Service by Alibaba](https://yq.aliyun.com/articles/6355) (in Chinese) - XGBoost is incoporated as part of [Graphlab Create](https://dato.com/products/create/) for scalable machine learning. - [Hanjing Su](https://www.52cs.org) from Tencent data platform team: "We use distributed XGBoost for click through prediction in wechat shopping and lookalikes. The problems involve hundreds millions of users and thousands of features. XGBoost is cleanly designed and can be easily integrated into our production environment, reducing our cost in developments." - [CNevd](https://github.com/CNevd) from autohome.com ad platform team: "Distributed XGBoost is used for click through rate prediction in our display advertising, XGBoost is highly efficient and flexible and can be easily used on our distributed platform, our ctr made a great improvement with hundred millions samples and millions features due to this awesome XGBoost" ## Tools using XGBoost - [BayesBoost](https://github.com/mpearmain/BayesBoost) - Bayesian Optimization using xgboost and sklearn API - [FLAML](https://github.com/microsoft/FLAML) - An open source AutoML library designed to automatically produce accurate machine learning models with low computational cost. FLAML includes [XGBoost as one of the default learners](https://github.com/microsoft/FLAML/blob/main/flaml/model.py) and can also be used as a fast hyperparameter tuning tool for XGBoost ([code example](https://microsoft.github.io/FLAML/docs/Examples/AutoML-for-XGBoost)). - [gp_xgboost_gridsearch](https://github.com/vatsan/gp_xgboost_gridsearch) - In-database parallel grid-search for XGBoost on [Greenplum](https://github.com/greenplum-db/gpdb) using PL/Python - [tpot](https://github.com/rhiever/tpot) - A Python tool that automatically creates and optimizes machine learning pipelines using genetic programming. ## Integrations with 3rd party software Open source integrations with XGBoost: * [Neptune.ai](http://neptune.ai/) - Experiment management and collaboration tool for ML/DL/RL specialists. Integration has a form of the [XGBoost callback](https://docs.neptune.ai/integrations/xgboost.html) that automatically logs training and evaluation metrics, as well as saved model (booster), feature importance chart and visualized trees. * [Optuna](https://optuna.org/) - An open source hyperparameter optimization framework to automate hyperparameter search. Optuna integrates with XGBoost in the [XGBoostPruningCallback](https://optuna.readthedocs.io/en/stable/reference/integration.html#optuna.integration.XGBoostPruningCallback) that let users easily prune unpromising trials. * [dtreeviz](https://github.com/parrt/dtreeviz) - A python library for decision tree visualization and model interpretation. Starting from version 1.0, dtreeviz is able to visualize tree ensembles produced by XGBoost. ## Awards - [John Chambers Award](http://stat-computing.org/awards/jmc/winners.html) - 2016 Winner: XGBoost R Package, by Tong He (Simon Fraser University) and Tianqi Chen (University of Washington) - [InfoWorld’s 2019 Technology of the Year Award](https://www.infoworld.com/article/3336072/application-development/infoworlds-2019-technology-of-the-year-award-winners.html) ## Windows Binaries Unofficial windows binaries and instructions on how to use them are hosted on [Guido Tapia's blog](http://www.picnet.com.au/blogs/guido/post/2016/09/22/xgboost-windows-x64-binaries-for-download/) xgboost-3.0.0/demo/aft_survival/000077500000000000000000000000001476511272400166415ustar00rootroot00000000000000xgboost-3.0.0/demo/aft_survival/README.rst000066400000000000000000000003421476511272400203270ustar00rootroot00000000000000Survival Analysis Walkthrough ============================= This is a collection of examples for using the XGBoost Python package for training survival models. For an introduction, see :doc:`/tutorials/aft_survival_analysis` xgboost-3.0.0/demo/aft_survival/aft_survival_demo.py000066400000000000000000000043631476511272400227320ustar00rootroot00000000000000""" Demo for survival analysis (regression). ======================================== Demo for survival analysis (regression). using Accelerated Failure Time (AFT) model. """ import os import numpy as np import pandas as pd from sklearn.model_selection import ShuffleSplit import xgboost as xgb # The Veterans' Administration Lung Cancer Trial # The Statistical Analysis of Failure Time Data by Kalbfleisch J. and Prentice R (1980) CURRENT_DIR = os.path.dirname(__file__) df = pd.read_csv(os.path.join(CURRENT_DIR, '../data/veterans_lung_cancer.csv')) print('Training data:') print(df) # Split features and labels y_lower_bound = df['Survival_label_lower_bound'] y_upper_bound = df['Survival_label_upper_bound'] X = df.drop(['Survival_label_lower_bound', 'Survival_label_upper_bound'], axis=1) # Split data into training and validation sets rs = ShuffleSplit(n_splits=2, test_size=.7, random_state=0) train_index, valid_index = next(rs.split(X)) dtrain = xgb.DMatrix(X.values[train_index, :]) dtrain.set_float_info('label_lower_bound', y_lower_bound[train_index]) dtrain.set_float_info('label_upper_bound', y_upper_bound[train_index]) dvalid = xgb.DMatrix(X.values[valid_index, :]) dvalid.set_float_info('label_lower_bound', y_lower_bound[valid_index]) dvalid.set_float_info('label_upper_bound', y_upper_bound[valid_index]) # Train gradient boosted trees using AFT loss and metric params = {'verbosity': 0, 'objective': 'survival:aft', 'eval_metric': 'aft-nloglik', 'tree_method': 'hist', 'learning_rate': 0.05, 'aft_loss_distribution': 'normal', 'aft_loss_distribution_scale': 1.20, 'max_depth': 6, 'lambda': 0.01, 'alpha': 0.02} bst = xgb.train(params, dtrain, num_boost_round=10000, evals=[(dtrain, 'train'), (dvalid, 'valid')], early_stopping_rounds=50) # Run prediction on the validation set df = pd.DataFrame({'Label (lower bound)': y_lower_bound[valid_index], 'Label (upper bound)': y_upper_bound[valid_index], 'Predicted label': bst.predict(dvalid)}) print(df) # Show only data points with right-censored labels print(df[np.isinf(df['Label (upper bound)'])]) # Save trained model bst.save_model('aft_model.json') xgboost-3.0.0/demo/aft_survival/aft_survival_demo_with_optuna.py000066400000000000000000000071071476511272400253520ustar00rootroot00000000000000""" Demo for survival analysis (regression) with Optuna. ==================================================== Demo for survival analysis (regression) using Accelerated Failure Time (AFT) model, using Optuna to tune hyperparameters """ import numpy as np import optuna import pandas as pd from sklearn.model_selection import ShuffleSplit import xgboost as xgb # The Veterans' Administration Lung Cancer Trial # The Statistical Analysis of Failure Time Data by Kalbfleisch J. and Prentice R (1980) df = pd.read_csv('../data/veterans_lung_cancer.csv') print('Training data:') print(df) # Split features and labels y_lower_bound = df['Survival_label_lower_bound'] y_upper_bound = df['Survival_label_upper_bound'] X = df.drop(['Survival_label_lower_bound', 'Survival_label_upper_bound'], axis=1) # Split data into training and validation sets rs = ShuffleSplit(n_splits=2, test_size=.7, random_state=0) train_index, valid_index = next(rs.split(X)) dtrain = xgb.DMatrix(X.values[train_index, :]) dtrain.set_float_info('label_lower_bound', y_lower_bound[train_index]) dtrain.set_float_info('label_upper_bound', y_upper_bound[train_index]) dvalid = xgb.DMatrix(X.values[valid_index, :]) dvalid.set_float_info('label_lower_bound', y_lower_bound[valid_index]) dvalid.set_float_info('label_upper_bound', y_upper_bound[valid_index]) # Define hyperparameter search space base_params = {'verbosity': 0, 'objective': 'survival:aft', 'eval_metric': 'aft-nloglik', 'tree_method': 'hist'} # Hyperparameters common to all trials def objective(trial): params = {'learning_rate': trial.suggest_loguniform('learning_rate', 0.01, 1.0), 'aft_loss_distribution': trial.suggest_categorical('aft_loss_distribution', ['normal', 'logistic', 'extreme']), 'aft_loss_distribution_scale': trial.suggest_loguniform('aft_loss_distribution_scale', 0.1, 10.0), 'max_depth': trial.suggest_int('max_depth', 3, 8), 'lambda': trial.suggest_loguniform('lambda', 1e-8, 1.0), 'alpha': trial.suggest_loguniform('alpha', 1e-8, 1.0)} # Search space params.update(base_params) pruning_callback = optuna.integration.XGBoostPruningCallback(trial, 'valid-aft-nloglik') bst = xgb.train(params, dtrain, num_boost_round=10000, evals=[(dtrain, 'train'), (dvalid, 'valid')], early_stopping_rounds=50, verbose_eval=False, callbacks=[pruning_callback]) if bst.best_iteration >= 25: return bst.best_score else: return np.inf # Reject models with < 25 trees # Run hyperparameter search study = optuna.create_study(direction='minimize') study.optimize(objective, n_trials=200) print('Completed hyperparameter tuning with best aft-nloglik = {}.'.format(study.best_trial.value)) params = {} params.update(base_params) params.update(study.best_trial.params) # Re-run training with the best hyperparameter combination print('Re-running the best trial... params = {}'.format(params)) bst = xgb.train(params, dtrain, num_boost_round=10000, evals=[(dtrain, 'train'), (dvalid, 'valid')], early_stopping_rounds=50) # Run prediction on the validation set df = pd.DataFrame({'Label (lower bound)': y_lower_bound[valid_index], 'Label (upper bound)': y_upper_bound[valid_index], 'Predicted label': bst.predict(dvalid)}) print(df) # Show only data points with right-censored labels print(df[np.isinf(df['Label (upper bound)'])]) # Save trained model bst.save_model('aft_best_model.json') xgboost-3.0.0/demo/aft_survival/aft_survival_viz_demo.py000066400000000000000000000101361476511272400236150ustar00rootroot00000000000000""" Visual demo for survival analysis (regression) with Accelerated Failure Time (AFT) model. ========================================================================================= This demo uses 1D toy data and visualizes how XGBoost fits a tree ensemble. The ensemble model starts out as a flat line and evolves into a step function in order to account for all ranged labels. """ import matplotlib.pyplot as plt import numpy as np import xgboost as xgb plt.rcParams.update({"font.size": 13}) # Function to visualize censored labels def plot_censored_labels( X: np.ndarray, y_lower: np.ndarray, y_upper: np.ndarray ) -> None: def replace_inf(x: np.ndarray, target_value: float) -> np.ndarray: x[np.isinf(x)] = target_value return x plt.plot(X, y_lower, "o", label="y_lower", color="blue") plt.plot(X, y_upper, "o", label="y_upper", color="fuchsia") plt.vlines( X, ymin=replace_inf(y_lower, 0.01), ymax=replace_inf(y_upper, 1000.0), label="Range for y", color="gray", ) # Toy data X = np.array([1, 2, 3, 4, 5]).reshape((-1, 1)) INF = np.inf y_lower = np.array([10, 15, -INF, 30, 100]) y_upper = np.array([INF, INF, 20, 50, INF]) # Visualize toy data plt.figure(figsize=(5, 4)) plot_censored_labels(X, y_lower, y_upper) plt.ylim((6, 200)) plt.legend(loc="lower right") plt.title("Toy data") plt.xlabel("Input feature") plt.ylabel("Label") plt.yscale("log") plt.tight_layout() plt.show(block=True) # Will be used to visualize XGBoost model grid_pts = np.linspace(0.8, 5.2, 1000).reshape((-1, 1)) # Train AFT model using XGBoost dmat = xgb.DMatrix(X) dmat.set_float_info("label_lower_bound", y_lower) dmat.set_float_info("label_upper_bound", y_upper) params = {"max_depth": 3, "objective": "survival:aft", "min_child_weight": 0} accuracy_history = [] class PlotIntermediateModel(xgb.callback.TrainingCallback): """Custom callback to plot intermediate models.""" def __init__(self) -> None: super().__init__() def after_iteration( self, model: xgb.Booster, epoch: int, evals_log: xgb.callback.TrainingCallback.EvalsLog, ) -> bool: """Run after training is finished.""" # Compute y_pred = prediction using the intermediate model, at current boosting # iteration y_pred = model.predict(dmat) # "Accuracy" = the number of data points whose ranged label (y_lower, y_upper) # includes the corresponding predicted label (y_pred) acc = np.sum( np.logical_and(y_pred >= y_lower, y_pred <= y_upper) / len(X) * 100 ) accuracy_history.append(acc) # Plot ranged labels as well as predictions by the model plt.subplot(5, 3, epoch + 1) plot_censored_labels(X, y_lower, y_upper) y_pred_grid_pts = model.predict(xgb.DMatrix(grid_pts)) plt.plot( grid_pts, y_pred_grid_pts, "r-", label="XGBoost AFT model", linewidth=4 ) plt.title("Iteration {}".format(epoch), x=0.5, y=0.8) plt.xlim((0.8, 5.2)) plt.ylim((1 if np.min(y_pred) < 6 else 6, 200)) plt.yscale("log") return False res: xgb.callback.TrainingCallback.EvalsLog = {} plt.figure(figsize=(12, 13)) bst = xgb.train( params, dmat, num_boost_round=15, evals=[(dmat, "train")], evals_result=res, callbacks=[PlotIntermediateModel()], ) plt.tight_layout() plt.legend( loc="lower center", ncol=4, bbox_to_anchor=(0.5, 0), bbox_transform=plt.gcf().transFigure, ) plt.tight_layout() # Plot negative log likelihood over boosting iterations plt.figure(figsize=(8, 3)) plt.subplot(1, 2, 1) plt.plot(res["train"]["aft-nloglik"], "b-o", label="aft-nloglik") plt.xlabel("# Boosting Iterations") plt.legend(loc="best") # Plot "accuracy" over boosting iterations # "Accuracy" = the number of data points whose ranged label (y_lower, y_upper) includes # the corresponding predicted label (y_pred) plt.subplot(1, 2, 2) plt.plot(accuracy_history, "r-o", label="Accuracy (%)") plt.xlabel("# Boosting Iterations") plt.legend(loc="best") plt.tight_layout() plt.show() xgboost-3.0.0/demo/c-api/000077500000000000000000000000001476511272400151255ustar00rootroot00000000000000xgboost-3.0.0/demo/c-api/.gitignore000066400000000000000000000000131476511272400171070ustar00rootroot00000000000000c-api-demo xgboost-3.0.0/demo/c-api/CMakeLists.txt000066400000000000000000000010511476511272400176620ustar00rootroot00000000000000cmake_minimum_required(VERSION 3.18) project(xgboost-c-examples) add_subdirectory(basic) add_subdirectory(external-memory) add_subdirectory(inference) enable_testing() add_test( NAME test_xgboost_demo_c_basic COMMAND api-demo WORKING_DIRECTORY ${xgboost-c-examples_BINARY_DIR} ) add_test( NAME test_xgboost_demo_c_external_memory COMMAND external-memory-demo WORKING_DIRECTORY ${xgboost-c-examples_BINARY_DIR} ) add_test( NAME test_xgboost_demo_c_inference COMMAND inference-demo WORKING_DIRECTORY ${xgboost-c-examples_BINARY_DIR} ) xgboost-3.0.0/demo/c-api/basic/000077500000000000000000000000001476511272400162065ustar00rootroot00000000000000xgboost-3.0.0/demo/c-api/basic/CMakeLists.txt000066400000000000000000000006301476511272400207450ustar00rootroot00000000000000project(api-demo LANGUAGES C VERSION 0.0.1) find_package(xgboost REQUIRED) # xgboost is built as static libraries, all cxx dependencies need to be linked into the # executable. if(XGBOOST_BUILD_STATIC_LIB) enable_language(CXX) # find again for those cxx libraries. find_package(xgboost REQUIRED) endif() add_executable(api-demo c-api-demo.c) target_link_libraries(api-demo PRIVATE xgboost::xgboost) xgboost-3.0.0/demo/c-api/basic/Makefile000066400000000000000000000005601476511272400176470ustar00rootroot00000000000000SRC=c-api-demo.c TGT=c-api-demo cc=cc CFLAGS ?=-O3 XGBOOST_ROOT ?=../.. INCLUDE_DIR=-I$(XGBOOST_ROOT)/include -I$(XGBOOST_ROOT)/dmlc-core/include LIB_DIR=-L$(XGBOOST_ROOT)/lib build: $(TGT) $(TGT): $(SRC) Makefile $(cc) $(CFLAGS) $(INCLUDE_DIR) $(LIB_DIR) -o $(TGT) $(SRC) -lxgboost run: $(TGT) LD_LIBRARY_PATH=$(XGBOOST_ROOT)/lib ./$(TGT) clean: rm -f $(TGT) xgboost-3.0.0/demo/c-api/basic/README.md000066400000000000000000000017071476511272400174720ustar00rootroot00000000000000C-APIs === **XGBoost** implements a C API originally designed for various language bindings. For detailed reference, please check xgboost/c_api.h. Here is a demonstration of using the API. # CMake If you use **CMake** for your project, you can either install **XGBoost** somewhere in your system and tell CMake to find it by calling `find_package(xgboost)`, or put **XGBoost** inside your project's source tree and call **CMake** command: `add_subdirectory(xgboost)`. To use `find_package()`, put the following in your **CMakeLists.txt**: ``` CMake find_package(xgboost REQUIRED) add_executable(api-demo c-api-demo.c) target_link_libraries(api-demo xgboost::xgboost) ``` If you want to put XGBoost inside your project (like git submodule), use this instead: ``` CMake add_subdirectory(xgboost) add_executable(api-demo c-api-demo.c) target_link_libraries(api-demo xgboost) ``` # make You can start by modifying the makefile in this directory to fit your need. xgboost-3.0.0/demo/c-api/basic/c-api-demo.c000066400000000000000000000176411476511272400202760ustar00rootroot00000000000000/** * Copyright 2019-2023 by XGBoost contributors * * \file c-api-demo.c * \brief A simple example of using xgboost C API. */ #include #include #include /* uint32_t,uint64_t */ #include #include #include #include #define safe_xgboost(call) { \ int err = (call); \ if (err != 0) { \ fprintf(stderr, "%s:%d: error in %s: %s\n", __FILE__, __LINE__, #call, XGBGetLastError()); \ exit(1); \ } \ } /* Make Json encoded array interface. */ static void MakeArrayInterface(size_t data, size_t n, char const* typestr, size_t length, char* out) { static char const kTemplate[] = "{\"data\": [%lu, true], \"shape\": [%lu, %lu], \"typestr\": \"%s\", \"version\": 3}"; memset(out, '\0', length); sprintf(out, kTemplate, data, n, 1ul, typestr); } /* Make Json encoded DMatrix configuration. */ static void MakeConfig(int n_threads, size_t length, char* out) { static char const kTemplate[] = "{\"missing\": NaN, \"nthread\": %d}"; memset(out, '\0', length); sprintf(out, kTemplate, n_threads); } int main() { int silent = 0; int use_gpu = 0; // set to 1 to use the GPU for training // load the data DMatrixHandle dtrain, dtest; safe_xgboost(XGDMatrixCreateFromFile("../../data/agaricus.txt.train?format=libsvm", silent, &dtrain)); safe_xgboost(XGDMatrixCreateFromFile("../../data/agaricus.txt.test?format=libsvm", silent, &dtest)); // create the booster BoosterHandle booster; DMatrixHandle eval_dmats[2] = {dtrain, dtest}; safe_xgboost(XGBoosterCreate(eval_dmats, 2, &booster)); // configure the training // available parameters are described here: // https://xgboost.readthedocs.io/en/latest/parameter.html safe_xgboost(XGBoosterSetParam(booster, "device", use_gpu ? "cuda" : "cpu")); safe_xgboost(XGBoosterSetParam(booster, "objective", "binary:logistic")); safe_xgboost(XGBoosterSetParam(booster, "min_child_weight", "1")); safe_xgboost(XGBoosterSetParam(booster, "gamma", "0.1")); safe_xgboost(XGBoosterSetParam(booster, "max_depth", "3")); safe_xgboost(XGBoosterSetParam(booster, "verbosity", silent ? "0" : "1")); // train and evaluate for 10 iterations int n_trees = 10; const char* eval_names[2] = {"train", "test"}; const char* eval_result = NULL; for (int i = 0; i < n_trees; ++i) { safe_xgboost(XGBoosterUpdateOneIter(booster, i, dtrain)); safe_xgboost(XGBoosterEvalOneIter(booster, i, eval_dmats, eval_names, 2, &eval_result)); printf("%s\n", eval_result); } bst_ulong num_feature = 0; safe_xgboost(XGBoosterGetNumFeature(booster, &num_feature)); printf("num_feature: %lu\n", (unsigned long)(num_feature)); // predict bst_ulong out_len = 0; int n_print = 10; /* Run prediction with DMatrix object. */ char const config[] = "{\"training\": false, \"type\": 0, " "\"iteration_begin\": 0, \"iteration_end\": 0, \"strict_shape\": false}"; /* Shape of output prediction */ uint64_t const* out_shape; /* Dimension of output prediction */ uint64_t out_dim; /* Pointer to a thread local contigious array, assigned in prediction function. */ float const* out_result = NULL; safe_xgboost( XGBoosterPredictFromDMatrix(booster, dtest, config, &out_shape, &out_dim, &out_result)); printf("y_pred: "); for (int i = 0; i < n_print; ++i) { printf("%1.4f ", out_result[i]); } printf("\n"); // print true labels safe_xgboost(XGDMatrixGetFloatInfo(dtest, "label", &out_len, &out_result)); printf("y_test: "); for (int i = 0; i < n_print; ++i) { printf("%1.4f ", out_result[i]); } printf("\n"); { printf("Dense Matrix Example (XGDMatrixCreateFromMat): "); const float values[] = {0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0}; DMatrixHandle dmat; safe_xgboost(XGDMatrixCreateFromMat(values, 1, 127, 0.0, &dmat)); const float* out_result = NULL; safe_xgboost( XGBoosterPredictFromDMatrix(booster, dmat, config, &out_shape, &out_dim, &out_result)); assert(out_dim == 1); assert(out_shape[0] == 1); printf("%1.4f \n", out_result[0]); safe_xgboost(XGDMatrixFree(dmat)); } { printf("Sparse Matrix Example (XGDMatrixCreateFromCSR): "); const uint64_t indptr[] = {0, 22}; const uint32_t indices[] = {1, 9, 19, 21, 24, 34, 36, 39, 42, 53, 56, 65, 69, 77, 86, 88, 92, 95, 102, 106, 117, 122}; const float data[] = {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0}; DMatrixHandle dmat; char j_indptr[128]; MakeArrayInterface((size_t)indptr, 2ul, " #include #include #include #define safe_xgboost(err) \ if ((err) != 0) { \ fprintf(stderr, "%s:%d: error in %s: %s\n", __FILE__, __LINE__, #err, \ XGBGetLastError()); \ exit(1); \ } #define N_BATCHS 32 #define BATCH_LEN 512 /* Shorthands. */ typedef DMatrixHandle DMatrix; typedef BoosterHandle Booster; typedef struct _DataIter { /* Data of each batch. */ float **data; /* Labels of each batch */ float **labels; /* Length of each batch. */ size_t *lengths; /* Total number of batches. */ size_t n; /* Current iteration. */ size_t cur_it; /* Private fields */ DMatrix _proxy; char _array[128]; } DataIter; #define safe_malloc(ptr) \ if ((ptr) == NULL) { \ fprintf(stderr, "%s:%d: Failed to allocate memory.\n", __FILE__, \ __LINE__); \ exit(1); \ } /** * Initialize with random data for demo. In practice the data should be loaded * from external memory. We just demonstrate how to use the iterator in * XGBoost. * * \param batch_size Number of elements for each batch. The demo here is only using 1 * column. * \param n_batches Number of batches. */ void DataIterator_Init(DataIter *self, size_t batch_size, size_t n_batches) { self->n = n_batches; self->lengths = (size_t *)malloc(self->n * sizeof(size_t)); safe_malloc(self->lengths); for (size_t i = 0; i < self->n; ++i) { self->lengths[i] = batch_size; } self->data = (float **)malloc(self->n * sizeof(float *)); safe_malloc(self->data); self->labels = (float **)malloc(self->n * sizeof(float *)); safe_malloc(self->labels); /* Generate some random data. */ for (size_t i = 0; i < self->n; ++i) { self->data[i] = (float *)malloc(self->lengths[i] * sizeof(float)); safe_malloc(self->data[i]); for (size_t j = 0; j < self->lengths[i]; ++j) { float x = (float)rand() / (float)(RAND_MAX); self->data[i][j] = x; } self->labels[i] = (float *)malloc(self->lengths[i] * sizeof(float)); safe_malloc(self->labels[i]); for (size_t j = 0; j < self->lengths[i]; ++j) { float y = (float)rand() / (float)(RAND_MAX); self->labels[i][j] = y; } } self->cur_it = 0; safe_xgboost(XGProxyDMatrixCreate(&self->_proxy)); } void DataIterator_Free(DataIter *self) { for (size_t i = 0; i < self->n; ++i) { free(self->data[i]); free(self->labels[i]); } free(self->data); free(self->lengths); free(self->labels); safe_xgboost(XGDMatrixFree(self->_proxy)); }; int DataIterator_Next(DataIterHandle handle) { DataIter *self = (DataIter *)(handle); if (self->cur_it == self->n) { self->cur_it = 0; return 0; /* At end */ } /* A JSON string encoding array interface (standard from numpy). */ char array[] = "{\"data\": [%lu, false], \"shape\":[%lu, 1], \"typestr\": " "\"_array, '\0', sizeof(self->_array)); sprintf(self->_array, array, (size_t)self->data[self->cur_it], self->lengths[self->cur_it]); safe_xgboost(XGProxyDMatrixSetDataDense(self->_proxy, self->_array)); /* The data passed in the iterator must remain valid (not being freed until the next * iteration or reset) */ safe_xgboost(XGDMatrixSetDenseInfo(self->_proxy, "label", self->labels[self->cur_it], self->lengths[self->cur_it], 1)); self->cur_it++; return 1; /* Continue. */ } void DataIterator_Reset(DataIterHandle handle) { DataIter *self = (DataIter *)(handle); self->cur_it = 0; } /** * Train a regression model and save it into JSON model file. */ void TrainModel(DMatrix Xy) { /* Create booster for training. */ Booster booster; DMatrix cache[] = {Xy}; safe_xgboost(XGBoosterCreate(cache, 1, &booster)); /* Use approx or hist for external memory training. */ safe_xgboost(XGBoosterSetParam(booster, "tree_method", "hist")); safe_xgboost(XGBoosterSetParam(booster, "objective", "reg:squarederror")); /* Start training. */ const char *validation_names[1] = {"train"}; const char *validation_result = NULL; size_t n_rounds = 10; for (size_t i = 0; i < n_rounds; ++i) { safe_xgboost(XGBoosterUpdateOneIter(booster, i, Xy)); safe_xgboost(XGBoosterEvalOneIter(booster, i, cache, validation_names, 1, &validation_result)); printf("%s\n", validation_result); } /* Save the model to a JSON file. */ safe_xgboost(XGBoosterSaveModel(booster, "model.json")); safe_xgboost(XGBoosterFree(booster)); } int main() { DataIter iter; DataIterator_Init(&iter, BATCH_LEN, N_BATCHS); /* Create DMatrix from iterator. During training, some cache files with the * prefix "cache-" will be generated in current directory */ char config[] = "{\"missing\": NaN, \"cache_prefix\": \"cache\"}"; DMatrix Xy; safe_xgboost(XGDMatrixCreateFromCallback( &iter, iter._proxy, DataIterator_Reset, DataIterator_Next, config, &Xy)); TrainModel(Xy); safe_xgboost(XGDMatrixFree(Xy)); DataIterator_Free(&iter); return 0; } xgboost-3.0.0/demo/c-api/inference/000077500000000000000000000000001476511272400170635ustar00rootroot00000000000000xgboost-3.0.0/demo/c-api/inference/CMakeLists.txt000066400000000000000000000007161476511272400216270ustar00rootroot00000000000000cmake_minimum_required(VERSION 3.18) project(inference-demo LANGUAGES C VERSION 0.0.1) find_package(xgboost REQUIRED) # xgboost is built as static libraries, all cxx dependencies need to be linked into the # executable. if(XGBOOST_BUILD_STATIC_LIB) enable_language(CXX) # find again for those cxx libraries. find_package(xgboost REQUIRED) endif() add_executable(inference-demo inference.c) target_link_libraries(inference-demo PRIVATE xgboost::xgboost) xgboost-3.0.0/demo/c-api/inference/inference.c000066400000000000000000000156221476511272400211730ustar00rootroot00000000000000/** * Copyright 2021-2025, XGBoost contributors * * @brief A simple example of using prediction functions. * * See more examples in test_c_api.cc on how to reuse a ProxyDMatrix object for reducing * the latency of DMatrix creation. */ #include #include #include #include #define safe_xgboost(err) \ if ((err) != 0) { \ fprintf(stderr, "%s:%d: error in %s: %s\n", __FILE__, __LINE__, #err, \ XGBGetLastError()); \ exit(1); \ } #define safe_malloc(ptr) \ if ((ptr) == NULL) { \ fprintf(stderr, "%s:%d: Failed to allocate memory.\n", __FILE__, \ __LINE__); \ exit(1); \ } #define N_SAMPLES 128 #define N_FEATURES 16 typedef BoosterHandle Booster; typedef DMatrixHandle DMatrix; /* Row-major matrix */ struct _Matrix { float *data; size_t shape[2]; /* private members */ char _array_intrerface[256]; }; /* A custom data type for demo. */ typedef struct _Matrix *Matrix; /* Initialize matrix, copy data from `data` if it's not NULL. */ void Matrix_Create(Matrix *self, float const *data, size_t n_samples, size_t n_features) { if (self == NULL) { fprintf(stderr, "Invalid pointer to %s\n", __func__); exit(-1); } *self = (Matrix)malloc(sizeof(struct _Matrix)); safe_malloc(*self); (*self)->data = (float *)malloc(n_samples * n_features * sizeof(float)); safe_malloc((*self)->data); (*self)->shape[0] = n_samples; (*self)->shape[1] = n_features; if (data != NULL) { memcpy((*self)->data, data, (*self)->shape[0] * (*self)->shape[1] * sizeof(float)); } } /* Generate random matrix. */ void Matrix_Random(Matrix *self, size_t n_samples, size_t n_features) { Matrix_Create(self, NULL, n_samples, n_features); for (size_t i = 0; i < n_samples * n_features; ++i) { float x = (float)rand() / (float)(RAND_MAX); (*self)->data[i] = x; } } /* Array interface specified by numpy. */ char const *Matrix_ArrayInterface(Matrix self) { char const template[] = "{\"data\": [%lu, true], \"shape\": [%lu, %lu], " "\"typestr\": \"_array_intrerface, '\0', sizeof(self->_array_intrerface)); sprintf(self->_array_intrerface, template, (size_t)self->data, self->shape[0], self->shape[1]); return self->_array_intrerface; } size_t Matrix_NSamples(Matrix self) { return self->shape[0]; } size_t Matrix_NFeatures(Matrix self) { return self->shape[1]; } float Matrix_At(Matrix self, size_t i, size_t j) { return self->data[i * self->shape[1] + j]; } void Matrix_Print(Matrix self) { for (size_t i = 0; i < Matrix_NSamples(self); i++) { for (size_t j = 0; j < Matrix_NFeatures(self); ++j) { printf("%f, ", Matrix_At(self, i, j)); } } printf("\n"); } void Matrix_Free(Matrix self) { if (self != NULL) { if (self->data != NULL) { self->shape[0] = 0; self->shape[1] = 0; free(self->data); self->data = NULL; } free(self); } } int main() { Matrix X; Matrix y; Matrix_Random(&X, N_SAMPLES, N_FEATURES); Matrix_Random(&y, N_SAMPLES, 1); char const *X_interface = Matrix_ArrayInterface(X); char config[] = "{\"nthread\": 16, \"missing\": NaN}"; DMatrix Xy; /* Dense means "dense matrix". */ safe_xgboost(XGDMatrixCreateFromDense(X_interface, config, &Xy)); /* Label must be in a contigious array. */ safe_xgboost(XGDMatrixSetDenseInfo(Xy, "label", y->data, y->shape[0], 1)); DMatrix cache[] = {Xy}; Booster booster; /* Train a booster for demo. */ safe_xgboost(XGBoosterCreate(cache, 1, &booster)); size_t n_rounds = 10; for (size_t i = 0; i < n_rounds; ++i) { safe_xgboost(XGBoosterUpdateOneIter(booster, i, Xy)); } /* Save the trained model in JSON format. */ safe_xgboost(XGBoosterSaveModel(booster, "model.json")); safe_xgboost(XGBoosterFree(booster)); /* Load it back for inference. The save and load is not required, only shown here for * demonstration purpose. */ safe_xgboost(XGBoosterCreate(NULL, 0, &booster)); safe_xgboost(XGBoosterLoadModel(booster, "model.json")); { /* Run prediction with DMatrix object. */ char const config[] = "{\"training\": false, \"type\": 0, " "\"iteration_begin\": 0, \"iteration_end\": 0, \"strict_shape\": true}"; /* Shape of output prediction */ uint64_t const *out_shape; /* Dimension of output prediction */ uint64_t out_dim; /* Pointer to a thread local contigious array, assigned in prediction function. */ float const *out_results; safe_xgboost(XGBoosterPredictFromDMatrix(booster, Xy, config, &out_shape, &out_dim, &out_results)); if (out_dim != 2 || out_shape[0] != N_SAMPLES || out_shape[1] != 1) { fprintf(stderr, "Regression model should output prediction as vector."); exit(-1); } Matrix predt; /* Always copy output from XGBoost before calling next API function. */ Matrix_Create(&predt, out_results, out_shape[0], out_shape[1]); printf("Results from prediction\n"); Matrix_Print(predt); Matrix_Free(predt); } { /* Run inplace prediction, which is faster and more memory efficient, but supports * only basic inference types. */ char const config[] = "{\"type\": 0, \"iteration_begin\": 0, " "\"iteration_end\": 0, \"strict_shape\": true, " "\"cache_id\": 0, \"missing\": NaN}"; /* Shape of output prediction */ uint64_t const *out_shape; /* Dimension of output prediction */ uint64_t out_dim; /* Pointer to a thread local contigious array, assigned in prediction function. */ float const *out_results; char const *X_interface = Matrix_ArrayInterface(X); safe_xgboost(XGBoosterPredictFromDense(booster, X_interface, config, NULL, &out_shape, &out_dim, &out_results)); if (out_dim != 2 || out_shape[0] != N_SAMPLES || out_shape[1] != 1) { fprintf(stderr, "Regression model should output prediction as vector, %lu, %lu", out_dim, out_shape[0]); exit(-1); } Matrix predt; /* Always copy output from XGBoost before calling next API function. */ Matrix_Create(&predt, out_results, out_shape[0], out_shape[1]); printf("Results from inplace prediction\n"); Matrix_Print(predt); Matrix_Free(predt); } XGBoosterFree(booster); XGDMatrixFree(Xy); Matrix_Free(X); Matrix_Free(y); return 0; } xgboost-3.0.0/demo/dask/000077500000000000000000000000001476511272400150565ustar00rootroot00000000000000xgboost-3.0.0/demo/dask/README.rst000066400000000000000000000003211476511272400165410ustar00rootroot00000000000000.. _dask-examples: XGBoost Dask Feature Walkthrough ================================ This directory contains some demonstrations for using `dask` with `XGBoost`. For an overview, see :doc:`/tutorials/dask` xgboost-3.0.0/demo/dask/cpu_survival.py000066400000000000000000000046231476511272400201570ustar00rootroot00000000000000""" Example of training survival model with Dask on CPU =================================================== """ import os import dask.array as da import dask.dataframe as dd from dask.distributed import Client, LocalCluster from xgboost import dask as dxgb from xgboost.dask import DaskDMatrix def main(client: Client) -> da.Array: # Load an example survival data from CSV into a Dask data frame. # The Veterans' Administration Lung Cancer Trial # The Statistical Analysis of Failure Time Data by Kalbfleisch J. and Prentice R (1980) CURRENT_DIR = os.path.dirname(__file__) df = dd.read_csv( os.path.join(CURRENT_DIR, os.pardir, "data", "veterans_lung_cancer.csv") ) # DaskDMatrix acts like normal DMatrix, works as a proxy for local # DMatrix scatter around workers. # For AFT survival, you'd need to extract the lower and upper bounds for the label # and pass them as arguments to DaskDMatrix. y_lower_bound = df["Survival_label_lower_bound"] y_upper_bound = df["Survival_label_upper_bound"] X = df.drop(["Survival_label_lower_bound", "Survival_label_upper_bound"], axis=1) dtrain = DaskDMatrix( client, X, label_lower_bound=y_lower_bound, label_upper_bound=y_upper_bound ) # Use train method from xgboost.dask instead of xgboost. This # distributed version of train returns a dictionary containing the # resulting booster and evaluation history obtained from # evaluation metrics. params = { "verbosity": 1, "objective": "survival:aft", "eval_metric": "aft-nloglik", "learning_rate": 0.05, "aft_loss_distribution_scale": 1.20, "aft_loss_distribution": "normal", "max_depth": 6, "lambda": 0.01, "alpha": 0.02, } output = dxgb.train( client, params, dtrain, num_boost_round=100, evals=[(dtrain, "train")] ) bst = output["booster"] history = output["history"] # you can pass output directly into `predict` too. prediction = dxgb.predict(client, bst, dtrain) print("Evaluation history: ", history) # Uncomment the following line to save the model to the disk # bst.save_model('survival_model.json') return prediction if __name__ == "__main__": # or use other clusters for scaling with LocalCluster(n_workers=7, threads_per_worker=4) as cluster: with Client(cluster) as client: main(client) xgboost-3.0.0/demo/dask/cpu_training.py000066400000000000000000000027161476511272400201200ustar00rootroot00000000000000""" Example of training with Dask on CPU ==================================== """ from dask import array as da from dask.distributed import Client, LocalCluster from xgboost import dask as dxgb from xgboost.dask import DaskDMatrix def main(client: Client) -> None: # generate some random data for demonstration m = 100000 n = 100 rng = da.random.default_rng(1) X = rng.normal(size=(m, n), chunks=(10000, -1)) y = X.sum(axis=1) # DaskDMatrix acts like normal DMatrix, works as a proxy for local # DMatrix scatter around workers. dtrain = DaskDMatrix(client, X, y) # Use train method from xgboost.dask instead of xgboost. This # distributed version of train returns a dictionary containing the # resulting booster and evaluation history obtained from # evaluation metrics. output = dxgb.train( client, {"verbosity": 1, "tree_method": "hist"}, dtrain, num_boost_round=4, evals=[(dtrain, "train")], ) bst = output["booster"] history = output["history"] # you can pass output directly into `predict` too. prediction = dxgb.predict(client, bst, dtrain) print("Evaluation history:", history) print("Error:", da.sqrt((prediction - y) ** 2).mean().compute()) if __name__ == "__main__": # or use other clusters for scaling with LocalCluster(n_workers=7, threads_per_worker=4) as cluster: with Client(cluster) as client: main(client) xgboost-3.0.0/demo/dask/dask_callbacks.py000066400000000000000000000061301476511272400203510ustar00rootroot00000000000000""" Example of using callbacks with Dask ==================================== """ from typing import Any import numpy as np from dask.distributed import Client, LocalCluster from dask_ml.datasets import make_regression from dask_ml.model_selection import train_test_split import xgboost as xgb import xgboost.dask as dxgb from xgboost.dask import DaskDMatrix def probability_for_going_backward(epoch: int) -> float: return 0.999 / (1.0 + 0.05 * np.log(1.0 + epoch)) # All callback functions must inherit from TrainingCallback class CustomEarlyStopping(xgb.callback.TrainingCallback): """A custom early stopping class where early stopping is determined stochastically. In the beginning, allow the metric to become worse with a probability of 0.999. As boosting progresses, the probability should be adjusted downward""" def __init__( self, *, validation_set: str, target_metric: str, maximize: bool, seed: int ) -> None: self.validation_set = validation_set self.target_metric = target_metric self.maximize = maximize self.seed = seed self.rng = np.random.default_rng(seed=seed) if maximize: self.better = lambda x, y: x > y else: self.better = lambda x, y: x < y def after_iteration( self, model: Any, epoch: int, evals_log: xgb.callback.TrainingCallback.EvalsLog ) -> bool: metric_history = evals_log[self.validation_set][self.target_metric] if len(metric_history) < 2 or self.better( metric_history[-1], metric_history[-2] ): return False # continue training p = probability_for_going_backward(epoch) go_backward = self.rng.choice(2, size=(1,), replace=True, p=[1 - p, p]).astype( np.bool_ )[0] print( "The validation metric went into the wrong direction. " + f"Stopping training with probability {1 - p}..." ) if go_backward: return False # continue training else: return True # stop training def main(client: Client) -> None: m = 100000 n = 100 X, y = make_regression(n_samples=m, n_features=n, chunks=200, random_state=0) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) dtrain = DaskDMatrix(client, X_train, y_train) dtest = DaskDMatrix(client, X_test, y_test) output = dxgb.train( client, { "verbosity": 1, "tree_method": "hist", "objective": "reg:squarederror", "eval_metric": "rmse", "max_depth": 6, "learning_rate": 1.0, }, dtrain, num_boost_round=1000, evals=[(dtrain, "train"), (dtest, "test")], callbacks=[ CustomEarlyStopping( validation_set="test", target_metric="rmse", maximize=False, seed=0 ) ], ) if __name__ == "__main__": # or use other clusters for scaling with LocalCluster(n_workers=4, threads_per_worker=1) as cluster: with Client(cluster) as client: main(client) xgboost-3.0.0/demo/dask/dask_learning_to_rank.py000066400000000000000000000150461476511272400217540ustar00rootroot00000000000000""" Learning to rank with the Dask Interface ======================================== .. versionadded:: 3.0.0 This is a demonstration of using XGBoost for learning to rank tasks using the MSLR_10k_letor dataset. For more infomation about the dataset, please visit its `description page `_. See :ref:`ltr-dist` for a general description for distributed learning to rank and :ref:`ltr-dask` for Dask-specific features. """ from __future__ import annotations import argparse import os from contextlib import contextmanager from typing import Generator import dask import numpy as np from dask import dataframe as dd from distributed import Client, LocalCluster, wait from sklearn.datasets import load_svmlight_file from xgboost import dask as dxgb def load_mslr_10k( device: str, data_path: str, cache_path: str ) -> tuple[dd.DataFrame, dd.DataFrame, dd.DataFrame]: """Load the MSLR10k dataset from data_path and save parquet files in the cache_path.""" root_path = os.path.expanduser(args.data) cache_path = os.path.expanduser(args.cache) # Use only the Fold1 for demo: # Train, Valid, Test # {S1,S2,S3}, S4, S5 fold = 1 if not os.path.exists(cache_path): os.mkdir(cache_path) fold_path = os.path.join(root_path, f"Fold{fold}") train_path = os.path.join(fold_path, "train.txt") valid_path = os.path.join(fold_path, "vali.txt") test_path = os.path.join(fold_path, "test.txt") X_train, y_train, qid_train = load_svmlight_file( train_path, query_id=True, dtype=np.float32 ) columns = [f"f{i}" for i in range(X_train.shape[1])] X_train = dd.from_array(X_train.toarray(), columns=columns) y_train = y_train.astype(np.int32) qid_train = qid_train.astype(np.int32) X_train["y"] = dd.from_array(y_train) X_train["qid"] = dd.from_array(qid_train) X_train.to_parquet(os.path.join(cache_path, "train"), engine="pyarrow") X_valid, y_valid, qid_valid = load_svmlight_file( valid_path, query_id=True, dtype=np.float32 ) X_valid = dd.from_array(X_valid.toarray(), columns=columns) y_valid = y_valid.astype(np.int32) qid_valid = qid_valid.astype(np.int32) X_valid["y"] = dd.from_array(y_valid) X_valid["qid"] = dd.from_array(qid_valid) X_valid.to_parquet(os.path.join(cache_path, "valid"), engine="pyarrow") X_test, y_test, qid_test = load_svmlight_file( test_path, query_id=True, dtype=np.float32 ) X_test = dd.from_array(X_test.toarray(), columns=columns) y_test = y_test.astype(np.int32) qid_test = qid_test.astype(np.int32) X_test["y"] = dd.from_array(y_test) X_test["qid"] = dd.from_array(qid_test) X_test.to_parquet(os.path.join(cache_path, "test"), engine="pyarrow") df_train = dd.read_parquet( os.path.join(cache_path, "train"), calculate_divisions=True ) df_valid = dd.read_parquet( os.path.join(cache_path, "valid"), calculate_divisions=True ) df_test = dd.read_parquet( os.path.join(cache_path, "test"), calculate_divisions=True ) return df_train, df_valid, df_test def ranking_demo(client: Client, args: argparse.Namespace) -> None: """Learning to rank with data sorted locally.""" df_tr, df_va, _ = load_mslr_10k(args.device, args.data, args.cache) X_train: dd.DataFrame = df_tr[df_tr.columns.difference(["y", "qid"])] y_train = df_tr[["y", "qid"]] Xy_train = dxgb.DaskQuantileDMatrix(client, X_train, y_train.y, qid=y_train.qid) X_valid: dd.DataFrame = df_va[df_va.columns.difference(["y", "qid"])] y_valid = df_va[["y", "qid"]] Xy_valid = dxgb.DaskQuantileDMatrix( client, X_valid, y_valid.y, qid=y_valid.qid, ref=Xy_train ) # Upon training, you will see a performance warning about sorting data based on # query groups. dxgb.train( client, {"objective": "rank:ndcg", "device": args.device}, Xy_train, evals=[(Xy_train, "Train"), (Xy_valid, "Valid")], num_boost_round=100, ) def ranking_wo_split_demo(client: Client, args: argparse.Namespace) -> None: """Learning to rank with data partitioned according to query groups.""" df_tr, df_va, df_te = load_mslr_10k(args.device, args.data, args.cache) X_tr = df_tr[df_tr.columns.difference(["y", "qid"])] X_va = df_va[df_va.columns.difference(["y", "qid"])] # `allow_group_split=False` makes sure data is partitioned according to the query # groups. ltr = dxgb.DaskXGBRanker(allow_group_split=False, device=args.device) ltr.client = client ltr = ltr.fit( X_tr, df_tr.y, qid=df_tr.qid, eval_set=[(X_tr, df_tr.y), (X_va, df_va.y)], eval_qid=[df_tr.qid, df_va.qid], verbose=True, ) df_te = df_te.persist() wait([df_te]) X_te = df_te[df_te.columns.difference(["y", "qid"])] predt = ltr.predict(X_te) y = client.compute(df_te.y) wait([predt, y]) @contextmanager def gen_client(device: str) -> Generator[Client, None, None]: match device: case "cuda": from dask_cuda import LocalCUDACluster with LocalCUDACluster() as cluster: with Client(cluster) as client: with dask.config.set( { "array.backend": "cupy", "dataframe.backend": "cudf", } ): yield client case "cpu": with LocalCluster() as cluster: with Client(cluster) as client: yield client if __name__ == "__main__": parser = argparse.ArgumentParser( description="Demonstration of learning to rank using XGBoost." ) parser.add_argument( "--data", type=str, help="Root directory of the MSLR-WEB10K data.", required=True, ) parser.add_argument( "--cache", type=str, help="Directory for caching processed data.", required=True, ) parser.add_argument("--device", choices=["cpu", "cuda"], default="cpu") parser.add_argument( "--no-split", action="store_true", help="Flag to indicate query groups should not be split.", ) args = parser.parse_args() with gen_client(args.device) as client: if args.no_split: ranking_wo_split_demo(client, args) else: ranking_demo(client, args) xgboost-3.0.0/demo/dask/forward_logging.py000066400000000000000000000105271476511272400206070ustar00rootroot00000000000000""" Example of forwarding evaluation logs to the client =================================================== The example runs on GPU. Two classes are defined to show how to use Dask builtins to forward the logs to the client process. """ import logging import dask import distributed from dask import array as da from dask_cuda import LocalCUDACluster from distributed import Client from xgboost import dask as dxgb from xgboost.callback import EvaluationMonitor def _get_logger() -> logging.Logger: logger = logging.getLogger("[xgboost.dask]") logger.setLevel(logging.INFO) if not logger.hasHandlers(): handler = logging.StreamHandler() logger.addHandler(handler) return logger class ForwardLoggingMonitor(EvaluationMonitor): def __init__( self, client: Client, rank: int = 0, period: int = 1, ) -> None: """Print the evaluation result at each iteration. The default monitor in the native interface logs the result to the Dask scheduler process. This class can be used to forward the logging to the client process. Important: see the `client` parameter for more info. Parameters ---------- client : Distributed client. This must be the top-level client. The class uses :py:meth:`distributed.Client.forward_logging` in conjunction with the Python :py:mod:`logging` module to forward the evaluation results to the client process. It has undefined behaviour if called in a nested task. As a result, client-side logging is not enabled by default. """ client.forward_logging(_get_logger().name) super().__init__( rank=rank, period=period, logger=lambda msg: _get_logger().info(msg.strip()), ) class WorkerEventMonitor(EvaluationMonitor): """Use :py:meth:`distributed.print` to forward the log. A downside is that not only all clients connected to the cluster can see the log, the logs are also printed on the worker. If you use a local cluster, the log is duplicated. """ def __init__(self, rank: int = 0, period: int = 1) -> None: super().__init__( rank=rank, period=period, logger=lambda msg: distributed.print(msg.strip()) ) def hist_train( client: Client, X: da.Array, y: da.Array, monitor: EvaluationMonitor ) -> da.Array: # `DaskQuantileDMatrix` is used instead of `DaskDMatrix`, be careful that it can not # be used for anything else other than as a training DMatrix, unless a reference is # specified. See the `ref` argument of `DaskQuantileDMatrix`. dtrain = dxgb.DaskQuantileDMatrix(client, X, y) output = dxgb.train( client, # Make sure the device is set to CUDA. {"tree_method": "hist", "device": "cuda"}, dtrain, num_boost_round=4, evals=[(dtrain, "train")], # Use the monitor to forward the log. callbacks=[monitor], # Disable the internal logging and prefer the client-side `EvaluationMonitor`. verbose_eval=False, ) bst = output["booster"] history = output["history"] prediction = dxgb.predict(client, bst, X) print("Evaluation history:", history) return prediction if __name__ == "__main__": # `LocalCUDACluster` is used for assigning GPU to XGBoost processes. Here # `n_workers` represents the number of GPUs since we use one GPU per worker process. with LocalCUDACluster(n_workers=2, threads_per_worker=4) as cluster: # Create client from cluster, set the backend to GPU array (cupy). with Client(cluster) as client, dask.config.set({"array.backend": "cupy"}): # Generate some random data for demonstration rng = da.random.default_rng(1) m = 2**18 n = 100 X = rng.uniform(size=(m, n), chunks=(128**2, -1)) y = X.sum(axis=1) # Use forwarding, the client must be the top client. monitor: EvaluationMonitor = ForwardLoggingMonitor(client) hist_train(client, X, y, monitor).compute() # Use distributed.print, the logs in this demo are duplicated as the same # log is printed in all workers along with the client. monitor = WorkerEventMonitor() hist_train(client, X, y, monitor).compute() xgboost-3.0.0/demo/dask/gpu_training.py000066400000000000000000000062361476511272400201250ustar00rootroot00000000000000""" Example of training with Dask on GPU ==================================== """ import dask import dask_cudf from dask import array as da from dask import dataframe as dd from dask.distributed import Client from dask_cuda import LocalCUDACluster from xgboost import dask as dxgb from xgboost.dask import DaskDMatrix def using_dask_matrix(client: Client, X: da.Array, y: da.Array) -> da.Array: # DaskDMatrix acts like normal DMatrix, works as a proxy for local DMatrix scatter # around workers. dtrain = DaskDMatrix(client, X, y) # Use train method from xgboost.dask instead of xgboost. This distributed version # of train returns a dictionary containing the resulting booster and evaluation # history obtained from evaluation metrics. output = dxgb.train( client, # Make sure the device is set to CUDA. {"tree_method": "hist", "device": "cuda"}, dtrain, num_boost_round=4, evals=[(dtrain, "train")], ) bst = output["booster"] history = output["history"] # you can pass output directly into `predict` too. prediction = dxgb.predict(client, bst, dtrain) print("Evaluation history:", history) return prediction def using_quantile_device_dmatrix(client: Client, X: da.Array, y: da.Array) -> da.Array: """`DaskQuantileDMatrix` is a data type specialized for `hist` tree methods for reducing memory usage. .. versionadded:: 1.2.0 """ # `DaskQuantileDMatrix` is used instead of `DaskDMatrix`, be careful that it can not # be used for anything else other than training unless a reference is specified. See # the `ref` argument of `DaskQuantileDMatrix`. dtrain = dxgb.DaskQuantileDMatrix(client, X, y) output = dxgb.train( client, # Make sure the device is set to CUDA. {"tree_method": "hist", "device": "cuda"}, dtrain, num_boost_round=4, evals=[(dtrain, "train")], ) prediction = dxgb.predict(client, output, X) return prediction if __name__ == "__main__": # `LocalCUDACluster` is used for assigning GPU to XGBoost processes. Here # `n_workers` represents the number of GPUs since we use one GPU per worker process. with LocalCUDACluster(n_workers=2, threads_per_worker=4) as cluster: # Create client from cluster, set the backend to GPU array (cupy). with Client(cluster) as client, dask.config.set({"array.backend": "cupy"}): # Generate some random data for demonstration rng = da.random.default_rng(1) m = 2**18 n = 100 X = rng.uniform(size=(m, n), chunks=(128**2, -1)) y = X.sum(axis=1) X = dd.from_dask_array(X) y = dd.from_dask_array(y) # XGBoost can take arrays. This is to show that DataFrame uses the GPU # backend as well. assert isinstance(X, dask_cudf.DataFrame) assert isinstance(y, dask_cudf.Series) print("Using DaskQuantileDMatrix") from_ddqdm = using_quantile_device_dmatrix(client, X, y).compute() print("Using DMatrix") from_dmatrix = using_dask_matrix(client, X, y).compute() xgboost-3.0.0/demo/dask/sklearn_cpu_training.py000066400000000000000000000023051476511272400216310ustar00rootroot00000000000000""" Use scikit-learn regressor interface with CPU histogram tree method =================================================================== """ from dask import array as da from dask.distributed import Client, LocalCluster from xgboost import dask as dxgb def main(client: Client) -> dxgb.Booster: # generate some random data for demonstration n = 100 m = 10000 partition_size = 100 X = da.random.random((m, n), partition_size) y = da.random.random(m, partition_size) regressor = dxgb.DaskXGBRegressor(verbosity=1, n_estimators=2) regressor.set_params(tree_method="hist") # assigning client here is optional regressor.client = client regressor.fit(X, y, eval_set=[(X, y)]) prediction = regressor.predict(X) bst = regressor.get_booster() history = regressor.evals_result() print("Evaluation history:", history) # returned prediction is always a dask array. assert isinstance(prediction, da.Array) return bst # returning the trained model if __name__ == "__main__": # or use other clusters for scaling with LocalCluster(n_workers=4, threads_per_worker=1) as cluster: with Client(cluster) as client: main(client) xgboost-3.0.0/demo/dask/sklearn_gpu_training.py000066400000000000000000000027721476511272400216450ustar00rootroot00000000000000""" Use scikit-learn regressor interface with GPU histogram tree method =================================================================== """ import dask from dask import array as da from dask.distributed import Client # It's recommended to use dask_cuda for GPU assignment from dask_cuda import LocalCUDACluster from xgboost import dask as dxgb def main(client: Client) -> dxgb.Booster: # Generate some random data for demonstration rng = da.random.default_rng(1) m = 2**18 n = 100 X = rng.uniform(size=(m, n), chunks=(128**2, -1)) y = X.sum(axis=1) regressor = dxgb.DaskXGBRegressor(verbosity=1) # Set the device to CUDA regressor.set_params(tree_method="hist", device="cuda") # Assigning client here is optional regressor.client = client regressor.fit(X, y, eval_set=[(X, y)]) prediction = regressor.predict(X) bst = regressor.get_booster() history = regressor.evals_result() print("Evaluation history:", history) # returned prediction is always a dask array. assert isinstance(prediction, da.Array) return bst # returning the trained model if __name__ == "__main__": # With dask cuda, one can scale up XGBoost to arbitrary GPU clusters. # `LocalCUDACluster` used here is only for demonstration purpose. with LocalCUDACluster() as cluster: # Create client from cluster, set the backend to GPU array (cupy). with Client(cluster) as client, dask.config.set({"array.backend": "cupy"}): main(client) xgboost-3.0.0/demo/data/000077500000000000000000000000001476511272400150455ustar00rootroot00000000000000xgboost-3.0.0/demo/data/README.md000066400000000000000000000002041476511272400163200ustar00rootroot00000000000000This folder contains processed example dataset used by the demos. Copyright of the dataset belongs to the original copyright holder xgboost-3.0.0/demo/data/agaricus.txt.test000066400000000000000000005464731476511272400204050ustar00rootroot000000000000000 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 4:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 4:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 1:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 5:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 5:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 1:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 1:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 5:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 1:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 5:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 5:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 1:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 1:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 1:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 1:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 1:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 3:1 9:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 3:1 9:1 13:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 52:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 1:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 0 4:1 9:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 2:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 xgboost-3.0.0/demo/data/agaricus.txt.train000066400000000000000000026515611476511272400205400ustar00rootroot000000000000001 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 4:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 4:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 4:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 6:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 6:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 4:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 4:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 124:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 20:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 123:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 45:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 9:1 11:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 10:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 123:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 1:1 10:1 19:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 1:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 122:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 11:1 21:1 24:1 34:1 36:1 39:1 51:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 124:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 41:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 116:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 20:1 21:1 23:1 34:1 37:1 40:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 19:1 21:1 24:1 34:1 37:1 40:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 21:1 24:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 111:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 3:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 124:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 9:1 20:1 21:1 24:1 34:1 36:1 39:1 42:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 10:1 20:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 21:1 23:1 34:1 37:1 40:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 1:1 9:1 19:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 20:1 21:1 23:1 34:1 36:1 39:1 48:1 53:1 60:1 65:1 67:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 123:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 19:1 21:1 23:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 122:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 42:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 124:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 20:1 21:1 24:1 34:1 36:1 39:1 41:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 116:1 122:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 9:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 1:1 10:1 19:1 21:1 23:1 34:1 36:1 39:1 45:1 53:1 56:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 122:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 11:1 21:1 30:1 34:1 36:1 40:1 48:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 124:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 114:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 51:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 124:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 21:1 30:1 34:1 36:1 40:1 42:1 53:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 44:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 34:1 37:1 39:1 48:1 54:1 58:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 39:1 41:1 54:1 58:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 105:1 114:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 42:1 54:1 58:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 106:1 117:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 19:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 16:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 19:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 117:1 126:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 3:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 1:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 7:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 16:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 5:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 5:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 1:1 9:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 22:1 25:1 34:1 36:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 82:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 14:1 22:1 25:1 34:1 37:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 73:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 14:1 22:1 25:1 34:1 36:1 40:1 49:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 82:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 19:1 22:1 25:1 34:1 36:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 16:1 22:1 25:1 34:1 37:1 40:1 48:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 117:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 16:1 22:1 25:1 34:1 37:1 40:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 5:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 5:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 1:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 1:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 7:1 14:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 19:1 22:1 25:1 34:1 36:1 40:1 42:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 105:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 49:1 54:1 55:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 1:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 7:1 18:1 21:1 29:1 34:1 36:1 39:1 42:1 54:1 55:1 65:1 69:1 73:1 86:1 88:1 92:1 95:1 102:1 106:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 5:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 1:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 8:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 2:1 8:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 1:1 8:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 1:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 3:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 5:1 9:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 5:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 1:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 5:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 5:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 1:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 1:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 1:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 1:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 1:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 1:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 2:1 9:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 1:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 1:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 3:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 44:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 1:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 3:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 0 4:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 1:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 0 4:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 1:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 5:1 8:1 19:1 21:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 112:1 115:1 121:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 5:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 5:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 1:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 3:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 9:1 15:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 1:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 9:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 9:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 1:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 1:1 9:1 19:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 4:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 4:1 7:1 13:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 0 4:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 3:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 1:1 10:1 19:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 1:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 9:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 5:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 17:1 22:1 29:1 34:1 36:1 40:1 49:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 0 3:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 48:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 118:1 126:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 4:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 120:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 65:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 0 3:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 3:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 0 5:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 3:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 1:1 9:1 20:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 1:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 1:1 10:1 12:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 0 3:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 69:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 48:1 53:1 55:1 64:1 68:1 70:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 123:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 0 5:1 10:1 18:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 76:1 85:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 7:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 123:1 1 1:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 70:1 80:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 61:1 65:1 69:1 76:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 0 4:1 9:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 5:1 7:1 11:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 64:1 67:1 77:1 87:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 79:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 7:1 14:1 22:1 27:1 34:1 36:1 39:1 44:1 53:1 55:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 100:1 108:1 119:1 126:1 1 3:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 45:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 122:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 120:1 1 3:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 0 3:1 7:1 11:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 55:1 62:1 66:1 77:1 79:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 19:1 22:1 29:1 34:1 36:1 40:1 51:1 53:1 61:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 99:1 108:1 119:1 126:1 1 4:1 10:1 19:1 21:1 27:1 34:1 36:1 39:1 51:1 54:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 124:1 1 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 46:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 109:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 80:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 4:1 10:1 14:1 21:1 27:1 34:1 36:1 39:1 44:1 54:1 55:1 62:1 69:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 117:1 120:1 1 3:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 20:1 22:1 27:1 34:1 36:1 39:1 45:1 53:1 55:1 64:1 68:1 71:1 84:1 88:1 92:1 95:1 100:1 108:1 118:1 120:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 9:1 12:1 21:1 29:1 34:1 36:1 39:1 50:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 98:1 112:1 115:1 125:1 1 4:1 10:1 12:1 21:1 27:1 34:1 36:1 39:1 48:1 54:1 55:1 65:1 66:1 77:1 86:1 88:1 92:1 95:1 102:1 108:1 118:1 124:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 13:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 1:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 52:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 13:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 11:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 4:1 9:1 11:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 13:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 13:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 9:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 18:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 52:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 0 3:1 10:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 11:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 13:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 2:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 52:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 4:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 1 4:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 9:1 18:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 4:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 18:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 13:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 9:1 18:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 0 4:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 4:1 10:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 3:1 9:1 13:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 1:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 3:1 9:1 11:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 3:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 20:1 22:1 29:1 34:1 37:1 40:1 51:1 53:1 56:1 63:1 67:1 78:1 87:1 88:1 93:1 95:1 98:1 112:1 115:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 126:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 9:1 13:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 3:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 9:1 11:1 22:1 28:1 34:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 4:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 4:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 9:1 14:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 4:1 9:1 11:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 0 1:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 4:1 9:1 16:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 115:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 5:1 9:1 18:1 22:1 28:1 32:1 36:1 39:1 51:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 13:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 9:1 11:1 21:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 4:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 3:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 5:1 9:1 13:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 0 3:1 9:1 11:1 22:1 29:1 34:1 36:1 39:1 51:1 53:1 55:1 63:1 67:1 70:1 79:1 88:1 92:1 96:1 102:1 112:1 119:1 123:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 3:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 0 5:1 10:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 4:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 1:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 113:1 118:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 118:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 0 1:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 115:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 1 5:1 10:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 7:1 14:1 22:1 29:1 34:1 37:1 39:1 48:1 53:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 7:1 19:1 22:1 29:1 34:1 37:1 39:1 45:1 53:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 117:1 120:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 118:1 121:1 1 3:1 10:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 10:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 18:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 1 3:1 10:1 11:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 3:1 9:1 13:1 22:1 28:1 34:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 75:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 68:1 77:1 84:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 1 5:1 10:1 18:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 123:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 106:1 115:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 110:1 115:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 113:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 115:1 121:1 1 5:1 9:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 0 1:1 10:1 19:1 22:1 29:1 34:1 37:1 39:1 51:1 53:1 61:1 65:1 69:1 77:1 86:1 88:1 92:1 96:1 102:1 112:1 116:1 120:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 106:1 118:1 121:1 0 5:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 47:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 1 4:1 9:1 13:1 22:1 28:1 32:1 36:1 39:1 52:1 53:1 56:1 64:1 67:1 72:1 81:1 88:1 92:1 94:1 101:1 112:1 115:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 118:1 121:1 1 5:1 9:1 11:1 22:1 31:1 34:1 36:1 40:1 43:1 54:1 61:1 65:1 68:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 121:1 1 5:1 10:1 18:1 22:1 26:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 1 5:1 9:1 11:1 22:1 27:1 34:1 36:1 40:1 43:1 54:1 61:1 64:1 69:1 75:1 86:1 88:1 92:1 95:1 98:1 112:1 118:1 126:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 90:1 95:1 102:1 107:1 118:1 121:1 0 4:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 42:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 107:1 115:1 121:1 0 3:1 10:1 11:1 22:1 29:1 32:1 36:1 39:1 52:1 53:1 61:1 65:1 69:1 74:1 83:1 88:1 91:1 95:1 102:1 110:1 115:1 121:1 xgboost-3.0.0/demo/data/featmap.txt000066400000000000000000000062031476511272400172240ustar00rootroot000000000000000 cap-shape=bell i 1 cap-shape=conical i 2 cap-shape=convex i 3 cap-shape=flat i 4 cap-shape=knobbed i 5 cap-shape=sunken i 6 cap-surface=fibrous i 7 cap-surface=grooves i 8 cap-surface=scaly i 9 cap-surface=smooth i 10 cap-color=brown i 11 cap-color=buff i 12 cap-color=cinnamon i 13 cap-color=gray i 14 cap-color=green i 15 cap-color=pink i 16 cap-color=purple i 17 cap-color=red i 18 cap-color=white i 19 cap-color=yellow i 20 bruises?=bruises i 21 bruises?=no i 22 odor=almond i 23 odor=anise i 24 odor=creosote i 25 odor=fishy i 26 odor=foul i 27 odor=musty i 28 odor=none i 29 odor=pungent i 30 odor=spicy i 31 gill-attachment=attached i 32 gill-attachment=descending i 33 gill-attachment=free i 34 gill-attachment=notched i 35 gill-spacing=close i 36 gill-spacing=crowded i 37 gill-spacing=distant i 38 gill-size=broad i 39 gill-size=narrow i 40 gill-color=black i 41 gill-color=brown i 42 gill-color=buff i 43 gill-color=chocolate i 44 gill-color=gray i 45 gill-color=green i 46 gill-color=orange i 47 gill-color=pink i 48 gill-color=purple i 49 gill-color=red i 50 gill-color=white i 51 gill-color=yellow i 52 stalk-shape=enlarging i 53 stalk-shape=tapering i 54 stalk-root=bulbous i 55 stalk-root=club i 56 stalk-root=cup i 57 stalk-root=equal i 58 stalk-root=rhizomorphs i 59 stalk-root=rooted i 60 stalk-root=missing i 61 stalk-surface-above-ring=fibrous i 62 stalk-surface-above-ring=scaly i 63 stalk-surface-above-ring=silky i 64 stalk-surface-above-ring=smooth i 65 stalk-surface-below-ring=fibrous i 66 stalk-surface-below-ring=scaly i 67 stalk-surface-below-ring=silky i 68 stalk-surface-below-ring=smooth i 69 stalk-color-above-ring=brown i 70 stalk-color-above-ring=buff i 71 stalk-color-above-ring=cinnamon i 72 stalk-color-above-ring=gray i 73 stalk-color-above-ring=orange i 74 stalk-color-above-ring=pink i 75 stalk-color-above-ring=red i 76 stalk-color-above-ring=white i 77 stalk-color-above-ring=yellow i 78 stalk-color-below-ring=brown i 79 stalk-color-below-ring=buff i 80 stalk-color-below-ring=cinnamon i 81 stalk-color-below-ring=gray i 82 stalk-color-below-ring=orange i 83 stalk-color-below-ring=pink i 84 stalk-color-below-ring=red i 85 stalk-color-below-ring=white i 86 stalk-color-below-ring=yellow i 87 veil-type=partial i 88 veil-type=universal i 89 veil-color=brown i 90 veil-color=orange i 91 veil-color=white i 92 veil-color=yellow i 93 ring-number=none i 94 ring-number=one i 95 ring-number=two i 96 ring-type=cobwebby i 97 ring-type=evanescent i 98 ring-type=flaring i 99 ring-type=large i 100 ring-type=none i 101 ring-type=pendant i 102 ring-type=sheathing i 103 ring-type=zone i 104 spore-print-color=black i 105 spore-print-color=brown i 106 spore-print-color=buff i 107 spore-print-color=chocolate i 108 spore-print-color=green i 109 spore-print-color=orange i 110 spore-print-color=purple i 111 spore-print-color=white i 112 spore-print-color=yellow i 113 population=abundant i 114 population=clustered i 115 population=numerous i 116 population=scattered i 117 population=several i 118 population=solitary i 119 habitat=grasses i 120 habitat=leaves i 121 habitat=meadows i 122 habitat=paths i 123 habitat=urban i 124 habitat=waste i 125 habitat=woods i xgboost-3.0.0/demo/data/gen_autoclaims.R000066400000000000000000000011611476511272400201610ustar00rootroot00000000000000site <- 'http://cran.r-project.org' if (!require('dummies')) { install.packages('dummies', repos = site) } if (!require('insuranceData')) { install.packages('insuranceData', repos = site) } library(dummies) library(insuranceData) data(AutoClaims) data <- AutoClaims data$STATE <- as.factor(data$STATE) data$CLASS <- as.factor(data$CLASS) data$GENDER <- as.factor(data$GENDER) data.dummy <- dummy.data.frame( data , dummy.class = 'factor' , omit.constants = TRUE ) write.table( data.dummy , 'autoclaims.csv' , sep = ',' , row.names = FALSE , col.names = FALSE , quote = FALSE ) xgboost-3.0.0/demo/data/veterans_lung_cancer.csv000066400000000000000000000133141476511272400217530ustar00rootroot00000000000000Survival_label_lower_bound,Survival_label_upper_bound,Age_in_years,Karnofsky_score,Months_from_Diagnosis,Celltype=adeno,Celltype=large,Celltype=smallcell,Celltype=squamous,Prior_therapy=no,Prior_therapy=yes,Treatment=standard,Treatment=test 72.0,72.0,69.0,60.0,7.0,0,0,0,1,1,0,1,0 411.0,411.0,64.0,70.0,5.0,0,0,0,1,0,1,1,0 228.0,228.0,38.0,60.0,3.0,0,0,0,1,1,0,1,0 126.0,126.0,63.0,60.0,9.0,0,0,0,1,0,1,1,0 118.0,118.0,65.0,70.0,11.0,0,0,0,1,0,1,1,0 10.0,10.0,49.0,20.0,5.0,0,0,0,1,1,0,1,0 82.0,82.0,69.0,40.0,10.0,0,0,0,1,0,1,1,0 110.0,110.0,68.0,80.0,29.0,0,0,0,1,1,0,1,0 314.0,314.0,43.0,50.0,18.0,0,0,0,1,1,0,1,0 100.0,inf,70.0,70.0,6.0,0,0,0,1,1,0,1,0 42.0,42.0,81.0,60.0,4.0,0,0,0,1,1,0,1,0 8.0,8.0,63.0,40.0,58.0,0,0,0,1,0,1,1,0 144.0,144.0,63.0,30.0,4.0,0,0,0,1,1,0,1,0 25.0,inf,52.0,80.0,9.0,0,0,0,1,0,1,1,0 11.0,11.0,48.0,70.0,11.0,0,0,0,1,0,1,1,0 30.0,30.0,61.0,60.0,3.0,0,0,1,0,1,0,1,0 384.0,384.0,42.0,60.0,9.0,0,0,1,0,1,0,1,0 4.0,4.0,35.0,40.0,2.0,0,0,1,0,1,0,1,0 54.0,54.0,63.0,80.0,4.0,0,0,1,0,0,1,1,0 13.0,13.0,56.0,60.0,4.0,0,0,1,0,1,0,1,0 123.0,inf,55.0,40.0,3.0,0,0,1,0,1,0,1,0 97.0,inf,67.0,60.0,5.0,0,0,1,0,1,0,1,0 153.0,153.0,63.0,60.0,14.0,0,0,1,0,0,1,1,0 59.0,59.0,65.0,30.0,2.0,0,0,1,0,1,0,1,0 117.0,117.0,46.0,80.0,3.0,0,0,1,0,1,0,1,0 16.0,16.0,53.0,30.0,4.0,0,0,1,0,0,1,1,0 151.0,151.0,69.0,50.0,12.0,0,0,1,0,1,0,1,0 22.0,22.0,68.0,60.0,4.0,0,0,1,0,1,0,1,0 56.0,56.0,43.0,80.0,12.0,0,0,1,0,0,1,1,0 21.0,21.0,55.0,40.0,2.0,0,0,1,0,0,1,1,0 18.0,18.0,42.0,20.0,15.0,0,0,1,0,1,0,1,0 139.0,139.0,64.0,80.0,2.0,0,0,1,0,1,0,1,0 20.0,20.0,65.0,30.0,5.0,0,0,1,0,1,0,1,0 31.0,31.0,65.0,75.0,3.0,0,0,1,0,1,0,1,0 52.0,52.0,55.0,70.0,2.0,0,0,1,0,1,0,1,0 287.0,287.0,66.0,60.0,25.0,0,0,1,0,0,1,1,0 18.0,18.0,60.0,30.0,4.0,0,0,1,0,1,0,1,0 51.0,51.0,67.0,60.0,1.0,0,0,1,0,1,0,1,0 122.0,122.0,53.0,80.0,28.0,0,0,1,0,1,0,1,0 27.0,27.0,62.0,60.0,8.0,0,0,1,0,1,0,1,0 54.0,54.0,67.0,70.0,1.0,0,0,1,0,1,0,1,0 7.0,7.0,72.0,50.0,7.0,0,0,1,0,1,0,1,0 63.0,63.0,48.0,50.0,11.0,0,0,1,0,1,0,1,0 392.0,392.0,68.0,40.0,4.0,0,0,1,0,1,0,1,0 10.0,10.0,67.0,40.0,23.0,0,0,1,0,0,1,1,0 8.0,8.0,61.0,20.0,19.0,1,0,0,0,0,1,1,0 92.0,92.0,60.0,70.0,10.0,1,0,0,0,1,0,1,0 35.0,35.0,62.0,40.0,6.0,1,0,0,0,1,0,1,0 117.0,117.0,38.0,80.0,2.0,1,0,0,0,1,0,1,0 132.0,132.0,50.0,80.0,5.0,1,0,0,0,1,0,1,0 12.0,12.0,63.0,50.0,4.0,1,0,0,0,0,1,1,0 162.0,162.0,64.0,80.0,5.0,1,0,0,0,1,0,1,0 3.0,3.0,43.0,30.0,3.0,1,0,0,0,1,0,1,0 95.0,95.0,34.0,80.0,4.0,1,0,0,0,1,0,1,0 177.0,177.0,66.0,50.0,16.0,0,1,0,0,0,1,1,0 162.0,162.0,62.0,80.0,5.0,0,1,0,0,1,0,1,0 216.0,216.0,52.0,50.0,15.0,0,1,0,0,1,0,1,0 553.0,553.0,47.0,70.0,2.0,0,1,0,0,1,0,1,0 278.0,278.0,63.0,60.0,12.0,0,1,0,0,1,0,1,0 12.0,12.0,68.0,40.0,12.0,0,1,0,0,0,1,1,0 260.0,260.0,45.0,80.0,5.0,0,1,0,0,1,0,1,0 200.0,200.0,41.0,80.0,12.0,0,1,0,0,0,1,1,0 156.0,156.0,66.0,70.0,2.0,0,1,0,0,1,0,1,0 182.0,inf,62.0,90.0,2.0,0,1,0,0,1,0,1,0 143.0,143.0,60.0,90.0,8.0,0,1,0,0,1,0,1,0 105.0,105.0,66.0,80.0,11.0,0,1,0,0,1,0,1,0 103.0,103.0,38.0,80.0,5.0,0,1,0,0,1,0,1,0 250.0,250.0,53.0,70.0,8.0,0,1,0,0,0,1,1,0 100.0,100.0,37.0,60.0,13.0,0,1,0,0,0,1,1,0 999.0,999.0,54.0,90.0,12.0,0,0,0,1,0,1,0,1 112.0,112.0,60.0,80.0,6.0,0,0,0,1,1,0,0,1 87.0,inf,48.0,80.0,3.0,0,0,0,1,1,0,0,1 231.0,inf,52.0,50.0,8.0,0,0,0,1,0,1,0,1 242.0,242.0,70.0,50.0,1.0,0,0,0,1,1,0,0,1 991.0,991.0,50.0,70.0,7.0,0,0,0,1,0,1,0,1 111.0,111.0,62.0,70.0,3.0,0,0,0,1,1,0,0,1 1.0,1.0,65.0,20.0,21.0,0,0,0,1,0,1,0,1 587.0,587.0,58.0,60.0,3.0,0,0,0,1,1,0,0,1 389.0,389.0,62.0,90.0,2.0,0,0,0,1,1,0,0,1 33.0,33.0,64.0,30.0,6.0,0,0,0,1,1,0,0,1 25.0,25.0,63.0,20.0,36.0,0,0,0,1,1,0,0,1 357.0,357.0,58.0,70.0,13.0,0,0,0,1,1,0,0,1 467.0,467.0,64.0,90.0,2.0,0,0,0,1,1,0,0,1 201.0,201.0,52.0,80.0,28.0,0,0,0,1,0,1,0,1 1.0,1.0,35.0,50.0,7.0,0,0,0,1,1,0,0,1 30.0,30.0,63.0,70.0,11.0,0,0,0,1,1,0,0,1 44.0,44.0,70.0,60.0,13.0,0,0,0,1,0,1,0,1 283.0,283.0,51.0,90.0,2.0,0,0,0,1,1,0,0,1 15.0,15.0,40.0,50.0,13.0,0,0,0,1,0,1,0,1 25.0,25.0,69.0,30.0,2.0,0,0,1,0,1,0,0,1 103.0,inf,36.0,70.0,22.0,0,0,1,0,0,1,0,1 21.0,21.0,71.0,20.0,4.0,0,0,1,0,1,0,0,1 13.0,13.0,62.0,30.0,2.0,0,0,1,0,1,0,0,1 87.0,87.0,60.0,60.0,2.0,0,0,1,0,1,0,0,1 2.0,2.0,44.0,40.0,36.0,0,0,1,0,0,1,0,1 20.0,20.0,54.0,30.0,9.0,0,0,1,0,0,1,0,1 7.0,7.0,66.0,20.0,11.0,0,0,1,0,1,0,0,1 24.0,24.0,49.0,60.0,8.0,0,0,1,0,1,0,0,1 99.0,99.0,72.0,70.0,3.0,0,0,1,0,1,0,0,1 8.0,8.0,68.0,80.0,2.0,0,0,1,0,1,0,0,1 99.0,99.0,62.0,85.0,4.0,0,0,1,0,1,0,0,1 61.0,61.0,71.0,70.0,2.0,0,0,1,0,1,0,0,1 25.0,25.0,70.0,70.0,2.0,0,0,1,0,1,0,0,1 95.0,95.0,61.0,70.0,1.0,0,0,1,0,1,0,0,1 80.0,80.0,71.0,50.0,17.0,0,0,1,0,1,0,0,1 51.0,51.0,59.0,30.0,87.0,0,0,1,0,0,1,0,1 29.0,29.0,67.0,40.0,8.0,0,0,1,0,1,0,0,1 24.0,24.0,60.0,40.0,2.0,1,0,0,0,1,0,0,1 18.0,18.0,69.0,40.0,5.0,1,0,0,0,0,1,0,1 83.0,inf,57.0,99.0,3.0,1,0,0,0,1,0,0,1 31.0,31.0,39.0,80.0,3.0,1,0,0,0,1,0,0,1 51.0,51.0,62.0,60.0,5.0,1,0,0,0,1,0,0,1 90.0,90.0,50.0,60.0,22.0,1,0,0,0,0,1,0,1 52.0,52.0,43.0,60.0,3.0,1,0,0,0,1,0,0,1 73.0,73.0,70.0,60.0,3.0,1,0,0,0,1,0,0,1 8.0,8.0,66.0,50.0,5.0,1,0,0,0,1,0,0,1 36.0,36.0,61.0,70.0,8.0,1,0,0,0,1,0,0,1 48.0,48.0,81.0,10.0,4.0,1,0,0,0,1,0,0,1 7.0,7.0,58.0,40.0,4.0,1,0,0,0,1,0,0,1 140.0,140.0,63.0,70.0,3.0,1,0,0,0,1,0,0,1 186.0,186.0,60.0,90.0,3.0,1,0,0,0,1,0,0,1 84.0,84.0,62.0,80.0,4.0,1,0,0,0,0,1,0,1 19.0,19.0,42.0,50.0,10.0,1,0,0,0,1,0,0,1 45.0,45.0,69.0,40.0,3.0,1,0,0,0,1,0,0,1 80.0,80.0,63.0,40.0,4.0,1,0,0,0,1,0,0,1 52.0,52.0,45.0,60.0,4.0,0,1,0,0,1,0,0,1 164.0,164.0,68.0,70.0,15.0,0,1,0,0,0,1,0,1 19.0,19.0,39.0,30.0,4.0,0,1,0,0,0,1,0,1 53.0,53.0,66.0,60.0,12.0,0,1,0,0,1,0,0,1 15.0,15.0,63.0,30.0,5.0,0,1,0,0,1,0,0,1 43.0,43.0,49.0,60.0,11.0,0,1,0,0,0,1,0,1 340.0,340.0,64.0,80.0,10.0,0,1,0,0,0,1,0,1 133.0,133.0,65.0,75.0,1.0,0,1,0,0,1,0,0,1 111.0,111.0,64.0,60.0,5.0,0,1,0,0,1,0,0,1 231.0,231.0,67.0,70.0,18.0,0,1,0,0,0,1,0,1 378.0,378.0,65.0,80.0,4.0,0,1,0,0,1,0,0,1 49.0,49.0,37.0,30.0,3.0,0,1,0,0,1,0,0,1 xgboost-3.0.0/demo/gpu_acceleration/000077500000000000000000000000001476511272400174405ustar00rootroot00000000000000xgboost-3.0.0/demo/gpu_acceleration/README.rst000066400000000000000000000004061476511272400211270ustar00rootroot00000000000000:orphan: GPU Acceleration Demo ===================== This is a collection of demonstration scripts to showcase the basic usage of GPU. Please see :doc:`/gpu/index` for more info. There are other demonstrations for distributed GPU training using dask or spark. xgboost-3.0.0/demo/gpu_acceleration/cover_type.py000066400000000000000000000027501476511272400221750ustar00rootroot00000000000000""" Using xgboost on GPU devices ============================ Shows how to train a model on the `forest cover type `_ dataset using GPU acceleration. The forest cover type dataset has 581,012 rows and 54 features, making it time consuming to process. We compare the run-time and accuracy of the GPU and CPU histogram algorithms. In addition, The demo showcases using GPU with other GPU-related libraries including cupy and cuml. These libraries are not strictly required. """ import time import cupy as cp from cuml.model_selection import train_test_split from sklearn.datasets import fetch_covtype import xgboost as xgb # Fetch dataset using sklearn X, y = fetch_covtype(return_X_y=True) X = cp.array(X) y = cp.array(y) y -= y.min() # Create 0.75/0.25 train/test split X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.25, train_size=0.75, random_state=42 ) # Specify sufficient boosting iterations to reach a minimum num_round = 3000 # Leave most parameters as default clf = xgb.XGBClassifier(device="cuda", n_estimators=num_round) # Train model start = time.time() clf.fit(X_train, y_train, eval_set=[(X_test, y_test)]) gpu_res = clf.evals_result() print("GPU Training Time: %s seconds" % (str(time.time() - start))) # Repeat for CPU algorithm clf = xgb.XGBClassifier(device="cpu", n_estimators=num_round) start = time.time() cpu_res = clf.evals_result() print("CPU Training Time: %s seconds" % (str(time.time() - start))) xgboost-3.0.0/demo/gpu_acceleration/tree_shap.py000066400000000000000000000025461476511272400217730ustar00rootroot00000000000000""" Use GPU to speedup SHAP value computation ========================================= Demonstrates using GPU acceleration to compute SHAP values for feature importance. """ import shap from sklearn.datasets import fetch_california_housing import xgboost as xgb # Fetch dataset using sklearn data = fetch_california_housing() print(data.DESCR) X = data.data y = data.target num_round = 500 param = { "eta": 0.05, "max_depth": 10, "tree_method": "hist", "device": "cuda", } # GPU accelerated training dtrain = xgb.DMatrix(X, label=y, feature_names=data.feature_names) model = xgb.train(param, dtrain, num_round) # Compute shap values using GPU with xgboost model.set_param({"device": "cuda"}) shap_values = model.predict(dtrain, pred_contribs=True) # Compute shap interaction values using GPU shap_interaction_values = model.predict(dtrain, pred_interactions=True) # shap will call the GPU accelerated version as long as the device parameter is set to # "cuda" explainer = shap.TreeExplainer(model) shap_values = explainer.shap_values(X) # visualize the first prediction's explanation shap.force_plot( explainer.expected_value, shap_values[0, :], X[0, :], feature_names=data.feature_names, matplotlib=True, ) # Show a summary of feature importance shap.summary_plot(shap_values, X, plot_type="bar", feature_names=data.feature_names) xgboost-3.0.0/demo/guide-python/000077500000000000000000000000001476511272400165505ustar00rootroot00000000000000xgboost-3.0.0/demo/guide-python/README.rst000066400000000000000000000002171476511272400202370ustar00rootroot00000000000000XGBoost Python Feature Walkthrough ================================== This is a collection of examples for using the XGBoost Python package. xgboost-3.0.0/demo/guide-python/basic_walkthrough.py000066400000000000000000000043211476511272400226220ustar00rootroot00000000000000""" Getting started with XGBoost ============================ This is a simple example of using the native XGBoost interface, there are other interfaces in the Python package like scikit-learn interface and Dask interface. See :doc:`/python/python_intro` and :doc:`/tutorials/index` for other references. """ import os import pickle import numpy as np from sklearn.datasets import load_svmlight_file import xgboost as xgb # Make sure the demo knows where to load the data. CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) XGBOOST_ROOT_DIR = os.path.dirname(os.path.dirname(CURRENT_DIR)) DEMO_DIR = os.path.join(XGBOOST_ROOT_DIR, "demo") # X is a scipy csr matrix, XGBoost supports many other input types, X, y = load_svmlight_file(os.path.join(DEMO_DIR, "data", "agaricus.txt.train")) dtrain = xgb.DMatrix(X, y) # validation set X_test, y_test = load_svmlight_file(os.path.join(DEMO_DIR, "data", "agaricus.txt.test")) dtest = xgb.DMatrix(X_test, y_test) # specify parameters via map, definition are same as c++ version param = {"max_depth": 2, "eta": 1, "objective": "binary:logistic"} # specify validations set to watch performance watchlist = [(dtest, "eval"), (dtrain, "train")] # number of boosting rounds num_round = 2 bst = xgb.train(param, dtrain, num_boost_round=num_round, evals=watchlist) # run prediction preds = bst.predict(dtest) labels = dtest.get_label() print( "error=%f" % ( sum(1 for i in range(len(preds)) if int(preds[i] > 0.5) != labels[i]) / float(len(preds)) ) ) bst.save_model("model-0.json") # dump model bst.dump_model("dump.raw.txt") # dump model with feature map bst.dump_model("dump.nice.txt", os.path.join(DEMO_DIR, "data/featmap.txt")) # save dmatrix into binary buffer dtest.save_binary("dtest.dmatrix") # save model bst.save_model("model-1.json") # load model and data in bst2 = xgb.Booster(model_file="model-1.json") dtest2 = xgb.DMatrix("dtest.dmatrix") preds2 = bst2.predict(dtest2) # assert they are the same assert np.sum(np.abs(preds2 - preds)) == 0 # alternatively, you can pickle the booster pks = pickle.dumps(bst2) # load model and data in bst3 = pickle.loads(pks) preds3 = bst3.predict(dtest2) # assert they are the same assert np.sum(np.abs(preds3 - preds)) == 0 xgboost-3.0.0/demo/guide-python/boost_from_prediction.py000066400000000000000000000022261476511272400235150ustar00rootroot00000000000000""" Demo for boosting from prediction ================================= """ import os import xgboost as xgb CURRENT_DIR = os.path.dirname(__file__) dtrain = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.train?format=libsvm") ) dtest = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.test?format=libsvm") ) watchlist = [(dtest, "eval"), (dtrain, "train")] ### # advanced: start from a initial base prediction # print("start running example to start from a initial prediction") # specify parameters via map, definition are same as c++ version param = {"max_depth": 2, "eta": 1, "objective": "binary:logistic"} # train xgboost for 1 round bst = xgb.train(param, dtrain, 1, watchlist) # Note: we need the margin value instead of transformed prediction in # set_base_margin # do predict with output_margin=True, will always give you margin values # before logistic transformation ptrain = bst.predict(dtrain, output_margin=True) ptest = bst.predict(dtest, output_margin=True) dtrain.set_base_margin(ptrain) dtest.set_base_margin(ptest) print("this is result of running from initial prediction") bst = xgb.train(param, dtrain, 1, watchlist) xgboost-3.0.0/demo/guide-python/callbacks.py000066400000000000000000000115471476511272400210510ustar00rootroot00000000000000""" Demo for using and defining callback functions ============================================== .. versionadded:: 1.3.0 """ import argparse import os import tempfile from typing import Dict import numpy as np from matplotlib import pyplot as plt from sklearn.datasets import load_breast_cancer from sklearn.model_selection import train_test_split import xgboost as xgb class Plotting(xgb.callback.TrainingCallback): """Plot evaluation result during training. Only for demonstration purpose as it's quite slow to draw using matplotlib. """ def __init__(self, rounds: int) -> None: self.fig = plt.figure() self.ax = self.fig.add_subplot(111) self.rounds = rounds self.lines: Dict[str, plt.Line2D] = {} self.fig.show() self.x = np.linspace(0, self.rounds, self.rounds) plt.ion() def _get_key(self, data: str, metric: str) -> str: return f"{data}-{metric}" def after_iteration( self, model: xgb.Booster, epoch: int, evals_log: Dict[str, dict] ) -> bool: """Update the plot.""" if not self.lines: for data, metric in evals_log.items(): for metric_name, log in metric.items(): key = self._get_key(data, metric_name) expanded = log + [0] * (self.rounds - len(log)) (self.lines[key],) = self.ax.plot(self.x, expanded, label=key) self.ax.legend() else: # https://pythonspot.com/matplotlib-update-plot/ for data, metric in evals_log.items(): for metric_name, log in metric.items(): key = self._get_key(data, metric_name) expanded = log + [0] * (self.rounds - len(log)) self.lines[key].set_ydata(expanded) self.fig.canvas.draw() # False to indicate training should not stop. return False def custom_callback() -> None: """Demo for defining a custom callback function that plots evaluation result during training.""" X, y = load_breast_cancer(return_X_y=True) X_train, X_valid, y_train, y_valid = train_test_split(X, y, random_state=0) D_train = xgb.DMatrix(X_train, y_train) D_valid = xgb.DMatrix(X_valid, y_valid) num_boost_round = 100 plotting = Plotting(num_boost_round) # Pass it to the `callbacks` parameter as a list. xgb.train( { "objective": "binary:logistic", "eval_metric": ["error", "rmse"], "tree_method": "hist", "device": "cuda", }, D_train, evals=[(D_train, "Train"), (D_valid, "Valid")], num_boost_round=num_boost_round, callbacks=[plotting], ) def check_point_callback() -> None: """Demo for using the checkpoint callback. Custom logic for handling output is usually required and users are encouraged to define their own callback for checkpointing operations. The builtin one can be used as a starting point. """ # Only for demo, set a larger value (like 100) in practice as checkpointing is quite # slow. rounds = 2 def check(as_pickle: bool) -> None: for i in range(0, 10, rounds): if i == 0: continue if as_pickle: path = os.path.join(tmpdir, "model_" + str(i) + ".pkl") else: path = os.path.join( tmpdir, f"model_{i}.{xgb.callback.TrainingCheckPoint.default_format}", ) assert os.path.exists(path) X, y = load_breast_cancer(return_X_y=True) m = xgb.DMatrix(X, y) # Check point to a temporary directory for demo with tempfile.TemporaryDirectory() as tmpdir: # Use callback class from xgboost.callback # Feel free to subclass/customize it to suit your need. check_point = xgb.callback.TrainingCheckPoint( directory=tmpdir, interval=rounds, name="model" ) xgb.train( {"objective": "binary:logistic"}, m, num_boost_round=10, verbose_eval=False, callbacks=[check_point], ) check(False) # This version of checkpoint saves everything including parameters and # model. See: doc/tutorials/saving_model.rst check_point = xgb.callback.TrainingCheckPoint( directory=tmpdir, interval=rounds, as_pickle=True, name="model" ) xgb.train( {"objective": "binary:logistic"}, m, num_boost_round=10, verbose_eval=False, callbacks=[check_point], ) check(True) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--plot", default=1, type=int) args = parser.parse_args() check_point_callback() if args.plot: custom_callback() xgboost-3.0.0/demo/guide-python/cat_in_the_dat.py000066400000000000000000000076461476511272400220640ustar00rootroot00000000000000""" Train XGBoost with cat_in_the_dat dataset ========================================= A simple demo for categorical data support using dataset from Kaggle categorical data tutorial. The excellent tutorial is at: https://www.kaggle.com/shahules/an-overview-of-encoding-techniques And the data can be found at: https://www.kaggle.com/shahules/an-overview-of-encoding-techniques/data .. versionadded:: 1.6.0 See Also -------- - :doc:`Tutorial ` - :ref:`sphx_glr_python_examples_categorical.py` - :ref:`sphx_glr_python_examples_cat_pipeline.py` """ from __future__ import annotations import os from tempfile import TemporaryDirectory from time import time import pandas as pd from sklearn.metrics import roc_auc_score from sklearn.model_selection import train_test_split import xgboost as xgb def load_cat_in_the_dat() -> tuple[pd.DataFrame, pd.Series]: """Assuming you have already downloaded the data into `input` directory.""" df_train = pd.read_csv("./input/cat-in-the-dat/train.csv") print( "train data set has got {} rows and {} columns".format( df_train.shape[0], df_train.shape[1] ) ) X = df_train.drop(["target"], axis=1) y = df_train["target"] for i in range(0, 5): X["bin_" + str(i)] = X["bin_" + str(i)].astype("category") for i in range(0, 5): X["nom_" + str(i)] = X["nom_" + str(i)].astype("category") for i in range(5, 10): X["nom_" + str(i)] = X["nom_" + str(i)].apply(int, base=16) for i in range(0, 6): X["ord_" + str(i)] = X["ord_" + str(i)].astype("category") print( "train data set has got {} rows and {} columns".format(X.shape[0], X.shape[1]) ) return X, y params = { "tree_method": "hist", "device": "cuda", "n_estimators": 32, "colsample_bylevel": 0.7, } def categorical_model(X: pd.DataFrame, y: pd.Series, output_dir: str) -> None: """Train using builtin categorical data support from XGBoost""" X_train, X_test, y_train, y_test = train_test_split( X, y, random_state=1994, test_size=0.2 ) # Be aware that the encoding for X_train and X_test are the same here. In practice, # we should try to use an encoder like (sklearn OrdinalEncoder) to obtain the # categorical values. # Specify `enable_categorical` to True. clf = xgb.XGBClassifier( **params, eval_metric="auc", enable_categorical=True, max_cat_to_onehot=1, # We use optimal partitioning exclusively ) clf.fit(X_train, y_train, eval_set=[(X_test, y_test), (X_train, y_train)]) clf.save_model(os.path.join(output_dir, "categorical.json")) y_score = clf.predict_proba(X_test)[:, 1] # proba of positive samples auc = roc_auc_score(y_test, y_score) print("AUC of using builtin categorical data support:", auc) def onehot_encoding_model(X: pd.DataFrame, y: pd.Series, output_dir: str) -> None: """Train using one-hot encoded data.""" X_train, X_test, y_train, y_test = train_test_split( X, y, random_state=42, test_size=0.2 ) # Specify `enable_categorical` to False as we are using encoded data. clf = xgb.XGBClassifier(**params, eval_metric="auc", enable_categorical=False) clf.fit( X_train, y_train, eval_set=[(X_test, y_test), (X_train, y_train)], ) clf.save_model(os.path.join(output_dir, "one-hot.json")) y_score = clf.predict_proba(X_test)[:, 1] # proba of positive samples auc = roc_auc_score(y_test, y_score) print("AUC of using onehot encoding:", auc) if __name__ == "__main__": X, y = load_cat_in_the_dat() with TemporaryDirectory() as tmpdir: start = time() categorical_model(X, y, tmpdir) end = time() print("Duration:categorical", end - start) X = pd.get_dummies(X) start = time() onehot_encoding_model(X, y, tmpdir) end = time() print("Duration:onehot", end - start) xgboost-3.0.0/demo/guide-python/cat_pipeline.py000066400000000000000000000114161476511272400215610ustar00rootroot00000000000000""" Feature engineering pipeline for categorical data ================================================= The script showcases how to keep the categorical data encoding consistent across training and inference. There are many ways to attain the same goal, this script can be used as a starting point. See Also -------- - :doc:`Tutorial ` - :ref:`sphx_glr_python_examples_categorical.py` - :ref:`sphx_glr_python_examples_cat_in_the_dat.py` """ from typing import List, Tuple import numpy as np import pandas as pd from sklearn.compose import make_column_selector, make_column_transformer from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline from sklearn.preprocessing import OrdinalEncoder import xgboost as xgb def make_example_data() -> Tuple[pd.DataFrame, pd.Series, List[str]]: """Generate data for demo.""" n_samples = 2048 rng = np.random.default_rng(1994) # We have three categorical features, while the rest are numerical. categorical_features = ["brand_id", "retailer_id", "category_id"] df = pd.DataFrame( np.random.randint(32, 96, size=(n_samples, 3)), columns=categorical_features, ) df["price"] = rng.integers(100, 200, size=(n_samples,)) df["stock_status"] = rng.choice([True, False], n_samples) df["on_sale"] = rng.choice([True, False], n_samples) df["label"] = rng.normal(loc=0.0, scale=1.0, size=n_samples) X = df.drop(["label"], axis=1) y = df["label"] return X, y, categorical_features def native() -> None: """Using the native XGBoost interface.""" X, y, cat_feats = make_example_data() X_train, X_test, y_train, y_test = train_test_split( X, y, random_state=1994, test_size=0.2 ) # Create an encoder based on training data. enc = OrdinalEncoder(handle_unknown="use_encoded_value", unknown_value=np.nan) enc.set_output(transform="pandas") enc = enc.fit(X_train[cat_feats]) def enc_transform(X: pd.DataFrame) -> pd.DataFrame: # don't make change inplace so that we can have demonstrations for encoding X = X.copy() cat_cols = enc.transform(X[cat_feats]) for i, name in enumerate(cat_feats): # create pd.Series based on the encoder cat_cols[name] = pd.Categorical.from_codes( codes=cat_cols[name].astype(np.int32), categories=enc.categories_[i] ) X[cat_feats] = cat_cols return X # Encode the data based on fitted encoder. X_train_enc = enc_transform(X_train) X_test_enc = enc_transform(X_test) # Train XGBoost model using the native interface. Xy_train = xgb.QuantileDMatrix(X_train_enc, y_train, enable_categorical=True) Xy_test = xgb.QuantileDMatrix( X_test_enc, y_test, enable_categorical=True, ref=Xy_train ) booster = xgb.train({}, Xy_train) booster.predict(Xy_test) # Following shows that data are encoded consistently. # We first obtain result from newly encoded data predt0 = booster.inplace_predict(enc_transform(X_train.head(16))) # then we obtain result from already encoded data from training. predt1 = booster.inplace_predict(X_train_enc.head(16)) np.testing.assert_allclose(predt0, predt1) def pipeline() -> None: """Using the sklearn pipeline.""" X, y, cat_feats = make_example_data() X_train, X_test, y_train, y_test = train_test_split( X, y, random_state=3, test_size=0.2 ) enc = make_column_transformer( ( OrdinalEncoder(handle_unknown="use_encoded_value", unknown_value=np.nan), # all categorical feature names end with "_id" make_column_selector(pattern=".*_id"), ), remainder="passthrough", verbose_feature_names_out=False, ) # No need to set pandas output, we use `feature_types` to indicate the type of # features. # enc.set_output(transform="pandas") feature_types = ["c" if fn in cat_feats else "q" for fn in X_train.columns] reg = xgb.XGBRegressor( feature_types=feature_types, enable_categorical=True, n_estimators=10 ) p = make_pipeline(enc, reg) p.fit(X_train, y_train) # check XGBoost is using the feature type correctly. model_types = reg.get_booster().feature_types assert model_types is not None for a, b in zip(model_types, feature_types): assert a == b # Following shows that data are encoded consistently. # We first create a slice of data that doesn't contain all the categories predt0 = p.predict(X_train.iloc[:16, :]) # Then we use the dataframe that contains all the categories predt1 = p.predict(X_train)[:16] # The resulting encoding is the same np.testing.assert_allclose(predt0, predt1) if __name__ == "__main__": pipeline() native() xgboost-3.0.0/demo/guide-python/categorical.py000066400000000000000000000057071476511272400214100ustar00rootroot00000000000000""" Getting started with categorical data ===================================== Experimental support for categorical data. In before, users need to run an encoder themselves before passing the data into XGBoost, which creates a sparse matrix and potentially increase memory usage. This demo showcases the experimental categorical data support, more advanced features are planned. .. versionadded:: 1.5.0 See Also -------- - :doc:`Tutorial ` - :ref:`sphx_glr_python_examples_cat_in_the_dat.py` - :ref:`sphx_glr_python_examples_cat_pipeline.py` """ from typing import Tuple import numpy as np import pandas as pd import xgboost as xgb def make_categorical( n_samples: int, n_features: int, n_categories: int, onehot: bool ) -> Tuple[pd.DataFrame, pd.Series]: """Make some random data for demo.""" rng = np.random.RandomState(1994) pd_dict = {} for i in range(n_features + 1): c = rng.randint(low=0, high=n_categories, size=n_samples) pd_dict[str(i)] = pd.Series(c, dtype=np.int64) df = pd.DataFrame(pd_dict) label = df.iloc[:, 0] df = df.iloc[:, 1:] for i in range(0, n_features): label += df.iloc[:, i] label += 1 df = df.astype("category") categories = np.arange(0, n_categories) for col in df.columns: df[col] = df[col].cat.set_categories(categories) if onehot: return pd.get_dummies(df), label return df, label def main() -> None: # Use builtin categorical data support # For scikit-learn interface, the input data should be pandas DataFrame or cudf # DataFrame with categorical features. If an numpy/cupy array is used instead, the # `feature_types` for `XGBRegressor` should be set accordingly. X, y = make_categorical(100, 10, 4, False) # Specify `enable_categorical` to True, also we use onehot-encoding-based split here # for demonstration. For details see the document of `max_cat_to_onehot`. reg = xgb.XGBRegressor( tree_method="hist", enable_categorical=True, max_cat_to_onehot=5, device="cuda" ) reg.fit(X, y, eval_set=[(X, y)]) # Pass in already encoded data X_enc, y_enc = make_categorical(100, 10, 4, True) reg_enc = xgb.XGBRegressor(tree_method="hist", device="cuda") reg_enc.fit(X_enc, y_enc, eval_set=[(X_enc, y_enc)]) reg_results = np.array(reg.evals_result()["validation_0"]["rmse"]) reg_enc_results = np.array(reg_enc.evals_result()["validation_0"]["rmse"]) # Check that they have same results np.testing.assert_allclose(reg_results, reg_enc_results) # Convert to DMatrix for SHAP value booster: xgb.Booster = reg.get_booster() m = xgb.DMatrix(X, enable_categorical=True) # specify categorical data support. SHAP = booster.predict(m, pred_contribs=True) margin = booster.predict(m, output_margin=True) np.testing.assert_allclose( np.sum(SHAP, axis=len(SHAP.shape) - 1), margin, rtol=1e-3 ) if __name__ == "__main__": main() xgboost-3.0.0/demo/guide-python/continuation.py000066400000000000000000000074441476511272400216450ustar00rootroot00000000000000""" Demo for training continuation ============================== """ import os import pickle import tempfile from sklearn.datasets import load_breast_cancer import xgboost def training_continuation(tmpdir: str, use_pickle: bool) -> None: """Basic training continuation.""" # Train 128 iterations in 1 session X, y = load_breast_cancer(return_X_y=True) clf = xgboost.XGBClassifier(n_estimators=128, eval_metric="logloss") clf.fit(X, y, eval_set=[(X, y)]) print("Total boosted rounds:", clf.get_booster().num_boosted_rounds()) # Train 128 iterations in 2 sessions, with the first one runs for 32 iterations and # the second one runs for 96 iterations clf = xgboost.XGBClassifier(n_estimators=32, eval_metric="logloss") clf.fit(X, y, eval_set=[(X, y)]) assert clf.get_booster().num_boosted_rounds() == 32 # load back the model, this could be a checkpoint if use_pickle: path = os.path.join(tmpdir, "model-first-32.pkl") with open(path, "wb") as fd: pickle.dump(clf, fd) with open(path, "rb") as fd: loaded = pickle.load(fd) else: path = os.path.join(tmpdir, "model-first-32.json") clf.save_model(path) loaded = xgboost.XGBClassifier() loaded.load_model(path) clf = xgboost.XGBClassifier(n_estimators=128 - 32, eval_metric="logloss") clf.fit(X, y, eval_set=[(X, y)], xgb_model=loaded) print("Total boosted rounds:", clf.get_booster().num_boosted_rounds()) assert clf.get_booster().num_boosted_rounds() == 128 def training_continuation_early_stop(tmpdir: str, use_pickle: bool) -> None: """Training continuation with early stopping.""" early_stopping_rounds = 5 early_stop = xgboost.callback.EarlyStopping( rounds=early_stopping_rounds, save_best=True ) n_estimators = 512 X, y = load_breast_cancer(return_X_y=True) clf = xgboost.XGBClassifier( n_estimators=n_estimators, eval_metric="logloss", callbacks=[early_stop] ) clf.fit(X, y, eval_set=[(X, y)]) print("Total boosted rounds:", clf.get_booster().num_boosted_rounds()) best = clf.best_iteration # Train 512 iterations in 2 sessions, with the first one runs for 128 iterations and # the second one runs until early stop. clf = xgboost.XGBClassifier( n_estimators=128, eval_metric="logloss", callbacks=[early_stop] ) # Reinitialize the early stop callback early_stop = xgboost.callback.EarlyStopping( rounds=early_stopping_rounds, save_best=True ) clf.set_params(callbacks=[early_stop]) clf.fit(X, y, eval_set=[(X, y)]) assert clf.get_booster().num_boosted_rounds() == 128 # load back the model, this could be a checkpoint if use_pickle: path = os.path.join(tmpdir, "model-first-128.pkl") with open(path, "wb") as fd: pickle.dump(clf, fd) with open(path, "rb") as fd: loaded = pickle.load(fd) else: path = os.path.join(tmpdir, "model-first-128.json") clf.save_model(path) loaded = xgboost.XGBClassifier() loaded.load_model(path) early_stop = xgboost.callback.EarlyStopping( rounds=early_stopping_rounds, save_best=True ) clf = xgboost.XGBClassifier( n_estimators=n_estimators - 128, eval_metric="logloss", callbacks=[early_stop] ) clf.fit( X, y, eval_set=[(X, y)], xgb_model=loaded, ) print("Total boosted rounds:", clf.get_booster().num_boosted_rounds()) assert clf.best_iteration == best if __name__ == "__main__": with tempfile.TemporaryDirectory() as tmpdir: training_continuation_early_stop(tmpdir, False) training_continuation_early_stop(tmpdir, True) training_continuation(tmpdir, True) training_continuation(tmpdir, False) xgboost-3.0.0/demo/guide-python/cross_validation.py000066400000000000000000000047511476511272400224740ustar00rootroot00000000000000""" Demo for using cross validation =============================== """ import os import numpy as np import xgboost as xgb # load data in do training CURRENT_DIR = os.path.dirname(__file__) dtrain = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.train?format=libsvm") ) param = {"max_depth": 2, "eta": 1, "objective": "binary:logistic"} num_round = 2 print("running cross validation") # do cross validation, this will print result out as # [iteration] metric_name:mean_value+std_value # std_value is standard deviation of the metric xgb.cv( param, dtrain, num_round, nfold=5, metrics={"error"}, seed=0, callbacks=[xgb.callback.EvaluationMonitor(show_stdv=True)], ) print("running cross validation, disable standard deviation display") # do cross validation, this will print result out as # [iteration] metric_name:mean_value res = xgb.cv( param, dtrain, num_boost_round=10, nfold=5, metrics={"error"}, seed=0, callbacks=[ xgb.callback.EvaluationMonitor(show_stdv=False), xgb.callback.EarlyStopping(3), ], ) print(res) print("running cross validation, with preprocessing function") # define the preprocessing function # used to return the preprocessed training, test data, and parameter # we can use this to do weight rescale, etc. # as a example, we try to set scale_pos_weight def fpreproc(dtrain, dtest, param): label = dtrain.get_label() ratio = float(np.sum(label == 0)) / np.sum(label == 1) param["scale_pos_weight"] = ratio return (dtrain, dtest, param) # do cross validation, for each fold # the dtrain, dtest, param will be passed into fpreproc # then the return value of fpreproc will be used to generate # results of that fold xgb.cv(param, dtrain, num_round, nfold=5, metrics={"auc"}, seed=0, fpreproc=fpreproc) ### # you can also do cross validation with customized loss function # See custom_objective.py ## print("running cross validation, with customized loss function") def logregobj(preds, dtrain): labels = dtrain.get_label() preds = 1.0 / (1.0 + np.exp(-preds)) grad = preds - labels hess = preds * (1.0 - preds) return grad, hess def evalerror(preds, dtrain): labels = dtrain.get_label() preds = 1.0 / (1.0 + np.exp(-preds)) return "error", float(sum(labels != (preds > 0.0))) / len(labels) param = {"max_depth": 2, "eta": 1} # train with customized objective xgb.cv( param, dtrain, num_round, nfold=5, seed=0, obj=logregobj, custom_metric=evalerror ) xgboost-3.0.0/demo/guide-python/custom_rmsle.py000066400000000000000000000144621476511272400216450ustar00rootroot00000000000000""" Demo for defining a custom regression objective and metric ========================================================== Demo for defining customized metric and objective. Notice that for simplicity reason weight is not used in following example. In this script, we implement the Squared Log Error (SLE) objective and RMSLE metric as customized functions, then compare it with native implementation in XGBoost. See :doc:`/tutorials/custom_metric_obj` for a step by step walkthrough, with other details. The `SLE` objective reduces impact of outliers in training dataset, hence here we also compare its performance with standard squared error. """ import argparse from time import time from typing import Dict, List, Tuple import matplotlib import numpy as np from matplotlib import pyplot as plt import xgboost as xgb # shape of generated data. kRows = 4096 kCols = 16 kOutlier = 10000 # mean of generated outliers kNumberOfOutliers = 64 kRatio = 0.7 kSeed = 1994 kBoostRound = 20 np.random.seed(seed=kSeed) def generate_data() -> Tuple[xgb.DMatrix, xgb.DMatrix]: '''Generate data containing outliers.''' x = np.random.randn(kRows, kCols) y = np.random.randn(kRows) y += np.abs(np.min(y)) # Create outliers for i in range(0, kNumberOfOutliers): ind = np.random.randint(0, len(y)-1) y[ind] += np.random.randint(0, kOutlier) train_portion = int(kRows * kRatio) # rmsle requires all label be greater than -1. assert np.all(y > -1.0) train_x: np.ndarray = x[: train_portion] train_y: np.ndarray = y[: train_portion] dtrain = xgb.DMatrix(train_x, label=train_y) test_x = x[train_portion:] test_y = y[train_portion:] dtest = xgb.DMatrix(test_x, label=test_y) return dtrain, dtest def native_rmse(dtrain: xgb.DMatrix, dtest: xgb.DMatrix) -> Dict[str, Dict[str, List[float]]]: '''Train using native implementation of Root Mean Squared Loss.''' print('Squared Error') squared_error = { 'objective': 'reg:squarederror', 'eval_metric': 'rmse', 'tree_method': 'hist', 'seed': kSeed } start = time() results: Dict[str, Dict[str, List[float]]] = {} xgb.train(squared_error, dtrain=dtrain, num_boost_round=kBoostRound, evals=[(dtrain, 'dtrain'), (dtest, 'dtest')], evals_result=results) print('Finished Squared Error in:', time() - start, '\n') return results def native_rmsle(dtrain: xgb.DMatrix, dtest: xgb.DMatrix) -> Dict[str, Dict[str, List[float]]]: '''Train using native implementation of Squared Log Error.''' print('Squared Log Error') results: Dict[str, Dict[str, List[float]]] = {} squared_log_error = { 'objective': 'reg:squaredlogerror', 'eval_metric': 'rmsle', 'tree_method': 'hist', 'seed': kSeed } start = time() xgb.train(squared_log_error, dtrain=dtrain, num_boost_round=kBoostRound, evals=[(dtrain, 'dtrain'), (dtest, 'dtest')], evals_result=results) print('Finished Squared Log Error in:', time() - start) return results def py_rmsle(dtrain: xgb.DMatrix, dtest: xgb.DMatrix) -> Dict: '''Train using Python implementation of Squared Log Error.''' def gradient(predt: np.ndarray, dtrain: xgb.DMatrix) -> np.ndarray: '''Compute the gradient squared log error.''' y = dtrain.get_label() return (np.log1p(predt) - np.log1p(y)) / (predt + 1) def hessian(predt: np.ndarray, dtrain: xgb.DMatrix) -> np.ndarray: '''Compute the hessian for squared log error.''' y = dtrain.get_label() return ((-np.log1p(predt) + np.log1p(y) + 1) / np.power(predt + 1, 2)) def squared_log(predt: np.ndarray, dtrain: xgb.DMatrix) -> Tuple[np.ndarray, np.ndarray]: '''Squared Log Error objective. A simplified version for RMSLE used as objective function. :math:`\frac{1}{2}[log(pred + 1) - log(label + 1)]^2` ''' predt[predt < -1] = -1 + 1e-6 grad = gradient(predt, dtrain) hess = hessian(predt, dtrain) return grad, hess def rmsle(predt: np.ndarray, dtrain: xgb.DMatrix) -> Tuple[str, float]: ''' Root mean squared log error metric. :math:`\sqrt{\frac{1}{N}[log(pred + 1) - log(label + 1)]^2}` ''' y = dtrain.get_label() predt[predt < -1] = -1 + 1e-6 elements = np.power(np.log1p(y) - np.log1p(predt), 2) return 'PyRMSLE', float(np.sqrt(np.sum(elements) / len(y))) results: Dict[str, Dict[str, List[float]]] = {} xgb.train({'tree_method': 'hist', 'seed': kSeed, 'disable_default_eval_metric': 1}, dtrain=dtrain, num_boost_round=kBoostRound, obj=squared_log, custom_metric=rmsle, evals=[(dtrain, 'dtrain'), (dtest, 'dtest')], evals_result=results) return results def plot_history(rmse_evals, rmsle_evals, py_rmsle_evals): fig, axs = plt.subplots(3, 1) ax0: matplotlib.axes.Axes = axs[0] ax1: matplotlib.axes.Axes = axs[1] ax2: matplotlib.axes.Axes = axs[2] x = np.arange(0, kBoostRound, 1) ax0.plot(x, rmse_evals['dtrain']['rmse'], label='train-RMSE') ax0.plot(x, rmse_evals['dtest']['rmse'], label='test-RMSE') ax0.legend() ax1.plot(x, rmsle_evals['dtrain']['rmsle'], label='train-native-RMSLE') ax1.plot(x, rmsle_evals['dtest']['rmsle'], label='test-native-RMSLE') ax1.legend() ax2.plot(x, py_rmsle_evals['dtrain']['PyRMSLE'], label='train-PyRMSLE') ax2.plot(x, py_rmsle_evals['dtest']['PyRMSLE'], label='test-PyRMSLE') ax2.legend() def main(args): dtrain, dtest = generate_data() rmse_evals = native_rmse(dtrain, dtest) rmsle_evals = native_rmsle(dtrain, dtest) py_rmsle_evals = py_rmsle(dtrain, dtest) if args.plot != 0: plot_history(rmse_evals, rmsle_evals, py_rmsle_evals) plt.show() if __name__ == "__main__": parser = argparse.ArgumentParser( description='Arguments for custom RMSLE objective function demo.') parser.add_argument( '--plot', type=int, default=1, help='Set to 0 to disable plotting the evaluation history.') args = parser.parse_args() main(args) xgboost-3.0.0/demo/guide-python/custom_softmax.py000066400000000000000000000136731476511272400222070ustar00rootroot00000000000000''' Demo for creating customized multi-class objective function =========================================================== This demo is only applicable after (excluding) XGBoost 1.0.0, as before this version XGBoost returns transformed prediction for multi-class objective function. More details in comments. See :doc:`/tutorials/custom_metric_obj` and :doc:`/tutorials/advanced_custom_obj` for detailed tutorial and notes. ''' import argparse import numpy as np from matplotlib import pyplot as plt import xgboost as xgb np.random.seed(1994) kRows = 100 kCols = 10 kClasses = 4 # number of classes kRounds = 10 # number of boosting rounds. # Generate some random data for demo. X = np.random.randn(kRows, kCols) y = np.random.randint(0, 4, size=kRows) m = xgb.DMatrix(X, y) def softmax(x): '''Softmax function with x as input vector.''' e = np.exp(x) return e / np.sum(e) def softprob_obj(predt: np.ndarray, data: xgb.DMatrix): '''Loss function. Computing the gradient and upper bound on the Hessian with a diagonal structure for XGBoost (note that this is not the true Hessian). Reimplements the `multi:softprob` inside XGBoost. ''' labels = data.get_label() if data.get_weight().size == 0: # Use 1 as weight if we don't have custom weight. weights = np.ones((kRows, 1), dtype=float) else: weights = data.get_weight() # The prediction is of shape (rows, classes), each element in a row # represents a raw prediction (leaf weight, hasn't gone through softmax # yet). In XGBoost 1.0.0, the prediction is transformed by a softmax # function, fixed in later versions. assert predt.shape == (kRows, kClasses) grad = np.zeros((kRows, kClasses), dtype=float) hess = np.zeros((kRows, kClasses), dtype=float) eps = 1e-6 # compute the gradient and hessian upper bound, slow iterations in Python, only # suitable for demo. Also the one in native XGBoost core is more robust to # numeric overflow as we don't do anything to mitigate the `exp` in # `softmax` here. for r in range(predt.shape[0]): target = labels[r] p = softmax(predt[r, :]) for c in range(predt.shape[1]): assert target >= 0 or target <= kClasses g = p[c] - 1.0 if c == target else p[c] g = g * weights[r] h = max((2.0 * p[c] * (1.0 - p[c]) * weights[r]).item(), eps) grad[r, c] = g hess[r, c] = h # After 2.1.0, pass the gradient as it is. return grad, hess def predict(booster: xgb.Booster, X): '''A customized prediction function that converts raw prediction to target class. ''' # Output margin means we want to obtain the raw prediction obtained from # tree leaf weight. predt = booster.predict(X, output_margin=True) out = np.zeros(kRows) for r in range(predt.shape[0]): # the class with maximum prob (not strictly prob as it haven't gone # through softmax yet so it doesn't sum to 1, but result is the same # for argmax). i = np.argmax(predt[r]) out[r] = i return out def merror(predt: np.ndarray, dtrain: xgb.DMatrix): y = dtrain.get_label() # Like custom objective, the predt is untransformed leaf weight when custom objective # is provided. # With the use of `custom_metric` parameter in train function, custom metric receives # raw input only when custom objective is also being used. Otherwise custom metric # will receive transformed prediction. assert predt.shape == (kRows, kClasses) out = np.zeros(kRows) for r in range(predt.shape[0]): i = np.argmax(predt[r]) out[r] = i assert y.shape == out.shape errors = np.zeros(kRows) errors[y != out] = 1.0 return 'PyMError', np.sum(errors) / kRows def plot_history(custom_results, native_results): fig, axs = plt.subplots(2, 1) ax0 = axs[0] ax1 = axs[1] pymerror = custom_results['train']['PyMError'] merror = native_results['train']['merror'] x = np.arange(0, kRounds, 1) ax0.plot(x, pymerror, label='Custom objective') ax0.legend() ax1.plot(x, merror, label='multi:softmax') ax1.legend() plt.show() def main(args): custom_results = {} # Use our custom objective function booster_custom = xgb.train({'num_class': kClasses, 'disable_default_eval_metric': True}, m, num_boost_round=kRounds, obj=softprob_obj, custom_metric=merror, evals_result=custom_results, evals=[(m, 'train')]) predt_custom = predict(booster_custom, m) native_results = {} # Use the same objective function defined in XGBoost. booster_native = xgb.train({'num_class': kClasses, "objective": "multi:softmax", 'eval_metric': 'merror'}, m, num_boost_round=kRounds, evals_result=native_results, evals=[(m, 'train')]) predt_native = booster_native.predict(m) # We are reimplementing the loss function in XGBoost, so it should # be the same for normal cases. assert np.all(predt_custom == predt_native) np.testing.assert_allclose(custom_results['train']['PyMError'], native_results['train']['merror']) if args.plot != 0: plot_history(custom_results, native_results) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Arguments for custom softmax objective function demo.') parser.add_argument( '--plot', type=int, default=1, help='Set to 0 to disable plotting the evaluation history.') args = parser.parse_args() main(args) xgboost-3.0.0/demo/guide-python/distributed_extmem_basic.py000066400000000000000000000171521476511272400241720ustar00rootroot00000000000000""" Experimental support for distributed training with external memory ================================================================== .. versionadded:: 3.0.0 See :doc:`the tutorial ` for more details. To run the example, following packages in addition to XGBoost native dependencies are required: - scikit-learn - loky If `device` is `cuda`, following are also needed: - cupy - python-cuda - rmm """ import argparse import multiprocessing as mp import os import sys import tempfile import traceback from functools import partial, update_wrapper, wraps from typing import Callable, List, ParamSpec, Tuple, TypeVar import numpy as np from loky import get_reusable_executor from sklearn.datasets import make_regression import xgboost from xgboost import collective as coll from xgboost.tracker import RabitTracker def make_batches( n_samples_per_batch: int, n_features: int, n_batches: int, tmpdir: str, rank: int ) -> List[Tuple[str, str]]: files: List[Tuple[str, str]] = [] rng = np.random.RandomState(rank) for i in range(n_batches): X, y = make_regression(n_samples_per_batch, n_features, random_state=rng) X_path = os.path.join(tmpdir, f"X-r{rank}-{i}.npy") y_path = os.path.join(tmpdir, f"y-r{rank}-{i}.npy") np.save(X_path, X) np.save(y_path, y) files.append((X_path, y_path)) return files class Iterator(xgboost.DataIter): """A custom iterator for loading files in batches.""" def __init__(self, device: str, file_paths: List[Tuple[str, str]]) -> None: self.device = device self._file_paths = file_paths self._it = 0 # XGBoost will generate some cache files under the current directory with the # prefix "cache" super().__init__(cache_prefix=os.path.join(".", "cache")) def load_file(self) -> Tuple[np.ndarray, np.ndarray]: """Load a single batch of data.""" X_path, y_path = self._file_paths[self._it] # When the `ExtMemQuantileDMatrix` is used, the device must match. GPU cannot # consume CPU input data and vice-versa. if self.device == "cpu": X = np.load(X_path) y = np.load(y_path) else: X = cp.load(X_path) y = cp.load(y_path) assert X.shape[0] == y.shape[0] return X, y def next(self, input_data: Callable) -> bool: """Advance the iterator by 1 step and pass the data to XGBoost. This function is called by XGBoost during the construction of ``DMatrix`` """ if self._it == len(self._file_paths): # return False to let XGBoost know this is the end of iteration return False # input_data is a keyword-only function passed in by XGBoost and has the similar # signature to the ``DMatrix`` constructor. X, y = self.load_file() input_data(data=X, label=y) self._it += 1 return True def reset(self) -> None: """Reset the iterator to its beginning""" self._it = 0 def setup_rmm() -> None: """Setup RMM for GPU-based external memory training. It's important to use RMM with `CudaAsyncMemoryResource` or `ArenaMemoryResource` for GPU-based external memory to improve performance. If XGBoost is not built with RMM support, a warning is raised when constructing the `DMatrix`. """ import rmm from cuda import cudart from rmm.allocators.cupy import rmm_cupy_allocator from rmm.mr import ArenaMemoryResource if not xgboost.build_info()["USE_RMM"]: return status, free, total = cudart.cudaMemGetInfo() if status != cudart.cudaError_t.cudaSuccess: raise RuntimeError(cudart.cudaGetErrorString(status)) mr = rmm.mr.CudaMemoryResource() mr = ArenaMemoryResource(mr, arena_size=int(total * 0.9)) rmm.mr.set_current_device_resource(mr) # Set the allocator for cupy as well. cp.cuda.set_allocator(rmm_cupy_allocator) R = TypeVar("R") P = ParamSpec("P") def try_run(fn: Callable[P, R]) -> Callable[P, R]: """Loky aborts the process without printing out any error message if there's an exception. """ @wraps(fn) def inner(*args: P.args, **kwargs: P.kwargs) -> R: try: return fn(*args, **kwargs) except Exception as e: print(traceback.format_exc(), file=sys.stderr) raise RuntimeError("Running into exception in worker.") from e return inner @try_run def hist_train(worker_idx: int, tmpdir: str, device: str, rabit_args: dict) -> None: """The hist tree method can use a special data structure `ExtMemQuantileDMatrix` for faster initialization and lower memory usage. """ # Make sure XGBoost is using RMM for all allocations. with coll.CommunicatorContext(**rabit_args), xgboost.config_context(use_rmm=True): # Generate the data for demonstration. The sythetic data is sharded by workers. files = make_batches( n_samples_per_batch=4096, n_features=16, n_batches=17, tmpdir=tmpdir, rank=coll.get_rank(), ) # Since we are running two workers on a single node, we should divide the number # of threads between workers. n_threads = os.cpu_count() assert n_threads is not None n_threads = max(n_threads // coll.get_world_size(), 1) it = Iterator(device, files) Xy = xgboost.ExtMemQuantileDMatrix( it, missing=np.nan, enable_categorical=False, nthread=n_threads ) # Check the device is correctly set. if device == "cuda": # Check the first device assert ( int(os.environ["CUDA_VISIBLE_DEVICES"].split(",")[0]) < coll.get_world_size() ) booster = xgboost.train( { "tree_method": "hist", "max_depth": 4, "device": it.device, "nthread": n_threads, }, Xy, evals=[(Xy, "Train")], num_boost_round=10, ) booster.predict(Xy) def main(tmpdir: str, args: argparse.Namespace) -> None: n_workers = 2 tracker = RabitTracker(host_ip="127.0.0.1", n_workers=n_workers) tracker.start() rabit_args = tracker.worker_args() def initializer(device: str) -> None: # Set CUDA device before launching child processes. if device == "cuda": # name: LokyProcess-1 lop, sidx = mp.current_process().name.split("-") idx = int(sidx) - 1 # 1-based indexing from loky # Assuming two workers for demo. devices = ",".join([str(idx), str((idx + 1) % n_workers)]) # P0: CUDA_VISIBLE_DEVICES=0,1 # P1: CUDA_VISIBLE_DEVICES=1,0 os.environ["CUDA_VISIBLE_DEVICES"] = devices setup_rmm() with get_reusable_executor( max_workers=n_workers, initargs=(args.device,), initializer=initializer ) as pool: # Poor man's currying fn = update_wrapper( partial( hist_train, tmpdir=tmpdir, device=args.device, rabit_args=rabit_args ), hist_train, ) pool.map(fn, range(n_workers)) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--device", choices=["cpu", "cuda"], default="cpu") args = parser.parse_args() if args.device == "cuda": import cupy as cp with tempfile.TemporaryDirectory() as tmpdir: main(tmpdir, args) else: with tempfile.TemporaryDirectory() as tmpdir: main(tmpdir, args) xgboost-3.0.0/demo/guide-python/evals_result.py000066400000000000000000000021401476511272400216270ustar00rootroot00000000000000""" This script demonstrate how to access the eval metrics ====================================================== """ import os import xgboost as xgb CURRENT_DIR = os.path.dirname(__file__) dtrain = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.train?format=libsvm") ) dtest = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.test?format=libsvm") ) param = [ ("max_depth", 2), ("objective", "binary:logistic"), ("eval_metric", "logloss"), ("eval_metric", "error"), ] num_round = 2 watchlist = [(dtest, "eval"), (dtrain, "train")] evals_result = {} bst = xgb.train(param, dtrain, num_round, watchlist, evals_result=evals_result) print("Access logloss metric directly from evals_result:") print(evals_result["eval"]["logloss"]) print("") print("Access metrics through a loop:") for e_name, e_mtrs in evals_result.items(): print("- {}".format(e_name)) for e_mtr_name, e_mtr_vals in e_mtrs.items(): print(" - {}".format(e_mtr_name)) print(" - {}".format(e_mtr_vals)) print("") print("Access complete dictionary:") print(evals_result) xgboost-3.0.0/demo/guide-python/external_memory.py000066400000000000000000000137711476511272400223450ustar00rootroot00000000000000""" Experimental support for external memory ======================================== This is similar to the one in `quantile_data_iterator.py`, but for external memory instead of Quantile DMatrix. The feature is not ready for production use yet. .. versionadded:: 1.5.0 See :doc:`the tutorial ` for more details. .. versionchanged:: 3.0.0 Added :py:class:`~xgboost.ExtMemQuantileDMatrix`. To run the example, following packages in addition to XGBoost native dependencies are required: - scikit-learn If `device` is `cuda`, following are also needed: - cupy - rmm - python-cuda .. seealso:: :ref:`sphx_glr_python_examples_distributed_extmem_basic.py` """ import argparse import os import tempfile from typing import Callable, List, Tuple import numpy as np from sklearn.datasets import make_regression import xgboost def make_batches( n_samples_per_batch: int, n_features: int, n_batches: int, tmpdir: str, ) -> List[Tuple[str, str]]: files: List[Tuple[str, str]] = [] rng = np.random.RandomState(1994) for i in range(n_batches): X, y = make_regression(n_samples_per_batch, n_features, random_state=rng) X_path = os.path.join(tmpdir, "X-" + str(i) + ".npy") y_path = os.path.join(tmpdir, "y-" + str(i) + ".npy") np.save(X_path, X) np.save(y_path, y) files.append((X_path, y_path)) return files class Iterator(xgboost.DataIter): """A custom iterator for loading files in batches.""" def __init__(self, device: str, file_paths: List[Tuple[str, str]]) -> None: self.device = device self._file_paths = file_paths self._it = 0 # XGBoost will generate some cache files under the current directory with the # prefix "cache" super().__init__(cache_prefix=os.path.join(".", "cache")) def load_file(self) -> Tuple[np.ndarray, np.ndarray]: """Load a single batch of data.""" X_path, y_path = self._file_paths[self._it] # When the `ExtMemQuantileDMatrix` is used, the device must match. GPU cannot # consume CPU input data and vice-versa. if self.device == "cpu": X = np.load(X_path) y = np.load(y_path) else: X = cp.load(X_path) y = cp.load(y_path) assert X.shape[0] == y.shape[0] return X, y def next(self, input_data: Callable) -> bool: """Advance the iterator by 1 step and pass the data to XGBoost. This function is called by XGBoost during the construction of ``DMatrix`` """ if self._it == len(self._file_paths): # return False to let XGBoost know this is the end of iteration return False # input_data is a keyword-only function passed in by XGBoost and has the similar # signature to the ``DMatrix`` constructor. X, y = self.load_file() input_data(data=X, label=y) self._it += 1 return True def reset(self) -> None: """Reset the iterator to its beginning""" self._it = 0 def hist_train(it: Iterator) -> None: """The hist tree method can use a special data structure `ExtMemQuantileDMatrix` for faster initialization and lower memory usage. .. versionadded:: 3.0.0 """ # For non-data arguments, specify it here once instead of passing them by the `next` # method. Xy = xgboost.ExtMemQuantileDMatrix(it, missing=np.nan, enable_categorical=False) booster = xgboost.train( {"tree_method": "hist", "max_depth": 4, "device": it.device}, Xy, evals=[(Xy, "Train")], num_boost_round=10, ) booster.predict(Xy) def approx_train(it: Iterator) -> None: """The approx tree method uses the basic `DMatrix`.""" # For non-data arguments, specify it here once instead of passing them by the `next` # method. Xy = xgboost.DMatrix(it, missing=np.nan, enable_categorical=False) # ``approx`` is also supported, but less efficient due to sketching. It's # recommended to use `hist` instead. booster = xgboost.train( {"tree_method": "approx", "max_depth": 4, "device": it.device}, Xy, evals=[(Xy, "Train")], num_boost_round=10, ) booster.predict(Xy) def main(tmpdir: str, args: argparse.Namespace) -> None: """Entry point for training.""" # generate some random data for demo files = make_batches( n_samples_per_batch=1024, n_features=17, n_batches=31, tmpdir=tmpdir ) it = Iterator(args.device, files) hist_train(it) approx_train(it) def setup_rmm() -> None: """Setup RMM for GPU-based external memory training. It's important to use RMM with `CudaAsyncMemoryResource` or `ArenaMemoryResource` for GPU-based external memory to improve performance. If XGBoost is not built with RMM support, a warning is raised when constructing the `DMatrix`. """ import rmm from cuda import cudart from rmm.allocators.cupy import rmm_cupy_allocator from rmm.mr import ArenaMemoryResource if not xgboost.build_info()["USE_RMM"]: return status, free, total = cudart.cudaMemGetInfo() if status != cudart.cudaError_t.cudaSuccess: raise RuntimeError(cudart.cudaGetErrorString(status)) mr = rmm.mr.CudaMemoryResource() mr = ArenaMemoryResource(mr, arena_size=int(total * 0.9)) rmm.mr.set_current_device_resource(mr) # Set the allocator for cupy as well. cp.cuda.set_allocator(rmm_cupy_allocator) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--device", choices=["cpu", "cuda"], default="cpu") args = parser.parse_args() if args.device == "cuda": import cupy as cp setup_rmm() # Make sure XGBoost is using RMM for all allocations. with xgboost.config_context(use_rmm=True): with tempfile.TemporaryDirectory() as tmpdir: main(tmpdir, args) else: with tempfile.TemporaryDirectory() as tmpdir: main(tmpdir, args) xgboost-3.0.0/demo/guide-python/feature_weights.py000066400000000000000000000025471476511272400223170ustar00rootroot00000000000000""" Demo for using feature weight to change column sampling ======================================================= .. versionadded:: 1.3.0 """ import argparse import numpy as np from matplotlib import pyplot as plt import xgboost def main(args: argparse.Namespace) -> None: rng = np.random.RandomState(1994) kRows = 4196 kCols = 10 X = rng.randn(kRows, kCols) y = rng.randn(kRows) fw = np.ones(shape=(kCols,)) for i in range(kCols): fw[i] *= float(i) dtrain = xgboost.DMatrix(X, y) dtrain.set_info(feature_weights=fw) # Perform column sampling for each node split evaluation, the sampling process is # weighted by feature weights. bst = xgboost.train( {"tree_method": "hist", "colsample_bynode": 0.2}, dtrain, num_boost_round=10, evals=[(dtrain, "d")], ) feature_map = bst.get_fscore() # feature zero has 0 weight assert feature_map.get("f0", None) is None assert max(feature_map.values()) == feature_map.get("f9") if args.plot: xgboost.plot_importance(bst) plt.show() if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "--plot", type=int, default=1, help="Set to 0 to disable plotting the evaluation history.", ) args = parser.parse_args() main(args) xgboost-3.0.0/demo/guide-python/gamma_regression.py000066400000000000000000000021121476511272400224400ustar00rootroot00000000000000""" Demo for gamma regression ========================= """ import numpy as np import xgboost as xgb # this script demonstrates how to fit gamma regression model (with log link function) # in xgboost, before running the demo you need to generate the autoclaims dataset # by running gen_autoclaims.R located in xgboost/demo/data. data = np.genfromtxt('../data/autoclaims.csv', delimiter=',') dtrain = xgb.DMatrix(data[0:4741, 0:34], data[0:4741, 34]) dtest = xgb.DMatrix(data[4741:6773, 0:34], data[4741:6773, 34]) # for gamma regression, we need to set the objective to 'reg:gamma', it also suggests # to set the base_score to a value between 1 to 5 if the number of iteration is small param = {'objective':'reg:gamma', 'booster':'gbtree', 'base_score':3} # the rest of settings are the same watchlist = [(dtest, 'eval'), (dtrain, 'train')] num_round = 30 # training and evaluation bst = xgb.train(param, dtrain, num_round, watchlist) preds = bst.predict(dtest) labels = dtest.get_label() print('test deviance=%f' % (2 * np.sum((labels - preds) / preds - np.log(labels) + np.log(preds)))) xgboost-3.0.0/demo/guide-python/generalized_linear_model.py000066400000000000000000000026211476511272400241260ustar00rootroot00000000000000""" Demo for GLM ============ """ import os import xgboost as xgb ## # this script demonstrate how to fit generalized linear model in xgboost # basically, we are using linear model, instead of tree for our boosters ## CURRENT_DIR = os.path.dirname(__file__) dtrain = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.train?format=libsvm") ) dtest = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.test?format=libsvm") ) # change booster to gblinear, so that we are fitting a linear model # alpha is the L1 regularizer # lambda is the L2 regularizer # you can also set lambda_bias which is L2 regularizer on the bias term param = { "objective": "binary:logistic", "booster": "gblinear", "alpha": 0.0001, "lambda": 1, } # normally, you do not need to set eta (step_size) # XGBoost uses a parallel coordinate descent algorithm (shotgun), # there could be affection on convergence with parallelization on certain cases # setting eta to be smaller value, e.g 0.5 can make the optimization more stable # param['eta'] = 1 ## # the rest of settings are the same ## watchlist = [(dtest, "eval"), (dtrain, "train")] num_round = 4 bst = xgb.train(param, dtrain, num_round, watchlist) preds = bst.predict(dtest) labels = dtest.get_label() print( "error=%f" % ( sum(1 for i in range(len(preds)) if int(preds[i] > 0.5) != labels[i]) / float(len(preds)) ) ) xgboost-3.0.0/demo/guide-python/individual_trees.py000066400000000000000000000064611476511272400224630ustar00rootroot00000000000000""" Demo for prediction using individual trees and model slices =========================================================== """ import os import numpy as np from scipy.special import logit from sklearn.datasets import load_svmlight_file import xgboost as xgb CURRENT_DIR = os.path.dirname(__file__) train = os.path.join(CURRENT_DIR, "../data/agaricus.txt.train") test = os.path.join(CURRENT_DIR, "../data/agaricus.txt.test") def individual_tree() -> None: """Get prediction from each individual tree and combine them together.""" X_train, y_train = load_svmlight_file(train) X_test, y_test = load_svmlight_file(test) Xy_train = xgb.QuantileDMatrix(X_train, y_train) n_rounds = 4 # Specify the base score, otherwise xgboost will estimate one from the training # data. base_score = 0.5 params = { "max_depth": 2, "eta": 1, "objective": "reg:logistic", "tree_method": "hist", "base_score": base_score, } booster = xgb.train(params, Xy_train, num_boost_round=n_rounds) # Use logit to inverse the base score back to raw leaf value (margin) scores = np.full((X_test.shape[0],), logit(base_score)) for i in range(n_rounds): # - Use output_margin to get raw leaf values # - Use iteration_range to get prediction for only one tree # - Use previous prediction as base marign for the model Xy_test = xgb.DMatrix(X_test, base_margin=scores) if i == n_rounds - 1: # last round, get the transformed prediction scores = booster.predict( Xy_test, iteration_range=(i, i + 1), output_margin=False ) else: # get raw leaf value for accumulation scores = booster.predict( Xy_test, iteration_range=(i, i + 1), output_margin=True ) full = booster.predict(xgb.DMatrix(X_test), output_margin=False) np.testing.assert_allclose(scores, full) def model_slices() -> None: """Inference with each individual tree using model slices.""" X_train, y_train = load_svmlight_file(train) X_test, y_test = load_svmlight_file(test) Xy_train = xgb.QuantileDMatrix(X_train, y_train) n_rounds = 4 # Specify the base score, otherwise xgboost will estimate one from the training # data. base_score = 0.5 params = { "max_depth": 2, "eta": 1, "objective": "reg:logistic", "tree_method": "hist", "base_score": base_score, } booster = xgb.train(params, Xy_train, num_boost_round=n_rounds) trees = [booster[t] for t in range(n_rounds)] # Use logit to inverse the base score back to raw leaf value (margin) scores = np.full((X_test.shape[0],), logit(base_score)) for i, t in enumerate(trees): # Feed previous scores into base margin. Xy_test = xgb.DMatrix(X_test, base_margin=scores) if i == n_rounds - 1: # last round, get the transformed prediction scores = t.predict(Xy_test, output_margin=False) else: # get raw leaf value for accumulation scores = t.predict(Xy_test, output_margin=True) full = booster.predict(xgb.DMatrix(X_test), output_margin=False) np.testing.assert_allclose(scores, full) if __name__ == "__main__": individual_tree() model_slices() xgboost-3.0.0/demo/guide-python/learning_to_rank.py000066400000000000000000000143121476511272400224370ustar00rootroot00000000000000""" Getting started with learning to rank ===================================== .. versionadded:: 2.0.0 This is a demonstration of using XGBoost for learning to rank tasks using the MSLR_10k_letor dataset. For more infomation about the dataset, please visit its `description page `_. This is a two-part demo, the first one contains a basic example of using XGBoost to train on relevance degree, and the second part simulates click data and enable the position debiasing training. For an overview of learning to rank in XGBoost, please see :doc:`Learning to Rank `. """ from __future__ import annotations import argparse import json import os import pickle as pkl import numpy as np import pandas as pd from sklearn.datasets import load_svmlight_file import xgboost as xgb from xgboost.testing.data import RelDataCV, simulate_clicks, sort_ltr_samples def load_mslr_10k(data_path: str, cache_path: str) -> RelDataCV: """Load the MSLR10k dataset from data_path and cache a pickle object in cache_path. Returns ------- A list of tuples [(X, y, qid), ...]. """ root_path = os.path.expanduser(args.data) cacheroot_path = os.path.expanduser(args.cache) cache_path = os.path.join(cacheroot_path, "MSLR_10K_LETOR.pkl") # Use only the Fold1 for demo: # Train, Valid, Test # {S1,S2,S3}, S4, S5 fold = 1 if not os.path.exists(cache_path): fold_path = os.path.join(root_path, f"Fold{fold}") train_path = os.path.join(fold_path, "train.txt") valid_path = os.path.join(fold_path, "vali.txt") test_path = os.path.join(fold_path, "test.txt") X_train, y_train, qid_train = load_svmlight_file( train_path, query_id=True, dtype=np.float32 ) y_train = y_train.astype(np.int32) qid_train = qid_train.astype(np.int32) X_valid, y_valid, qid_valid = load_svmlight_file( valid_path, query_id=True, dtype=np.float32 ) y_valid = y_valid.astype(np.int32) qid_valid = qid_valid.astype(np.int32) X_test, y_test, qid_test = load_svmlight_file( test_path, query_id=True, dtype=np.float32 ) y_test = y_test.astype(np.int32) qid_test = qid_test.astype(np.int32) data = RelDataCV( train=(X_train, y_train, qid_train), test=(X_test, y_test, qid_test), max_rel=4, ) with open(cache_path, "wb") as fd: pkl.dump(data, fd) with open(cache_path, "rb") as fd: data = pkl.load(fd) return data def ranking_demo(args: argparse.Namespace) -> None: """Demonstration for learning to rank with relevance degree.""" data = load_mslr_10k(args.data, args.cache) # Sort data according to query index X_train, y_train, qid_train = data.train sorted_idx = np.argsort(qid_train) X_train = X_train[sorted_idx] y_train = y_train[sorted_idx] qid_train = qid_train[sorted_idx] X_test, y_test, qid_test = data.test sorted_idx = np.argsort(qid_test) X_test = X_test[sorted_idx] y_test = y_test[sorted_idx] qid_test = qid_test[sorted_idx] ranker = xgb.XGBRanker( tree_method="hist", device="cuda", lambdarank_pair_method="topk", lambdarank_num_pair_per_sample=13, eval_metric=["ndcg@1", "ndcg@8"], ) ranker.fit( X_train, y_train, qid=qid_train, eval_set=[(X_test, y_test)], eval_qid=[qid_test], verbose=True, ) def click_data_demo(args: argparse.Namespace) -> None: """Demonstration for learning to rank with click data.""" data = load_mslr_10k(args.data, args.cache) train, test = simulate_clicks(data) assert test is not None assert train.X.shape[0] == train.click.size assert test.X.shape[0] == test.click.size assert test.score.dtype == np.float32 assert test.click.dtype == np.int32 X_train, clicks_train, y_train, qid_train = sort_ltr_samples( train.X, train.y, train.qid, train.click, train.pos, ) X_test, clicks_test, y_test, qid_test = sort_ltr_samples( test.X, test.y, test.qid, test.click, test.pos, ) class ShowPosition(xgb.callback.TrainingCallback): def after_iteration( self, model: xgb.Booster, epoch: int, evals_log: xgb.callback.TrainingCallback.EvalsLog, ) -> bool: config = json.loads(model.save_config()) ti_plus = np.array(config["learner"]["objective"]["ti+"]) tj_minus = np.array(config["learner"]["objective"]["tj-"]) df = pd.DataFrame({"ti+": ti_plus, "tj-": tj_minus}) print(df) return False ranker = xgb.XGBRanker( n_estimators=512, tree_method="hist", device="cuda", learning_rate=0.01, reg_lambda=1.5, subsample=0.8, sampling_method="gradient_based", # LTR specific parameters objective="rank:ndcg", # - Enable bias estimation lambdarank_unbiased=True, # - normalization (1 / (norm + 1)) lambdarank_bias_norm=1, # - Focus on the top 12 documents lambdarank_num_pair_per_sample=12, lambdarank_pair_method="topk", ndcg_exp_gain=True, eval_metric=["ndcg@1", "ndcg@3", "ndcg@5", "ndcg@10"], callbacks=[ShowPosition()], ) ranker.fit( X_train, clicks_train, qid=qid_train, eval_set=[(X_test, y_test), (X_test, clicks_test)], eval_qid=[qid_test, qid_test], verbose=True, ) ranker.predict(X_test) if __name__ == "__main__": parser = argparse.ArgumentParser( description="Demonstration of learning to rank using XGBoost." ) parser.add_argument( "--data", type=str, help="Root directory of the MSLR-WEB10K data.", required=True, ) parser.add_argument( "--cache", type=str, help="Directory for caching processed data.", required=True, ) args = parser.parse_args() ranking_demo(args) click_data_demo(args) xgboost-3.0.0/demo/guide-python/model_parser.py000066400000000000000000000231421476511272400216000ustar00rootroot00000000000000""" Demonstration for parsing JSON/UBJSON tree model files ====================================================== See :doc:`/tutorials/saving_model` for details about the model serialization. """ import argparse import json from dataclasses import dataclass from enum import IntEnum, unique from typing import Any, Dict, List, Sequence, Union import numpy as np try: import ubjson except ImportError: ubjson = None ParamT = Dict[str, str] def to_integers(data: Union[bytes, List[int]]) -> List[int]: """Convert a sequence of bytes to a list of Python integer""" return [v for v in data] @unique class SplitType(IntEnum): numerical = 0 categorical = 1 @dataclass class Node: # properties left: int right: int parent: int split_idx: int split_cond: float default_left: bool split_type: SplitType categories: List[int] # statistic base_weight: float loss_chg: float sum_hess: float class Tree: """A tree built by XGBoost.""" def __init__(self, tree_id: int, nodes: Sequence[Node]) -> None: self.tree_id = tree_id self.nodes = nodes def loss_change(self, node_id: int) -> float: """Loss gain of a node.""" return self.nodes[node_id].loss_chg def sum_hessian(self, node_id: int) -> float: """Sum Hessian of a node.""" return self.nodes[node_id].sum_hess def base_weight(self, node_id: int) -> float: """Base weight of a node.""" return self.nodes[node_id].base_weight def split_index(self, node_id: int) -> int: """Split feature index of node.""" return self.nodes[node_id].split_idx def split_condition(self, node_id: int) -> float: """Split value of a node.""" return self.nodes[node_id].split_cond def split_categories(self, node_id: int) -> List[int]: """Categories in a node.""" return self.nodes[node_id].categories def is_categorical(self, node_id: int) -> bool: """Whether a node has categorical split.""" return self.nodes[node_id].split_type == SplitType.categorical def is_numerical(self, node_id: int) -> bool: return not self.is_categorical(node_id) def parent(self, node_id: int) -> int: """Parent ID of a node.""" return self.nodes[node_id].parent def left_child(self, node_id: int) -> int: """Left child ID of a node.""" return self.nodes[node_id].left def right_child(self, node_id: int) -> int: """Right child ID of a node.""" return self.nodes[node_id].right def is_leaf(self, node_id: int) -> bool: """Whether a node is leaf.""" return self.nodes[node_id].left == -1 def is_deleted(self, node_id: int) -> bool: """Whether a node is deleted.""" return self.split_index(node_id) == np.iinfo(np.uint32).max def __str__(self) -> str: stack = [0] nodes = [] while stack: node: Dict[str, Union[float, int, List[int]]] = {} nid = stack.pop() node["node id"] = nid node["gain"] = self.loss_change(nid) node["cover"] = self.sum_hessian(nid) nodes.append(node) if not self.is_leaf(nid) and not self.is_deleted(nid): left = self.left_child(nid) right = self.right_child(nid) stack.append(left) stack.append(right) categories = self.split_categories(nid) if categories: assert self.is_categorical(nid) node["categories"] = categories else: assert self.is_numerical(nid) node["condition"] = self.split_condition(nid) if self.is_leaf(nid): node["weight"] = self.split_condition(nid) string = "\n".join(map(lambda x: " " + str(x), nodes)) return string class Model: """Gradient boosted tree model.""" def __init__(self, model: dict) -> None: """Construct the Model from a JSON object. parameters ---------- model : A dictionary loaded by json representing a XGBoost boosted tree model. """ # Basic properties of a model self.learner_model_shape: ParamT = model["learner"]["learner_model_param"] self.num_output_group = int(self.learner_model_shape["num_class"]) self.num_feature = int(self.learner_model_shape["num_feature"]) self.base_score = float(self.learner_model_shape["base_score"]) # A field encoding which output group a tree belongs self.tree_info = model["learner"]["gradient_booster"]["model"]["tree_info"] model_shape: ParamT = model["learner"]["gradient_booster"]["model"][ "gbtree_model_param" ] # JSON representation of trees j_trees = model["learner"]["gradient_booster"]["model"]["trees"] # Load the trees self.num_trees = int(model_shape["num_trees"]) trees: List[Tree] = [] for i in range(self.num_trees): tree: Dict[str, Any] = j_trees[i] tree_id = int(tree["id"]) assert tree_id == i, (tree_id, i) # - properties left_children: List[int] = tree["left_children"] right_children: List[int] = tree["right_children"] parents: List[int] = tree["parents"] split_conditions: List[float] = tree["split_conditions"] split_indices: List[int] = tree["split_indices"] # when ubjson is used, this is a byte array with each element as uint8 default_left = to_integers(tree["default_left"]) # - categorical features # when ubjson is used, this is a byte array with each element as uint8 split_types = to_integers(tree["split_type"]) # categories for each node is stored in a CSR style storage with segment as # the begin ptr and the `categories' as values. cat_segments: List[int] = tree["categories_segments"] cat_sizes: List[int] = tree["categories_sizes"] # node index for categorical nodes cat_nodes: List[int] = tree["categories_nodes"] assert len(cat_segments) == len(cat_sizes) == len(cat_nodes) cats = tree["categories"] assert len(left_children) == len(split_types) # The storage for categories is only defined for categorical nodes to # prevent unnecessary overhead for numerical splits, we track the # categorical node that are processed using a counter. cat_cnt = 0 if cat_nodes: last_cat_node = cat_nodes[cat_cnt] else: last_cat_node = -1 node_categories: List[List[int]] = [] for node_id in range(len(left_children)): if node_id == last_cat_node: beg = cat_segments[cat_cnt] size = cat_sizes[cat_cnt] end = beg + size node_cats = cats[beg:end] # categories are unique for each node assert len(set(node_cats)) == len(node_cats) cat_cnt += 1 if cat_cnt == len(cat_nodes): last_cat_node = -1 # continue to process the rest of the nodes else: last_cat_node = cat_nodes[cat_cnt] assert node_cats node_categories.append(node_cats) else: # append an empty node, it's either a numerical node or a leaf. node_categories.append([]) # - stats base_weights: List[float] = tree["base_weights"] loss_changes: List[float] = tree["loss_changes"] sum_hessian: List[float] = tree["sum_hessian"] # Construct a list of nodes that have complete information nodes: List[Node] = [ Node( left_children[node_id], right_children[node_id], parents[node_id], split_indices[node_id], split_conditions[node_id], default_left[node_id] == 1, # to boolean SplitType(split_types[node_id]), node_categories[node_id], base_weights[node_id], loss_changes[node_id], sum_hessian[node_id], ) for node_id in range(len(left_children)) ] pytree = Tree(tree_id, nodes) trees.append(pytree) self.trees = trees def print_model(self) -> None: for i, tree in enumerate(self.trees): print("\ntree_id:", i) print(tree) if __name__ == "__main__": parser = argparse.ArgumentParser( description="Demonstration for loading XGBoost JSON/UBJSON model." ) parser.add_argument( "--model", type=str, required=True, help="Path to .json/.ubj model file." ) args = parser.parse_args() if args.model.endswith("json"): # use json format with open(args.model, "r") as fd: model = json.load(fd) elif args.model.endswith("ubj"): if ubjson is None: raise ImportError("ubjson is not installed.") # use ubjson format with open(args.model, "rb") as bfd: model = ubjson.load(bfd) else: raise ValueError( "Unexpected file extension. Supported file extension are json and ubj." ) model = Model(model) model.print_model() xgboost-3.0.0/demo/guide-python/multioutput_regression.py000066400000000000000000000101751476511272400240010ustar00rootroot00000000000000""" A demo for multi-output regression ================================== The demo is adopted from scikit-learn: https://scikit-learn.org/stable/auto_examples/ensemble/plot_random_forest_regression_multioutput.html#sphx-glr-auto-examples-ensemble-plot-random-forest-regression-multioutput-py See :doc:`/tutorials/multioutput` for more information. .. note:: The feature is experimental. For the `multi_output_tree` strategy, many features are missing. """ import argparse from typing import Dict, List, Tuple import numpy as np from matplotlib import pyplot as plt import xgboost as xgb def plot_predt(y: np.ndarray, y_predt: np.ndarray, name: str) -> None: s = 25 plt.scatter(y[:, 0], y[:, 1], c="navy", s=s, edgecolor="black", label="data") plt.scatter( y_predt[:, 0], y_predt[:, 1], c="cornflowerblue", s=s, edgecolor="black" ) plt.xlim([-1, 2]) plt.ylim([-1, 2]) plt.show() def gen_circle() -> Tuple[np.ndarray, np.ndarray]: "Generate a sample dataset that y is a 2 dim circle." rng = np.random.RandomState(1994) X = np.sort(200 * rng.rand(100, 1) - 100, axis=0) y = np.array([np.pi * np.sin(X).ravel(), np.pi * np.cos(X).ravel()]).T y[::5, :] += 0.5 - rng.rand(20, 2) y = y - y.min() y = y / y.max() return X, y def rmse_model(plot_result: bool, strategy: str) -> None: """Draw a circle with 2-dim coordinate as target variables.""" X, y = gen_circle() # Train a regressor on it reg = xgb.XGBRegressor( tree_method="hist", n_estimators=128, n_jobs=16, max_depth=8, multi_strategy=strategy, subsample=0.6, ) reg.fit(X, y, eval_set=[(X, y)]) y_predt = reg.predict(X) if plot_result: plot_predt(y, y_predt, "multi") def custom_rmse_model(plot_result: bool, strategy: str) -> None: """Train using Python implementation of Squared Error.""" def gradient(predt: np.ndarray, dtrain: xgb.DMatrix) -> np.ndarray: """Compute the gradient squared error.""" y = dtrain.get_label().reshape(predt.shape) return predt - y def hessian(predt: np.ndarray, dtrain: xgb.DMatrix) -> np.ndarray: """Compute the hessian for squared error.""" return np.ones(predt.shape) def squared_log( predt: np.ndarray, dtrain: xgb.DMatrix ) -> Tuple[np.ndarray, np.ndarray]: grad = gradient(predt, dtrain) hess = hessian(predt, dtrain) # both numpy.ndarray and cupy.ndarray works. return grad, hess def rmse(predt: np.ndarray, dtrain: xgb.DMatrix) -> Tuple[str, float]: y = dtrain.get_label().reshape(predt.shape) v = np.sqrt(np.sum(np.power(y - predt, 2))) return "PyRMSE", v X, y = gen_circle() Xy = xgb.DMatrix(X, y) results: Dict[str, Dict[str, List[float]]] = {} # Make sure the `num_target` is passed to XGBoost when custom objective is used. # When builtin objective is used, XGBoost can figure out the number of targets # automatically. booster = xgb.train( { "tree_method": "hist", "num_target": y.shape[1], "multi_strategy": strategy, }, dtrain=Xy, num_boost_round=128, obj=squared_log, evals=[(Xy, "Train")], evals_result=results, custom_metric=rmse, ) y_predt = booster.inplace_predict(X) if plot_result: plot_predt(y, y_predt, "multi") if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--plot", choices=[0, 1], type=int, default=1) args = parser.parse_args() # Train with builtin RMSE objective # - One model per output. rmse_model(args.plot == 1, "one_output_per_tree") # - One model for all outputs, this is still working in progress, many features are # missing. rmse_model(args.plot == 1, "multi_output_tree") # Train with custom objective. # - One model per output. custom_rmse_model(args.plot == 1, "one_output_per_tree") # - One model for all outputs, this is still working in progress, many features are # missing. custom_rmse_model(args.plot == 1, "multi_output_tree") xgboost-3.0.0/demo/guide-python/predict_first_ntree.py000066400000000000000000000036261476511272400231670ustar00rootroot00000000000000""" Demo for prediction using number of trees ========================================= """ import os import numpy as np from sklearn.datasets import load_svmlight_file import xgboost as xgb CURRENT_DIR = os.path.dirname(__file__) train = os.path.join(CURRENT_DIR, "../data/agaricus.txt.train") test = os.path.join(CURRENT_DIR, "../data/agaricus.txt.test") def native_interface(): # load data in do training dtrain = xgb.DMatrix(train + "?format=libsvm") dtest = xgb.DMatrix(test + "?format=libsvm") param = {"max_depth": 2, "eta": 1, "objective": "binary:logistic"} watchlist = [(dtest, "eval"), (dtrain, "train")] num_round = 3 bst = xgb.train(param, dtrain, num_round, watchlist) print("start testing prediction from first n trees") # predict using first 1 tree label = dtest.get_label() ypred1 = bst.predict(dtest, iteration_range=(0, 1)) # by default, we predict using all the trees ypred2 = bst.predict(dtest) print("error of ypred1=%f" % (np.sum((ypred1 > 0.5) != label) / float(len(label)))) print("error of ypred2=%f" % (np.sum((ypred2 > 0.5) != label) / float(len(label)))) def sklearn_interface(): X_train, y_train = load_svmlight_file(train) X_test, y_test = load_svmlight_file(test) clf = xgb.XGBClassifier(n_estimators=3, max_depth=2, eta=1) clf.fit(X_train, y_train, eval_set=[(X_test, y_test)]) assert clf.n_classes_ == 2 print("start testing prediction from first n trees") # predict using first 1 tree ypred1 = clf.predict(X_test, iteration_range=(0, 1)) # by default, we predict using all the trees ypred2 = clf.predict(X_test) print( "error of ypred1=%f" % (np.sum((ypred1 > 0.5) != y_test) / float(len(y_test))) ) print( "error of ypred2=%f" % (np.sum((ypred2 > 0.5) != y_test) / float(len(y_test))) ) if __name__ == "__main__": native_interface() sklearn_interface() xgboost-3.0.0/demo/guide-python/predict_leaf_indices.py000066400000000000000000000015221476511272400232410ustar00rootroot00000000000000""" Demo for obtaining leaf index ============================= """ import os import xgboost as xgb # load data in do training CURRENT_DIR = os.path.dirname(__file__) dtrain = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.train?format=libsvm") ) dtest = xgb.DMatrix( os.path.join(CURRENT_DIR, "../data/agaricus.txt.test?format=libsvm") ) param = {"max_depth": 2, "eta": 1, "objective": "binary:logistic"} watchlist = [(dtest, "eval"), (dtrain, "train")] num_round = 3 bst = xgb.train(param, dtrain, num_round, watchlist) print("start testing predict the leaf indices") # predict using first 2 tree leafindex = bst.predict( dtest, iteration_range=(0, 2), pred_leaf=True, strict_shape=True ) print(leafindex.shape) print(leafindex) # predict all trees leafindex = bst.predict(dtest, pred_leaf=True) print(leafindex.shape) xgboost-3.0.0/demo/guide-python/quantile_data_iterator.py000066400000000000000000000077541476511272400236630ustar00rootroot00000000000000""" Demo for using data iterator with Quantile DMatrix ================================================== .. versionadded:: 1.2.0 The demo that defines a customized iterator for passing batches of data into :py:class:`xgboost.QuantileDMatrix` and use this ``QuantileDMatrix`` for training. The feature is primarily designed to reduce the required GPU memory for training on distributed environment. Aftering going through the demo, one might ask why don't we use more native Python iterator? That's because XGBoost requires a `reset` function, while using `itertools.tee` might incur significant memory usage according to: https://docs.python.org/3/library/itertools.html#itertools.tee. .. seealso:: :ref:`sphx_glr_python_examples_external_memory.py` """ from typing import Callable import cupy import numpy import xgboost COLS = 64 ROWS_PER_BATCH = 1000 # data is splited by rows BATCHES = 32 class IterForDMatrixDemo(xgboost.core.DataIter): """A data iterator for XGBoost DMatrix. `reset` and `next` are required for any data iterator, other functions here are utilites for demonstration's purpose. """ def __init__(self) -> None: """Generate some random data for demostration. Actual data can be anything that is currently supported by XGBoost. """ self.rows = ROWS_PER_BATCH self.cols = COLS rng = cupy.random.RandomState(numpy.uint64(1994)) self._data = [rng.randn(self.rows, self.cols)] * BATCHES self._labels = [rng.randn(self.rows)] * BATCHES self._weights = [rng.uniform(size=self.rows)] * BATCHES self.it = 0 # set iterator to 0 super().__init__() def as_array(self) -> cupy.ndarray: return cupy.concatenate(self._data) def as_array_labels(self) -> cupy.ndarray: return cupy.concatenate(self._labels) def as_array_weights(self) -> cupy.ndarray: return cupy.concatenate(self._weights) def data(self) -> cupy.ndarray: """Utility function for obtaining current batch of data.""" return self._data[self.it] def labels(self) -> cupy.ndarray: """Utility function for obtaining current batch of label.""" return self._labels[self.it] def weights(self) -> cupy.ndarray: return self._weights[self.it] def reset(self) -> None: """Reset the iterator""" self.it = 0 def next(self, input_data: Callable) -> bool: """Yield the next batch of data.""" if self.it == len(self._data): # Return False to let XGBoost know this is the end of iteration return False # input_data is a keyword-only function passed in by XGBoost and has the similar # signature to the ``DMatrix`` constructor. input_data(data=self.data(), label=self.labels(), weight=self.weights()) self.it += 1 return True def main() -> None: rounds = 100 it = IterForDMatrixDemo() # Use iterator, must be `QuantileDMatrix`. # In this demo, the input batches are created using cupy, and the data processing # (quantile sketching) will be performed on GPU. If data is loaded with CPU based # data structures like numpy or pandas, then the processing step will be performed # on CPU instead. m_with_it = xgboost.QuantileDMatrix(it) # Use regular DMatrix. m = xgboost.DMatrix( it.as_array(), it.as_array_labels(), weight=it.as_array_weights() ) assert m_with_it.num_col() == m.num_col() assert m_with_it.num_row() == m.num_row() # Tree method must be `hist`. reg_with_it = xgboost.train( {"tree_method": "hist", "device": "cuda"}, m_with_it, num_boost_round=rounds, evals=[(m_with_it, "Train")], ) predict_with_it = reg_with_it.predict(m_with_it) reg = xgboost.train( {"tree_method": "hist", "device": "cuda"}, m, num_boost_round=rounds, evals=[(m, "Train")], ) predict = reg.predict(m) if __name__ == "__main__": main() xgboost-3.0.0/demo/guide-python/quantile_regression.py000066400000000000000000000101401476511272400232000ustar00rootroot00000000000000""" Quantile Regression =================== .. versionadded:: 2.0.0 The script is inspired by this awesome example in sklearn: https://scikit-learn.org/stable/auto_examples/ensemble/plot_gradient_boosting_quantile.html .. note:: The feature is only supported using the Python, R, and C packages. In addition, quantile crossing can happen due to limitation in the algorithm. """ import argparse from typing import Dict import numpy as np from sklearn.model_selection import train_test_split import xgboost as xgb def f(x: np.ndarray) -> np.ndarray: """The function to predict.""" return x * np.sin(x) def quantile_loss(args: argparse.Namespace) -> None: """Train a quantile regression model.""" rng = np.random.RandomState(1994) # Generate a synthetic dataset for demo, the generate process is from the sklearn # example. X = np.atleast_2d(rng.uniform(0, 10.0, size=1000)).T expected_y = f(X).ravel() sigma = 0.5 + X.ravel() / 10.0 noise = rng.lognormal(sigma=sigma) - np.exp(sigma**2.0 / 2.0) y = expected_y + noise # Train on 0.05 and 0.95 quantiles. The model is similar to multi-class and # multi-target models. alpha = np.array([0.05, 0.5, 0.95]) evals_result: Dict[str, Dict] = {} X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=rng) # We will be using the `hist` tree method, quantile DMatrix can be used to preserve # memory (which has nothing to do with quantile regression itself, see its document # for details). # Do not use the `exact` tree method for quantile regression, otherwise the # performance might drop. Xy = xgb.QuantileDMatrix(X_train, y_train) # use Xy as a reference Xy_test = xgb.QuantileDMatrix(X_test, y_test, ref=Xy) booster = xgb.train( { # Use the quantile objective function. "objective": "reg:quantileerror", "tree_method": "hist", "quantile_alpha": alpha, # Let's try not to overfit. "learning_rate": 0.04, "max_depth": 5, }, Xy, num_boost_round=32, early_stopping_rounds=2, # The evaluation result is a weighted average across multiple quantiles. evals=[(Xy, "Train"), (Xy_test, "Test")], evals_result=evals_result, ) xx = np.atleast_2d(np.linspace(0, 10, 1000)).T scores = booster.inplace_predict(xx) # dim 1 is the quantiles assert scores.shape[0] == xx.shape[0] assert scores.shape[1] == alpha.shape[0] y_lower = scores[:, 0] # alpha=0.05 y_med = scores[:, 1] # alpha=0.5, median y_upper = scores[:, 2] # alpha=0.95 # Train a mse model for comparison booster = xgb.train( { "objective": "reg:squarederror", "tree_method": "hist", # Let's try not to overfit. "learning_rate": 0.04, "max_depth": 5, }, Xy, num_boost_round=32, early_stopping_rounds=2, evals=[(Xy, "Train"), (Xy_test, "Test")], evals_result=evals_result, ) xx = np.atleast_2d(np.linspace(0, 10, 1000)).T y_pred = booster.inplace_predict(xx) if args.plot: from matplotlib import pyplot as plt fig = plt.figure(figsize=(10, 10)) plt.plot(xx, f(xx), "g:", linewidth=3, label=r"$f(x) = x\,\sin(x)$") plt.plot(X_test, y_test, "b.", markersize=10, label="Test observations") plt.plot(xx, y_med, "r-", label="Predicted median") plt.plot(xx, y_pred, "m-", label="Predicted mean") plt.plot(xx, y_upper, "k-") plt.plot(xx, y_lower, "k-") plt.fill_between( xx.ravel(), y_lower, y_upper, alpha=0.4, label="Predicted 90% interval" ) plt.xlabel("$x$") plt.ylabel("$f(x)$") plt.ylim(-10, 25) plt.legend(loc="upper left") plt.show() if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "--plot", action="store_true", help="Specify it to enable plotting the outputs.", ) args = parser.parse_args() quantile_loss(args) xgboost-3.0.0/demo/guide-python/sklearn_evals_result.py000066400000000000000000000024061476511272400233530ustar00rootroot00000000000000""" Demo for accessing the xgboost eval metrics by using sklearn interface ====================================================================== """ import numpy as np from sklearn.datasets import make_hastie_10_2 import xgboost as xgb X, y = make_hastie_10_2(n_samples=2000, random_state=42) # Map labels from {-1, 1} to {0, 1} labels, y = np.unique(y, return_inverse=True) X_train, X_test = X[:1600], X[1600:] y_train, y_test = y[:1600], y[1600:] param_dist = {"objective": "binary:logistic", "n_estimators": 2} clf = xgb.XGBModel( **param_dist, eval_metric="logloss", ) # Or you can use: clf = xgb.XGBClassifier(**param_dist) clf.fit( X_train, y_train, eval_set=[(X_train, y_train), (X_test, y_test)], verbose=True, ) # Load evals result by calling the evals_result() function evals_result = clf.evals_result() print("Access logloss metric directly from validation_0:") print(evals_result["validation_0"]["logloss"]) print("") print("Access metrics through a loop:") for e_name, e_mtrs in evals_result.items(): print("- {}".format(e_name)) for e_mtr_name, e_mtr_vals in e_mtrs.items(): print(" - {}".format(e_mtr_name)) print(" - {}".format(e_mtr_vals)) print("") print("Access complete dict:") print(evals_result) xgboost-3.0.0/demo/guide-python/sklearn_examples.py000066400000000000000000000050751476511272400224660ustar00rootroot00000000000000""" Collection of examples for using sklearn interface ================================================== For an introduction to XGBoost's scikit-learn estimator interface, see :doc:`/python/sklearn_estimator`. Created on 1 Apr 2015 @author: Jamie Hall """ import pickle import numpy as np from sklearn.datasets import fetch_california_housing, load_digits, load_iris from sklearn.metrics import confusion_matrix, mean_squared_error from sklearn.model_selection import GridSearchCV, KFold, train_test_split import xgboost as xgb rng = np.random.RandomState(31337) print("Zeros and Ones from the Digits dataset: binary classification") digits = load_digits(n_class=2) y = digits["target"] X = digits["data"] kf = KFold(n_splits=2, shuffle=True, random_state=rng) for train_index, test_index in kf.split(X): xgb_model = xgb.XGBClassifier(n_jobs=1).fit(X[train_index], y[train_index]) predictions = xgb_model.predict(X[test_index]) actuals = y[test_index] print(confusion_matrix(actuals, predictions)) print("Iris: multiclass classification") iris = load_iris() y = iris["target"] X = iris["data"] kf = KFold(n_splits=2, shuffle=True, random_state=rng) for train_index, test_index in kf.split(X): xgb_model = xgb.XGBClassifier(n_jobs=1).fit(X[train_index], y[train_index]) predictions = xgb_model.predict(X[test_index]) actuals = y[test_index] print(confusion_matrix(actuals, predictions)) print("California Housing: regression") X, y = fetch_california_housing(return_X_y=True) kf = KFold(n_splits=2, shuffle=True, random_state=rng) for train_index, test_index in kf.split(X): xgb_model = xgb.XGBRegressor(n_jobs=1).fit(X[train_index], y[train_index]) predictions = xgb_model.predict(X[test_index]) actuals = y[test_index] print(mean_squared_error(actuals, predictions)) print("Parameter optimization") xgb_model = xgb.XGBRegressor(n_jobs=1) clf = GridSearchCV( xgb_model, {"max_depth": [2, 4], "n_estimators": [50, 100]}, verbose=1, n_jobs=1, cv=3, ) clf.fit(X, y) print(clf.best_score_) print(clf.best_params_) # The sklearn API models are picklable print("Pickling sklearn API models") # must open in binary format to pickle pickle.dump(clf, open("best_calif.pkl", "wb")) clf2 = pickle.load(open("best_calif.pkl", "rb")) print(np.allclose(clf.predict(X), clf2.predict(X))) # Early-stopping X = digits["data"] y = digits["target"] X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) clf = xgb.XGBClassifier(n_jobs=1, early_stopping_rounds=10, eval_metric="auc") clf.fit(X_train, y_train, eval_set=[(X_test, y_test)]) xgboost-3.0.0/demo/guide-python/sklearn_parallel.py000066400000000000000000000013451476511272400224400ustar00rootroot00000000000000""" Demo for using xgboost with sklearn =================================== """ import multiprocessing from sklearn.datasets import fetch_california_housing from sklearn.model_selection import GridSearchCV import xgboost as xgb if __name__ == "__main__": print("Parallel Parameter optimization") X, y = fetch_california_housing(return_X_y=True) # Make sure the number of threads is balanced. xgb_model = xgb.XGBRegressor( n_jobs=multiprocessing.cpu_count() // 2, tree_method="hist" ) clf = GridSearchCV( xgb_model, {"max_depth": [2, 4, 6], "n_estimators": [50, 100, 200]}, verbose=1, n_jobs=2, ) clf.fit(X, y) print(clf.best_score_) print(clf.best_params_) xgboost-3.0.0/demo/guide-python/spark_estimator_examples.py000066400000000000000000000065771476511272400242460ustar00rootroot00000000000000""" Collection of examples for using xgboost.spark estimator interface ================================================================== @author: Weichen Xu """ import sklearn.datasets from pyspark.ml.evaluation import MulticlassClassificationEvaluator, RegressionEvaluator from pyspark.ml.linalg import Vectors from pyspark.sql import SparkSession from pyspark.sql.functions import rand from sklearn.model_selection import train_test_split from xgboost.spark import SparkXGBClassifier, SparkXGBRegressor spark = SparkSession.builder.master("local[*]").getOrCreate() def create_spark_df(X, y): return spark.createDataFrame( spark.sparkContext.parallelize( [(Vectors.dense(features), float(label)) for features, label in zip(X, y)] ), ["features", "label"], ) # load diabetes dataset (regression dataset) diabetes_X, diabetes_y = sklearn.datasets.load_diabetes(return_X_y=True) diabetes_X_train, diabetes_X_test, diabetes_y_train, diabetes_y_test = train_test_split( diabetes_X, diabetes_y, test_size=0.3, shuffle=True ) diabetes_train_spark_df = create_spark_df(diabetes_X_train, diabetes_y_train) diabetes_test_spark_df = create_spark_df(diabetes_X_test, diabetes_y_test) # train xgboost regressor model xgb_regressor = SparkXGBRegressor(max_depth=5) xgb_regressor_model = xgb_regressor.fit(diabetes_train_spark_df) transformed_diabetes_test_spark_df = xgb_regressor_model.transform( diabetes_test_spark_df ) regressor_evaluator = RegressionEvaluator(metricName="rmse") print( f"regressor rmse={regressor_evaluator.evaluate(transformed_diabetes_test_spark_df)}" ) diabetes_train_spark_df2 = diabetes_train_spark_df.withColumn( "validationIndicatorCol", rand(1) > 0.7 ) # train xgboost regressor model with validation dataset xgb_regressor2 = SparkXGBRegressor( max_depth=5, validation_indicator_col="validationIndicatorCol" ) xgb_regressor_model2 = xgb_regressor2.fit(diabetes_train_spark_df2) transformed_diabetes_test_spark_df2 = xgb_regressor_model2.transform( diabetes_test_spark_df ) print( f"regressor2 rmse={regressor_evaluator.evaluate(transformed_diabetes_test_spark_df2)}" ) # load iris dataset (classification dataset) iris_X, iris_y = sklearn.datasets.load_iris(return_X_y=True) iris_X_train, iris_X_test, iris_y_train, iris_y_test = train_test_split( iris_X, iris_y, test_size=0.3, shuffle=True ) iris_train_spark_df = create_spark_df(iris_X_train, iris_y_train) iris_test_spark_df = create_spark_df(iris_X_test, iris_y_test) # train xgboost classifier model xgb_classifier = SparkXGBClassifier(max_depth=5) xgb_classifier_model = xgb_classifier.fit(iris_train_spark_df) transformed_iris_test_spark_df = xgb_classifier_model.transform(iris_test_spark_df) classifier_evaluator = MulticlassClassificationEvaluator(metricName="f1") print(f"classifier f1={classifier_evaluator.evaluate(transformed_iris_test_spark_df)}") iris_train_spark_df2 = iris_train_spark_df.withColumn( "validationIndicatorCol", rand(1) > 0.7 ) # train xgboost classifier model with validation dataset xgb_classifier2 = SparkXGBClassifier( max_depth=5, validation_indicator_col="validationIndicatorCol" ) xgb_classifier_model2 = xgb_classifier2.fit(iris_train_spark_df2) transformed_iris_test_spark_df2 = xgb_classifier_model2.transform(iris_test_spark_df) print( f"classifier2 f1={classifier_evaluator.evaluate(transformed_iris_test_spark_df2)}" ) spark.stop() xgboost-3.0.0/demo/guide-python/update_process.py000066400000000000000000000062571476511272400221540ustar00rootroot00000000000000""" Demo for using `process_type` with `prune` and `refresh` ======================================================== Modifying existing trees is not a well established use for XGBoost, so feel free to experiment. """ import numpy as np from sklearn.datasets import fetch_california_housing import xgboost as xgb def main(): n_rounds = 32 X, y = fetch_california_housing(return_X_y=True) # Train a model first X_train = X[: X.shape[0] // 2] y_train = y[: y.shape[0] // 2] Xy = xgb.DMatrix(X_train, y_train) evals_result: xgb.callback.EvaluationMonitor.EvalsLog = {} booster = xgb.train( {"tree_method": "hist", "max_depth": 6, "device": "cuda"}, Xy, num_boost_round=n_rounds, evals=[(Xy, "Train")], evals_result=evals_result, ) SHAP = booster.predict(Xy, pred_contribs=True) # Refresh the leaf value and tree statistic X_refresh = X[X.shape[0] // 2 :] y_refresh = y[y.shape[0] // 2 :] Xy_refresh = xgb.DMatrix(X_refresh, y_refresh) # The model will adapt to other half of the data by changing leaf value (no change in # split condition) with refresh_leaf set to True. refresh_result: xgb.callback.EvaluationMonitor.EvalsLog = {} refreshed = xgb.train( {"process_type": "update", "updater": "refresh", "refresh_leaf": True}, Xy_refresh, num_boost_round=n_rounds, xgb_model=booster, evals=[(Xy, "Original"), (Xy_refresh, "Train")], evals_result=refresh_result, ) # Refresh the model without changing the leaf value, but tree statistic including # cover and weight are refreshed. refresh_result: xgb.callback.EvaluationMonitor.EvalsLog = {} refreshed = xgb.train( {"process_type": "update", "updater": "refresh", "refresh_leaf": False}, Xy_refresh, num_boost_round=n_rounds, xgb_model=booster, evals=[(Xy, "Original"), (Xy_refresh, "Train")], evals_result=refresh_result, ) # Without refreshing the leaf value, resulting trees should be the same with original # model except for accumulated statistic. The rtol is for floating point error in # prediction. np.testing.assert_allclose( refresh_result["Original"]["rmse"], evals_result["Train"]["rmse"], rtol=1e-5 ) # But SHAP value is changed as cover in tree nodes are changed. refreshed_SHAP = refreshed.predict(Xy, pred_contribs=True) assert not np.allclose(SHAP, refreshed_SHAP, rtol=1e-3) # Prune the trees with smaller max_depth X_update = X_train y_update = y_train Xy_update = xgb.DMatrix(X_update, y_update) prune_result: xgb.callback.EvaluationMonitor.EvalsLog = {} pruned = xgb.train( {"process_type": "update", "updater": "prune", "max_depth": 2}, Xy_update, num_boost_round=n_rounds, xgb_model=booster, evals=[(Xy, "Original"), (Xy_update, "Train")], evals_result=prune_result, ) # Have a smaller model, but similar accuracy. np.testing.assert_allclose( np.array(prune_result["Original"]["rmse"]), np.array(prune_result["Train"]["rmse"]), atol=1e-5, ) if __name__ == "__main__": main() xgboost-3.0.0/demo/kaggle-higgs/000077500000000000000000000000001476511272400164655ustar00rootroot00000000000000xgboost-3.0.0/demo/kaggle-higgs/README.md000066400000000000000000000020161476511272400177430ustar00rootroot00000000000000Highlights ===== Higgs challenge ends recently, xgboost is being used by many users. This list highlights the xgboost solutions of players * Blogpost by phunther: [Winning solution of Kaggle Higgs competition: what a single model can do](http://no2147483647.wordpress.com/2014/09/17/winning-solution-of-kaggle-higgs-competition-what-a-single-model-can-do/) * The solution by Tianqi Chen and Tong He [Link](https://github.com/hetong007/higgsml) Guide for Kaggle Higgs Challenge ===== This is the folder giving example of how to use XGBoost Python Module to run Kaggle Higgs competition This script will achieve about 3.600 AMS score in public leaderboard. To get start, you need do following step: 1. Compile the XGBoost python lib ```bash cd ../.. make ``` 2. Put training.csv test.csv on folder './data' (you can create a symbolic link) 3. Run ./run.sh Speed ===== speedtest.py compares xgboost's speed on this dataset with sklearn.GBM Using R module ===== * Alternatively, you can run using R, higgs-train.R and higgs-pred.R. xgboost-3.0.0/demo/kaggle-higgs/higgs-cv.py000077500000000000000000000026341476511272400205560ustar00rootroot00000000000000#!/usr/bin/python import numpy as np import xgboost as xgb ### load data in do training train = np.loadtxt('./data/training.csv', delimiter=',', skiprows=1, converters={32: lambda x:int(x=='s'.encode('utf-8')) } ) label = train[:,32] data = train[:,1:31] weight = train[:,31] dtrain = xgb.DMatrix( data, label=label, missing = -999.0, weight=weight ) param = {'max_depth':6, 'eta':0.1, 'objective':'binary:logitraw', 'nthread':4} num_round = 120 print ('running cross validation, with preprocessing function') # define the preprocessing function # used to return the preprocessed training, test data, and parameter # we can use this to do weight rescale, etc. # as a example, we try to set scale_pos_weight def fpreproc(dtrain, dtest, param): label = dtrain.get_label() ratio = float(np.sum(label == 0)) / np.sum(label==1) param['scale_pos_weight'] = ratio wtrain = dtrain.get_weight() wtest = dtest.get_weight() sum_weight = sum(wtrain) + sum(wtest) wtrain *= sum_weight / sum(wtrain) wtest *= sum_weight / sum(wtest) dtrain.set_weight(wtrain) dtest.set_weight(wtest) return (dtrain, dtest, param) # do cross validation, for each fold # the dtrain, dtest, param will be passed into fpreproc # then the return value of fpreproc will be used to generate # results of that fold xgb.cv(param, dtrain, num_round, nfold=5, metrics={'ams@0.15', 'auc'}, seed = 0, fpreproc = fpreproc) xgboost-3.0.0/demo/kaggle-higgs/higgs-numpy.py000077500000000000000000000032621476511272400213140ustar00rootroot00000000000000#!/usr/bin/python # this is the example script to use xgboost to train import numpy as np import xgboost as xgb test_size = 550000 # path to where the data lies dpath = 'data' # load in training data, directly use numpy dtrain = np.loadtxt( dpath+'/training.csv', delimiter=',', skiprows=1, converters={32: lambda x:int(x=='s'.encode('utf-8')) } ) print ('finish loading from csv ') label = dtrain[:,32] data = dtrain[:,1:31] # rescale weight to make it same as test set weight = dtrain[:,31] * float(test_size) / len(label) sum_wpos = sum( weight[i] for i in range(len(label)) if label[i] == 1.0 ) sum_wneg = sum( weight[i] for i in range(len(label)) if label[i] == 0.0 ) # print weight statistics print ('weight statistics: wpos=%g, wneg=%g, ratio=%g' % ( sum_wpos, sum_wneg, sum_wneg/sum_wpos )) # construct xgboost.DMatrix from numpy array, treat -999.0 as missing value xgmat = xgb.DMatrix( data, label=label, missing = -999.0, weight=weight ) # setup parameters for xgboost param = {} # use logistic regression loss, use raw prediction before logistic transformation # since we only need the rank param['objective'] = 'binary:logitraw' # scale weight of positive examples param['scale_pos_weight'] = sum_wneg/sum_wpos param['eta'] = 0.1 param['max_depth'] = 6 param['eval_metric'] = 'auc' param['nthread'] = 16 # you can directly throw param in, though we want to watch multiple metrics here plst = list(param.items())+[('eval_metric', 'ams@0.15')] watchlist = [ (xgmat,'train') ] # boost 120 trees num_round = 120 print ('loading data end, start to boost trees') bst = xgb.train( plst, xgmat, num_round, watchlist ); # save out model bst.save_model('higgs.model') print ('finish training') xgboost-3.0.0/demo/kaggle-higgs/higgs-pred.R000066400000000000000000000013221476511272400206370ustar00rootroot00000000000000# install xgboost package, see R-package in root folder require(xgboost) require(methods) modelfile <- "higgs.model" outfile <- "higgs.pred.csv" dtest <- read.csv("data/test.csv", header = TRUE) data <- as.matrix(dtest[2:31]) idx <- dtest[[1]] xgmat <- xgb.DMatrix(data, missing = -999.0) bst <- xgb.load(modelfile = modelfile) ypred <- predict(bst, xgmat) rorder <- rank(ypred, ties.method = "first") threshold <- 0.15 # to be completed ntop <- length(rorder) - as.integer(threshold * length(rorder)) plabel <- ifelse(rorder > ntop, "s", "b") outdata <- list("EventId" = idx, "RankOrder" = rorder, "Class" = plabel) write.csv(outdata, file = outfile, quote = FALSE, row.names = FALSE) xgboost-3.0.0/demo/kaggle-higgs/higgs-pred.py000077500000000000000000000022141476511272400210720ustar00rootroot00000000000000#!/usr/bin/python # make prediction import numpy as np import xgboost as xgb # path to where the data lies dpath = 'data' modelfile = 'higgs.model' outfile = 'higgs.pred.csv' # make top 15% as positive threshold_ratio = 0.15 # load in training data, directly use numpy dtest = np.loadtxt( dpath+'/test.csv', delimiter=',', skiprows=1 ) data = dtest[:,1:31] idx = dtest[:,0] print ('finish loading from csv ') xgmat = xgb.DMatrix( data, missing = -999.0 ) bst = xgb.Booster({'nthread':16}, model_file = modelfile) ypred = bst.predict( xgmat ) res = [ ( int(idx[i]), ypred[i] ) for i in range(len(ypred)) ] rorder = {} for k, v in sorted( res, key = lambda x:-x[1] ): rorder[ k ] = len(rorder) + 1 # write out predictions ntop = int( threshold_ratio * len(rorder ) ) fo = open(outfile, 'w') nhit = 0 ntot = 0 fo.write('EventId,RankOrder,Class\n') for k, v in res: if rorder[k] <= ntop: lb = 's' nhit += 1 else: lb = 'b' # change output rank order to follow Kaggle convention fo.write('%s,%d,%s\n' % ( k, len(rorder)+1-rorder[k], lb ) ) ntot += 1 fo.close() print ('finished writing into prediction file') xgboost-3.0.0/demo/kaggle-higgs/higgs-train.R000066400000000000000000000020661476511272400210300ustar00rootroot00000000000000# install xgboost package, see R-package in root folder require(xgboost) require(methods) testsize <- 550000 dtrain <- read.csv("data/training.csv", header = TRUE) dtrain[33] <- dtrain[33] == "s" label <- as.numeric(dtrain[[33]]) data <- as.matrix(dtrain[2:31]) weight <- as.numeric(dtrain[[32]]) * testsize / length(label) sumwpos <- sum(weight * (label == 1.0)) sumwneg <- sum(weight * (label == 0.0)) print(paste("weight statistics: wpos=", sumwpos, "wneg=", sumwneg, "ratio=", sumwneg / sumwpos)) xgmat <- xgb.DMatrix(data, label = label, weight = weight, missing = -999.0) param <- list("objective" = "binary:logitraw", "scale_pos_weight" = sumwneg / sumwpos, "bst:eta" = 0.1, "bst:max_depth" = 6, "eval_metric" = "auc", "eval_metric" = "ams@0.15", "nthread" = 16) watchlist <- list("train" = xgmat) nrounds <- 120 print("loading data end, start to boost trees") bst <- xgb.train(param, xgmat, nrounds, watchlist) # save out model xgb.save(bst, "higgs.model") print('finish training') xgboost-3.0.0/demo/kaggle-higgs/run.sh000077500000000000000000000003451476511272400176320ustar00rootroot00000000000000#!/bin/bash python -u higgs-numpy.py ret=$? if [[ $ret != 0 ]]; then echo "ERROR in higgs-numpy.py" exit $ret fi python -u higgs-pred.py ret=$? if [[ $ret != 0 ]]; then echo "ERROR in higgs-pred.py" exit $ret fi xgboost-3.0.0/demo/kaggle-higgs/speedtest.R000066400000000000000000000036331476511272400206150ustar00rootroot00000000000000# install xgboost package, see R-package in root folder require(xgboost) require(gbm) require(methods) testsize <- 550000 dtrain <- read.csv("data/training.csv", header = TRUE, nrows = 350001) dtrain$Label <- as.numeric(dtrain$Label == 's') # gbm.time = system.time({ # gbm.model <- gbm(Label ~ ., data = dtrain[, -c(1,32)], n.trees = 120, # interaction.depth = 6, shrinkage = 0.1, bag.fraction = 1, # verbose = TRUE) # }) # print(gbm.time) # Test result: 761.48 secs # dtrain[33] <- dtrain[33] == "s" # label <- as.numeric(dtrain[[33]]) data <- as.matrix(dtrain[2:31]) weight <- as.numeric(dtrain[[32]]) * testsize / length(label) sumwpos <- sum(weight * (label == 1.0)) sumwneg <- sum(weight * (label == 0.0)) print(paste("weight statistics: wpos=", sumwpos, "wneg=", sumwneg, "ratio=", sumwneg / sumwpos)) xgboost.time <- list() threads <- c(1, 2, 4, 8, 16) for (i in seq_along(threads)){ thread <- threads[i] xgboost.time[[i]] <- system.time({ xgmat <- xgb.DMatrix(data, label = label, weight = weight, missing = -999.0) param <- list("objective" = "binary:logitraw", "scale_pos_weight" = sumwneg / sumwpos, "bst:eta" = 0.1, "bst:max_depth" = 6, "eval_metric" = "auc", "eval_metric" = "ams@0.15", "nthread" = thread) watchlist <- list("train" = xgmat) nrounds <- 120 print("loading data end, start to boost trees") bst <- xgb.train(param, xgmat, nrounds, watchlist) # save out model xgb.save(bst, "higgs.model") print('finish training') }) } xgboost.time # [[1]] # user system elapsed # 99.015 0.051 98.982 # # [[2]] # user system elapsed # 100.268 0.317 55.473 # # [[3]] # user system elapsed # 111.682 0.777 35.963 # # [[4]] # user system elapsed # 149.396 1.851 32.661 # # [[5]] # user system elapsed # 157.390 5.988 40.949 xgboost-3.0.0/demo/kaggle-higgs/speedtest.py000077500000000000000000000040031476511272400210370ustar00rootroot00000000000000#!/usr/bin/python # this is the example script to use xgboost to train import time import numpy as np from sklearn.ensemble import GradientBoostingClassifier import xgboost as xgb test_size = 550000 # path to where the data lies dpath = 'data' # load in training data, directly use numpy dtrain = np.loadtxt( dpath+'/training.csv', delimiter=',', skiprows=1, converters={32: lambda x:int(x=='s') } ) print ('finish loading from csv ') label = dtrain[:,32] data = dtrain[:,1:31] # rescale weight to make it same as test set weight = dtrain[:,31] * float(test_size) / len(label) sum_wpos = sum( weight[i] for i in range(len(label)) if label[i] == 1.0 ) sum_wneg = sum( weight[i] for i in range(len(label)) if label[i] == 0.0 ) # print weight statistics print ('weight statistics: wpos=%g, wneg=%g, ratio=%g' % ( sum_wpos, sum_wneg, sum_wneg/sum_wpos )) # construct xgboost.DMatrix from numpy array, treat -999.0 as missing value xgmat = xgb.DMatrix( data, label=label, missing = -999.0, weight=weight ) # setup parameters for xgboost param = {} # use logistic regression loss param['objective'] = 'binary:logitraw' # scale weight of positive examples param['scale_pos_weight'] = sum_wneg/sum_wpos param['bst:eta'] = 0.1 param['bst:max_depth'] = 6 param['eval_metric'] = 'auc' param['nthread'] = 4 plst = param.items()+[('eval_metric', 'ams@0.15')] watchlist = [ (xgmat,'train') ] # boost 10 trees num_round = 10 print ('loading data end, start to boost trees') print ("training GBM from sklearn") tmp = time.time() gbm = GradientBoostingClassifier(n_estimators=num_round, max_depth=6, verbose=2) gbm.fit(data, label) print ("sklearn.GBM costs: %s seconds" % str(time.time() - tmp)) #raw_input() print ("training xgboost") threads = [1, 2, 4, 16] for i in threads: param['nthread'] = i tmp = time.time() plst = param.items()+[('eval_metric', 'ams@0.15')] bst = xgb.train( plst, xgmat, num_round, watchlist ); print ("XGBoost with %d thread costs: %s seconds" % (i, str(time.time() - tmp))) print ('finish training') xgboost-3.0.0/demo/kaggle-otto/000077500000000000000000000000001476511272400163515ustar00rootroot00000000000000xgboost-3.0.0/demo/kaggle-otto/README.MD000066400000000000000000000012351476511272400175310ustar00rootroot00000000000000Benchmark for Otto Group Competition ========= This is a folder containing the benchmark for the [Otto Group Competition on Kaggle](http://www.kaggle.com/c/otto-group-product-classification-challenge). ## Getting started 1. Put `train.csv` and `test.csv` under the `data` folder 2. Run the script 3. Submit the `submission.csv` The parameter `nthread` controls the number of cores to run on, please set it to suit your machine. ## R-package To install the R-package of xgboost, please run ```r install.packages("xgboost", repos = "https://cran.r-project.org") ``` Windows users may need to install [RTools](http://cran.r-project.org/bin/windows/Rtools/) first. xgboost-3.0.0/demo/kaggle-otto/otto_train_pred.R000066400000000000000000000025121476511272400216700ustar00rootroot00000000000000require(xgboost) require(methods) train <- read.csv('data/train.csv', header = TRUE, stringsAsFactors = FALSE) test <- read.csv('data/test.csv', header = TRUE, stringsAsFactors = FALSE) train <- train[, -1] test <- test[, -1] y <- train[, ncol(train)] y <- gsub('Class_', '', y, fixed = TRUE) y <- as.integer(y) - 1 # xgboost take features in [0,numOfClass) x <- rbind(train[, -ncol(train)], test) x <- as.matrix(x) x <- matrix(as.numeric(x), nrow(x), ncol(x)) trind <- seq_along(y) teind <- (nrow(train) + 1):nrow(x) # Set necessary parameter param <- list("objective" = "multi:softprob", "eval_metric" = "mlogloss", "num_class" = 9, "nthread" = 8) # Run Cross Validation cv.nrounds <- 50 bst.cv <- xgb.cv( param = param , data = x[trind, ] , label = y , nfold = 3 , nrounds = cv.nrounds ) # Train the model nrounds <- 50 bst <- xgboost(param = param, data = x[trind, ], label = y, nrounds = nrounds) # Make prediction pred <- predict(bst, x[teind, ]) pred <- matrix(pred, 9, length(pred) / 9) pred <- t(pred) # Output submission pred <- format(pred, digits = 2, scientific = FALSE) # shrink the size of submission pred <- data.frame(seq_len(nrow(pred)), pred) names(pred) <- c('id', paste0('Class_', 1:9)) write.csv(pred, file = 'submission.csv', quote = FALSE, row.names = FALSE) xgboost-3.0.0/demo/kaggle-otto/understandingXGBoostModel.Rmd000066400000000000000000000233171476511272400241170ustar00rootroot00000000000000--- title: "Understanding XGBoost Model on Otto Dataset" author: "Michaël Benesty" output: rmarkdown::html_vignette: css: ../../R-package/vignettes/vignette.css number_sections: yes toc: yes --- Introduction ============ **XGBoost** is an implementation of the famous gradient boosting algorithm. This model is often described as a *blackbox*, meaning it works well but it is not trivial to understand how. Indeed, the model is made of hundreds (thousands?) of decision trees. You may wonder how possible a human would be able to have a general view of the model? While XGBoost is known for its fast speed and accurate predictive power, it also comes with various functions to help you understand the model. The purpose of this RMarkdown document is to demonstrate how easily we can leverage the functions already implemented in **XGBoost R** package. Of course, everything showed below can be applied to the dataset you may have to manipulate at work or wherever! First we will prepare the **Otto** dataset and train a model, then we will generate two visualisations to get a clue of what is important to the model, finally, we will see how we can leverage these information. Preparation of the data ======================= This part is based on the **R** tutorial example by [Tong He](https://github.com/dmlc/xgboost/blob/master/demo/kaggle-otto/otto_train_pred.R) First, let's load the packages and the dataset. ```{r loading} require(xgboost) require(methods) require(data.table) require(magrittr) train <- fread('data/train.csv', header = TRUE, stringsAsFactors = FALSE) test <- fread('data/test.csv', header = TRUE, stringsAsFactors = FALSE) ``` > `magrittr` and `data.table` are here to make the code cleaner and much more rapid. Let's explore the dataset. ```{r explore} # Train dataset dimensions dim(train) # Training content train[1:6, 1:5, with = FALSE] # Test dataset dimensions dim(test) # Test content test[1:6, 1:5, with = FALSE] ``` > We only display the 6 first rows and 5 first columns for convenience Each *column* represents a feature measured by an `integer`. Each *row* is an **Otto** product. Obviously the first column (`ID`) doesn't contain any useful information. To let the algorithm focus on real stuff, we will delete it. ```{r clean, results='hide'} # Delete ID column in training dataset train[, id := NULL] # Delete ID column in testing dataset test[, id := NULL] ``` According to its description, the **Otto** challenge is a multi class classification challenge. We need to extract the labels (here the name of the different classes) from the dataset. We only have two files (test and training), it seems logical that the training file contains the class we are looking for. Usually the labels is in the first or the last column. We already know what is in the first column, let's check the content of the last one. ```{r searchLabel} # Check the content of the last column train[1:6, ncol(train), with = FALSE] # Save the name of the last column nameLastCol <- names(train)[ncol(train)] ``` The classes are provided as character string in the `r ncol(train)`th column called `r nameLastCol`. As you may know, **XGBoost** doesn't support anything else than numbers. So we will convert classes to `integer`. Moreover, according to the documentation, it should start at `0`. For that purpose, we will: * extract the target column * remove `Class_` from each class name * convert to `integer` * remove `1` to the new value ```{r classToIntegers} # Convert from classes to numbers y <- train[, nameLastCol, with = FALSE][[1]] %>% gsub('Class_', '', ., fixed = TRUE) %>% as.integer %>% subtract(., 1) # Display the first 5 levels y[1:5] ``` We remove label column from training dataset, otherwise **XGBoost** would use it to guess the labels! ```{r deleteCols, results='hide'} train[, nameLastCol := NULL, with = FALSE] ``` `data.table` is an awesome implementation of data.frame, unfortunately it is not a format supported natively by **XGBoost**. We need to convert both datasets (training and test) in `numeric` Matrix format. ```{r convertToNumericMatrix} trainMatrix <- train[, lapply(.SD, as.numeric)] %>% as.matrix testMatrix <- test[, lapply(.SD, as.numeric)] %>% as.matrix ``` Model training ============== Before the learning we will use the cross validation to evaluate the our error rate. Basically **XGBoost** will divide the training data in `nfold` parts, then **XGBoost** will retain the first part to use it as the test data and perform a training. Then it will reintegrate the first part and retain the second part, do a training and so on... You can look at the function documentation for more information. ```{r crossValidation} numberOfClasses <- max(y) + 1 param <- list("objective" = "multi:softprob", "eval_metric" = "mlogloss", "num_class" = numberOfClasses) cv.nrounds <- 5 cv.nfold <- 3 bst.cv <- xgb.cv(param = param, data = trainMatrix, label = y, nfold = cv.nfold, nrounds = cv.nrounds) ``` > As we can see the error rate is low on the test dataset (for a 5mn trained model). Finally, we are ready to train the real model!!! ```{r modelTraining} nrounds <- 50 bst <- xgboost(param = param, data = trainMatrix, label = y, nrounds = nrounds) ``` Model understanding =================== Feature importance ------------------ So far, we have built a model made of **`r nrounds`** trees. To build a tree, the dataset is divided recursively several times. At the end of the process, you get groups of observations (here, these observations are properties regarding **Otto** products). Each division operation is called a *split*. Each group at each division level is called a branch and the deepest level is called a *leaf*. In the final model, these *leafs* are supposed to be as pure as possible for each tree, meaning in our case that each *leaf* should be made of one class of **Otto** product only (of course it is not true, but that's what we try to achieve in a minimum of splits). **Not all *splits* are equally important**. Basically the first *split* of a tree will have more impact on the purity that, for instance, the deepest *split*. Intuitively, we understand that the first *split* makes most of the work, and the following *splits* focus on smaller parts of the dataset which have been misclassified by the first *tree*. In the same way, in Boosting we try to optimize the misclassification at each round (it is called the *loss*). So the first *tree* will do the big work and the following trees will focus on the remaining, on the parts not correctly learned by the previous *trees*. The improvement brought by each *split* can be measured, it is the *gain*. Each *split* is done on one feature only at one value. Let's see what the model looks like. ```{r modelDump} model <- xgb.dump(bst, with.stats = TRUE) model[1:10] ``` > For convenience, we are displaying the first 10 lines of the model only. Clearly, it is not easy to understand what it means. Basically each line represents a *branch*, there is the *tree* ID, the feature ID, the point where it *splits*, and information regarding the next *branches* (left, right, when the row for this feature is N/A). Hopefully, **XGBoost** offers a better representation: **feature importance**. Feature importance is about averaging the *gain* of each feature for all *split* and all *trees*. Then we can use the function `xgb.plot.importance`. ```{r importanceFeature, fig.align='center', fig.height=5, fig.width=10} # Get the feature real names names <- dimnames(trainMatrix)[[2]] # Compute feature importance matrix importance_matrix <- xgb.importance(names, model = bst) # Nice graph xgb.plot.importance(importance_matrix[1:10, ]) ``` > To make it understandable we first extract the column names from the `Matrix`. Interpretation -------------- In the feature importance above, we can see the first 10 most important features. This function gives a color to each bar. These colors represent groups of features. Basically a K-means clustering is applied to group each feature by importance. From here you can take several actions. For instance you can remove the less important feature (feature selection process), or go deeper in the interaction between the most important features and labels. Or you can just reason about why these features are so important (in **Otto** challenge we can't go this way because there is not enough information). Tree graph ---------- Feature importance gives you feature weight information but not interaction between features. **XGBoost R** package have another useful function for that. Please, scroll on the right to see the tree. ```{r treeGraph, dpi=1500, fig.align='left'} xgb.plot.tree(feature_names = names, model = bst, n_first_tree = 2) ``` We are just displaying the first two trees here. On simple models the first two trees may be enough. Here, it might not be the case. We can see from the size of the trees that the interaction between features is complicated. Besides, **XGBoost** generate `k` trees at each round for a `k`-classification problem. Therefore the two trees illustrated here are trying to classify data into different classes. Going deeper ============ There are 4 documents you may also be interested in: * [xgboostPresentation.Rmd](https://github.com/dmlc/xgboost/blob/master/R-package/vignettes/xgboostPresentation.Rmd): general presentation * [discoverYourData.Rmd](https://github.com/dmlc/xgboost/blob/master/R-package/vignettes/discoverYourData.Rmd): explaining feature analysis * [Feature Importance Analysis with XGBoost in Tax audit](http://fr.slideshare.net/MichaelBENESTY/feature-importance-analysis-with-xgboost-in-tax-audit): use case * [The Elements of Statistical Learning](http://statweb.stanford.edu/~tibs/ElemStatLearn/): very good book to have a good understanding of the model xgboost-3.0.0/demo/multiclass_classification/000077500000000000000000000000001476511272400213675ustar00rootroot00000000000000xgboost-3.0.0/demo/multiclass_classification/README.md000066400000000000000000000004531476511272400226500ustar00rootroot00000000000000Demonstrating how to use XGBoost accomplish Multi-Class classification task on [UCI Dermatology dataset](https://archive.ics.uci.edu/ml/datasets/Dermatology) Make sure you make xgboost python module in ../../python 1. Run runexp.sh ```bash ./runexp.sh ``` **R version** please see the `train.R`. xgboost-3.0.0/demo/multiclass_classification/runexp.sh000077500000000000000000000004551476511272400232530ustar00rootroot00000000000000#!/bin/bash if [ -f dermatology.data ] then echo "use existing data to run multi class classification" else echo "getting data from uci, make sure you are connected to internet" wget https://archive.ics.uci.edu/ml/machine-learning-databases/dermatology/dermatology.data fi python train.py xgboost-3.0.0/demo/multiclass_classification/train.R000066400000000000000000000030231476511272400226250ustar00rootroot00000000000000library(data.table) library(xgboost) if (!file.exists("./dermatology.data")) { download.file( "https://archive.ics.uci.edu/ml/machine-learning-databases/dermatology/dermatology.data", "dermatology.data", method = "curl" ) } df <- fread("dermatology.data", sep = ",", header = FALSE) df[, `:=`(V34 = as.integer(ifelse(V34 == "?", 0L, V34)), V35 = V35 - 1L)] idx <- sample(nrow(df), size = round(0.7 * nrow(df)), replace = FALSE) train <- df[idx, ] test <- df[-idx, ] train_x <- train[, 1:34] train_y <- train[, V35] test_x <- test[, 1:34] test_y <- test[, V35] xg_train <- xgb.DMatrix(data = as.matrix(train_x), label = train_y) xg_test <- xgb.DMatrix(as.matrix(test_x), label = test_y) params <- list( objective = 'multi:softmax', num_class = 6, max_depth = 6, nthread = 4, eta = 0.1 ) watchlist <- list(train = xg_train, test = xg_test) bst <- xgb.train( params = params, data = xg_train, watchlist = watchlist, nrounds = 5 ) pred <- predict(bst, xg_test) error_rate <- sum(pred != test_y) / length(test_y) print(paste("Test error using softmax =", error_rate)) # do the same thing again, but output probabilities params$objective <- 'multi:softprob' bst <- xgb.train(params, xg_train, nrounds = 5, watchlist) pred_prob <- predict(bst, xg_test) pred_mat <- matrix(pred_prob, ncol = 6, byrow = TRUE) # validation # rowSums(pred_mat) pred_label <- apply(pred_mat, 1, which.max) - 1L error_rate <- sum(pred_label != test_y) / length(test_y) print(paste("Test error using softprob =", error_rate)) xgboost-3.0.0/demo/multiclass_classification/train.py000077500000000000000000000030211476511272400230550ustar00rootroot00000000000000#!/usr/bin/python from __future__ import division import numpy as np import xgboost as xgb # label need to be 0 to num_class -1 data = np.loadtxt('./dermatology.data', delimiter=',', converters={33: lambda x:int(x == '?'), 34: lambda x:int(x) - 1}) sz = data.shape train = data[:int(sz[0] * 0.7), :] test = data[int(sz[0] * 0.7):, :] train_X = train[:, :33] train_Y = train[:, 34] test_X = test[:, :33] test_Y = test[:, 34] xg_train = xgb.DMatrix(train_X, label=train_Y) xg_test = xgb.DMatrix(test_X, label=test_Y) # setup parameters for xgboost param = {} # use softmax multi-class classification param['objective'] = 'multi:softmax' # scale weight of positive examples param['eta'] = 0.1 param['max_depth'] = 6 param['nthread'] = 4 param['num_class'] = 6 watchlist = [(xg_train, 'train'), (xg_test, 'test')] num_round = 5 bst = xgb.train(param, xg_train, num_round, watchlist) # get prediction pred = bst.predict(xg_test) error_rate = np.sum(pred != test_Y) / test_Y.shape[0] print('Test error using softmax = {}'.format(error_rate)) # do the same thing again, but output probabilities param['objective'] = 'multi:softprob' bst = xgb.train(param, xg_train, num_round, watchlist) # Note: this convention has been changed since xgboost-unity # get prediction, this is in 1D array, need reshape to (ndata, nclass) pred_prob = bst.predict(xg_test).reshape(test_Y.shape[0], 6) pred_label = np.argmax(pred_prob, axis=1) error_rate = np.sum(pred_label != test_Y) / test_Y.shape[0] print('Test error using softprob = {}'.format(error_rate)) xgboost-3.0.0/demo/nvflare/000077500000000000000000000000001476511272400155715ustar00rootroot00000000000000xgboost-3.0.0/demo/nvflare/.gitignore000066400000000000000000000000101476511272400175500ustar00rootroot00000000000000!config xgboost-3.0.0/demo/nvflare/README.md000066400000000000000000000010251476511272400170460ustar00rootroot00000000000000# Experimental Support of Federated XGBoost using NVFlare This directory contains a demo of Federated Learning using [NVFlare](https://nvidia.github.io/NVFlare/). ## Horizontal Federated XGBoost For horizontal federated learning using XGBoost (data is split row-wise), check out the `horizontal` directory (see the [README](horizontal/README.md)). ## Vertical Federated XGBoost For vertical federated learning using XGBoost (data is split column-wise), check out the `vertical` directory (see the [README](vertical/README.md)). xgboost-3.0.0/demo/nvflare/config/000077500000000000000000000000001476511272400170365ustar00rootroot00000000000000xgboost-3.0.0/demo/nvflare/config/config_fed_client.json000066400000000000000000000007661476511272400233630ustar00rootroot00000000000000{ "format_version": 2, "executors": [ { "tasks": [ "train" ], "executor": { "path": "trainer.XGBoostTrainer", "args": { "server_address": "localhost:9091", "world_size": 2, "server_cert_path": "server-cert.pem", "client_key_path": "client-key.pem", "client_cert_path": "client-cert.pem", "use_gpus": false } } } ], "task_result_filters": [], "task_data_filters": [] } xgboost-3.0.0/demo/nvflare/config/config_fed_server.json000066400000000000000000000007271476511272400234100ustar00rootroot00000000000000{ "format_version": 2, "server": { "heart_beat_timeout": 600 }, "task_data_filters": [], "task_result_filters": [], "workflows": [ { "id": "server_workflow", "path": "controller.XGBoostController", "args": { "port": 9091, "world_size": 2, "server_key_path": "server-key.pem", "server_cert_path": "server-cert.pem", "client_cert_path": "client-cert.pem" } } ], "components": [] } xgboost-3.0.0/demo/nvflare/horizontal/000077500000000000000000000000001476511272400177625ustar00rootroot00000000000000xgboost-3.0.0/demo/nvflare/horizontal/README.md000066400000000000000000000056121476511272400212450ustar00rootroot00000000000000# Experimental Support of Horizontal Federated XGBoost using NVFlare This directory contains a demo of Horizontal Federated Learning using [NVFlare](https://nvidia.github.io/NVFlare/). ## Training with CPU only To run the demo, first build XGBoost with the federated learning plugin enabled (see the [README](../../../plugin/federated/README.md)). Install NVFlare: ```shell pip install nvflare ``` Prepare the data: ```shell ./prepare_data.sh ``` Start the NVFlare federated server: ```shell /tmp/nvflare/poc/server/startup/start.sh ``` In another terminal, start the first worker: ```shell /tmp/nvflare/poc/site-1/startup/start.sh ``` And the second worker: ```shell /tmp/nvflare/poc/site-2/startup/start.sh ``` Then start the admin CLI: ```shell /tmp/nvflare/poc/admin/startup/fl_admin.sh ``` In the admin CLI, run the following command: ```shell submit_job horizontal-xgboost ``` Make a note of the job id: ```console Submitted job: 28309e77-a7c5-45e6-b2bc-c2e3655122d8 ``` On both workers, you should see train and eval losses printed: ```console [10:45:41] [0] eval-logloss:0.22646 train-logloss:0.23316 [10:45:41] [1] eval-logloss:0.13776 train-logloss:0.13654 [10:45:41] [2] eval-logloss:0.08036 train-logloss:0.08243 [10:45:41] [3] eval-logloss:0.05830 train-logloss:0.05645 [10:45:41] [4] eval-logloss:0.03825 train-logloss:0.04148 [10:45:41] [5] eval-logloss:0.02660 train-logloss:0.02958 [10:45:41] [6] eval-logloss:0.01386 train-logloss:0.01918 [10:45:41] [7] eval-logloss:0.01018 train-logloss:0.01331 [10:45:41] [8] eval-logloss:0.00847 train-logloss:0.01112 [10:45:41] [9] eval-logloss:0.00691 train-logloss:0.00662 [10:45:41] [10] eval-logloss:0.00543 train-logloss:0.00503 [10:45:41] [11] eval-logloss:0.00445 train-logloss:0.00420 [10:45:41] [12] eval-logloss:0.00336 train-logloss:0.00355 [10:45:41] [13] eval-logloss:0.00277 train-logloss:0.00280 [10:45:41] [14] eval-logloss:0.00252 train-logloss:0.00244 [10:45:41] [15] eval-logloss:0.00177 train-logloss:0.00193 [10:45:41] [16] eval-logloss:0.00156 train-logloss:0.00161 [10:45:41] [17] eval-logloss:0.00135 train-logloss:0.00142 [10:45:41] [18] eval-logloss:0.00123 train-logloss:0.00125 [10:45:41] [19] eval-logloss:0.00106 train-logloss:0.00107 ``` Once the training finishes, the model file should be written into `/tmp/nvlfare/poc/site-1/${job_id}/test.model.json` and `/tmp/nvflare/poc/site-2/${job_id}/test.model.json` respectively, where `job_id` is the UUID printed out when we ran `submit_job`. Finally, shutdown everything from the admin CLI, using `admin` as password: ```shell shutdown client shutdown server ``` ## Training with GPUs To demo with Federated Learning using GPUs, make sure your machine has at least 2 GPUs. Build XGBoost with the federated learning plugin enabled along with CUDA (see the [README](../../plugin/federated/README.md)). Modify `../config/config_fed_client.json` and set `use_gpus` to `true`, then repeat the steps above. xgboost-3.0.0/demo/nvflare/horizontal/custom/000077500000000000000000000000001476511272400212745ustar00rootroot00000000000000xgboost-3.0.0/demo/nvflare/horizontal/custom/controller.py000066400000000000000000000050331476511272400240320ustar00rootroot00000000000000""" Example of training controller with NVFlare =========================================== """ import multiprocessing from nvflare.apis.client import Client from nvflare.apis.fl_context import FLContext from nvflare.apis.impl.controller import Controller, Task from nvflare.apis.shareable import Shareable from nvflare.apis.signal import Signal from trainer import SupportedTasks import xgboost.federated class XGBoostController(Controller): def __init__(self, port: int, world_size: int, server_key_path: str, server_cert_path: str, client_cert_path: str): """Controller for federated XGBoost. Args: port: the port for the gRPC server to listen on. world_size: the number of sites. server_key_path: the path to the server key file. server_cert_path: the path to the server certificate file. client_cert_path: the path to the client certificate file. """ super().__init__() self._port = port self._world_size = world_size self._server_key_path = server_key_path self._server_cert_path = server_cert_path self._client_cert_path = client_cert_path self._server = None def start_controller(self, fl_ctx: FLContext): self._server = multiprocessing.Process( target=xgboost.federated.run_federated_server, args=(self._port, self._world_size, self._server_key_path, self._server_cert_path, self._client_cert_path)) self._server.start() def stop_controller(self, fl_ctx: FLContext): if self._server: self._server.terminate() def process_result_of_unknown_task(self, client: Client, task_name: str, client_task_id: str, result: Shareable, fl_ctx: FLContext): self.log_warning(fl_ctx, f"Unknown task: {task_name} from client {client.name}.") def control_flow(self, abort_signal: Signal, fl_ctx: FLContext): self.log_info(fl_ctx, "XGBoost training control flow started.") if abort_signal.triggered: return task = Task(name=SupportedTasks.TRAIN, data=Shareable()) self.broadcast_and_wait( task=task, min_responses=self._world_size, fl_ctx=fl_ctx, wait_time_after_min_received=1, abort_signal=abort_signal, ) if abort_signal.triggered: return self.log_info(fl_ctx, "XGBoost training control flow finished.") xgboost-3.0.0/demo/nvflare/horizontal/custom/trainer.py000066400000000000000000000075621476511272400233240ustar00rootroot00000000000000import os from nvflare.apis.executor import Executor from nvflare.apis.fl_constant import FLContextKey, ReturnCode from nvflare.apis.fl_context import FLContext from nvflare.apis.shareable import Shareable, make_reply from nvflare.apis.signal import Signal import xgboost as xgb from xgboost import callback class SupportedTasks(object): TRAIN = "train" class XGBoostTrainer(Executor): def __init__(self, server_address: str, world_size: int, server_cert_path: str, client_key_path: str, client_cert_path: str, use_gpus: bool): """Trainer for federated XGBoost. Args: server_address: address for the gRPC server to connect to. world_size: the number of sites. server_cert_path: the path to the server certificate file. client_key_path: the path to the client key file. client_cert_path: the path to the client certificate file. """ super().__init__() self._server_address = server_address self._world_size = world_size self._server_cert_path = server_cert_path self._client_key_path = client_key_path self._client_cert_path = client_cert_path self._use_gpus = use_gpus def execute(self, task_name: str, shareable: Shareable, fl_ctx: FLContext, abort_signal: Signal) -> Shareable: self.log_info(fl_ctx, f"Executing {task_name}") try: if task_name == SupportedTasks.TRAIN: self._do_training(fl_ctx) return make_reply(ReturnCode.OK) else: self.log_error(fl_ctx, f"{task_name} is not a supported task.") return make_reply(ReturnCode.TASK_UNKNOWN) except BaseException as e: self.log_exception(fl_ctx, f"Task {task_name} failed. Exception: {e.__str__()}") return make_reply(ReturnCode.EXECUTION_EXCEPTION) def _do_training(self, fl_ctx: FLContext): client_name = fl_ctx.get_prop(FLContextKey.CLIENT_NAME) rank = int(client_name.split('-')[1]) - 1 communicator_env = { 'xgboost_communicator': 'federated', 'federated_server_address': self._server_address, 'federated_world_size': self._world_size, 'federated_rank': rank, 'federated_server_cert': self._server_cert_path, 'federated_client_key': self._client_key_path, 'federated_client_cert': self._client_cert_path } with xgb.collective.CommunicatorContext(**communicator_env): # Load file, file will not be sharded in federated mode. dtrain = xgb.DMatrix('agaricus.txt.train?format=libsvm') dtest = xgb.DMatrix('agaricus.txt.test?format=libsvm') # Specify parameters via map, definition are same as c++ version param = {'tree_method': 'hist', 'max_depth': 2, 'eta': 1, 'objective': 'binary:logistic'} if self._use_gpus: self.log_info(fl_ctx, f'Training with GPU {rank}') param['device'] = f"cuda:{rank}" # Specify validations set to watch performance watchlist = [(dtest, 'eval'), (dtrain, 'train')] num_round = 20 # Run training, all the features in training API is available. bst = xgb.train(param, dtrain, num_round, evals=watchlist, early_stopping_rounds=2, verbose_eval=False, callbacks=[callback.EvaluationMonitor(rank=rank)]) # Save the model. workspace = fl_ctx.get_prop(FLContextKey.WORKSPACE_OBJECT) run_number = fl_ctx.get_prop(FLContextKey.CURRENT_RUN) run_dir = workspace.get_run_dir(run_number) bst.save_model(os.path.join(run_dir, "test.model.json")) xgb.collective.communicator_print("Finished training\n") xgboost-3.0.0/demo/nvflare/horizontal/prepare_data.sh000077500000000000000000000022021476511272400227440ustar00rootroot00000000000000#!/bin/bash set -e rm -fr ./agaricus* ./*.pem /tmp/nvflare world_size=2 # Generate server and client certificates. openssl req -x509 -newkey rsa:2048 -days 7 -nodes -keyout server-key.pem -out server-cert.pem -subj "/C=US/CN=localhost" openssl req -x509 -newkey rsa:2048 -days 7 -nodes -keyout client-key.pem -out client-cert.pem -subj "/C=US/CN=localhost" # Split train and test files manually to simulate a federated environment. split -n l/${world_size} --numeric-suffixes=1 -a 1 ../../data/agaricus.txt.train agaricus.txt.train-site- split -n l/${world_size} --numeric-suffixes=1 -a 1 ../../data/agaricus.txt.test agaricus.txt.test-site- nvflare poc -n 2 --prepare mkdir -p /tmp/nvflare/poc/admin/transfer/horizontal-xgboost cp -fr ../config custom /tmp/nvflare/poc/admin/transfer/horizontal-xgboost cp server-*.pem client-cert.pem /tmp/nvflare/poc/server/ for (( site=1; site<=world_size; site++ )); do cp server-cert.pem client-*.pem /tmp/nvflare/poc/site-"$site"/ cp agaricus.txt.train-site-"$site" /tmp/nvflare/poc/site-"$site"/agaricus.txt.train cp agaricus.txt.test-site-"$site" /tmp/nvflare/poc/site-"$site"/agaricus.txt.test done xgboost-3.0.0/demo/nvflare/vertical/000077500000000000000000000000001476511272400174025ustar00rootroot00000000000000xgboost-3.0.0/demo/nvflare/vertical/README.md000066400000000000000000000032741476511272400206670ustar00rootroot00000000000000# Experimental Support of Vertical Federated XGBoost using NVFlare This directory contains a demo of Vertical Federated Learning using [NVFlare](https://nvidia.github.io/NVFlare/). ## Training with CPU only To run the demo, first build XGBoost with the federated learning plugin enabled (see the [README](../../../plugin/federated/README.md)). Install NVFlare: ```shell pip install nvflare ``` Prepare the data (note that this step will download the HIGGS dataset, which is 2.6GB compressed, and 7.5GB uncompressed, so make sure you have enough disk space and are on a fast internet connection): ```shell ./prepare_data.sh ``` Start the NVFlare federated server: ```shell /tmp/nvflare/poc/server/startup/start.sh ``` In another terminal, start the first worker: ```shell /tmp/nvflare/poc/site-1/startup/start.sh ``` And the second worker: ```shell /tmp/nvflare/poc/site-2/startup/start.sh ``` Then start the admin CLI: ```shell /tmp/nvflare/poc/admin/startup/fl_admin.sh ``` In the admin CLI, run the following command: ```shell submit_job vertical-xgboost ``` Once the training finishes, the model file should be written into `/tmp/nvlfare/poc/site-1/run_1/test.model.json` and `/tmp/nvflare/poc/site-2/run_1/test.model.json` respectively. Finally, shutdown everything from the admin CLI, using `admin` as password: ```shell shutdown client shutdown server ``` ## Training with GPUs To demo with Vertical Federated Learning using GPUs, make sure your machine has at least 2 GPUs. Build XGBoost with the federated learning plugin enabled along with CUDA (see the [README](../../plugin/federated/README.md)). Modify `../config/config_fed_client.json` and set `use_gpus` to `true`, then repeat the steps above. xgboost-3.0.0/demo/nvflare/vertical/custom/000077500000000000000000000000001476511272400207145ustar00rootroot00000000000000xgboost-3.0.0/demo/nvflare/vertical/custom/controller.py000066400000000000000000000050331476511272400234520ustar00rootroot00000000000000""" Example of training controller with NVFlare =========================================== """ import multiprocessing from nvflare.apis.client import Client from nvflare.apis.fl_context import FLContext from nvflare.apis.impl.controller import Controller, Task from nvflare.apis.shareable import Shareable from nvflare.apis.signal import Signal from trainer import SupportedTasks import xgboost.federated class XGBoostController(Controller): def __init__(self, port: int, world_size: int, server_key_path: str, server_cert_path: str, client_cert_path: str): """Controller for federated XGBoost. Args: port: the port for the gRPC server to listen on. world_size: the number of sites. server_key_path: the path to the server key file. server_cert_path: the path to the server certificate file. client_cert_path: the path to the client certificate file. """ super().__init__() self._port = port self._world_size = world_size self._server_key_path = server_key_path self._server_cert_path = server_cert_path self._client_cert_path = client_cert_path self._server = None def start_controller(self, fl_ctx: FLContext): self._server = multiprocessing.Process( target=xgboost.federated.run_federated_server, args=(self._port, self._world_size, self._server_key_path, self._server_cert_path, self._client_cert_path)) self._server.start() def stop_controller(self, fl_ctx: FLContext): if self._server: self._server.terminate() def process_result_of_unknown_task(self, client: Client, task_name: str, client_task_id: str, result: Shareable, fl_ctx: FLContext): self.log_warning(fl_ctx, f"Unknown task: {task_name} from client {client.name}.") def control_flow(self, abort_signal: Signal, fl_ctx: FLContext): self.log_info(fl_ctx, "XGBoost training control flow started.") if abort_signal.triggered: return task = Task(name=SupportedTasks.TRAIN, data=Shareable()) self.broadcast_and_wait( task=task, min_responses=self._world_size, fl_ctx=fl_ctx, wait_time_after_min_received=1, abort_signal=abort_signal, ) if abort_signal.triggered: return self.log_info(fl_ctx, "XGBoost training control flow finished.") xgboost-3.0.0/demo/nvflare/vertical/custom/trainer.py000066400000000000000000000101621476511272400227320ustar00rootroot00000000000000import os from nvflare.apis.executor import Executor from nvflare.apis.fl_constant import FLContextKey, ReturnCode from nvflare.apis.fl_context import FLContext from nvflare.apis.shareable import Shareable, make_reply from nvflare.apis.signal import Signal import xgboost as xgb from xgboost import callback class SupportedTasks(object): TRAIN = "train" class XGBoostTrainer(Executor): def __init__(self, server_address: str, world_size: int, server_cert_path: str, client_key_path: str, client_cert_path: str, use_gpus: bool): """Trainer for federated XGBoost. Args: server_address: address for the gRPC server to connect to. world_size: the number of sites. server_cert_path: the path to the server certificate file. client_key_path: the path to the client key file. client_cert_path: the path to the client certificate file. """ super().__init__() self._server_address = server_address self._world_size = world_size self._server_cert_path = server_cert_path self._client_key_path = client_key_path self._client_cert_path = client_cert_path self._use_gpus = use_gpus def execute(self, task_name: str, shareable: Shareable, fl_ctx: FLContext, abort_signal: Signal) -> Shareable: self.log_info(fl_ctx, f"Executing {task_name}") try: if task_name == SupportedTasks.TRAIN: self._do_training(fl_ctx) return make_reply(ReturnCode.OK) else: self.log_error(fl_ctx, f"{task_name} is not a supported task.") return make_reply(ReturnCode.TASK_UNKNOWN) except BaseException as e: self.log_exception(fl_ctx, f"Task {task_name} failed. Exception: {e.__str__()}") return make_reply(ReturnCode.EXECUTION_EXCEPTION) def _do_training(self, fl_ctx: FLContext): client_name = fl_ctx.get_prop(FLContextKey.CLIENT_NAME) rank = int(client_name.split('-')[1]) - 1 communicator_env = { 'xgboost_communicator': 'federated', 'federated_server_address': self._server_address, 'federated_world_size': self._world_size, 'federated_rank': rank, 'federated_server_cert': self._server_cert_path, 'federated_client_key': self._client_key_path, 'federated_client_cert': self._client_cert_path } with xgb.collective.CommunicatorContext(**communicator_env): # Load file, file will not be sharded in federated mode. if rank == 0: label = '&label_column=0' else: label = '' dtrain = xgb.DMatrix(f'higgs.train.csv?format=csv{label}', data_split_mode=1) dtest = xgb.DMatrix(f'higgs.test.csv?format=csv{label}', data_split_mode=1) # specify parameters via map param = { 'validate_parameters': True, 'eta': 0.1, 'gamma': 1.0, 'max_depth': 8, 'min_child_weight': 100, 'tree_method': 'hist', 'grow_policy': 'depthwise', 'objective': 'binary:logistic', 'eval_metric': 'auc', } if self._use_gpus: self.log_info(fl_ctx, f'Training with GPU {rank}') param['device'] = f"cuda:{rank}" # specify validations set to watch performance watchlist = [(dtest, "eval"), (dtrain, "train")] # number of boosting rounds num_round = 10 bst = xgb.train(param, dtrain, num_round, evals=watchlist, early_stopping_rounds=2) # Save the model. workspace = fl_ctx.get_prop(FLContextKey.WORKSPACE_OBJECT) run_number = fl_ctx.get_prop(FLContextKey.CURRENT_RUN) run_dir = workspace.get_run_dir(run_number) bst.save_model(os.path.join(run_dir, "higgs.model.federated.vertical.json")) xgb.collective.communicator_print("Finished training\n") xgboost-3.0.0/demo/nvflare/vertical/prepare_data.sh000077500000000000000000000043351476511272400223750ustar00rootroot00000000000000#!/bin/bash set -e rm -fr ./*.pem /tmp/nvflare/poc world_size=2 # Generate server and client certificates. openssl req -x509 -newkey rsa:2048 -days 7 -nodes -keyout server-key.pem -out server-cert.pem -subj "/C=US/CN=localhost" openssl req -x509 -newkey rsa:2048 -days 7 -nodes -keyout client-key.pem -out client-cert.pem -subj "/C=US/CN=localhost" # Download HIGGS dataset. if [ -f "HIGGS.csv" ]; then echo "HIGGS.csv exists, skipping download." else echo "Downloading HIGGS dataset." wget https://archive.ics.uci.edu/ml/machine-learning-databases/00280/HIGGS.csv.gz gunzip HIGGS.csv.gz fi # Split into train/test. if [[ -f higgs.train.csv && -f higgs.test.csv ]]; then echo "higgs.train.csv and higgs.test.csv exist, skipping split." else echo "Splitting HIGGS dataset into train/test." head -n 10450000 HIGGS.csv > higgs.train.csv tail -n 550000 HIGGS.csv > higgs.test.csv fi # Split train and test files by column to simulate a federated environment. site_files=(higgs.{train,test}.csv-site-*) if [ ${#site_files[@]} -eq $((world_size*2)) ]; then echo "Site files exist, skipping split." else echo "Splitting train/test into site files." total_cols=28 # plus label cols=$((total_cols/world_size)) echo "Columns per site: $cols" for (( site=1; site<=world_size; site++ )); do if (( site == 1 )); then start=$((cols*(site-1)+1)) else start=$((cols*(site-1)+2)) fi if (( site == world_size )); then end=$((total_cols+1)) else end=$((cols*site+1)) fi echo "Site $site, columns $start-$end" cut -d, -f${start}-${end} higgs.train.csv > higgs.train.csv-site-"${site}" cut -d, -f${start}-${end} higgs.test.csv > higgs.test.csv-site-"${site}" done fi nvflare poc -n 2 --prepare mkdir -p /tmp/nvflare/poc/admin/transfer/vertical-xgboost cp -fr ../config custom /tmp/nvflare/poc/admin/transfer/vertical-xgboost cp server-*.pem client-cert.pem /tmp/nvflare/poc/server/ for (( site=1; site<=world_size; site++ )); do cp server-cert.pem client-*.pem /tmp/nvflare/poc/site-"${site}"/ ln -s "${PWD}"/higgs.train.csv-site-"${site}" /tmp/nvflare/poc/site-"${site}"/higgs.train.csv ln -s "${PWD}"/higgs.test.csv-site-"${site}" /tmp/nvflare/poc/site-"${site}"/higgs.test.csv done xgboost-3.0.0/demo/rmm_plugin/000077500000000000000000000000001476511272400163055ustar00rootroot00000000000000xgboost-3.0.0/demo/rmm_plugin/README.rst000066400000000000000000000072441476511272400200030ustar00rootroot00000000000000Using XGBoost with RAPIDS Memory Manager (RMM) plugin ===================================================== `RAPIDS Memory Manager (RMM) `__ library provides a collection of efficient memory allocators for NVIDIA GPUs. It is now possible to use XGBoost with memory allocators provided by RMM, by enabling the RMM integration plugin. The demos in this directory highlights one RMM allocator in particular: **the pool sub-allocator**. This allocator addresses the slow speed of ``cudaMalloc()`` by allocating a large chunk of memory upfront. Subsequent allocations will draw from the pool of already allocated memory and thus avoid the overhead of calling ``cudaMalloc()`` directly. See `this GTC talk slides `_ for more details. Before running the demos, ensure that XGBoost is compiled with the RMM plugin enabled. To do this, run CMake with option ``-DPLUGIN_RMM=ON`` (``-DUSE_CUDA=ON`` also required): .. code-block:: sh cmake -B build -S . -DUSE_CUDA=ON -DUSE_NCCL=ON -DPLUGIN_RMM=ON cmake --build build -j$(nproc) CMake will attempt to locate the RMM library in your build environment. You may choose to build RMM from the source, or install it using the Conda package manager. If CMake cannot find RMM, you should specify the location of RMM with the CMake prefix: .. code-block:: sh # If using Conda: cmake -B build -S . -DUSE_CUDA=ON -DUSE_NCCL=ON -DPLUGIN_RMM=ON -DCMAKE_PREFIX_PATH=$CONDA_PREFIX # If using RMM installed with a custom location cmake -B build -S . -DUSE_CUDA=ON -DUSE_NCCL=ON -DPLUGIN_RMM=ON -DCMAKE_PREFIX_PATH=/path/to/rmm ******************************** Informing XGBoost about RMM pool ******************************** When XGBoost is compiled with RMM, most of the large size allocation will go through RMM allocators, but some small allocations in performance critical areas are using a different caching allocator so that we can have better control over memory allocation behavior. Users can override this behavior and force the use of rmm for all allocations by setting the global configuration ``use_rmm``: .. code-block:: python with xgb.config_context(use_rmm=True): clf = xgb.XGBClassifier(tree_method="hist", device="cuda") Depending on the choice of memory pool size and the type of the allocator, this can add more consistency to memory usage but with slightly degraded performance impact. ******************************* No Device Ordinal for Multi-GPU ******************************* Since with RMM the memory pool is pre-allocated on a specific device, changing the CUDA device ordinal in XGBoost can result in memory error ``cudaErrorIllegalAddress``. Use the ``CUDA_VISIBLE_DEVICES`` environment variable instead of the ``device="cuda:1"`` parameter for selecting device. For distributed training, the distributed computing frameworks like ``dask-cuda`` are responsible for device management. For Scala-Spark, see :doc:`/jvm/xgboost4j_spark_gpu_tutorial` for more info. ************************ Memory Over-Subscription ************************ .. warning:: This feature is still experimental and is under active development. The newer NVIDIA platforms like `Grace-Hopper `__ use `NVLink-C2C `__, which allows the CPU and GPU to have a coherent memory model. Users can use the `SamHeadroomMemoryResource` in the latest RMM to utilize system memory for storing data. This can help XGBoost utilize memory from the host for GPU computation, but it may reduce performance due to slower CPU memory speed and page migration overhead.xgboost-3.0.0/demo/rmm_plugin/rmm_mgpu_with_dask.py000066400000000000000000000026301476511272400225400ustar00rootroot00000000000000""" Using rmm with Dask =================== """ import dask from dask.distributed import Client from dask_cuda import LocalCUDACluster from sklearn.datasets import make_classification import xgboost as xgb def main(client): # Optionally force XGBoost to use RMM for all GPU memory allocation, see ./README.md # xgb.set_config(use_rmm=True) X, y = make_classification(n_samples=10000, n_informative=5, n_classes=3) # In pratice one should prefer loading the data with dask collections instead of # using `from_array`. X = dask.array.from_array(X) y = dask.array.from_array(y) dtrain = xgb.dask.DaskDMatrix(client, X, label=y) params = { "max_depth": 8, "eta": 0.01, "objective": "multi:softprob", "num_class": 3, "tree_method": "hist", "eval_metric": "merror", "device": "cuda", } output = xgb.dask.train( client, params, dtrain, num_boost_round=100, evals=[(dtrain, "train")] ) bst = output["booster"] history = output["history"] for i, e in enumerate(history["train"]["merror"]): print(f"[{i}] train-merror: {e}") if __name__ == "__main__": # To use RMM pool allocator with a GPU Dask cluster, just add rmm_pool_size option # to LocalCUDACluster constructor. with LocalCUDACluster(rmm_pool_size="2GB") as cluster: with Client(cluster) as client: main(client) xgboost-3.0.0/demo/rmm_plugin/rmm_singlegpu.py000066400000000000000000000013521476511272400215300ustar00rootroot00000000000000""" Using rmm on a single node device ================================= """ import rmm from sklearn.datasets import make_classification import xgboost as xgb # Initialize RMM pool allocator rmm.reinitialize(pool_allocator=True) # Optionally force XGBoost to use RMM for all GPU memory allocation, see ./README.md # xgb.set_config(use_rmm=True) X, y = make_classification(n_samples=10000, n_informative=5, n_classes=3) dtrain = xgb.DMatrix(X, label=y) params = { "max_depth": 8, "eta": 0.01, "objective": "multi:softprob", "num_class": 3, "tree_method": "hist", "device": "cuda", } # XGBoost will automatically use the RMM pool allocator bst = xgb.train(params, dtrain, num_boost_round=100, evals=[(dtrain, "train")]) xgboost-3.0.0/dev/000077500000000000000000000000001476511272400137665ustar00rootroot00000000000000xgboost-3.0.0/dev/prepare_jvm_release.py000066400000000000000000000172001476511272400203520ustar00rootroot00000000000000import argparse import errno import glob import os import re import shutil import subprocess import sys import tempfile import zipfile from contextlib import contextmanager from urllib.request import urlretrieve def normpath(path): """Normalize UNIX path to a native path.""" normalized = os.path.join(*path.split("/")) if os.path.isabs(path): return os.path.abspath("/") + normalized else: return normalized def cp(source, target): source = normpath(source) target = normpath(target) print("cp {0} {1}".format(source, target)) shutil.copy(source, target) def maybe_makedirs(path): path = normpath(path) print("mkdir -p " + path) try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise @contextmanager def cd(path): path = normpath(path) cwd = os.getcwd() os.chdir(path) print("cd " + path) try: yield path finally: os.chdir(cwd) def run(command, **kwargs): print(command) subprocess.check_call(command, shell=True, **kwargs) def get_current_commit_hash(): out = subprocess.check_output(["git", "rev-parse", "HEAD"]) return out.decode().split("\n")[0] def get_current_git_branch(): out = subprocess.check_output(["git", "log", "-n", "1", "--pretty=%d", "HEAD"]) m = re.search(r"release_[0-9\.]+", out.decode()) if not m: raise ValueError("Expected branch name of form release_xxx") return m.group(0) def retrieve(url, filename=None): print(f"{url} -> {filename}") return urlretrieve(url, filename) def main(): parser = argparse.ArgumentParser() parser.add_argument( "--release-version", type=str, required=True, help="Version of the release being prepared", ) args = parser.parse_args() version = args.release_version commit_hash = get_current_commit_hash() git_branch = get_current_git_branch() print( f"Using commit {commit_hash} of branch {git_branch}" ) with cd("jvm-packages/"): print("====copying pure-Python tracker====") for use_cuda in [True, False]: xgboost4j = "xgboost4j-gpu" if use_cuda else "xgboost4j" cp( "../python-package/xgboost/tracker.py", f"{xgboost4j}/src/main/resources", ) print("====copying resources for testing====") with cd("../demo/CLI/regression"): run(f"{sys.executable} mapfeat.py") run(f"{sys.executable} mknfold.py machine.txt 1") for use_cuda in [True, False]: xgboost4j = "xgboost4j-gpu" if use_cuda else "xgboost4j" xgboost4j_spark = "xgboost4j-spark-gpu" if use_cuda else "xgboost4j-spark" maybe_makedirs(f"{xgboost4j}/src/test/resources") maybe_makedirs(f"{xgboost4j_spark}/src/test/resources") for file in glob.glob("../demo/data/agaricus.*"): cp(file, f"{xgboost4j}/src/test/resources") cp(file, f"{xgboost4j_spark}/src/test/resources") for file in glob.glob("../demo/CLI/regression/machine.txt.t*"): cp(file, f"{xgboost4j_spark}/src/test/resources") print("====Creating directories to hold native binaries====") for os_ident, arch in [ ("linux", "x86_64"), ("linux", "aarch64"), ("windows", "x86_64"), ("macos", "x86_64"), ("macos", "aarch64"), ]: output_dir = f"xgboost4j/src/main/resources/lib/{os_ident}/{arch}" maybe_makedirs(output_dir) for os_ident, arch in [("linux", "x86_64")]: output_dir = f"xgboost4j-gpu/src/main/resources/lib/{os_ident}/{arch}" maybe_makedirs(output_dir) print("====Downloading native binaries from CI====") nightly_bucket_prefix = ( "https://s3-us-west-2.amazonaws.com/xgboost-nightly-builds" ) maven_repo_prefix = ( "https://s3-us-west-2.amazonaws.com/xgboost-maven-repo/release/ml/dmlc" ) retrieve( url=f"{nightly_bucket_prefix}/{git_branch}/libxgboost4j/xgboost4j_{commit_hash}.dll", filename="xgboost4j/src/main/resources/lib/windows/x86_64/xgboost4j.dll", ) retrieve( url=f"{nightly_bucket_prefix}/{git_branch}/libxgboost4j/libxgboost4j_linux_x86_64_{commit_hash}.so", filename="xgboost4j/src/main/resources/lib/linux/x86_64/libxgboost4j.so", ) retrieve( url=f"{nightly_bucket_prefix}/{git_branch}/libxgboost4j/libxgboost4j_linux_arm64_{commit_hash}.so", filename="xgboost4j/src/main/resources/lib/linux/aarch64/libxgboost4j.so", ) retrieve( url=f"{nightly_bucket_prefix}/{git_branch}/libxgboost4j/libxgboost4j_{commit_hash}.dylib", filename="xgboost4j/src/main/resources/lib/macos/x86_64/libxgboost4j.dylib", ) retrieve( url=f"{nightly_bucket_prefix}/{git_branch}/libxgboost4j/libxgboost4j_m1_{commit_hash}.dylib", filename="xgboost4j/src/main/resources/lib/macos/aarch64/libxgboost4j.dylib", ) with tempfile.TemporaryDirectory() as tempdir: # libxgboost4j.so for Linux x86_64, GPU support zip_path = os.path.join(tempdir, "xgboost4j-gpu_2.12.jar") extract_dir = os.path.join(tempdir, "xgboost4j-gpu") retrieve( url=f"{maven_repo_prefix}/xgboost4j-gpu_2.12/{version}/" f"xgboost4j-gpu_2.12-{version}.jar", filename=zip_path, ) os.mkdir(extract_dir) with zipfile.ZipFile(zip_path, "r") as t: t.extractall(extract_dir) cp( os.path.join(extract_dir, "lib", "linux", "x86_64", "libxgboost4j.so"), "xgboost4j-gpu/src/main/resources/lib/linux/x86_64/libxgboost4j.so", ) print("====Next Steps====") print("1. Gain upload right to Maven Central repo.") print("1-1. Sign up for a JIRA account at Sonatype: ") print( "1-2. File a JIRA ticket: " "https://issues.sonatype.org/secure/CreateIssue.jspa?issuetype=21&pid=10134. Example: " "https://issues.sonatype.org/browse/OSSRH-67724" ) print( "2. Store the Sonatype credentials in .m2/settings.xml. See insturctions in " "https://central.sonatype.org/publish/publish-maven/" ) print( "3. Now on a Linux machine, run the following to build Scala 2.12 artifacts. " "Make sure to use an Internet connection with fast upload speed:" ) print( " # Skip native build, since we have all needed native binaries from CI\n" " GPG_TTY=$(tty) mvn deploy -Prelease -DskipTests -Dskip.native.build=true" ) print( "4. Log into https://oss.sonatype.org/. On the left menu panel, click Staging " "Repositories. Visit the URL https://oss.sonatype.org/content/repositories/mldmlc-xxxx " "to inspect the staged JAR files. Finally, press Release button to publish the " "artifacts to the Maven Central repository. The top-level metapackage should be " "named xgboost-jvm_2.12." ) print( "5. Remove the Scala 2.12 artifacts and build Scala 2.13 artifacts:\n" " python ops/script/change_scala_version.py --scala-version 2.13 --purge-artifacts\n" " GPG_TTY=$(tty) mvn deploy -Prelease -DskipTests -Dskip.native.build=true" ) print( "6. Go to https://oss.sonatype.org/ to release the Scala 2.13 artifacts. " "The top-level metapackage should be named xgboost-jvm_2.13." ) if __name__ == "__main__": main() xgboost-3.0.0/dev/query_contributors.py000066400000000000000000000055311476511272400203260ustar00rootroot00000000000000"""Query list of all contributors and reviewers in a release""" import json import re import sys import requests from sh.contrib import git if len(sys.argv) != 5: print(f'Usage: {sys.argv[0]} [starting commit/tag] [ending commit/tag] [GitHub username] ' + '[GitHub password]') sys.exit(1) from_commit = sys.argv[1] to_commit = sys.argv[2] username = sys.argv[3] password = sys.argv[4] contributors = set() reviewers = set() def paginate_request(url, callback): r = requests.get(url, auth=(username, password)) assert r.status_code == requests.codes.ok, f'Code: {r.status_code}, Text: {r.text}' callback(json.loads(r.text)) while 'next' in r.links: r = requests.get(r.links['next']['url'], auth=(username, password)) callback(json.loads(r.text)) for line in git.log(f'{from_commit}..{to_commit}', '--pretty=format:%s', '--reverse', '--first-parent'): m = re.search('\(#([0-9]+)\)$', line.rstrip()) if m: pr_id = m.group(1) print(f'PR #{pr_id}') def process_commit_list(commit_list): try: contributors.update([commit['author']['login'] for commit in commit_list]) except TypeError: prompt = (f'Error fetching contributors for PR #{pr_id}. Enter it manually, ' + 'as a space-separated list: ') contributors.update(str(input(prompt)).split(' ')) def process_review_list(review_list): reviewers.update([x['user']['login'] for x in review_list]) def process_comment_list(comment_list): reviewers.update([x['user']['login'] for x in comment_list]) paginate_request(f'https://api.github.com/repos/dmlc/xgboost/pulls/{pr_id}/commits', process_commit_list) paginate_request(f'https://api.github.com/repos/dmlc/xgboost/pulls/{pr_id}/reviews', process_review_list) paginate_request(f'https://api.github.com/repos/dmlc/xgboost/issues/{pr_id}/comments', process_comment_list) print('Contributors: ', end='') for x in sorted(contributors): r = requests.get(f'https://api.github.com/users/{x}', auth=(username, password)) assert r.status_code == requests.codes.ok, f'Code: {r.status_code}, Text: {r.text}' user_info = json.loads(r.text) if user_info['name'] is None: print(f"@{x}, ", end='') else: print(f"{user_info['name']} (@{x}), ", end='') print('\nReviewers: ', end='') for x in sorted(reviewers): r = requests.get(f'https://api.github.com/users/{x}', auth=(username, password)) assert r.status_code == requests.codes.ok, f'Code: {r.status_code}, Text: {r.text}' user_info = json.loads(r.text) if user_info['name'] is None: print(f"@{x}, ", end='') else: print(f"{user_info['name']} (@{x}), ", end='') print('') xgboost-3.0.0/dmlc-core/000077500000000000000000000000001476511272400150555ustar00rootroot00000000000000xgboost-3.0.0/doc/000077500000000000000000000000001476511272400137555ustar00rootroot00000000000000xgboost-3.0.0/doc/.gitignore000066400000000000000000000001451476511272400157450ustar00rootroot00000000000000html latex *.sh _* sg_execution_times.rst doxygen parser.py *.pyc web-data # generated by doxygen tmpxgboost-3.0.0/doc/Doxyfile.in000066400000000000000000003401401476511272400160720ustar00rootroot00000000000000# Doxyfile 1.9.1 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the configuration # file that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = xgboost # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = @XGBOOST_VERSION@ # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = @PROJECT_BINARY_DIR@/doc_doxygen # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = NO # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all generated output in the proper direction. # Possible values are: None, LTR, RTL and Context. # The default value is: None. OUTPUT_TEXT_DIRECTION = None # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = YES # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = NO # If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line # such as # /*************** # as being the beginning of a Javadoc-style comment "banner". If set to NO, the # Javadoc-style will behave just like regular comments and it will not be # interpreted by doxygen. # The default value is: NO. JAVADOC_BANNER = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # By default Python docstrings are displayed as preformatted text and doxygen's # special commands cannot be used. By setting PYTHON_DOCSTRING to NO the # doxygen's special commands can be used and the contents of the docstring # documentation blocks is shown as doxygen documentation. # The default value is: YES. PYTHON_DOCSTRING = YES # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 8 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines (in the resulting output). You can put ^^ in the value part of an # alias to insert a newline as if a physical newline was in the original file. # When you need a literal { or } or , in the value part of an alias you have to # escape them by means of a backslash (\), this can lead to conflicts with the # commands \{ and \} for these it is advised to use the version @{ and @} or use # a double escape (\\{ and \\}) ALIASES = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice # sources only. Doxygen will then generate output that is more tailored for that # language. For instance, namespaces will be presented as modules, types will be # separated into more groups, etc. # The default value is: NO. OPTIMIZE_OUTPUT_SLICE = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, JavaScript, # Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, # Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: # FortranFree, unknown formatted Fortran: Fortran. In the later case the parser # tries to guess whether the code is fixed or free formatted code, this is the # default for Fortran type files). For instance to make doxygen treat .inc files # as Fortran files (default is PHP), and .f files as C (default is Fortran), # use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. When specifying no_extension you should add # * to the FILE_PATTERNS. # # Note see also the list of default file extension mappings. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. # Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 5 # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 # The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use # during processing. When set to 0 doxygen will based this on the number of # cores available in the system. You can set it explicitly to a value larger # than 0 to get more control over the balance between CPU load and processing # speed. At this moment only the input processing can be done using multiple # threads. Since this is still an experimental feature the default is set to 1, # which efficively disables parallel processing. Please report any issues you # encounter. Generating dot graphs in parallel is controlled by the # DOT_NUM_THREADS setting. # Minimum value: 0, maximum value: 32, default value: 1. NUM_PROC_THREADS = 1 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = YES # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual # methods of a class will be included in the documentation. # The default value is: NO. EXTRACT_PRIV_VIRTUAL = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If this flag is set to YES, the name of an unnamed parameter in a declaration # will be determined by the corresponding definition. By default unnamed # parameters remain unnamed in the output. # The default value is: YES. RESOLVE_UNNAMED_PARAMS = YES # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # declarations. If set to NO, these declarations will be included in the # documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # With the correct setting of option CASE_SENSE_NAMES doxygen will better be # able to match the capabilities of the underlying filesystem. In case the # filesystem is case sensitive (i.e. it supports files in the same directory # whose names only differ in casing), the option must be set to YES to properly # deal with such files in case they appear in the input. For filesystems that # are not case sensitive the option should be be set to NO to properly deal with # output files written for symbols that only differ in casing, such as for two # classes, one named CLASS and the other named Class, and to also support # references to files without having to specify the exact matching casing. On # Windows (including Cygwin) and MacOS, users should typically set this option # to NO, whereas on Linux or other Unix flavors it should typically be set to # YES. # The default value is: system dependent. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = YES # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete # parameter documentation, but not about the absence of documentation. If # EXTRACT_ALL is set to YES then this flag will automatically be disabled. # The default value is: NO. WARN_NO_PARAMDOC = YES # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS # then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but # at the end of the doxygen process doxygen will return with a non-zero status. # Possible values are: NO, YES and FAIL_ON_WARNINGS. # The default value is: NO. WARN_AS_ERROR = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = @PROJECT_SOURCE_DIR@/include # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: # https://www.gnu.org/software/libiconv/) for the list of possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # Note the list of default checked file patterns might differ from the list of # default file extension mappings. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, # *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), # *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, *.vhdl, # *.ucf, *.qsf and *.ice. FILE_PATTERNS = *.h # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = */test/* \ logging.h # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = @PROJECT_SOURCE_DIR@/demo/c-api/ # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = YES # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # entity all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: # http://clang.llvm.org/) for more accurate parsing at the cost of reduced # performance. This can be particularly helpful with template rich C++ code for # which doxygen's built-in parser lacks the necessary type information. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If clang assisted parsing is enabled and the CLANG_ADD_INC_PATHS tag is set to # YES then doxygen will add the directory of each input to the include path. # The default value is: YES. CLANG_ADD_INC_PATHS = YES # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = # If clang assisted parsing is enabled you can provide the clang parser with the # path to the directory containing a file called compile_commands.json. This # file is the compilation database (see: # http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the # options used when the source files were built. This is equivalent to # specifying the -p option to a clang tool, such as clang-check. These options # will then be passed to the parser. Any options specified with CLANG_OPTIONS # will be added as well. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. CLANG_DATABASE_PATH = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to YES can help to show when doxygen was last run and thus if the # documentation is up to date. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that # are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML # page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_MENUS = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = NO # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: # https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To # create a documentation set, doxygen will generate a Makefile in the HTML # output directory. Running make will produce the docset in that directory and # running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy # genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: # https://www.microsoft.com/en-us/download/details.aspx?id=21138) on Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the main .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location (absolute path # including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to # run qhelpgenerator on the generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg # tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see # https://inkscape.org) to generate formulas as SVG images instead of PNGs for # the HTML output. These images will generally look nicer at scaled resolutions. # Possible values are: png (the default) and svg (looks nicer but requires the # pdf2svg or inkscape tool). # The default value is: png. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FORMULA_FORMAT = png # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANSPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands # to create new LaTeX commands to be used in formulas as building blocks. See # the section "Including formulas" for details. FORMULA_MACROFILE = # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. # The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://www.mathjax.org/mathjax # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: # http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /