pax_global_header00006660000000000000000000000064147571600210014515gustar00rootroot0000000000000052 comment=c9f8bcf92b170f3752c43d28cc277797faeea581 scitokens-cpp-1.1.3/000077500000000000000000000000001475716002100143015ustar00rootroot00000000000000scitokens-cpp-1.1.3/.clang-format000066400000000000000000000000411475716002100166470ustar00rootroot00000000000000BasedOnStyle: LLVM IndentWidth: 4scitokens-cpp-1.1.3/.github/000077500000000000000000000000001475716002100156415ustar00rootroot00000000000000scitokens-cpp-1.1.3/.github/workflows/000077500000000000000000000000001475716002100176765ustar00rootroot00000000000000scitokens-cpp-1.1.3/.github/workflows/build-rpm.yml000066400000000000000000000006631475716002100223210ustar00rootroot00000000000000name: RPM Build on: pull_request: branches: - master push: branches: - master jobs: build-rpm: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 with: submodules: recursive # Custom rpm building from Derek to build RPM - uses: djw8605/docker-mock-rpmbuilder@master with: spec-file: rpm/scitokens-cpp.spec mock-config: epel-7-x86_64 scitokens-cpp-1.1.3/.github/workflows/ccpp.yml000066400000000000000000000041431475716002100213500ustar00rootroot00000000000000name: C/C++ CI on: pull_request: branches: - master push: branches: - master env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) BUILD_TYPE: Release jobs: build: strategy: matrix: external-gtest: [ YES, NO ] os: [ ubuntu-latest, ubuntu-22.04 ] runs-on: ${{ matrix.os }} name: Build with external_gtest=${{ matrix.external-gtest }} on ${{ matrix.os }} steps: - uses: actions/checkout@v1 with: submodules: recursive - name: install deps run: | sudo apt update && sudo apt-get install libssl-dev sqlite3 libsqlite3-dev cmake libcurl4 libcurl4-openssl-dev uuid-dev libgtest-dev - name: Create Build Environment # Some projects don't allow in-source building, so create a separate build directory # We'll use this as our working directory for all subsequent commands run: cmake -E make_directory ${{runner.workspace}}/build - name: Configure CMake # Use a bash shell so we can use the same syntax for environment variable # access regardless of the host operating system shell: bash working-directory: ${{runner.workspace}}/build # Note the current convention is to use the -S and -B options here to specify source # and build directories, but this is only available with CMake 3.13 and higher. # The CMake binaries on the Github Actions machines are (as of this writing) 3.12 run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DSCITOKENS_BUILD_UNITTESTS=yes -DSCITOKENS_EXTERNAL_GTEST=${{ matrix.external-gtest }} - name: Build working-directory: ${{runner.workspace}}/build shell: bash # Execute the build. You can specify a specific target with "--target " run: cmake --build . --config $BUILD_TYPE - name: Test working-directory: ${{runner.workspace}}/build shell: bash # Execute tests defined by the CMake configuration. # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail run: ctest -C $BUILD_TYPE --verbose scitokens-cpp-1.1.3/.github/workflows/linter.yml000066400000000000000000000046131475716002100217220ustar00rootroot00000000000000name: Lint # Linter Action documentation at https://github.com/marketplace/actions/lint-action # One thing to note is that this action is currently configured automatically fix and re-push the linted code to the repo on a pull request. # Because the github token used for authenticating this commit comes from the upstream repo (ie scitokens/scitokens-cpp), those linter changes will not be pushed # to the fork that is providing the pull request. A manual git fetch will have to be run by the fork after the PR is merged to update the fork to the linted code. # The linter does not have authorization to lint any code in the repo's .github/workflows/ directory. # If the linter fails, the PR can still be completed, but none of the linter changes will be made. on: # push: # Can specify more circumstances under which to run the linter. # branches: # Not specifying input to "branches:" causes the action to run on push for all branches. push: branches: - master # Trigger the workflow on pull request, # but only for master pull_request_target: branches: - master permissions: checks: write contents: read pull-requests: write jobs: run-linters: name: Run linters runs-on: ubuntu-latest steps: - name: Check out repository (push) if: ${{ github.event_name == 'push' }} uses: actions/checkout@v3 - name: Check out repository (pull_request_target) if: ${{ github.event_name == 'pull_request_target' }} uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} - name: Install ClangFormat run: sudo apt-get install -y clang-format - name: Run linters uses: wearerequired/lint-action@v2 with: github_token: ${{ secrets.github_token }} # For providing the commit authorization for the auto_fix feature clang_format: true clang_format_auto_fix: true auto_fix: true commit: false continue_on_error: false git_email: github.event.commits[0].author.name # Uses the author's git email instead of the default git email associated with the action ("lint-action@samuelmeuli.com") clang_format_args: -style=file # Any additional arguments for clang_format - name: suggester / lint uses: reviewdog/action-suggester@v1 with: tool_name: lint scitokens-cpp-1.1.3/.gitignore000066400000000000000000000000061475716002100162650ustar00rootroot00000000000000build scitokens-cpp-1.1.3/.gitmodules000066400000000000000000000003271475716002100164600ustar00rootroot00000000000000[submodule "vendor/jwt-cpp"] path = vendor/jwt-cpp url = https://github.com/Thalhammer/jwt-cpp.git [submodule "vendor/gtest"] path = vendor/gtest url = https://github.com/google/googletest.git branch = v1.8.x scitokens-cpp-1.1.3/CMakeLists.txt000066400000000000000000000061771475716002100170540ustar00rootroot00000000000000 cmake_minimum_required( VERSION 3.10) project( scitokens-cpp DESCRIPTION "A C++ Library to interface to scitokens" VERSION 1.0.2 LANGUAGES CXX) option( SCITOKENS_BUILD_UNITTESTS "Build the scitokens-cpp unit tests" OFF ) option( SCITOKENS_EXTERNAL_GTEST "Use an external/pre-installed copy of GTest" OFF ) set( CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake;${CMAKE_MODULE_PATH}" ) set( CMAKE_BUILD_TYPE RelWithDebInfo) # -g -O2 set( CMAKE_UNITY_BUILD False) include(GNUInstallDirs) find_package( jwt-cpp REQUIRED ) find_package( CURL REQUIRED ) find_package( UUID REQUIRED ) if( APPLE ) find_package( OpenSSL REQUIRED ) find_package( Sqlite3 REQUIRED ) set(LIBCRYPTO_INCLUDE_DIRS ${OPENSSL_INCLUDE_DIR}) set(LIBCRYPTO_LIBRARIES ${OPENSSL_CRYPTO_LIBRARY}) set(CMAKE_MACOSX_RPATH ON) elseif( UNIX ) include (FindPkgConfig) pkg_check_modules(LIBCRYPTO REQUIRED libcrypto) pkg_check_modules(OPENSSL REQUIRED openssl) pkg_check_modules(SQLITE REQUIRED sqlite3) endif() add_library(SciTokens SHARED src/scitokens.cpp src/scitokens_internal.cpp src/scitokens_cache.cpp) target_compile_features(SciTokens PUBLIC cxx_std_11) # Use at least C++11 for building and when linking to scitokens target_include_directories(SciTokens PUBLIC ${JWT_CPP_INCLUDES} "${PROJECT_SOURCE_DIR}/src" PRIVATE ${CURL_INCLUDE_DIRS} ${OPENSSL_INCLUDE_DIRS} ${LIBCRYPTO_INCLUDE_DIRS} ${SQLITE_INCLUDE_DIRS} ${UUID_INCLUDE_DIRS}) target_link_libraries(SciTokens PUBLIC ${OPENSSL_LIBRARIES} ${LIBCRYPTO_LIBRARIES} ${CURL_LIBRARIES} ${SQLITE_LIBRARIES} ${UUID_LIBRARIES}) if (UNIX) # pkg_check_modules fails to return an absolute path on RHEL7. Set the # link directories accordingly. target_link_directories(SciTokens PUBLIC ${OPENSSL_LIBRARY_DIRS} ${LIBCRYPTO_LIBRARY_DIRS}) endif() if ( NOT APPLE AND UNIX ) set_target_properties(SciTokens PROPERTIES LINK_FLAGS "-Wl,--version-script=${PROJECT_SOURCE_DIR}/configs/export-symbols") endif() add_executable(scitokens-test src/test.cpp) target_include_directories(scitokens-test PRIVATE "${PROJECT_SOURCE_DIR}" ${JWT_CPP_INCLUDES} ${LIBCRYPTO_INCLUDE_DIRS}) target_link_libraries(scitokens-test SciTokens) add_executable(scitokens-verify src/verify.cpp) target_link_libraries(scitokens-verify SciTokens) add_executable(scitokens-test-access src/test_access.cpp) target_link_libraries(scitokens-test-access SciTokens) add_executable(scitokens-list-access src/list_access.cpp) target_link_libraries(scitokens-list-access SciTokens) add_executable(scitokens-create src/create.cpp) target_link_libraries(scitokens-create SciTokens) get_directory_property(TARGETS BUILDSYSTEM_TARGETS) install( TARGETS ${TARGETS} LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} ) install( FILES src/scitokens.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/scitokens ) set_target_properties( SciTokens PROPERTIES VERSION "0.0.2" SOVERSION "0" ) if( SCITOKENS_BUILD_UNITTESTS ) if( NOT SCITOKENS_EXTERNAL_GTEST ) include(ExternalProject) ExternalProject_Add(gtest PREFIX external/gtest URL ${CMAKE_CURRENT_SOURCE_DIR}/vendor/gtest INSTALL_COMMAND : ) endif() enable_testing() add_subdirectory(test) endif() scitokens-cpp-1.1.3/LICENSE000066400000000000000000000261351475716002100153150ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. scitokens-cpp-1.1.3/README.md000066400000000000000000000057341475716002100155710ustar00rootroot00000000000000 SciTokens C++ Library ===================== This repository implements a minimal library for creating and using SciTokens from C or C++. [SciTokens](https://scitokens.org) provide a token format for distributed authorization. The tokens are self-describing, can be verified in a distributed fashion (no need to contact the issuer to determine if the token is valid). This is convenient for a federated environment where several otherwise-independent storage endpoints want to delegate trust for an issuer for managing a storage allocation. Building -------- To build the `scitokens-cpp` library, the following dependencies are needed: - [jwt-cpp] v0.5.0 or later (https://github.com/Thalhammer/jwt-cpp): A header-only C++ library for manipulating JWTs. - OpenSSL 1.0 or later. - `sqlite3` CMake is used for the build system. To build, from the source directory: ``` mkdir build cd build JWT_CPP_DIR=~/path/to/jwt-cpp cmake .. make ``` Testing ------- The easiest way to test `scitokens-cpp` is to head to the [SciTokens Demo app](https://demo.scitokens.org) and copy the generated token. Then, from the build directory: ``` ./scitokens-verify eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6ImtleS1yczI1NiJ9.eyJpc3MiOiJodHRwczovL2RlbW8uc2NpdG9rZW5zLm9yZyIsImV4cCI6MTU0NjQ1NjMwOSwiaWF0IjoxNTQ2NDU1NzA5LCJuYmYiOjE1NDY0NTU3MDksImp0aSI6ImRlYmNkZDRjLTU1MzgtNDkxNS1hY2U2LTgyNTg3NGQwZjEzNyJ9.Vu9TRfDi5WJujeAGl-wP-atvNqh31-gteKqqu_IEcxoCfGYdmoIM3xOOY1GmHcmXfclkrl724ldxBBChsDpcdi_8914N9EGwGVApJLQU0SaPPdtcoCrqvVJE3bD9fs6UKooGwuk_e20ml9g0R4100fTdsD7pkIOABYGTbhxioEb1dP1o-17l2t2kUXpd8KhIyZZjmtmnMdmO5bxaY_V60OOWekwelT8ACK8ao39Ocf_wmUiS0VVX21hD1KqO0bgBU9AsVJ5prAL9ytElr_UB2X5KowPODbj6LPFNhpCwXcoG4w4Gw9VueuxCuIPhlcHBhP83i5LPgtk2YOjygdSahA ``` Replace the given token above with the fresh one you just generated; using the above token should give an expired token error. Instructions for Generating a Release ------------------------------------- SciTokens-cpp includes a submodule, jwt-cpp. Therefore, to create a release, you have to include the submodule into the release. VER=0.3.3 # for example git archive --prefix "scitokens-cpp-$VER/" -o "scitokens-cpp-$VER.tar" v$VER git submodule update --init git submodule foreach --recursive "git archive --prefix=scitokens-cpp-$VER/\$path/ --output=\$sha1.tar HEAD && tar --concatenate --file=$(pwd)/scitokens-cpp-$VER.tar \$sha1.tar && rm \$sha1.tar" gzip "scitokens-cpp-$VER.tar" Before tagging a new release, make sure that the RPM spec file has an updated version number and an associated changelog entry. Also, make sure that the ``debian/changelog`` has an entry that matches the RPM changelog entry. This package is built on the [cvmfs-config OpenSUSE Build Service](https://build.opensuse.org/project/show/home:cvmfs:contrib). In order to support that run `debian/obsupdate.sh` whenever the version or release number is changed in `rpm/scitokens-cpp.spec`, and commit the generated `debian/scitokens-cpp.dsc` before tagging the release. scitokens-cpp-1.1.3/cmake/000077500000000000000000000000001475716002100153615ustar00rootroot00000000000000scitokens-cpp-1.1.3/cmake/FindSqlite3.cmake000066400000000000000000000022441475716002100205120ustar00rootroot00000000000000# - Try to find SQLITE3 # # SQLITE3_FOUND - system has SQLITE3 # SQLITE3_INCLUDE_DIRS - the SQLITE3 include directory # SQLITE3_LIBRARIES - Link these to use SQLITE3 # include(CheckSymbolExists) FIND_LIBRARY(SQLITE3_LIBRARY NAMES sqlite) find_path(SQLITE3_INCLUDE_DIR NAMES sqlite3.h PATHS ${SQLITE3_DIR} $ENV{SQLITE3_DIR} ~/Library/Frameworks /Library/Frameworks /usr/local/include /usr/include ) find_library(SQLITE3_LIBRARY NAMES sqlite3 PATHS ${SQLITE_DIR} $ENV{SQLITE_DIR} ~/Library/Frameworks /Library/Frameworks ) INCLUDE(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(SQLITE3 DEFAULT_MSG SQLITE3_LIBRARY SQLITE3_INCLUDE_DIR) IF(SQLITE3_FOUND) SET(SQLITE3_LIBRARIES ${SQLITE3_LIBRARY}) SET(SQLITE_LIBRARIES ${SQLITE3_LIBRARY}) SET(SQLITE3_INCLUDE_DIRS ${SQLITE3_INCLUDE_DIR}) SET(SQLITE_INCLUDE_DIRS ${SQLITE3_INCLUDE_DIR}) ELSE() SET(SQLITE_LIBRARIES) SET(SQLITE3_LIBRARIES) SET(SQLITE_INCLUDE_DIRS) SET(SQLITE3_INCLUDE_DIRS) endif() mark_as_advanced(SQLITE3_INCLUDE_DIRS SQLITE3_LIBRARIES SQLITE_LIBRARIES SQLITE_INCLUDE_DIRS) scitokens-cpp-1.1.3/cmake/FindUUID.cmake000066400000000000000000000056301475716002100177360ustar00rootroot00000000000000# - Try to find UUID # Once done this will define # # UUID_FOUND - system has UUID # UUID_INCLUDE_DIRS - the UUID include directory # UUID_LIBRARIES - Link these to use UUID # UUID_DEFINITIONS - Compiler switches required for using UUID # # Copyright (c) 2006 Andreas Schneider # # Redistribution and use is allowed according to the terms of the New # BSD license. # For details see the accompanying COPYING-CMAKE-SCRIPTS file. # if (APPLE) include(CheckSymbolExists) # On mac, it can't find uuid library # So, just check for the functions with the default include CHECK_SYMBOL_EXISTS("uuid_generate" "uuid/uuid.h" UUID_SYMBOL) endif (APPLE) if (UUID_SYMBOL) set(UUID_FOUND TRUE) elseif (UUID_LIBRARIES AND UUID_INCLUDE_DIRS) # in cache already set(UUID_FOUND TRUE) else (UUID_LIBRARIES AND UUID_INCLUDE_DIRS) find_path(UUID_INCLUDE_DIR NAMES uuid/uuid.h PATHS ${UUID_DIR}/include $ENV{UUID_DIR}/include $ENV{UUID_DIR} ${DELTA3D_EXT_DIR}/inc $ENV{DELTA_ROOT}/ext/inc $ENV{DELTA_ROOT} ~/Library/Frameworks /Library/Frameworks /usr/local/include /usr/include /usr/include/gdal /sw/include # Fink /opt/local/include # DarwinPorts /opt/csw/include # Blastwave /opt/include [HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\Session\ Manager\\Environment;OSG_ROOT]/include /usr/freeware/include ) find_library(UUID_LIBRARY NAMES uuid PATHS ${UUID_DIR}/lib $ENV{UUID_DIR}/lib $ENV{UUID_DIR} ${DELTA3D_EXT_DIR}/lib $ENV{DELTA_ROOT}/ext/lib $ENV{DELTA_ROOT} $ENV{OSG_ROOT}/lib ~/Library/Frameworks /Library/Frameworks /usr/local/lib /usr/lib /sw/lib /opt/local/lib /opt/csw/lib /opt/lib /usr/freeware/lib64 ) find_library(UUID_LIBRARY_DEBUG NAMES uuidd PATHS ${UUID_DIR}/lib $ENV{UUID_DIR}/lib $ENV{UUID_DIR} ${DELTA3D_EXT_DIR}/lib $ENV{DELTA_ROOT}/ext/lib $ENV{DELTA_ROOT} $ENV{OSG_ROOT}/lib ~/Library/Frameworks /Library/Frameworks /usr/local/lib /usr/lib /sw/lib /opt/local/lib /opt/csw/lib /opt/lib /usr/freeware/lib64 ) set(UUID_INCLUDE_DIRS ${UUID_INCLUDE_DIR}) set(UUID_LIBRARIES ${UUID_LIBRARY}) if (UUID_INCLUDE_DIRS AND UUID_LIBRARIES) set(UUID_FOUND TRUE) endif (UUID_INCLUDE_DIRS AND UUID_LIBRARIES) if (UUID_FOUND) if (NOT UUID_FIND_QUIETLY) message(STATUS "Found UUID : ${UUID_LIBRARIES}") endif (NOT UUID_FIND_QUIETLY) else (UUID_FOUND) if (UUID_FIND_REQUIRED) message(FATAL_ERROR "Could not find UUID") endif (UUID_FIND_REQUIRED) endif (UUID_FOUND) # show the UUID_INCLUDE_DIRS and UUID_LIBRARIES variables only in the advanced view mark_as_advanced(UUID_INCLUDE_DIRS UUID_LIBRARIES) endif (UUID_SYMBOL) scitokens-cpp-1.1.3/cmake/Findjwt-cpp.cmake000066400000000000000000000002371475716002100205520ustar00rootroot00000000000000 FIND_PATH(JWT_CPP_INCLUDES jwt-cpp/jwt.h HINTS ${JWT_CPP_DIR} $ENV{JWT_CPP_DIR} /usr ${PROJECT_SOURCE_DIR}/vendor/jwt-cpp PATH_SUFFIXES include ) scitokens-cpp-1.1.3/configs/000077500000000000000000000000001475716002100157315ustar00rootroot00000000000000scitokens-cpp-1.1.3/configs/export-symbols000066400000000000000000000001341475716002100206610ustar00rootroot00000000000000{ global: scitoken*; validator*; enforcer*; keycache*; config*; local: *; }; scitokens-cpp-1.1.3/debian/000077500000000000000000000000001475716002100155235ustar00rootroot00000000000000scitokens-cpp-1.1.3/debian/README.obs000066400000000000000000000005001475716002100171600ustar00rootroot00000000000000This package is built by cvmfs-contrib on the OpenSUSE Build System https://build.opensuse.org/project/show/home:cvmfs:contrib OBS requires a little help from the package in order to build on Debian. Each time there's a new release, run obsupdate.sh in this directory and commit the newly updated scitokens-cpp.dsc. scitokens-cpp-1.1.3/debian/changelog000066400000000000000000000077341475716002100174100ustar00rootroot00000000000000scitokens-cpp (1.1.3-1) stable; urgency=medium * Include cstdint import for jwt library to support newer compilers -- Derek Weitzel Mon, 24 Feb 2025 12:00:00 -0600 scitokens-cpp (1.1.2-1) stable; urgency=medium * Turn off CMAKE unity builds * Add a mutex around requesting public keys to stop overloading issuers -- Derek Weitzel Wed, 30 Oct 2024 12:00:00 -0600 scitokens-cpp (1.1.1-1) stable; urgency=medium * Improve error handling around the sqlite3 library * Fix test failures and compiler warnings -- Derek Weitzel Wed, 28 Feb 2024 12:00:00 -0600 scitokens-cpp (1.1.0-1) stable; urgency=medium * Allow the scitokens library user to setup a custom CA file * Fix typecast errors in scitoken_status_get_*() that caused async queries to fail * Fix logic error in deserialize_continue() that caused async deserialization to fail -- Tim Theisen Tue, 07 Nov 2023 15:46:00 -0600 scitokens-cpp (1.0.2-1) stable; urgency=medium * Add support for API-configurable cache home * Fix enforcer_acl_free logic * scitokens_internal: catch matching exception type after jwt-cpp update -- Derek Weitzel Thu, 15 Jun 2023 12:00:00 -0500 scitokens-cpp (1.0.1-1) stable; urgency=medium * Fix bug in generate acls which would cause a timeout -- Derek Weitzel Wed, 26 Apr 2023 12:00:00 -0500 scitokens-cpp (1.0.0-1) stable; urgency=medium * Add async API for parsing and verifying tokens * Add configuration API * Make nbf claim optional for non-scitokens tokens * Update to OpenSSL 3.0 -- Derek Weitzel Tue, 21 Mar 2023 10:18:59 -0500 scitokens-cpp (0.7.3-1) stable; urgency=medium * Retry failed key renewal every 5 minutes -- Derek Weitzel Tue, 01 Nov 2022 08:29:22 -0500 scitokens-cpp (0.7.2-1) stable; urgency=medium * Add curl timeout of 4 seconds for update, and 30 for expired keys -- Derek Weitzel Mon, 31 Oct 2022 15:35:17 -0500 scitokens-cpp (0.7.1-1) stable; urgency=medium * Add scitokens-* binaries to the package * Bug: close sqlite db handle on return -- Derek Weitzel Wed, 22 Jun 2022 11:26:18 -0500 scitokens-cpp (0.7.0-1) stable; urgency=medium * Changes from static analysis * If only one key is available, do not error on no kid * Support at+jwt profile -- Derek Weitzel Fri, 18 Feb 2022 13:16:18 -0600 scitokens-cpp (0.6.3-1) stable; urgency=medium * Add support for building Debian packages on the OpenSUSE Build System * Add patch to jwt-cpp to update its picojson dependency in order to enable it to compile on Debian 11 and Ubuntu 21.04 * Fix el7 build by requiring epel-rpm-macros -- Dave Dykstra Fri, 03 Sep 2021 12:00:00 -0500 scitokens-cpp (0.6.2-2) stable; urgency=medium * Make the build require cmake3 instead of cmake -- Dave Dykstra Thu, 26 Aug 2021 12:00:00 -0500 scitokens-cpp (0.6.2-1) stable; urgency=medium * Correct WLCG compat for condor read permissions -- Dave Dykstra Thu, 26 Aug 2021 12:00:00 -0500 scitokens-cpp (0.6.1-1) stable; urgency=medium * Fix vector resize for el8+ builds -- Derek Weitzel Thu, 20 May 2021 12:00:00 -0500 scitokens-cpp (0.6.0-2) unstable; urgency=medium * Fix empty libscitokens0 package -- Tim Theisen Fri, 26 Mar 2021 08:11:00 -0500 scitokens-cpp (0.6.0-1) unstable; urgency=medium * Fix compilation errors on c++11 * Update to jwt-cpp-0.4.0 vendor * Change scitoken profile name to match spec, scitoken:2.0 -- Derek Weitzel Tue, 09 Mar 2021 13:45:00 -0600 scitokens-cpp (0.5.1-3) unstable; urgency=low * Updated packaging for Debian -- Tim Theisen Sun, 28 Feb 2021 16:02:24 -0600 scitokens-cpp (0.5.1-1) unstable; urgency=low * Initial release. -- Tim Theisen Fri, 04 Dec 2020 10:54:24 -0600 scitokens-cpp-1.1.3/debian/compat000066400000000000000000000000031475716002100167220ustar00rootroot0000000000000012 scitokens-cpp-1.1.3/debian/control000066400000000000000000000027671475716002100171420ustar00rootroot00000000000000Source: scitokens-cpp Section: science Priority: optional Maintainer: Tim Theisen Build-Depends: cmake (>=2.6), debhelper (>=9), libcurl4-openssl-dev | libcurl4-gnutls-dev, libsqlite3-dev, libssl-dev, pkg-config, uuid-dev Standards-Version: 3.9.8 Homepage: https://github.com/scitokens/scitokens-cpp Package: libscitokens0 Section: libs Architecture: any Multi-Arch: same Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends} Description: C++ Implementation of the SciTokens Library SciTokens provide a token format for distributed authorization The tokens are self-describing, can be verified in a distributed fashion (no need to contact the issuer to determine if the token is valid). This is convenient for a federated environment where several otherwise-independent storage endpoints want to delegate trust for an issuer for managing a storage allocation. Package: libscitokens-dev Section: libdevel Architecture: any Multi-Arch: same Depends: libscitokens0 (= ${binary:Version}), ${misc:Depends} Description: Header files for the libscitokens public interfaces SciTokens provide a token format for distributed authorization. The tokens are self-describing, can be verified in a distributed fashion (no need to contact the issuer to determine if the token is valid). This is convenient for a federated environment where several otherwise-independent storage endpoints want to delegate trust for an issuer for managing a storage allocation. scitokens-cpp-1.1.3/debian/copyright000066400000000000000000000422151475716002100174620ustar00rootroot00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: scitokens-cpp Upstream-Contact: discuss@scitokens.org Source: https://github.com/scitokens/scitokens-cpp Files: * Copyright: NONE License: Apache-2.0 Files: ./vendor/gtest/BUILD.bazel ./vendor/gtest/LICENSE ./vendor/gtest/ci/build-linux-bazel.sh ./vendor/gtest/ci/env-linux.sh ./vendor/gtest/ci/env-osx.sh ./vendor/gtest/ci/get-nprocessors.sh ./vendor/gtest/ci/install-linux.sh ./vendor/gtest/ci/install-osx.sh ./vendor/gtest/ci/log-config.sh ./vendor/gtest/googlemock/LICENSE ./vendor/gtest/googlemock/include/gmock/gmock-actions.h ./vendor/gtest/googlemock/include/gmock/gmock-cardinalities.h ./vendor/gtest/googlemock/include/gmock/gmock-function-mocker.h ./vendor/gtest/googlemock/include/gmock/gmock-generated-actions.h ./vendor/gtest/googlemock/include/gmock/gmock-generated-actions.h.pump ./vendor/gtest/googlemock/include/gmock/gmock-generated-function-mockers.h ./vendor/gtest/googlemock/include/gmock/gmock-generated-function-mockers.h.pump ./vendor/gtest/googlemock/include/gmock/gmock-generated-matchers.h ./vendor/gtest/googlemock/include/gmock/gmock-generated-matchers.h.pump ./vendor/gtest/googlemock/include/gmock/gmock-matchers.h ./vendor/gtest/googlemock/include/gmock/gmock-more-actions.h ./vendor/gtest/googlemock/include/gmock/gmock-more-matchers.h ./vendor/gtest/googlemock/include/gmock/gmock-nice-strict.h ./vendor/gtest/googlemock/include/gmock/gmock-spec-builders.h ./vendor/gtest/googlemock/include/gmock/gmock.h ./vendor/gtest/googlemock/include/gmock/internal/custom/gmock-matchers.h ./vendor/gtest/googlemock/include/gmock/internal/custom/gmock-port.h ./vendor/gtest/googlemock/include/gmock/internal/gmock-internal-utils.h ./vendor/gtest/googlemock/include/gmock/internal/gmock-port.h ./vendor/gtest/googlemock/scripts/fuse_gmock_files.py ./vendor/gtest/googlemock/scripts/pump.py ./vendor/gtest/googlemock/src/gmock-all.cc ./vendor/gtest/googlemock/src/gmock-cardinalities.cc ./vendor/gtest/googlemock/src/gmock-internal-utils.cc ./vendor/gtest/googlemock/src/gmock-matchers.cc ./vendor/gtest/googlemock/src/gmock-spec-builders.cc ./vendor/gtest/googlemock/src/gmock.cc ./vendor/gtest/googlemock/src/gmock_main.cc ./vendor/gtest/googlemock/test/BUILD.bazel ./vendor/gtest/googlemock/test/gmock-actions_test.cc ./vendor/gtest/googlemock/test/gmock-cardinalities_test.cc ./vendor/gtest/googlemock/test/gmock-function-mocker_test.cc ./vendor/gtest/googlemock/test/gmock-generated-actions_test.cc ./vendor/gtest/googlemock/test/gmock-generated-function-mockers_test.cc ./vendor/gtest/googlemock/test/gmock-generated-matchers_test.cc ./vendor/gtest/googlemock/test/gmock-internal-utils_test.cc ./vendor/gtest/googlemock/test/gmock-matchers_test.cc ./vendor/gtest/googlemock/test/gmock-more-actions_test.cc ./vendor/gtest/googlemock/test/gmock-nice-strict_test.cc ./vendor/gtest/googlemock/test/gmock-port_test.cc ./vendor/gtest/googlemock/test/gmock-pp-string_test.cc ./vendor/gtest/googlemock/test/gmock-spec-builders_test.cc ./vendor/gtest/googlemock/test/gmock_all_test.cc ./vendor/gtest/googlemock/test/gmock_ex_test.cc ./vendor/gtest/googlemock/test/gmock_leak_test.py ./vendor/gtest/googlemock/test/gmock_leak_test_.cc ./vendor/gtest/googlemock/test/gmock_link2_test.cc ./vendor/gtest/googlemock/test/gmock_link_test.cc ./vendor/gtest/googlemock/test/gmock_link_test.h ./vendor/gtest/googlemock/test/gmock_output_test.py ./vendor/gtest/googlemock/test/gmock_output_test_.cc ./vendor/gtest/googlemock/test/gmock_stress_test.cc ./vendor/gtest/googlemock/test/gmock_test.cc ./vendor/gtest/googlemock/test/gmock_test_utils.py ./vendor/gtest/googlemock/test/pump_test.py ./vendor/gtest/googletest/LICENSE ./vendor/gtest/googletest/include/gtest/gtest-death-test.h ./vendor/gtest/googletest/include/gtest/gtest-matchers.h ./vendor/gtest/googletest/include/gtest/gtest-message.h ./vendor/gtest/googletest/include/gtest/gtest-param-test.h ./vendor/gtest/googletest/include/gtest/gtest-printers.h ./vendor/gtest/googletest/include/gtest/gtest-spi.h ./vendor/gtest/googletest/include/gtest/gtest-test-part.h ./vendor/gtest/googletest/include/gtest/gtest-typed-test.h ./vendor/gtest/googletest/include/gtest/gtest.h ./vendor/gtest/googletest/include/gtest/gtest_pred_impl.h ./vendor/gtest/googletest/include/gtest/gtest_prod.h ./vendor/gtest/googletest/include/gtest/internal/custom/gtest-port.h ./vendor/gtest/googletest/include/gtest/internal/custom/gtest-printers.h ./vendor/gtest/googletest/include/gtest/internal/custom/gtest.h ./vendor/gtest/googletest/include/gtest/internal/gtest-death-test-internal.h ./vendor/gtest/googletest/include/gtest/internal/gtest-filepath.h ./vendor/gtest/googletest/include/gtest/internal/gtest-internal.h ./vendor/gtest/googletest/include/gtest/internal/gtest-param-util.h ./vendor/gtest/googletest/include/gtest/internal/gtest-port-arch.h ./vendor/gtest/googletest/include/gtest/internal/gtest-port.h ./vendor/gtest/googletest/include/gtest/internal/gtest-string.h ./vendor/gtest/googletest/include/gtest/internal/gtest-type-util.h ./vendor/gtest/googletest/samples/prime_tables.h ./vendor/gtest/googletest/samples/sample1.cc ./vendor/gtest/googletest/samples/sample1.h ./vendor/gtest/googletest/samples/sample10_unittest.cc ./vendor/gtest/googletest/samples/sample1_unittest.cc ./vendor/gtest/googletest/samples/sample2.cc ./vendor/gtest/googletest/samples/sample2.h ./vendor/gtest/googletest/samples/sample2_unittest.cc ./vendor/gtest/googletest/samples/sample3-inl.h ./vendor/gtest/googletest/samples/sample3_unittest.cc ./vendor/gtest/googletest/samples/sample4.cc ./vendor/gtest/googletest/samples/sample4.h ./vendor/gtest/googletest/samples/sample4_unittest.cc ./vendor/gtest/googletest/samples/sample5_unittest.cc ./vendor/gtest/googletest/samples/sample6_unittest.cc ./vendor/gtest/googletest/samples/sample7_unittest.cc ./vendor/gtest/googletest/samples/sample8_unittest.cc ./vendor/gtest/googletest/samples/sample9_unittest.cc ./vendor/gtest/googletest/scripts/common.py ./vendor/gtest/googletest/scripts/fuse_gtest_files.py ./vendor/gtest/googletest/scripts/gen_gtest_pred_impl.py ./vendor/gtest/googletest/scripts/release_docs.py ./vendor/gtest/googletest/scripts/run_with_path.py ./vendor/gtest/googletest/scripts/upload.py ./vendor/gtest/googletest/scripts/upload_gtest.py ./vendor/gtest/googletest/src/gtest-all.cc ./vendor/gtest/googletest/src/gtest-death-test.cc ./vendor/gtest/googletest/src/gtest-filepath.cc ./vendor/gtest/googletest/src/gtest-internal-inl.h ./vendor/gtest/googletest/src/gtest-matchers.cc ./vendor/gtest/googletest/src/gtest-port.cc ./vendor/gtest/googletest/src/gtest-printers.cc ./vendor/gtest/googletest/src/gtest-test-part.cc ./vendor/gtest/googletest/src/gtest-typed-test.cc ./vendor/gtest/googletest/src/gtest.cc ./vendor/gtest/googletest/src/gtest_main.cc ./vendor/gtest/googletest/test/BUILD.bazel ./vendor/gtest/googletest/test/googletest-break-on-failure-unittest.py ./vendor/gtest/googletest/test/googletest-break-on-failure-unittest_.cc ./vendor/gtest/googletest/test/googletest-catch-exceptions-test.py ./vendor/gtest/googletest/test/googletest-catch-exceptions-test_.cc ./vendor/gtest/googletest/test/googletest-color-test.py ./vendor/gtest/googletest/test/googletest-color-test_.cc ./vendor/gtest/googletest/test/googletest-death-test-test.cc ./vendor/gtest/googletest/test/googletest-death-test_ex_test.cc ./vendor/gtest/googletest/test/googletest-env-var-test.py ./vendor/gtest/googletest/test/googletest-env-var-test_.cc ./vendor/gtest/googletest/test/googletest-filepath-test.cc ./vendor/gtest/googletest/test/googletest-filter-unittest.py ./vendor/gtest/googletest/test/googletest-filter-unittest_.cc ./vendor/gtest/googletest/test/googletest-json-outfiles-test.py ./vendor/gtest/googletest/test/googletest-json-output-unittest.py ./vendor/gtest/googletest/test/googletest-list-tests-unittest.py ./vendor/gtest/googletest/test/googletest-list-tests-unittest_.cc ./vendor/gtest/googletest/test/googletest-listener-test.cc ./vendor/gtest/googletest/test/googletest-message-test.cc ./vendor/gtest/googletest/test/googletest-options-test.cc ./vendor/gtest/googletest/test/googletest-output-test.py ./vendor/gtest/googletest/test/googletest-output-test_.cc ./vendor/gtest/googletest/test/googletest-param-test-invalid-name1-test.py ./vendor/gtest/googletest/test/googletest-param-test-invalid-name1-test_.cc ./vendor/gtest/googletest/test/googletest-param-test-invalid-name2-test.py ./vendor/gtest/googletest/test/googletest-param-test-invalid-name2-test_.cc ./vendor/gtest/googletest/test/googletest-param-test-test.cc ./vendor/gtest/googletest/test/googletest-param-test-test.h ./vendor/gtest/googletest/test/googletest-param-test2-test.cc ./vendor/gtest/googletest/test/googletest-port-test.cc ./vendor/gtest/googletest/test/googletest-printers-test.cc ./vendor/gtest/googletest/test/googletest-shuffle-test.py ./vendor/gtest/googletest/test/googletest-shuffle-test_.cc ./vendor/gtest/googletest/test/googletest-test-part-test.cc ./vendor/gtest/googletest/test/googletest-test2_test.cc ./vendor/gtest/googletest/test/googletest-throw-on-failure-test.py ./vendor/gtest/googletest/test/googletest-throw-on-failure-test_.cc ./vendor/gtest/googletest/test/googletest-uninitialized-test.py ./vendor/gtest/googletest/test/googletest-uninitialized-test_.cc ./vendor/gtest/googletest/test/gtest-typed-test2_test.cc ./vendor/gtest/googletest/test/gtest-typed-test_test.cc ./vendor/gtest/googletest/test/gtest-typed-test_test.h ./vendor/gtest/googletest/test/gtest-unittest-api_test.cc ./vendor/gtest/googletest/test/gtest_all_test.cc ./vendor/gtest/googletest/test/gtest_assert_by_exception_test.cc ./vendor/gtest/googletest/test/gtest_environment_test.cc ./vendor/gtest/googletest/test/gtest_help_test.py ./vendor/gtest/googletest/test/gtest_help_test_.cc ./vendor/gtest/googletest/test/gtest_json_test_utils.py ./vendor/gtest/googletest/test/gtest_list_output_unittest.py ./vendor/gtest/googletest/test/gtest_list_output_unittest_.cc ./vendor/gtest/googletest/test/gtest_main_unittest.cc ./vendor/gtest/googletest/test/gtest_no_test_unittest.cc ./vendor/gtest/googletest/test/gtest_pred_impl_unittest.cc ./vendor/gtest/googletest/test/gtest_premature_exit_test.cc ./vendor/gtest/googletest/test/gtest_prod_test.cc ./vendor/gtest/googletest/test/gtest_repeat_test.cc ./vendor/gtest/googletest/test/gtest_skip_test.cc ./vendor/gtest/googletest/test/gtest_sole_header_test.cc ./vendor/gtest/googletest/test/gtest_stress_test.cc ./vendor/gtest/googletest/test/gtest_test_macro_stack_footprint_test.cc ./vendor/gtest/googletest/test/gtest_test_utils.py ./vendor/gtest/googletest/test/gtest_throw_on_failure_ex_test.cc ./vendor/gtest/googletest/test/gtest_unittest.cc ./vendor/gtest/googletest/test/gtest_xml_outfile1_test_.cc ./vendor/gtest/googletest/test/gtest_xml_outfile2_test_.cc ./vendor/gtest/googletest/test/gtest_xml_outfiles_test.py ./vendor/gtest/googletest/test/gtest_xml_output_unittest.py ./vendor/gtest/googletest/test/gtest_xml_output_unittest_.cc ./vendor/gtest/googletest/test/gtest_xml_test_utils.py ./vendor/gtest/googletest/test/production.cc ./vendor/gtest/googletest/test/production.h Copyright: 2005, Google Inc. 2006, Google Inc. 2007, Google Inc. 2008, Google Inc. 2009, Google Inc. 2010, Google Inc. 2013, Google Inc. 2015, Google Inc. 2017, Google Inc. 2018, Google Inc. License: BSD-3-clause Files: ./vendor/gtest/googlemock/scripts/generator/cpp/ast.py ./vendor/gtest/googlemock/scripts/generator/cpp/gmock_class_test.py ./vendor/gtest/googlemock/scripts/generator/cpp/keywords.py ./vendor/gtest/googlemock/scripts/generator/cpp/tokenize.py ./vendor/gtest/googlemock/scripts/generator/cpp/utils.py Copyright: 2007, Google Inc. 2007, Neal Norwitz 2009, Google Inc. 2009, Neal Norwitz License: Apache-2.0 Files: ./vendor/gtest/googletest/test/gtest_skip_check_output_test.py ./vendor/gtest/googletest/test/gtest_skip_environment_check_output_test.py ./vendor/gtest/googletest/test/gtest_skip_in_environment_setup_test.cc ./vendor/gtest/googletest/test/gtest_testbridge_test.py ./vendor/gtest/googletest/test/gtest_testbridge_test_.cc Copyright: 2018, Google LLC. 2019, Google LLC. License: BSD-3-clause Files: ./vendor/gtest/googlemock/scripts/generator/cpp/gmock_class.py ./vendor/gtest/googlemock/scripts/generator/gmock_gen.py Copyright: 2008, Google Inc. License: Apache-2.0 Files: ./vendor/jwt-cpp/include/jwt-cpp/picojson.h Copyright: 2009-2010, Cybozu Labs, Inc. 2011-2014, Kazuho Oku License: BSD-2-clause Files: ./vendor/gtest/library.json Copyright: NONE License: BSD-3-clause Files: ./vendor/jwt-cpp/* Copyright: 2018, Dominik Thalhammer License: Expat Files: ./cmake/FindUUID.cmake Copyright: 2006, Andreas Schneider License: BSD-3-clause License: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at . http://www.apache.org/licenses/LICENSE-2.0 . Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. . On Debian systems, the full text of the Apache License, Version 2.0 can be found in the file `/usr/share/common-licenses/Apache-2.0'. License: BSD-2-clause Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: . 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. . 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. License: BSD-3-clause Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: . 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. . 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. . 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. License: Expat Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: . The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. . THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. scitokens-cpp-1.1.3/debian/get-orig-source.sh000077500000000000000000000020701475716002100210740ustar00rootroot00000000000000# Generate a source tarball including submodules if [ -z "${1}" ] ; then echo No tag or branch given exit 1 fi ver=${1} # Remove initial v from tag name for use in filenames if [ ${ver:0:1} = 'v' ] ; then fver=${ver:1} else fver=${ver} fi if [ -r scitokens-cpp_${fver}.orig.tar.gz ] ; then echo scitokens-cpp_${fver}.orig.tar.gz already exists exit 1 fi curdir=$(pwd) tdir=$(mktemp -d) cd ${tdir} git clone https://github.com/scitokens/scitokens-cpp.git cd scitokens-cpp git checkout ${ver} if [ $? -ne 0 ] ; then echo No such tag or branch: ${ver} cd ${curdir} rm -rf ${tdir} exit 1 fi git archive --prefix scitokens-cpp_${fver}/ ${ver} -o ${tdir}/scitokens-cpp_${fver}.orig.tar git submodule update --init git submodule foreach --recursive "git archive --prefix scitokens-cpp_${fver}/\$path/ \$sha1 -o ${tdir}/\$sha1.tar ; tar -A -f ${tdir}/scitokens-cpp_${fver}.orig.tar ${tdir}/\$sha1.tar ; rm ${tdir}/\$sha1.tar" cd ${tdir} gzip scitokens-cpp_${fver}.orig.tar mv scitokens-cpp_${fver}.orig.tar.gz ${curdir} cd ${curdir} rm -rf ${tdir} scitokens-cpp-1.1.3/debian/libscitokens-dev.install000066400000000000000000000001231475716002100223540ustar00rootroot00000000000000debian/tmp/usr/include/scitokens usr/include/ debian/tmp/usr/lib/*/libSciTokens.so scitokens-cpp-1.1.3/debian/libscitokens0.install000066400000000000000000000001061475716002100216610ustar00rootroot00000000000000debian/tmp/usr/lib/*/libSciTokens.so.* debian/tmp/usr/bin/scitokens-* scitokens-cpp-1.1.3/debian/obsupdate.sh000077500000000000000000000023151475716002100200510ustar00rootroot00000000000000#!/bin/bash # update ../packaging/debian/packageName.dsc # That file is used by build.opensuse.org's Open Build Service # After the file is updated, it needs to be separately committed to git. HERE="`dirname $0`" ME="`basename $0`" cd $HERE PKG="`sed -n 's/^Source: //p' control`" SPECFILE="../rpm/$PKG.spec" VERSION="$(grep ^Version: $SPECFILE | awk '{print $2}')" RPMREL="$(grep '^%define release_prefix' $SPECFILE | awk '{print $3}')" if [ -z "$RPMREL" ]; then RPMREL="$(grep '^Release:' $SPECFILE | awk '{print $2}' | cut -d% -f1)" fi # if the version is current, increment the release number, else choose 1 DEBREL="`sed -n "s/^Version: ${VERSION}\.${RPMREL}-//p" $PKG.dsc 2>/dev/null`" if [ -z "$DEBREL" ]; then DEBREL=1 else let DEBREL+=1 fi ( echo "# created by $ME, do not edit by hand" # The following two lines are OBS "magic" to use the tarball from the rpm echo "Debtransform-Tar: ${PKG}-${VERSION}.tar.gz" #echo "Debtransform-Files-Tar: " echo "Format: 3.0" echo "Version: ${VERSION}.${RPMREL}-${DEBREL}" echo "Binary: $PKG" cat control echo "Files:" echo " ffffffffffffffffffffffffffffffff 99999 file1" echo " ffffffffffffffffffffffffffffffff 99999 file2" ) > $PKG.dsc # echo "Updated $PWD/$PKG.dsc" scitokens-cpp-1.1.3/debian/patches/000077500000000000000000000000001475716002100171525ustar00rootroot00000000000000scitokens-cpp-1.1.3/debian/patches/series000066400000000000000000000000001475716002100203550ustar00rootroot00000000000000scitokens-cpp-1.1.3/debian/rules000077500000000000000000000003061475716002100166020ustar00rootroot00000000000000#!/usr/bin/make -f DEB_HOST_MULTIARCH ?= $(shell dpkg-architecture -qDEB_HOST_MULTIARCH) %: dh $@ override_dh_auto_configure: dh_auto_configure -- -DLIB_INSTALL_DIR=lib/$(DEB_HOST_MULTIARCH) scitokens-cpp-1.1.3/debian/scitokens-cpp.dsc000066400000000000000000000033551475716002100210060ustar00rootroot00000000000000# created by obsupdate.sh, do not edit by hand Debtransform-Tar: scitokens-cpp-1.1.3.tar.gz Format: 3.0 Version: 1.1.3.1-2 Binary: scitokens-cpp Source: scitokens-cpp Section: science Priority: optional Maintainer: Tim Theisen Build-Depends: cmake (>=2.6), debhelper (>=9), libcurl4-openssl-dev | libcurl4-gnutls-dev, libsqlite3-dev, libssl-dev, pkg-config, uuid-dev Standards-Version: 3.9.8 Homepage: https://github.com/scitokens/scitokens-cpp Package: libscitokens0 Section: libs Architecture: any Multi-Arch: same Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends} Description: C++ Implementation of the SciTokens Library SciTokens provide a token format for distributed authorization The tokens are self-describing, can be verified in a distributed fashion (no need to contact the issuer to determine if the token is valid). This is convenient for a federated environment where several otherwise-independent storage endpoints want to delegate trust for an issuer for managing a storage allocation. Package: libscitokens-dev Section: libdevel Architecture: any Multi-Arch: same Depends: libscitokens0 (= ${binary:Version}), ${misc:Depends} Description: Header files for the libscitokens public interfaces SciTokens provide a token format for distributed authorization. The tokens are self-describing, can be verified in a distributed fashion (no need to contact the issuer to determine if the token is valid). This is convenient for a federated environment where several otherwise-independent storage endpoints want to delegate trust for an issuer for managing a storage allocation. Files: ffffffffffffffffffffffffffffffff 99999 file1 ffffffffffffffffffffffffffffffff 99999 file2 scitokens-cpp-1.1.3/debian/source/000077500000000000000000000000001475716002100170235ustar00rootroot00000000000000scitokens-cpp-1.1.3/debian/source/format000066400000000000000000000000141475716002100202310ustar00rootroot000000000000003.0 (quilt) scitokens-cpp-1.1.3/debian/source/local-options000066400000000000000000000000541475716002100215300ustar00rootroot00000000000000#abort-on-upstream-changes #unapply-patches scitokens-cpp-1.1.3/debian/watch000066400000000000000000000000121475716002100165450ustar00rootroot00000000000000version=3 scitokens-cpp-1.1.3/rpm/000077500000000000000000000000001475716002100150775ustar00rootroot00000000000000scitokens-cpp-1.1.3/rpm/scitokens-cpp.spec000066400000000000000000000136401475716002100205410ustar00rootroot00000000000000Name: scitokens-cpp Version: 1.1.3 Release: 1%{?dist} Summary: C++ Implementation of the SciTokens Library License: ASL 2.0 URL: https://github.com/scitokens/scitokens-cpp # Directions to generate a proper release: # VER=0.3.3 # for example # git archive --prefix "scitokens-cpp-$VER/" -o "scitokens-cpp-$VER.tar" v$VER # git submodule update --init # git submodule foreach --recursive "git archive --prefix=scitokens-cpp-$VER/\$path/ --output=\$sha1.tar HEAD && tar --concatenate --file=$(pwd)/scitokens-cpp-$VER.tar \$sha1.tar && rm \$sha1.tar" # gzip "scitokens-cpp-$VER.tar" Source0: https://github.com/scitokens/scitokens-cpp/releases/download/v%{version}/%{name}-%{version}.tar.gz # Scitokens-cpp bundles jwt-cpp, a header only dependency # Since it doesn't create a library that can be used by others, it seems # inappropriate to include a "Provides", as jwt-cpp is not provided # by this package. BuildRequires: gcc-c++ BuildRequires: make BuildRequires: cmake3 BuildRequires: sqlite-devel BuildRequires: openssl-devel BuildRequires: libcurl-devel BuildRequires: libuuid-devel %if 0%{?el7} # needed for ldconfig_scriptlets BuildRequires: epel-rpm-macros %endif %description %{summary} %package devel Summary: Header files for the scitokens-cpp public interfaces Requires: %{name}%{?_isa} = %{version} %description devel %{summary} %prep %setup -q %build %cmake3 %cmake3_build %install %cmake3_install # Run the ldconfig %ldconfig_scriptlets %files %{_libdir}/libSciTokens.so.0* %{_bindir}/scitokens-* %license LICENSE %doc README.md %files devel %{_libdir}/libSciTokens.so %{_includedir}/scitokens/scitokens.h %dir %{_includedir}/scitokens %changelog * Mon Feb 24 2025 Derek Weitzel - 1.1.3-1 - Include cstdint import for jwt library to support newer compilers * Wed Oct 30 2024 Derek Weitzel - 1.1.2-1 - Turn off CMAKE unity builds - Add a mutex around requesting public keys to stop overloading issuers * Wed Feb 28 2024 Derek Weitzel - 1.1.1-1 - Improve error handling around the sqlite3 library - Fix test failures and compiler warnings * Tue Nov 07 2023 Derek Weitzel - 1.1.0-1 - Allow the scitokens library user to setup a custom CA file - Fix typecast errors in scitoken_status_get_*() that caused async queries to fail - Fix logic error in deserialize_continue() that caused async deserialization to fail * Thu Jun 15 2023 Derek Weitzel - 1.0.2-1 - Add support for API-configurable cache home - Fix enforcer_acl_free logic - scitokens_internal: catch matching exception type after jwt-cpp update * Wed Apr 26 2023 Derek Weitzel - 1.0.1-1 - Fix bug in generate acls which would cause a timeout * Tue Mar 21 2023 Derek Weitzel - 1.0.0-1 - Add async API for parsing and verifying tokens - Add configuration API - Make nbf claim optional for non-scitokens tokens - Update to OpenSSL 3.0 * Wed Jun 22 2022 Derek Weitzel - 0.7.1-1 - Add scitokens-* binaries to the package - Bug: close sqlite db handle on return * Fri Feb 18 2022 Derek Weitzel - 0.7.0-1 - Changes from static analysis - If only one key is available, do not error on no kid - Support at+jwt profile * Fri Sep 03 2021 Dave Dykstra - 0.6.3-1 - Add support for building Debian packages on the OpenSUSE Build System - Add patch to jwt-cpp to update its picojson dependency in order to enable it to compile on Debian 11 and Ubuntu 21.04 - Fix el7 build by requiring epel-rpm-macros * Thu Aug 26 2021 Dave Dykstra - 0.6.2-2 - Make the build require cmake3 instead of cmake * Thu Jun 03 2021 Derek Weitzel - 0.6.2-1 - Correct WLCG compat for condor read permissions * Thu May 20 2021 Derek Weitzel - 0.6.1-1 - Fix vector resize for el8+ builds * Tue May 18 2021 Derek Weitzel - 0.6.0-2 - Add back paren patch * Tue Mar 09 2021 Derek Weitzel - 0.6.0-1 - Fix compilation errors on c++11 - Update to jwt-cpp-0.4.0 vendor - Change scitoken profile name to match spec, scitoken:2.0 * Wed Jun 24 2020 Derek Weitzel - 0.5.1-1 - Add storage.modify as write permission * Fri Feb 28 2020 Derek Weitzel - 0.5.0-1 - Add API for retrieving string list attributes * Fri Nov 08 2019 Derek Weitzel - 0.4.0-1 - Add support for WLCG profile * Fri Nov 08 2019 Derek Weitzel - 0.3.5-1 - Fix EC public key handling * Wed Sep 18 2019 Derek Weitzel - 0.3.4-1 - Fix bugs for support with IAM * Thu Aug 01 2019 Derek Weitzel - 0.3.3-3 - Update the packaging to bring it line with EPEL (fedora) guidelines * Tue Jul 30 2019 Derek Weitzel - 0.3.3-2 - Change the Source URL - Use make_build in the packaging * Thu Jul 25 2019 Derek Weitzel - 0.3.3-1 - Merge OSG changes - Use a newer, still supported version of devtoolset - Fix bug in verifying EC signed tokens #13 * Thu Jul 25 2019 Derek Weitzel - 0.3.2-1 - Update RPM to v0.3.2 of the packaging. - Fix downloading public key bug #12 * Thu Jun 20 2019 Brian Bockelman - 0.3.1-1 - Update RPM to v0.3.1 of the packaging. * Wed May 29 2019 Mátyás Selmeci - 0.3.0-4 - Use double layer of const for deserialize (patch from https://github.com/scitokens/scitokens-cpp/commit/ac0b2f0679488fa91c14ed781268efbcdb69ed3c) * Mon May 13 2019 Mátyás Selmeci - 0.3.0-3 - Add Force-aud-test-in-the-validator.patch from https://github.com/scitokens/scitokens-cpp/pull/8 * Fri May 03 2019 Mátyás Selmeci - 0.3.0-2 - Fix requirements * Thu May 02 2019 Mátyás Selmeci - 0.3.0-1 - Update to v0.3.0 - Add dependencies on libcurl-devel, libuuid-devel * Thu Jan 03 2019 Brian Bockelman - 0.1.0-1 - Initial version of the SciTokens C++ RPM. scitokens-cpp-1.1.3/src/000077500000000000000000000000001475716002100150705ustar00rootroot00000000000000scitokens-cpp-1.1.3/src/create.cpp000066400000000000000000000140361475716002100170430ustar00rootroot00000000000000 #include "scitokens.h" #include #include #include #include #include #include #include namespace { const char usage[] = "\n" "Syntax: %s [--cred cred_file] [--key key_file] [--keyid kid]\n" " [--claim key=val] ...\n" "\n" " Options\n" " -h | --help Display usage\n" " -c | --cred File containing signing " "credential.\n" " -k | --key File containing the signing " "private key.\n" " -K | --keyid Name of the token key.\n" " -i | --issuer Issuer for the token.\n" " -p | --profile Token profile (wlcg, scitokens1, " "scitokens2, atjwt).\n" "\n"; const struct option long_options[] = {{"help", no_argument, NULL, 'h'}, {"cred", required_argument, NULL, 'c'}, {"key", required_argument, NULL, 'k'}, {"keyid", required_argument, NULL, 'K'}, {"issuer", required_argument, NULL, 'i'}, {"claim", required_argument, NULL, 'C'}, {"profile", required_argument, NULL, 'p'}, {0, 0, 0, 0}}; const char short_options[] = "hc:k:K:i:C:p:"; std::string g_cred, g_key, g_kid, g_issuer, g_profile; std::vector g_claims; int init_arguments(int argc, char *argv[]) { int arg; while ((arg = getopt_long(argc, argv, short_options, long_options, nullptr)) != -1) { switch (arg) { case 'h': printf(usage, argv[0]); exit(0); break; case 'c': g_cred = optarg; break; case 'k': g_key = optarg; break; case 'K': g_kid = optarg; break; case 'i': g_issuer = optarg; break; case 'C': g_claims.emplace_back(optarg); break; case 'p': g_profile = optarg; break; default: fprintf(stderr, usage, argv[0]); exit(1); break; } } if (optind != argc) { fprintf(stderr, "%s: invalid option -- %s\n", argv[0], argv[optind]); fprintf(stderr, usage, argv[0]); exit(1); } if (g_cred.empty()) { fprintf(stderr, "%s: missing --cred option\n", argv[0]); fprintf(stderr, usage, argv[0]); exit(1); } if (g_key.empty()) { fprintf(stderr, "%s: missing --key option\n", argv[0]); fprintf(stderr, usage, argv[0]); exit(1); } if (g_kid.empty()) { fprintf(stderr, "%s: missing --keyid option\n", argv[0]); fprintf(stderr, usage, argv[0]); exit(1); } if (g_issuer.empty()) { fprintf(stderr, "%s: missing --issuer option\n", argv[0]); fprintf(stderr, usage, argv[0]); exit(1); } return 0; } } // namespace int main(int argc, char *argv[]) { int rv = init_arguments(argc, argv); if (rv) { return rv; } std::ifstream priv_ifs(g_key); std::string private_contents((std::istreambuf_iterator(priv_ifs)), (std::istreambuf_iterator())); std::ifstream pub_ifs(g_cred); std::string public_contents((std::istreambuf_iterator(pub_ifs)), (std::istreambuf_iterator())); char *err_msg; auto key_raw = scitoken_key_create(g_kid.c_str(), "ES256", public_contents.c_str(), private_contents.c_str(), &err_msg); std::unique_ptr key( key_raw, scitoken_key_destroy); if (key_raw == nullptr) { fprintf(stderr, "Failed to generate a key: %s\n", err_msg); free(err_msg); return 1; } std::unique_ptr token( scitoken_create(key_raw), scitoken_destroy); if (token.get() == nullptr) { fprintf(stderr, "Failed to generate a new token.\n"); return 1; } rv = scitoken_set_claim_string(token.get(), "iss", g_issuer.c_str(), &err_msg); if (rv) { fprintf(stderr, "Failed to set issuer: %s\n", err_msg); free(err_msg); return 1; } for (const auto &claim : g_claims) { auto pos = claim.find("="); if (pos == std::string::npos) { fprintf(stderr, "Claim must contain a '=' character: %s\n", claim.c_str()); return 1; } auto key = claim.substr(0, pos); auto val = claim.substr(pos + 1); rv = scitoken_set_claim_string(token.get(), key.c_str(), val.c_str(), &err_msg); if (rv) { fprintf(stderr, "Failed to set claim (%s=%s): %s\n", key.c_str(), val.c_str(), err_msg); free(err_msg); return 1; } } if (!g_profile.empty()) { SciTokenProfile profile; if (g_profile == "wlcg") { profile = SciTokenProfile::WLCG_1_0; } else if (g_profile == "scitokens1") { profile = SciTokenProfile::SCITOKENS_1_0; } else if (g_profile == "scitokens2") { profile = SciTokenProfile::SCITOKENS_2_0; } else if (g_profile == "atjwt") { profile = SciTokenProfile::AT_JWT; } else { fprintf(stderr, "Unknown token profile: %s\n", g_profile.c_str()); return 1; } scitoken_set_serialize_mode(token.get(), profile); } char *value; rv = scitoken_serialize(token.get(), &value, &err_msg); if (rv) { fprintf(stderr, "Failed to serialize the token: %s\n", err_msg); free(err_msg); return 1; } printf("%s\n", value); } scitokens-cpp-1.1.3/src/list_access.cpp000066400000000000000000000027241475716002100200750ustar00rootroot00000000000000#include #include "scitokens.h" int main(int argc, const char **argv) { if (argc < 4) { std::cerr << "Usage: " << argv[0] << " (TOKEN) (ISSUER) (AUDIENCE)" << std::endl; return 1; } std::string token(argv[1]); std::string issuer(argv[2]); std::string audience(argv[3]); const char *aud_list[2]; aud_list[0] = audience.c_str(); aud_list[1] = nullptr; SciToken scitoken; char *err_msg = nullptr; if (scitoken_deserialize(token.c_str(), &scitoken, nullptr, &err_msg)) { std::cout << "Failed to deserialize a token: " << err_msg << std::endl; return 1; } std::cout << "Token deserialization successful. Checking authorizations." << std::endl; Enforcer enf; if (!(enf = enforcer_create(issuer.c_str(), aud_list, &err_msg))) { std::cout << "Failed to create a new enforcer object: " << err_msg << std::endl; return 1; } Acl *acls; if (enforcer_generate_acls(enf, scitoken, &acls, &err_msg)) { std::cout << "ACL generation failed: " << err_msg << std::endl; return 1; } std::cout << "Start of ACLs:" << std::endl; for (int idx = 0; acls[idx].authz && acls[idx].resource; idx++) { std::cout << "ACL: " << acls[idx].authz << ":" << acls[idx].resource << std::endl; } std::cout << "End of ACLs:" << std::endl; enforcer_destroy(enf); return 0; } scitokens-cpp-1.1.3/src/scitokens.cpp000066400000000000000000000752021475716002100176040ustar00rootroot00000000000000#include #include #include #include #include "scitokens.h" #include "scitokens_internal.h" /** * GLOBALS */ // Cache timeout config std::atomic_int configurer::Configuration::m_next_update_delta{600}; std::atomic_int configurer::Configuration::m_expiry_delta{4 * 24 * 3600}; // SciTokens cache home config std::shared_ptr configurer::Configuration::m_cache_home = std::make_shared(""); std::shared_ptr configurer::Configuration::m_tls_ca_file = std::make_shared(""); SciTokenKey scitoken_key_create(const char *key_id, const char *alg, const char *public_contents, const char *private_contents, char **err_msg) { if (key_id == nullptr) { if (err_msg) { *err_msg = strdup("Key ID cannot be NULL."); } return nullptr; } if (alg == nullptr) { if (err_msg) { *err_msg = strdup("Algorithm cannot be NULL."); } return nullptr; } if (public_contents == nullptr) { if (err_msg) { *err_msg = strdup("Public key contents cannot be NULL."); } return nullptr; } if (private_contents == nullptr) { if (err_msg) { *err_msg = strdup("Private key contents cannot be NULL."); } return nullptr; } return new scitokens::SciTokenKey(key_id, alg, public_contents, private_contents); } void scitoken_key_destroy(SciTokenKey token) { scitokens::SciTokenKey *real_token = reinterpret_cast(token); delete real_token; } SciToken scitoken_create(SciTokenKey private_key) { scitokens::SciTokenKey *key = reinterpret_cast(private_key); return new scitokens::SciToken(*key); } void scitoken_destroy(SciToken token) { scitokens::SciToken *real_token = reinterpret_cast(token); delete real_token; } int scitoken_set_claim_string(SciToken token, const char *key, const char *value, char **err_msg) { scitokens::SciToken *real_token = reinterpret_cast(token); if (real_token == nullptr) { if (err_msg) { *err_msg = strdup("Token passed is not initialized."); } return -1; } if (key == nullptr) { if (err_msg) { *err_msg = strdup("Claim key passed is not initialized."); } return -1; } if (value == nullptr) { if (err_msg) { *err_msg = strdup("Claim value passed is not initialized."); } return -1; } try { real_token->set_claim(key, jwt::claim(std::string(value))); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } void scitoken_set_serialize_profile(SciToken token, SciTokenProfile profile) { scitoken_set_serialize_mode(token, profile); } void scitoken_set_serialize_mode(SciToken token, SciTokenProfile profile) { scitokens::SciToken *real_token = reinterpret_cast(token); if (real_token == nullptr) { return; } real_token->set_serialize_mode( static_cast(profile)); } void scitoken_set_deserialize_profile(SciToken token, SciTokenProfile profile) { scitokens::SciToken *real_token = reinterpret_cast(token); if (real_token == nullptr) { return; } real_token->set_deserialize_mode( static_cast(profile)); } int scitoken_get_claim_string(const SciToken token, const char *key, char **value, char **err_msg) { scitokens::SciToken *real_token = reinterpret_cast(token); std::string claim_str; try { claim_str = real_token->get_claim_string(key); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } *value = strdup(claim_str.c_str()); return 0; } int scitoken_set_claim_string_list(const SciToken token, const char *key, const char **value, char **err_msg) { auto real_token = reinterpret_cast(token); if (real_token == nullptr) { if (err_msg) *err_msg = strdup( "NULL scitoken passed to scitoken_get_claim_string_list"); return -1; } std::vector claim_list; int idx = 0; while (value[idx++]) { } claim_list.reserve(idx); idx = 0; while (value[idx++]) { claim_list.emplace_back(value[idx - 1]); } real_token->set_claim_list(key, claim_list); return 0; } int scitoken_get_claim_string_list(const SciToken token, const char *key, char ***value, char **err_msg) { auto real_token = reinterpret_cast(token); if (real_token == nullptr) { if (err_msg) *err_msg = strdup( "NULL scitoken passed to scitoken_get_claim_string_list"); return -1; } std::vector claim_list; try { claim_list = real_token->get_claim_list(key); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } auto claim_list_c = static_cast(malloc(sizeof(char *) * (claim_list.size() + 1))); claim_list_c[claim_list.size()] = nullptr; int idx = 0; for (const auto &entry : claim_list) { claim_list_c[idx] = strdup(entry.c_str()); if (!claim_list_c[idx]) { scitoken_free_string_list(claim_list_c); if (err_msg) { *err_msg = strdup("Failed to create a copy of string entry in list"); } return -1; } idx++; } *value = claim_list_c; return 0; } void scitoken_free_string_list(char **value) { int idx = 0; do { free(value[idx++]); } while (value[idx]); free(value); } int scitoken_get_expiration(const SciToken token, long long *expiry, char **err_msg) { scitokens::SciToken *real_token = reinterpret_cast(token); if (!real_token->has_claim("exp")) { *expiry = -1; return 0; } long long result; try { result = real_token->get_claim("exp").as_int(); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } *expiry = result; return 0; } void scitoken_set_lifetime(SciToken token, int lifetime) { if (token == nullptr) { return; } scitokens::SciToken *real_token = reinterpret_cast(token); real_token->set_lifetime(lifetime); } int scitoken_serialize(const SciToken token, char **value, char **err_msg) { if (value == nullptr) { if (err_msg) { *err_msg = strdup("Output variable not provided"); } return -1; } scitokens::SciToken *real_token = reinterpret_cast(token); try { std::string serialized = real_token->serialize(); *value = strdup(serialized.c_str()); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } int scitoken_deserialize(const char *value, SciToken *token, char const *const *allowed_issuers, char **err_msg) { if (value == nullptr) { if (err_msg) { *err_msg = strdup("Token may not be NULL"); } return -1; } if (token == nullptr) { if (err_msg) { *err_msg = strdup("Output token not provided"); } return -1; } scitokens::SciTokenKey key; scitokens::SciToken *real_token = new scitokens::SciToken(key); int retval = scitoken_deserialize_v2(value, reinterpret_cast(real_token), allowed_issuers, err_msg); if (retval) { delete real_token; } else { *token = real_token; } return retval; } int scitoken_deserialize_v2(const char *value, SciToken token, char const *const *allowed_issuers, char **err_msg) { scitokens::SciToken *real_token = reinterpret_cast(token); std::vector allowed_issuers_vec; if (allowed_issuers != nullptr) { for (int idx = 0; allowed_issuers[idx]; idx++) { allowed_issuers_vec.push_back(allowed_issuers[idx]); } } try { real_token->deserialize(value, allowed_issuers_vec); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } int scitoken_deserialize_start(const char *value, SciToken *token, char const *const *allowed_issuers, SciTokenStatus *status_out, char **err_msg) { if (value == nullptr) { if (err_msg) { *err_msg = strdup("Token may not be NULL"); } return -1; } if (token == nullptr) { if (err_msg) { *err_msg = strdup("Output token not provided"); } return -1; } scitokens::SciTokenKey key; scitokens::SciToken *real_token = new scitokens::SciToken(key); std::vector allowed_issuers_vec; if (allowed_issuers != nullptr) { for (int idx = 0; allowed_issuers[idx]; idx++) { allowed_issuers_vec.push_back(allowed_issuers[idx]); } } std::unique_ptr status; try { status = real_token->deserialize_start(value, allowed_issuers_vec); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } delete real_token; *status_out = nullptr; return -1; } // Check if we're done if (status->m_status->m_done) { *token = real_token; *status_out = nullptr; return 0; } *token = real_token; *status_out = status.release(); return 0; } int scitoken_deserialize_continue(SciToken *token, SciTokenStatus *status, char **err_msg) { if (token == nullptr) { if (err_msg) { *err_msg = strdup("Output token not provided"); } return -1; } scitokens::SciToken *real_token = reinterpret_cast(*token); std::unique_ptr real_status( reinterpret_cast(*status)); if (*status == nullptr || real_status->m_status->m_done) { *status = nullptr; return 0; } try { real_status = real_token->deserialize_continue(std::move(real_status)); } catch (std::exception &exc) { *status = nullptr; if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } if (real_status->m_status->m_done) { *status = nullptr; } else { *status = real_status.release(); } return 0; } int scitoken_store_public_ec_key(const char *issuer, const char *keyid, const char *key, char **err_msg) { bool success; try { success = scitokens::Validator::store_public_ec_key(issuer, keyid, key); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return success ? 0 : -1; } Validator validator_create() { return new Validator(); } void validator_destroy(Validator validator) { scitokens::Validator *real_validator = reinterpret_cast(validator); delete real_validator; } void validator_set_token_profile(Validator validator, SciTokenProfile profile) { if (validator == nullptr) { return; } auto real_validator = reinterpret_cast(validator); real_validator->set_validate_profile( static_cast(profile)); } int validator_add(Validator validator, const char *claim, StringValidatorFunction validator_func, char **err_msg) { if (validator == nullptr) { if (err_msg) { *err_msg = strdup("Validator may not be a null pointer"); } return -1; } auto real_validator = reinterpret_cast(validator); if (claim == nullptr) { if (err_msg) { *err_msg = strdup("Claim name may not be a null pointer"); } return -1; } if (validator_func == nullptr) { if (err_msg) { *err_msg = strdup("Validator function may not be a null pointer"); } return -1; } real_validator->add_string_validator(claim, validator_func); return 0; } int validator_add_critical_claims(Validator validator, const char **claims, char **err_msg) { if (validator == nullptr) { if (err_msg) { *err_msg = strdup("Validator may not be a null pointer"); } return -1; } auto real_validator = reinterpret_cast(validator); if (claims == nullptr) { if (err_msg) { *err_msg = strdup("Claim list may not be a null pointer"); } return -1; } std::vector claims_vec; for (int idx = 0; claims[idx]; idx++) { claims_vec.push_back(claims[idx]); } real_validator->add_critical_claims(claims_vec); return 0; } int validator_validate(Validator validator, SciToken scitoken, char **err_msg) { if (validator == nullptr) { if (err_msg) { *err_msg = strdup("Validator may not be a null pointer"); } return -1; } auto real_validator = reinterpret_cast(validator); if (scitoken == nullptr) { if (err_msg) { *err_msg = strdup("SciToken may not be a null pointer"); } return -1; } auto real_scitoken = reinterpret_cast(scitoken); try { real_validator->verify(*real_scitoken, time(NULL) + 20); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } int validator_set_time(Validator validator, time_t now, char **err_msg) { if (validator == nullptr) { if (err_msg) { *err_msg = strdup("Validator may not be a null pointer"); } return -1; } auto real_validator = reinterpret_cast(validator); real_validator->set_now(std::chrono::system_clock::from_time_t(now)); return 0; } Enforcer enforcer_create(const char *issuer, const char **audience_list, char **err_msg) { if (issuer == nullptr) { if (err_msg) { *err_msg = strdup("Issuer may not be a null pointer"); } return nullptr; } std::vector aud_list; if (audience_list != nullptr) { for (int idx = 0; audience_list[idx]; idx++) { aud_list.push_back(audience_list[idx]); } } return new scitokens::Enforcer(issuer, aud_list); } void enforcer_destroy(Enforcer enf) { if (enf == nullptr) { return; } auto real_enf = reinterpret_cast(enf); delete real_enf; } void enforcer_acl_free(Acl *acls) { for (int idx = 0; acls[idx].authz != nullptr || acls[idx].resource != nullptr; idx++) { free(const_cast(acls[idx].authz)); free(const_cast(acls[idx].resource)); } free(acls); } void enforcer_set_validate_profile(Enforcer enf, SciTokenProfile profile) { if (enf == nullptr) { return; } auto real_enf = reinterpret_cast(enf); real_enf->set_validate_profile( static_cast(profile)); } namespace { Acl *convert_acls(scitokens::Enforcer::AclsList &acls_list, char **err_msg) { Acl *acl_result = static_cast(malloc((acls_list.size() + 1) * sizeof(Acl))); size_t idx = 0; for (const auto &acl : acls_list) { acl_result[idx].authz = strdup(acl.first.c_str()); acl_result[idx].resource = strdup(acl.second.c_str()); if (acl_result[idx].authz == nullptr) { enforcer_acl_free(acl_result); if (err_msg) { *err_msg = strdup("ACL was generated without an authorization set."); } return nullptr; } if (acl_result[idx].resource == nullptr) { enforcer_acl_free(acl_result); if (err_msg) { *err_msg = strdup("ACL was generated without a resource set."); } return nullptr; } idx++; } acl_result[idx].authz = nullptr; acl_result[idx].resource = nullptr; return acl_result; } } // namespace int enforcer_set_time(Enforcer enf, time_t now, char **err_msg) { if (enf == nullptr) { if (err_msg) { *err_msg = strdup("Enforcer may not be a null pointer"); } return -1; } auto real_enf = reinterpret_cast(enf); real_enf->set_now(std::chrono::system_clock::from_time_t(now)); return 0; } int enforcer_generate_acls(const Enforcer enf, const SciToken scitoken, Acl **acls, char **err_msg) { if (enf == nullptr) { if (err_msg) { *err_msg = strdup("Enforcer may not be a null pointer"); } return -1; } auto real_enf = reinterpret_cast(enf); if (scitoken == nullptr) { if (err_msg) { *err_msg = strdup("SciToken may not be a null pointer"); } return -1; } auto real_scitoken = reinterpret_cast(scitoken); scitokens::Enforcer::AclsList acls_list; try { acls_list = real_enf->generate_acls(*real_scitoken); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } auto result_acls = convert_acls(acls_list, err_msg); if (!result_acls) { return -1; } *acls = result_acls; return 0; } int enforcer_generate_acls_start(const Enforcer enf, const SciToken scitoken, SciTokenStatus *status_out, Acl **acls, char **err_msg) { if (enf == nullptr) { if (err_msg) { *err_msg = strdup("Enforcer may not be a null pointer"); } return -1; } auto real_enf = reinterpret_cast(enf); if (scitoken == nullptr) { if (err_msg) { *err_msg = strdup("SciToken may not be a null pointer"); } return -1; } auto real_scitoken = reinterpret_cast(scitoken); scitokens::Enforcer::AclsList acls_list; std::unique_ptr status; try { status = real_enf->generate_acls_start(*real_scitoken, acls_list); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } if (status->m_done) { auto result_acls = convert_acls(acls_list, err_msg); if (!result_acls) { return -1; } *acls = result_acls; *status_out = nullptr; return 0; } *status_out = status.release(); return 0; } int enforcer_generate_acls_continue(const Enforcer enf, SciTokenStatus *status, Acl **acls, char **err_msg) { if (enf == nullptr) { if (err_msg) { *err_msg = strdup("Enforcer may not be a null pointer"); } return -1; } auto real_enf = reinterpret_cast(enf); if (status == nullptr) { if (err_msg) { *err_msg = strdup("Status may not be a null pointer"); } return -1; } scitokens::Enforcer::AclsList acls_list; std::unique_ptr status_internal( reinterpret_cast(*status)); try { status_internal = real_enf->generate_acls_continue( std::move(status_internal), acls_list); } catch (std::exception &exc) { *status = nullptr; if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } if (status_internal->m_done) { auto result_acls = convert_acls(acls_list, err_msg); if (!result_acls) { return -1; } *acls = result_acls; *status = nullptr; return 0; } *status = status_internal.release(); return 0; } int enforcer_test(const Enforcer enf, const SciToken scitoken, const Acl *acl, char **err_msg) { if (enf == nullptr) { if (err_msg) { *err_msg = strdup("Enforcer may not be a null pointer"); } return -1; } auto real_enf = reinterpret_cast(enf); if (scitoken == nullptr) { if (err_msg) { *err_msg = strdup("SciToken may not be a null pointer"); } return -1; } auto real_scitoken = reinterpret_cast(scitoken); if (acl == nullptr) { if (err_msg) { *err_msg = strdup("ACL may not be a null pointer"); } return -1; } try { return real_enf->test(*real_scitoken, acl->authz, acl->resource) == true ? 0 : -1; } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } void scitoken_status_free(SciTokenStatus status) { std::unique_ptr status_real( reinterpret_cast(status)); } int scitoken_status_get_timeout_val(const SciTokenStatus *status, time_t expiry_time, struct timeval *timeout, char **err_msg) { if (status == nullptr) { if (err_msg) { *err_msg = strdup("Status object may not be a null pointer"); } return -1; } if (timeout == nullptr) { if (err_msg) { *err_msg = strdup("Timeout object may not be a null pointer"); } return -1; } auto real_status = reinterpret_cast(*status); struct timeval timeout_internal = real_status->m_status->get_timeout_val(expiry_time); timeout->tv_sec = timeout_internal.tv_sec; timeout->tv_usec = timeout_internal.tv_usec; return 0; } int scitoken_status_get_read_fd_set(SciTokenStatus *status, fd_set **read_fd_set, char **err_msg) { if (status == nullptr) { if (err_msg) { *err_msg = strdup("Status object may not be a null pointer"); } return -1; } if (read_fd_set == nullptr) { if (err_msg) { *err_msg = strdup("Read fd_set object may not be a null pointer"); } return -1; } auto real_status = reinterpret_cast(*status); *read_fd_set = real_status->m_status->get_read_fd_set(); return 0; } int scitoken_status_get_write_fd_set(SciTokenStatus *status, fd_set **write_fd_set, char **err_msg) { if (status == nullptr) { if (err_msg) { *err_msg = strdup("Status object may not be a null pointer"); } return -1; } if (write_fd_set == nullptr) { if (err_msg) { *err_msg = strdup("Write fd_set object may not be a null pointer"); } return -1; } auto real_status = reinterpret_cast(*status); *write_fd_set = real_status->m_status->get_write_fd_set(); return 0; } int scitoken_status_get_exc_fd_set(SciTokenStatus *status, fd_set **exc_fd_set, char **err_msg) { if (status == nullptr) { if (err_msg) { *err_msg = strdup("Status object may not be a null pointer"); } return -1; } if (exc_fd_set == nullptr) { if (err_msg) { *err_msg = strdup("Read fd_set object may not be a null pointer"); } return -1; } auto real_status = reinterpret_cast(*status); *exc_fd_set = real_status->m_status->get_exc_fd_set(); return 0; } int scitoken_status_get_max_fd(const SciTokenStatus *status, int *max_fd, char **err_msg) { if (status == nullptr) { if (err_msg) { *err_msg = strdup("Status object may not be a null pointer"); } return -1; } if (max_fd == nullptr) { if (err_msg) { *err_msg = strdup("Max FD may not be a null pointer"); } return -1; } auto real_status = reinterpret_cast(*status); *max_fd = real_status->m_status->get_max_fd(); return 0; } int keycache_refresh_jwks(const char *issuer, char **err_msg) { if (!issuer) { if (err_msg) { *err_msg = strdup("Issuer may not be a null pointer"); } return -1; } try { if (!scitokens::Validator::refresh_jwks(issuer)) { if (err_msg) { *err_msg = strdup("Failed to refresh JWKS cache for issuer."); } return -1; } } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } int keycache_get_cached_jwks(const char *issuer, char **jwks, char **err_msg) { if (!issuer) { if (err_msg) { *err_msg = strdup("Issuer may not be a null pointer"); } return -1; } if (!jwks) { if (err_msg) { *err_msg = strdup("JWKS output pointer may not be null."); } return -1; } try { *jwks = strdup(scitokens::Validator::get_jwks(issuer).c_str()); } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } int keycache_set_jwks(const char *issuer, const char *jwks, char **err_msg) { if (!issuer) { if (err_msg) { *err_msg = strdup("Issuer may not be a null pointer"); } return -1; } if (!jwks) { if (err_msg) { *err_msg = strdup("JWKS pointer may not be null."); } return -1; } try { if (!scitokens::Validator::store_jwks(issuer, jwks)) { if (err_msg) { *err_msg = strdup("Failed to set the JWKS cache for issuer."); } return -1; } } catch (std::exception &exc) { if (err_msg) { *err_msg = strdup(exc.what()); } return -1; } return 0; } int config_set_int(const char *key, int value, char **err_msg) { return scitoken_config_set_int(key, value, err_msg); } int scitoken_config_set_int(const char *key, int value, char **err_msg) { if (!key) { if (err_msg) { *err_msg = strdup("A key must be provided."); } return -1; } std::string _key = key; if (_key == "keycache.update_interval_s") { if (value < 0) { if (err_msg) { *err_msg = strdup("Update interval must be positive."); } return -1; } configurer::Configuration::set_next_update_delta(value); return 0; } else if (_key == "keycache.expiration_interval_s") { if (value < 0) { if (err_msg) { *err_msg = strdup("Expiry interval must be positive."); } return -1; } configurer::Configuration::set_expiry_delta(value); return 0; } else { if (err_msg) { *err_msg = strdup("Key not recognized."); } return -1; } } int config_get_int(const char *key, char **err_msg) { return scitoken_config_get_int(key, err_msg); } int scitoken_config_get_int(const char *key, char **err_msg) { if (!key) { if (err_msg) { *err_msg = strdup("A key must be provided."); } return -1; } std::string _key = key; if (_key == "keycache.update_interval_s") { return configurer::Configuration::get_next_update_delta(); } else if (_key == "keycache.expiration_interval_s") { return configurer::Configuration::get_expiry_delta(); } else { if (err_msg) { *err_msg = strdup("Key not recognized."); } return -1; } } int scitoken_config_set_str(const char *key, const char *value, char **err_msg) { if (!key) { if (err_msg) { *err_msg = strdup("A key must be provided."); } return -1; } std::string _key = key; if (_key == "keycache.cache_home") { auto rp = configurer::Configuration::set_cache_home(value); if (!rp.first) { // There was an error, pass rp.second to err_msg if (err_msg) { *err_msg = strdup(rp.second.c_str()); } return -1; } } else if (_key == "tls.ca_file") { configurer::Configuration::set_tls_ca_file(value ? std::string(value) : ""); } else { if (err_msg) { *err_msg = strdup("Key not recognized."); } return -1; } return 0; } int scitoken_config_get_str(const char *key, char **output, char **err_msg) { if (!key) { if (err_msg) { *err_msg = strdup("A key must be provided."); } return -1; } std::string _key = key; if (_key == "keycache.cache_home") { *output = strdup(configurer::Configuration::get_cache_home().c_str()); } else if (_key == "tls.ca_file") { *output = strdup(configurer::Configuration::get_tls_ca_file().c_str()); } else { if (err_msg) { *err_msg = strdup("Key not recognized."); } return -1; } return 0; } scitokens-cpp-1.1.3/src/scitokens.h000066400000000000000000000270061475716002100172500ustar00rootroot00000000000000/** * Public header for the SciTokens C library. * * */ #include #include #ifdef __cplusplus #include extern "C" { #else #include #endif typedef void *SciTokenKey; typedef void *SciToken; typedef void *Validator; typedef void *Enforcer; typedef void *SciTokenStatus; typedef void *Configuration; typedef int (*StringValidatorFunction)(const char *value, char **err_msg); typedef struct Acl_s { const char *authz; const char *resource; } Acl; /** * Determine the mode we will use to validate tokens. * - COMPAT mode (default) indicates any supported token format * is acceptable. Where possible, the scope names are translated into * equivalent SciTokens 1.0 claim names (i.e., storage.read -> read; * storage.write -> write). If a typ header claim is present, use that to deduce * type (RFC8725 Section 3.11). * - SCITOKENS_1_0, SCITOKENS_2_0, WLCG_1_0, AT_JWT: only accept these specific * profiles. No automatic translation is performed. */ typedef enum _profile { COMPAT = 0, SCITOKENS_1_0, SCITOKENS_2_0, WLCG_1_0, AT_JWT } SciTokenProfile; SciTokenKey scitoken_key_create(const char *key_id, const char *algorithm, const char *public_contents, const char *private_contents, char **err_msg); void scitoken_key_destroy(SciTokenKey private_key); SciToken scitoken_create(SciTokenKey private_key); void scitoken_destroy(SciToken token); int scitoken_set_claim_string(SciToken token, const char *key, const char *value, char **err_msg); int scitoken_get_claim_string(const SciToken token, const char *key, char **value, char **err_msg); /** * Given a SciToken object, parse a specific claim's value as a list of strings. * If the JSON value is not actually a list of strings - or the claim is not set * - returns an error and sets the err_msg appropriately. * * The returned value is a list of strings that ends with a nullptr. */ int scitoken_get_claim_string_list(const SciToken token, const char *key, char ***value, char **err_msg); /** * Given a list of strings that was returned by scitoken_get_claim_string_list, * free all the associated memory. */ void scitoken_free_string_list(char **value); /** * Set the value of a claim to a list of strings. */ int scitoken_set_claim_string_list(const SciToken token, const char *key, const char **values, char **err_msg); int scitoken_get_expiration(const SciToken token, long long *value, char **err_msg); void scitoken_set_lifetime(SciToken token, int lifetime); int scitoken_serialize(const SciToken token, char **value, char **err_msg); /** * Set the profile used for serialization; if COMPAT mode is used, then * the library default is utilized (currently, scitokens 1.0). */ void scitoken_set_serialize_profile(SciToken token, SciTokenProfile profile); void scitoken_set_serialize_mode(SciToken token, SciTokenProfile profile); void scitoken_set_deserialize_profile(SciToken token, SciTokenProfile profile); int scitoken_deserialize(const char *value, SciToken *token, char const *const *allowed_issuers, char **err_msg); /** * @brief Start the deserialization process for a token, returning a status * object. * * @param value The serialized token. * @param token Destination for the token object. * @param allowed_issuers List of allowed issuers, or nullptr for no issuer * check. * @param status Destination for the status object. * @param err_msg Destination for error message. * @return int 0 on success, -1 on error. */ int scitoken_deserialize_start(const char *value, SciToken *token, char const *const *allowed_issuers, SciTokenStatus *status, char **err_msg); /** * @brief Continue the deserialization process for a token, updating the status * object. * * If the status object indicates that the token is complete, the token object * will be populated and the status object will be nullptr. * * @param token The token object, returned from scitoken_deserialize_start. * @param status Status object for the deserialize. * @param err_msg Destination for error message. * @return int 0 on success, -1 on error. */ int scitoken_deserialize_continue(SciToken *token, SciTokenStatus *status, char **err_msg); int scitoken_deserialize_v2(const char *value, SciToken token, char const *const *allowed_issuers, char **err_msg); int scitoken_store_public_ec_key(const char *issuer, const char *keyid, const char *value, char **err_msg); Validator validator_create(); /** * Set the profile used for validating the tokens; COMPAT (default) will accept * any known token type while others will only support that specific profile. */ void validator_set_token_profile(Validator, SciTokenProfile profile); /** * Set the time to use with the validator. Useful if you want to see if the * token would have been valid at some time in the past. */ int validator_set_time(Validator validator, time_t now, char **err_msg); int validator_add(Validator validator, const char *claim, StringValidatorFunction validator_func, char **err_msg); int validator_add_critical_claims(Validator validator, const char **claims, char **err_msg); int validator_validate(Validator validator, SciToken scitoken, char **err_msg); /** * Destroy a validator object. */ void validator_destroy(Validator); Enforcer enforcer_create(const char *issuer, const char **audience, char **err_msg); void enforcer_destroy(Enforcer); /** * Set the profile used for enforcing ACLs; when set to COMPAT (default), then * the authorizations will be converted to SciTokens 1.0-style authorizations * (so, WLCG's storage.read becomes read). */ void enforcer_set_validate_profile(Enforcer, SciTokenProfile profile); /** * Set the time to use with the enforcer. Useful if you want to see if the * token would have been valid at some time in the past. */ int enforcer_set_time(Enforcer enf, time_t now, char **err_msg); int enforcer_generate_acls(const Enforcer enf, const SciToken scitokens, Acl **acls, char **err_msg); /** * The asynchronous versions of enforcer_generate_acls. */ int enforcer_generate_acls_start(const Enforcer enf, const SciToken scitokens, SciTokenStatus *status, Acl **acls, char **err_msg); int enforcer_generate_acls_continue(const Enforcer enf, SciTokenStatus *status, Acl **acls, char **err_msg); void enforcer_acl_free(Acl *acls); int enforcer_test(const Enforcer enf, const SciToken sci, const Acl *acl, char **err_msg); void scitoken_status_free(SciTokenStatus *status); /** * Get the suggested timeout val. After the timeout value has passed, the * asynchronous operation should continue. * * - `expiry_time`: the expiration time (in Unix epoch seconds) for the * operation in total. The returned timeout value will never take the operation * past the expiration time. */ int scitoken_status_get_timeout_val(const SciTokenStatus *status, time_t expiry_time, struct timeval *timeout, char **err_msg); /** * Get the set of read file descriptors. This will return a borrowed pointer * (whose lifetime matches the status object) pointing at a fd_set array of size * FD_SETSIZE. Any file descriptors owned by the status operation will be set * and the returned fd_set can be used for select() operations. * * IMPLEMENTATION NOTE: If the file descriptor monitored by libcurl are too high * to be stored in this set, libcurl should give a corresponding low timeout val * (100ms) and effectively switch to polling. See: * for more information. */ int scitoken_status_get_read_fd_set(SciTokenStatus *status, fd_set **read_fd_set, char **err_msg); /** * Get the set of write FDs; see documentation for * scitoken_status_get_read_fd_set. */ int scitoken_status_get_write_fd_set(SciTokenStatus *status, fd_set **write_fd_set, char **err_msg); /** * Get the set of exception FDs; see documentation for * scitoken_status_get_exc_fd_set. */ int scitoken_status_get_exc_fd_set(SciTokenStatus *status, fd_set **exc_fd_set, char **err_msg); /** * Get the maximum FD in the status set. * * IMPLEMENTATION NOTE: If the max FD is -1 then it implies libcurl is something * that cannot be modelled by a socket. In such a case, the libcurl docs * suggest using a 100ms timeout for select operations. See * . */ int scitoken_status_get_max_fd(const SciTokenStatus *status, int *max_fd, char **err_msg); /** * API for explicity managing the key cache. * * This manipulates the keycache for the current eUID. */ /** * Refresh the JWKS in the keycache for a given issuer; the refresh will occur * even if the JWKS is not otherwise due for updates. * - Returns 0 on success, nonzero on failure. */ int keycache_refresh_jwks(const char *issuer, char **err_msg); /** * Retrieve the JWKS from the keycache for a given issuer. * - Returns 0 if successful, nonzero on failure. * - If the existing JWKS has expired - or does not exist - this does not * trigger a new download of the JWKS from the issuer. Instead, it will return * a JWKS object with an empty set of keys. * - `jwks` is an output variable set to the contents of the JWKS in the key * cache. */ int keycache_get_cached_jwks(const char *issuer, char **jwks, char **err_msg); /** * Replace any existing key cache entry with one provided by the user. * The expiration and next update time of the user-provided JWKS will utilize * the same rules as a download from an issuer with no explicit cache lifetime * directives. * - `jwks` is value that will be set in the cache. */ int keycache_set_jwks(const char *issuer, const char *jwks, char **err_msg); /** * APIs for managing scitokens configuration parameters. */ // On its way to deprecation int config_set_int(const char *key, int value, char **err_msg); /** * Update scitokens int parameters. * Takes in key/value pairs and assigns the input value to whatever * configuration variable is indicated by the key. * Returns 0 on success, and non-zero for invalid keys or values. */ int scitoken_config_set_int(const char *key, int value, char **err_msg); // on its way to deprecation int config_get_int(const char *key, char **err_msg); /** * Get current scitokens int parameters. * Returns the value associated with the supplied input key on success, and -1 * on failure. This assumes there are no keys for which a negative return value * is permissible. */ int scitoken_config_get_int(const char *key, char **err_msg); /** * Set current scitokens str parameters. * Returns 0 on success, nonzero on failure */ int scitoken_config_set_str(const char *key, const char *value, char **err_msg); /** * Get current scitokens str parameters. * Returns 0 on success, nonzero on failure, and populates the value associated * with the input key to output. */ int scitoken_config_get_str(const char *key, char **output, char **err_msg); #ifdef __cplusplus } #endif scitokens-cpp-1.1.3/src/scitokens_cache.cpp000066400000000000000000000211411475716002100207200ustar00rootroot00000000000000 #include #include #include #include #include #include #include #ifndef PICOJSON_USE_INT64 #define PICOJSON_USE_INT64 #endif #include #include #include "scitokens_internal.h" namespace { void initialize_cachedb(const std::string &keycache_file) { sqlite3 *db; int rc = sqlite3_open(keycache_file.c_str(), &db); if (rc != SQLITE_OK) { std::cerr << "SQLite key cache creation failed." << std::endl; sqlite3_close(db); return; } char *err_msg = nullptr; rc = sqlite3_exec(db, "CREATE TABLE IF NOT EXISTS keycache (" "issuer text UNIQUE PRIMARY KEY NOT NULL," "keys text NOT NULL)", NULL, 0, &err_msg); if (rc) { std::cerr << "Sqlite table creation failed: " << err_msg << std::endl; sqlite3_free(err_msg); } sqlite3_close(db); } /** * Get the Cache file location * 1. User-defined through config api * 2. $XDG_CACHE_HOME * 3. .cache subdirectory of home directory as returned by the password * database */ std::string get_cache_file() { const char *xdg_cache_home = getenv("XDG_CACHE_HOME"); auto bufsize = sysconf(_SC_GETPW_R_SIZE_MAX); bufsize = (bufsize == -1) ? 16384 : bufsize; std::unique_ptr buf(new char[bufsize]); std::string home_dir; struct passwd pwd, *result = NULL; getpwuid_r(geteuid(), &pwd, buf.get(), bufsize, &result); if (result && result->pw_dir) { home_dir = result->pw_dir; home_dir += "/.cache"; } // Figure out where to plop the cache based on priority std::string cache_dir; std::string configured_cache_dir = configurer::Configuration::get_cache_home(); if (configured_cache_dir.length() > 0) { // The variable has been configured cache_dir = configured_cache_dir; } else { cache_dir = xdg_cache_home ? xdg_cache_home : home_dir.c_str(); } if (cache_dir.size() == 0) { return ""; } int r = mkdir(cache_dir.c_str(), 0700); if ((r < 0) && errno != EEXIST) { return ""; } std::string keycache_dir = cache_dir + "/scitokens"; r = mkdir(keycache_dir.c_str(), 0700); if ((r < 0) && errno != EEXIST) { return ""; } std::string keycache_file = keycache_dir + "/scitokens_cpp.sqllite"; initialize_cachedb(keycache_file); return keycache_file; } // Remove a given issuer from the database. Starts a new transaction // if `new_transaction` is true. // If a failure occurs, then this function returns nonzero and closes // the database handle. int remove_issuer_entry(sqlite3 *db, const std::string &issuer, bool new_transaction) { int rc; if (new_transaction) { if ((rc = sqlite3_exec(db, "BEGIN", 0, 0, 0)) != SQLITE_OK) { sqlite3_close(db); return -1; } } sqlite3_stmt *stmt; rc = sqlite3_prepare_v2(db, "DELETE FROM keycache WHERE issuer = ?", -1, &stmt, NULL); if (rc != SQLITE_OK) { sqlite3_close(db); return -1; } if (sqlite3_bind_text(stmt, 1, issuer.c_str(), issuer.size(), SQLITE_STATIC) != SQLITE_OK) { sqlite3_finalize(stmt); sqlite3_close(db); return -1; } rc = sqlite3_step(stmt); if (rc != SQLITE_DONE) { sqlite3_finalize(stmt); sqlite3_close(db); return -1; } sqlite3_finalize(stmt); if (new_transaction) { if ((rc = sqlite3_exec(db, "COMMIT", 0, 0, 0)) != SQLITE_OK) { sqlite3_close(db); return -1; } } return 0; } } // namespace bool scitokens::Validator::get_public_keys_from_db(const std::string issuer, int64_t now, picojson::value &keys, int64_t &next_update) { auto cache_fname = get_cache_file(); if (cache_fname.size() == 0) { return false; } sqlite3 *db; int rc = sqlite3_open(cache_fname.c_str(), &db); if (rc) { sqlite3_close(db); return false; } sqlite3_stmt *stmt; rc = sqlite3_prepare_v2(db, "SELECT keys from keycache where issuer = ?", -1, &stmt, NULL); if (rc != SQLITE_OK) { sqlite3_close(db); return false; } if (sqlite3_bind_text(stmt, 1, issuer.c_str(), issuer.size(), SQLITE_STATIC) != SQLITE_OK) { sqlite3_finalize(stmt); sqlite3_close(db); return false; } rc = sqlite3_step(stmt); if (rc == SQLITE_ROW) { const unsigned char *data = sqlite3_column_text(stmt, 0); std::string metadata(reinterpret_cast(data)); sqlite3_finalize(stmt); picojson::value json_obj; auto err = picojson::parse(json_obj, metadata); if (!err.empty() || !json_obj.is()) { if (remove_issuer_entry(db, issuer, true) != 0) { return false; } sqlite3_close(db); return false; } auto top_obj = json_obj.get(); auto iter = top_obj.find("jwks"); if (iter == top_obj.end() || !iter->second.is()) { if (remove_issuer_entry(db, issuer, true) != 0) { return false; } sqlite3_close(db); return false; } auto keys_local = iter->second; iter = top_obj.find("expires"); if (iter == top_obj.end() || !iter->second.is()) { if (remove_issuer_entry(db, issuer, true) != 0) { return false; } sqlite3_close(db); return false; } auto expiry = iter->second.get(); if (now > expiry) { if (remove_issuer_entry(db, issuer, true) != 0) { return false; } sqlite3_close(db); return false; } sqlite3_close(db); iter = top_obj.find("next_update"); if (iter == top_obj.end() || !iter->second.is()) { next_update = expiry - 4 * 3600; } else { next_update = iter->second.get(); } keys = keys_local; return true; } else if (rc == SQLITE_DONE) { sqlite3_finalize(stmt); sqlite3_close(db); return false; } else { // TODO: log error? sqlite3_finalize(stmt); sqlite3_close(db); return false; } } bool scitokens::Validator::store_public_keys(const std::string &issuer, const picojson::value &keys, int64_t next_update, int64_t expires) { picojson::object top_obj; top_obj["jwks"] = keys; top_obj["next_update"] = picojson::value(next_update); top_obj["expires"] = picojson::value(expires); picojson::value db_value(top_obj); std::string db_str = db_value.serialize(); auto cache_fname = get_cache_file(); if (cache_fname.size() == 0) { return false; } sqlite3 *db; int rc = sqlite3_open(cache_fname.c_str(), &db); if (rc) { sqlite3_close(db); return false; } if ((rc = sqlite3_exec(db, "BEGIN", 0, 0, 0)) != SQLITE_OK) { sqlite3_close(db); return false; } if (remove_issuer_entry(db, issuer, false) != 0) { return false; } sqlite3_stmt *stmt; rc = sqlite3_prepare_v2(db, "INSERT INTO keycache VALUES (?, ?)", -1, &stmt, NULL); if (rc != SQLITE_OK) { sqlite3_close(db); return false; } if (sqlite3_bind_text(stmt, 1, issuer.c_str(), issuer.size(), SQLITE_STATIC) != SQLITE_OK) { sqlite3_finalize(stmt); sqlite3_close(db); return false; } if (sqlite3_bind_text(stmt, 2, db_str.c_str(), db_str.size(), SQLITE_STATIC) != SQLITE_OK) { sqlite3_finalize(stmt); sqlite3_close(db); return false; } rc = sqlite3_step(stmt); if (rc != SQLITE_DONE) { sqlite3_finalize(stmt); sqlite3_close(db); return false; } sqlite3_finalize(stmt); if (sqlite3_exec(db, "COMMIT", 0, 0, 0) != SQLITE_OK) { sqlite3_close(db); return false; } sqlite3_close(db); return true; } scitokens-cpp-1.1.3/src/scitokens_internal.cpp000066400000000000000000001252341475716002100215010ustar00rootroot00000000000000 #include #include #include #include #include #include #include #include #include #if OPENSSL_VERSION_NUMBER >= 0x30000000L #include #include #endif #define EC_NAME NID_X9_62_prime256v1 #include "scitokens_internal.h" using namespace scitokens; namespace { struct CurlRaii { CurlRaii() { curl_global_init(CURL_GLOBAL_DEFAULT); } ~CurlRaii() { curl_global_cleanup(); } }; CurlRaii myCurl; std::mutex key_refresh_mutex; } // namespace namespace scitokens { namespace internal { SimpleCurlGet::GetStatus SimpleCurlGet::perform_start(const std::string &url) { m_len = 0; m_curl_multi.reset(curl_multi_init()); if (!m_curl_multi) { throw CurlException("Failed to create a new curl async handle."); } m_curl.reset(curl_easy_init()); if (!m_curl) { throw CurlException("Failed to create a new curl handle."); } if (m_maxbytes > 0) { size_t new_size = std::min(m_maxbytes, 8 * 1024); if (m_data.size() < new_size) { m_data.resize(new_size); } } long timeout = m_timeout > 120 ? 120 : m_timeout; CURLcode rv = curl_easy_setopt(m_curl.get(), CURLOPT_URL, url.c_str()); if (rv != CURLE_OK) { throw CurlException("Failed to set CURLOPT_URL."); } rv = curl_easy_setopt(m_curl.get(), CURLOPT_WRITEFUNCTION, &write_data); if (rv != CURLE_OK) { throw CurlException("Failed to set CURLOPT_WRITEFUNCTION."); } rv = curl_easy_setopt(m_curl.get(), CURLOPT_WRITEDATA, this); if (rv != CURLE_OK) { throw CurlException("Failed to set CURLOPT_WRITEDATA."); } rv = curl_easy_setopt(m_curl.get(), CURLOPT_TIMEOUT, timeout); if (rv != CURLE_OK) { throw CurlException("Failed to set CURLOPT_TIMEOUT."); } rv = curl_easy_setopt(m_curl.get(), CURLOPT_FOLLOWLOCATION, 1L); if (rv != CURLE_OK) { throw CurlException("Failed to set CURLOPT_FOLLOWLOCATION."); } auto ca_file = configurer::Configuration::get_tls_ca_file(); if (!ca_file.empty()) { rv = curl_easy_setopt(m_curl.get(), CURLOPT_CAINFO, ca_file.c_str()); if (rv != CURLE_OK) { throw CurlException("Failed to set CURLOPT_CAINFO."); } } { auto mres = curl_multi_add_handle(m_curl_multi.get(), m_curl.get()); if (mres) { throw CurlException("Failed to add curl handle to async object"); } } return perform_continue(); } std::string SimpleCurlGet::get_url() const { if (!m_curl) { return ""; } char *url = nullptr; auto rv = curl_easy_getinfo(m_curl.get(), CURLINFO_EFFECTIVE_URL, &url); if (rv != CURLE_OK) { return ""; } return std::string(url); } SimpleCurlGet::GetStatus SimpleCurlGet::perform_continue() { int still_running; auto resm = curl_multi_perform(m_curl_multi.get(), &still_running); if (!resm && still_running) { resm = curl_multi_timeout(m_curl_multi.get(), &m_timeout_ms); if (resm) { throw CurlException(curl_multi_strerror(resm)); } if (m_timeout_ms < 0) { m_timeout_ms = 100; } FD_ZERO(m_read_fd_set); FD_ZERO(m_write_fd_set); FD_ZERO(m_exc_fd_set); resm = curl_multi_fdset(m_curl_multi.get(), m_read_fd_set, m_write_fd_set, m_exc_fd_set, &m_max_fd); if (resm) { throw CurlException(curl_multi_strerror(resm)); } if (m_max_fd < 0) m_timeout_ms = 100; return GetStatus(); } if (resm) { throw CurlException(curl_multi_strerror(resm)); } CURLMsg *msg; CURLcode res = static_cast(-1); do { int msgq = 0; msg = curl_multi_info_read(m_curl_multi.get(), &msgq); if (msg && (msg->msg == CURLMSG_DONE)) { CURL *easy_handle = msg->easy_handle; res = msg->data.result; curl_multi_remove_handle(m_curl_multi.get(), easy_handle); } } while (msg); if (res) { throw CurlException(curl_easy_strerror(res)); } long status_code; res = curl_easy_getinfo(m_curl.get(), CURLINFO_RESPONSE_CODE, &status_code); if (res != CURLE_OK) { throw CurlException(curl_easy_strerror(res)); } GetStatus status; status.m_done = true; status.m_status_code = status_code; return status; } int SimpleCurlGet::perform(const std::string &url, time_t expiry_time) { GetStatus status = perform_start(url); while (!status.m_done) { auto now = time(NULL); int timeout_ms = 1000 * (expiry_time - now); if (timeout_ms < 0) timeout_ms = 0; if (m_timeout_ms < timeout_ms) timeout_ms = m_timeout_ms; struct timeval timeout; timeout.tv_sec = timeout_ms / 1000; timeout.tv_usec = (timeout_ms % 1000) * 1000; // Return value of select is ignored; curl will take care of it. select(m_max_fd + 1, m_read_fd_set, m_write_fd_set, m_exc_fd_set, &timeout); status = perform_continue(); } return status.m_status_code; } void SimpleCurlGet::get_data(char *&buffer, size_t &len) { buffer = &m_data[0]; len = m_len; } size_t SimpleCurlGet::write_data(void *buffer, size_t size, size_t nmemb, void *userp) { SimpleCurlGet *myself = reinterpret_cast(userp); size_t new_data = size * nmemb; size_t new_length = myself->m_len + new_data; if (myself->m_maxbytes > 0 && (new_length > static_cast(myself->m_maxbytes))) { return 0; } if (myself->m_data.size() < new_length) { myself->m_data.resize(new_length); } memcpy(&(myself->m_data[myself->m_len]), buffer, new_data); myself->m_len = new_length; return new_data; } } // namespace internal } // namespace scitokens namespace { void parse_url(const std::string &url, std::string &schema, std::string &netloc, std::string &path) { const std::string prot_end("://"); std::string::const_iterator prot_iter = std::search(url.begin(), url.end(), prot_end.begin(), prot_end.end()); schema.reserve(distance(url.begin(), prot_iter)); std::transform(url.begin(), prot_iter, std::back_inserter(schema), std::function(tolower)); if (prot_iter == url.end()) { throw InvalidIssuerException("Issuer URL missing hostname."); } std::advance(prot_iter, prot_end.length()); std::string::const_iterator path_iter = std::find(prot_iter, url.end(), '/'); netloc.reserve(std::distance(prot_iter, path_iter)); std::transform(prot_iter, path_iter, std::back_inserter(netloc), std::function(tolower)); std::string::const_iterator query_iter = std::find(path_iter, url.end(), '?'); path.assign(path_iter, query_iter); } void get_metadata_endpoint(const std::string &issuer, std::string &openid_metadata, std::string &oauth_metadata) { std::string schema, netloc, path; parse_url(issuer, schema, netloc, path); if (schema != "https") { throw InvalidIssuerException("Issuer URL must be HTTPS"); } if (path == "/") { path = ""; } std::string new_path = "/.well-known/oauth-authorization-server" + path; oauth_metadata = "https://" + netloc + new_path; openid_metadata = issuer + "/.well-known/openid-configuration"; } /* "keys": [ { "alg": "RS256", "e": "AQAB", "kid": "key-rs256", "kty": "RSA", "n": "uGDGTLXnqh3mfopjys6sFUBvFl3F4Qt6NEYphq_u_aBhtN1X9NEyb78uB_I1KjciJNGLIQU0ECsJiFx6qV1hR9xE1dPyrS3bU92AVtnBrvzUtTU-aUZAmZQiuAC_rC0-z_TOQr6qJkkUgZtxR9n9op55ZBpRfZD5dzhkW4Dm146vfTKt0D4cIMoMNJS5xQx9nibeB4E8hryZDW_fPeD0XZDcpByNyP0jFDYkxdUtQFvyRpz4WMZ4ejUfvW3gf4LRAfGZJtMnsZ7ZW4RfoQbhiXKMfWeBEjQDiXh0r-KuZLykxhYJtpf7fTnPna753IzMgRMmW3F69iQn2LQN3LoSMw==", "use": "sig" }, { "alg": "ES256", "kid": "key-es356", "kty": "EC", "use": "sig", "x": "ncSCrGTBTXXOhNiAOTwNdPjwRz1hVY4saDNiHQK9Bh4=", "y": "sCsFXvx7FAAklwq3CzRCBcghqZOFPB2dKUayS6LY_Lo=" } ] } */ picojson::value::object find_key_id(const picojson::value json, const std::string &kid) { if (!json.is()) { throw JsonException("Top-level JSON is not an object."); } auto top_obj = json.get(); auto iter = top_obj.find("keys"); if (iter == top_obj.end() || (!iter->second.is())) { throw JsonException("Metadata resource is missing 'keys' array value"); } auto keys_array = iter->second.get(); if (kid.empty()) { if (keys_array.size() != 1) { throw JsonException("Key ID empty but multiple keys published."); } auto &key = keys_array.at(0); return key.get(); } else { for (auto &key : keys_array) { if (!key.is()) { continue; } auto key_obj = key.get(); iter = key_obj.find("kid"); if (iter == key_obj.end() || (!iter->second.is())) { continue; } std::string cur_kid = iter->second.get(); if (cur_kid == kid) { return key_obj; } } throw JsonException("Key ID is not published by the issuer."); } } struct local_base64url : public jwt::alphabet::base64url { static const std::string &fill() { static std::string fill = "="; return fill; } }; // Assuming a padding, decode std::string b64url_decode_nopadding(const std::string &input) { std::string result = input; switch (result.size() % 4) { case 1: result += "="; // fallthrough case 2: result += "="; // fallthrough case 3: result += "="; // fallthrough default: break; } return jwt::base::decode(result); } // Base64-encode without padding. std::string b64url_encode_nopadding(const std::string &input) { std::string result = jwt::base::encode(input); auto pos = result.find("="); return result.substr(0, pos); } std::string es256_from_coords(const std::string &x_str, const std::string &y_str) { auto x_decode = b64url_decode_nopadding(x_str); auto y_decode = b64url_decode_nopadding(y_str); std::unique_ptr pubkey_bio( BIO_new(BIO_s_mem()), BIO_free_all); std::unique_ptr x_bignum( BN_bin2bn(reinterpret_cast(x_decode.c_str()), x_decode.size(), nullptr), BN_free); std::unique_ptr y_bignum( BN_bin2bn(reinterpret_cast(y_decode.c_str()), y_decode.size(), nullptr), BN_free); #if OPENSSL_VERSION_NUMBER >= 0x30000000L unsigned char *buf; OSSL_PARAM *params; std::unique_ptr ec_group( EC_GROUP_new_by_curve_name(EC_NAME), EC_GROUP_free); if (!ec_group.get()) { throw UnsupportedKeyException("Unable to get OpenSSL EC group"); } std::unique_ptr Q_point( EC_POINT_new(ec_group.get()), EC_POINT_free); if (!Q_point.get()) { throw UnsupportedKeyException("Unable to allocate new EC point"); } if (!EC_POINT_set_affine_coordinates(ec_group.get(), Q_point.get(), x_bignum.get(), y_bignum.get(), NULL)) { throw UnsupportedKeyException("Invalid elliptic curve point in key"); } size_t out_len = EC_POINT_point2buf(ec_group.get(), Q_point.get(), POINT_CONVERSION_UNCOMPRESSED, &buf, NULL); if (out_len == 0) { throw UnsupportedKeyException( "Failed to convert EC point to octet base buffer"); } std::unique_ptr param_build( OSSL_PARAM_BLD_new(), OSSL_PARAM_BLD_free); if (!param_build.get() || !OSSL_PARAM_BLD_push_utf8_string(param_build.get(), "group", "prime256v1", 0) || !OSSL_PARAM_BLD_push_octet_string(param_build.get(), "pub", buf, out_len) || (params = OSSL_PARAM_BLD_to_param(param_build.get())) == NULL) { throw UnsupportedKeyException( "Failed to build EC public key parameters"); } EVP_PKEY *pkey = NULL; std::unique_ptr ec_ctx( EVP_PKEY_CTX_new_from_name(NULL, "EC", NULL), EVP_PKEY_CTX_free); if (!ec_ctx.get()) { throw UnsupportedKeyException("Failed to set EC PKEY context"); } if (EVP_PKEY_fromdata_init(ec_ctx.get()) <= 0 || EVP_PKEY_fromdata(ec_ctx.get(), &pkey, EVP_PKEY_PUBLIC_KEY, params) <= 0 || pkey == NULL) { throw UnsupportedKeyException("Failed to set the EC public key"); } if (PEM_write_bio_PUBKEY(pubkey_bio.get(), pkey) == 0) { throw UnsupportedKeyException("Failed to serialize EC public key"); } EVP_PKEY_free(pkey); OSSL_PARAM_free(params); OPENSSL_free(buf); #else std::unique_ptr ec( EC_KEY_new_by_curve_name(EC_NAME), EC_KEY_free); if (!ec.get()) { throw UnsupportedKeyException( "OpenSSL does not support the P-256 curve"); } EC_GROUP *params = (EC_GROUP *)EC_KEY_get0_group(ec.get()); if (!params) { throw UnsupportedKeyException("Unable to get OpenSSL EC group"); } std::unique_ptr Q_point( EC_POINT_new(params), EC_POINT_free); if (!Q_point.get()) { throw UnsupportedKeyException("Unable to allocate new EC point"); } if (EC_POINT_set_affine_coordinates_GFp( params, Q_point.get(), x_bignum.get(), y_bignum.get(), NULL) != 1) { throw UnsupportedKeyException("Invalid elliptic curve point in key"); } if (EC_KEY_set_public_key(ec.get(), Q_point.get()) != 1) { throw UnsupportedKeyException("Unable to set the EC public key"); } if (PEM_write_bio_EC_PUBKEY(pubkey_bio.get(), ec.get()) == 0) { throw UnsupportedKeyException("Failed to serialize EC public key"); } #endif char *mem_data; size_t mem_len = BIO_get_mem_data(pubkey_bio.get(), &mem_data); std::string result = std::string(mem_data, mem_len); return result; } std::string rs256_from_coords(const std::string &e_str, const std::string &n_str) { auto e_decode = b64url_decode_nopadding(e_str); auto n_decode = b64url_decode_nopadding(n_str); std::unique_ptr pubkey_bio( BIO_new(BIO_s_mem()), BIO_free_all); std::unique_ptr e_bignum( BN_bin2bn(reinterpret_cast(e_decode.c_str()), e_decode.size(), nullptr), BN_free); std::unique_ptr n_bignum( BN_bin2bn(reinterpret_cast(n_decode.c_str()), n_decode.size(), nullptr), BN_free); #if OPENSSL_VERSION_NUMBER >= 0x30000000L OSSL_PARAM *params; std::unique_ptr rsa_ctx( EVP_PKEY_CTX_new_from_name(NULL, "RSA", NULL), EVP_PKEY_CTX_free); if (!rsa_ctx.get()) { throw UnsupportedKeyException("Failed to set RSA PKEY context"); } std::unique_ptr param_build( OSSL_PARAM_BLD_new(), OSSL_PARAM_BLD_free); if (!param_build.get() || !OSSL_PARAM_BLD_push_BN_pad(param_build.get(), "e", e_bignum.get(), BN_num_bytes(e_bignum.get())) || !OSSL_PARAM_BLD_push_BN_pad(param_build.get(), "n", n_bignum.get(), BN_num_bytes(n_bignum.get())) || (params = OSSL_PARAM_BLD_to_param(param_build.get())) == NULL) { throw UnsupportedKeyException( "Failed to build RSA public key parameters"); } EVP_PKEY *pkey = NULL; if (EVP_PKEY_fromdata_init(rsa_ctx.get()) <= 0 || EVP_PKEY_fromdata(rsa_ctx.get(), &pkey, EVP_PKEY_PUBLIC_KEY, params) <= 0 || pkey == NULL) { throw UnsupportedKeyException("Failed to set the RSA public key"); } if (PEM_write_bio_PUBKEY(pubkey_bio.get(), pkey) == 0) { throw UnsupportedKeyException("Failed to serialize RSA public key"); } EVP_PKEY_free(pkey); OSSL_PARAM_free(params); #else std::unique_ptr rsa(RSA_new(), RSA_free); #if OPENSSL_VERSION_NUMBER < 0x10100000L || defined(LIBRESSL_VERSION_NUMBER) rsa->e = e_bignum.get(); rsa->n = n_bignum.get(); rsa->d = nullptr; #else RSA_set0_key(rsa.get(), n_bignum.get(), e_bignum.get(), nullptr); #endif std::unique_ptr pkey(EVP_PKEY_new(), EVP_PKEY_free); if (EVP_PKEY_set1_RSA(pkey.get(), rsa.get()) != 1) { throw UnsupportedKeyException("Failed to set the public key"); } if (PEM_write_bio_PUBKEY(pubkey_bio.get(), pkey.get()) == 0) { throw UnsupportedKeyException("Failed to serialize RSA public key"); } #endif e_bignum.release(); n_bignum.release(); char *mem_data; size_t mem_len = BIO_get_mem_data(pubkey_bio.get(), &mem_data); std::string result = std::string(mem_data, mem_len); return result; } /** * Normalize path: collapse etc. * >>> normalize_path('/a/b///c') * '/a/b/c' */ std::string normalize_absolute_path(const std::string &path) { if ((path == "//") || (path == "/") || (path == "")) { return "/"; } std::vector path_components; auto path_iter = path.begin(); while (path_iter != path.end()) { while (*path_iter == '/') { path_iter++; } auto next_path_iter = std::find(path_iter, path.end(), '/'); std::string component; component.reserve(std::distance(path_iter, next_path_iter)); component.assign(path_iter, next_path_iter); path_components.push_back(component); path_iter = next_path_iter; } std::vector path_components_filtered; path_components_filtered.reserve(path_components.size()); for (const auto &component : path_components) { if (component == "..") { path_components_filtered.pop_back(); } else if (!component.empty() && component != ".") { path_components_filtered.push_back(component); } } std::stringstream ss; for (const auto &component : path_components_filtered) { ss << "/" << component; } std::string result = ss.str(); return result.empty() ? "/" : result; } } // namespace void SciToken::deserialize(const std::string &data, const std::vector allowed_issuers) { m_decoded.reset(new jwt::decoded_jwt(data)); scitokens::Validator val; val.add_allowed_issuers(allowed_issuers); val.set_validate_all_claims_scitokens_1(false); val.set_validate_profile(m_deserialize_profile); val.verify(*m_decoded); // Set all the claims m_claims = m_decoded->get_payload_claims(); // Copy over the profile m_profile = val.get_profile(); } std::unique_ptr SciToken::deserialize_start(const std::string &data, const std::vector allowed_issuers) { m_decoded.reset(new jwt::decoded_jwt(data)); std::unique_ptr status(new SciTokenAsyncStatus()); status->m_validator.reset(new scitokens::Validator()); status->m_validator->add_allowed_issuers(allowed_issuers); status->m_validator->set_validate_all_claims_scitokens_1(false); status->m_validator->set_validate_profile(m_deserialize_profile); status->m_status = status->m_validator->verify_async(*m_decoded); return deserialize_continue(std::move(status)); } std::unique_ptr SciToken::deserialize_continue(std::unique_ptr status) { // Check if the status is completed (verification is complete) if (status->m_status->m_done) { // Set all the claims m_claims = m_decoded->get_payload_claims(); // Copy over the profile m_profile = status->m_validator->get_profile(); } else { status->m_status = status->m_validator->verify_async_continue( std::move(status->m_status)); if (status->m_status->m_done) { // Set all the claims m_claims = m_decoded->get_payload_claims(); // Copy over the profile m_profile = status->m_validator->get_profile(); } } return std::move(status); } std::unique_ptr Validator::get_public_keys_from_web(const std::string &issuer, unsigned timeout) { std::string openid_metadata, oauth_metadata; get_metadata_endpoint(issuer, openid_metadata, oauth_metadata); std::unique_ptr status(new AsyncStatus()); status->m_oauth_metadata_url = oauth_metadata; status->m_cget.reset(new internal::SimpleCurlGet(1024 * 1024, timeout)); auto cget_status = status->m_cget->perform_start(openid_metadata); status->m_continue_fetch = true; if (!cget_status.m_done) { return status; } return get_public_keys_from_web_continue(std::move(status)); } std::unique_ptr Validator::get_public_keys_from_web_continue( std::unique_ptr status) { char *buffer; size_t len; switch (status->m_state) { case AsyncStatus::DOWNLOAD_METADATA: { auto cget_status = status->m_cget->perform_continue(); if (!cget_status.m_done) { return std::move(status); } if (cget_status.m_status_code != 200) { if (status->m_oauth_fallback) { throw CurlException("Failed to retrieve metadata provider " "information for issuer."); } else { status->m_oauth_fallback = true; status->m_cget.reset(new internal::SimpleCurlGet()); cget_status = status->m_cget->perform_start(status->m_oauth_metadata_url); if (!cget_status.m_done) { return std::move(status); } return get_public_keys_from_web_continue(std::move(status)); } } status->m_cget->get_data(buffer, len); std::string metadata(buffer, len); picojson::value json_obj; auto err = picojson::parse(json_obj, metadata); if (!err.empty()) { throw JsonException( "JSON parse failure when downloading from the metadata URL " + status->m_cget->get_url() + ": " + err); } if (!json_obj.is()) { throw JsonException("Metadata resource " + status->m_cget->get_url() + " contains " "improperly-formatted JSON."); } auto top_obj = json_obj.get(); auto iter = top_obj.find("jwks_uri"); if (iter == top_obj.end() || (!iter->second.is())) { throw JsonException("Metadata resource " + status->m_cget->get_url() + " is missing 'jwks_uri' string value"); } auto jwks_uri = iter->second.get(); status->m_has_metadata = true; status->m_state = AsyncStatus::DOWNLOAD_PUBLIC_KEY; status->m_cget.reset(new internal::SimpleCurlGet()); status->m_cget->perform_start(jwks_uri); // This should also fall through the next state } case AsyncStatus::DOWNLOAD_PUBLIC_KEY: { auto cget_status = status->m_cget->perform_continue(); if (!cget_status.m_done) { return std::move(status); } if (cget_status.m_status_code != 200) { throw CurlException("Failed to retrieve the issuer's key set"); } status->m_cget->get_data(buffer, len); auto metadata = std::string(buffer, len); picojson::value json_obj; auto err = picojson::parse(json_obj, metadata); status->m_cget.reset(); if (!err.empty()) { throw JsonException("JSON parse failure when downloading from the " " public key URL " + status->m_cget->get_url() + ": " + err); } auto now = std::time(NULL); // TODO: take expiration time from the cache-control header in the // response. int next_update_delta = configurer::Configuration::get_next_update_delta(); int expiry_delta = configurer::Configuration::get_expiry_delta(); status->m_next_update = now + next_update_delta; status->m_expires = now + expiry_delta; status->m_keys = json_obj; status->m_continue_fetch = false; status->m_done = true; status->m_state = AsyncStatus::DONE; } case AsyncStatus::DONE: status->m_done = true; } // Switch return std::move(status); } std::string Validator::get_jwks(const std::string &issuer) { auto now = std::time(NULL); picojson::value jwks; int64_t next_update; if (get_public_keys_from_db(issuer, now, jwks, next_update)) { return jwks.serialize(); } return std::string("{\"keys\": []}"); } bool Validator::refresh_jwks(const std::string &issuer) { picojson::value keys; std::unique_ptr status = get_public_keys_from_web( issuer, internal::SimpleCurlGet::extended_timeout); while (!status->m_done) { status = get_public_keys_from_web_continue(std::move(status)); } return store_public_keys(issuer, status->m_keys, status->m_next_update, status->m_expires); } bool Validator::store_jwks(const std::string &issuer, const std::string &jwks_str) { picojson::value jwks; std::string err = picojson::parse(jwks, jwks_str); auto now = std::time(NULL); int next_update_delta = configurer::Configuration::get_next_update_delta(); int expiry_delta = configurer::Configuration::get_expiry_delta(); int64_t next_update = now + next_update_delta, expires = now + expiry_delta; if (!err.empty()) { throw JsonException(err); } return store_public_keys(issuer, jwks, next_update, expires); } std::unique_ptr Validator::get_public_key_pem(const std::string &issuer, const std::string &kid, std::string &public_pem, std::string &algorithm) { auto now = std::time(NULL); std::unique_ptr result(new AsyncStatus()); if (get_public_keys_from_db(issuer, now, result->m_keys, result->m_next_update)) { std::unique_lock lock(key_refresh_mutex, std::defer_lock); // If refresh is due *and* the key refresh mutex is free, try to update if (now > result->m_next_update && lock.try_lock()) { try { result->m_ignore_error = true; result = get_public_keys_from_web( issuer, internal::SimpleCurlGet::default_timeout); // Hold refresh mutex in the new result result->m_refresh_lock = std::move(lock); } catch (std::runtime_error &) { result->m_do_store = false; // ignore the exception: we have a valid set of keys already } } else { // Got the keys from the DB, and they are still valid. result->m_continue_fetch = false; result->m_do_store = false; result->m_done = true; } } else { // No keys in the DB, or they are expired, so get them from the web. result = get_public_keys_from_web( issuer, internal::SimpleCurlGet::default_timeout); } result->m_issuer = issuer; result->m_kid = kid; // Always call the continue because it formats the public_pem and algorithm return get_public_key_pem_continue(std::move(result), public_pem, algorithm); } std::unique_ptr Validator::get_public_key_pem_continue(std::unique_ptr status, std::string &public_pem, std::string &algorithm) { if (status->m_continue_fetch) { status = get_public_keys_from_web_continue(std::move(status)); if (status->m_continue_fetch) { return std::move(status); } } if (status->m_do_store) { store_public_keys(status->m_issuer, status->m_keys, status->m_next_update, status->m_expires); } status->m_done = true; auto key_obj = find_key_id(status->m_keys, status->m_kid); auto iter = key_obj.find("alg"); std::string alg; if (iter == key_obj.end() || (!iter->second.is())) { auto iter2 = key_obj.find("kty"); if (iter2 == key_obj.end() || !iter2->second.is()) { throw JsonException("Key is missing key type"); } else { auto kty = iter2->second.get(); if (kty == "RSA") { alg = "RS256"; } else if (kty == "EC") { auto iter3 = key_obj.find("crv"); if (iter3 == key_obj.end() || !iter3->second.is()) { throw JsonException("EC key is missing curve name"); } auto crv = iter3->second.get(); if (crv == "P-256") { alg = "ES256"; } else { throw JsonException("Unsupported EC curve in public key"); } } else { throw JsonException("Unknown public key type"); } } } else { alg = iter->second.get(); } if (alg != "RS256" and alg != "ES256") { throw UnsupportedKeyException( "Issuer is using an unsupported algorithm"); } std::string pem; if (alg == "ES256") { iter = key_obj.find("x"); if (iter == key_obj.end() || (!iter->second.is())) { throw JsonException("Elliptic curve is missing x-coordinate"); } auto x = iter->second.get(); iter = key_obj.find("y"); if (iter == key_obj.end() || (!iter->second.is())) { throw JsonException("Elliptic curve is missing y-coordinate"); } auto y = iter->second.get(); pem = es256_from_coords(x, y); } else { iter = key_obj.find("e"); if (iter == key_obj.end() || (!iter->second.is())) { throw JsonException("Public key is missing exponent"); } auto e = iter->second.get(); iter = key_obj.find("n"); if (iter == key_obj.end() || (!iter->second.is())) { throw JsonException("Public key is missing n-value"); } auto n = iter->second.get(); pem = rs256_from_coords(e, n); } public_pem = pem; algorithm = alg; return std::move(status); } bool scitokens::Validator::store_public_ec_key(const std::string &issuer, const std::string &keyid, const std::string &public_key) { std::unique_ptr pubkey_bio( BIO_new(BIO_s_mem()), BIO_free_all); if ((size_t)BIO_write(pubkey_bio.get(), public_key.data(), public_key.size()) != public_key.size()) { return false; } std::unique_ptr x_bignum(BN_new(), BN_free); std::unique_ptr y_bignum(BN_new(), BN_free); #if OPENSSL_VERSION_NUMBER >= 0x30000000L std::unique_ptr pkey( PEM_read_bio_PUBKEY(pubkey_bio.get(), nullptr, nullptr, nullptr), EVP_PKEY_free); if (!pkey.get()) { return false; } std::unique_ptr ec_group( EC_GROUP_new_by_curve_name(EC_NAME), EC_GROUP_free); if (!ec_group.get()) { throw UnsupportedKeyException("Unable to get OpenSSL EC group"); } std::unique_ptr q_point( EC_POINT_new(ec_group.get()), EC_POINT_free); if (!q_point.get()) { throw UnsupportedKeyException("Unable to get OpenSSL EC point"); } OSSL_PARAM *params; if (!EVP_PKEY_todata(pkey.get(), EVP_PKEY_PUBLIC_KEY, ¶ms)) { throw UnsupportedKeyException( "Unable to get OpenSSL public key parameters"); } void *buf = NULL; size_t buf_len, max_len = 256; OSSL_PARAM *p = OSSL_PARAM_locate(params, "pub"); if (!p || !OSSL_PARAM_get_octet_string(p, &buf, max_len, &buf_len) || !EC_POINT_oct2point(ec_group.get(), q_point.get(), static_cast(buf), buf_len, nullptr)) { throw UnsupportedKeyException( "Failed to to set OpenSSL EC point with public key information"); } if (!EC_POINT_get_affine_coordinates(ec_group.get(), q_point.get(), x_bignum.get(), y_bignum.get(), NULL)) { throw UnsupportedKeyException( "Unable to get OpenSSL affine coordinates"); } OSSL_PARAM_free(params); #else std::unique_ptr pkey( PEM_read_bio_EC_PUBKEY(pubkey_bio.get(), nullptr, nullptr, nullptr), EC_KEY_free); if (!pkey) { return false; } EC_GROUP *params = (EC_GROUP *)EC_KEY_get0_group(pkey.get()); if (!params) { throw UnsupportedKeyException("Unable to get OpenSSL EC group"); } const EC_POINT *point = EC_KEY_get0_public_key(pkey.get()); if (!point) { throw UnsupportedKeyException("Unable to get OpenSSL EC point"); } if (!EC_POINT_get_affine_coordinates_GFp(params, point, x_bignum.get(), y_bignum.get(), nullptr)) { throw UnsupportedKeyException( "Unable to get OpenSSL affine coordinates"); } #endif auto x_num = BN_num_bytes(x_bignum.get()); auto y_num = BN_num_bytes(y_bignum.get()); std::vector x_bin; x_bin.resize(x_num); std::vector y_bin; y_bin.resize(y_num); BN_bn2bin(x_bignum.get(), &x_bin[0]); BN_bn2bin(y_bignum.get(), &y_bin[0]); std::string x_str(reinterpret_cast(&x_bin[0]), x_num); std::string y_str(reinterpret_cast(&y_bin[0]), y_num); picojson::object key_obj; key_obj["alg"] = picojson::value("ES256"); key_obj["kid"] = picojson::value(keyid); key_obj["use"] = picojson::value("sig"); key_obj["kty"] = picojson::value("EC"); key_obj["x"] = picojson::value(b64url_encode_nopadding(x_str)); key_obj["y"] = picojson::value(b64url_encode_nopadding(y_str)); std::vector key_list; key_list.emplace_back(key_obj); picojson::object top_obj; top_obj["keys"] = picojson::value(key_list); picojson::value top_value(top_obj); auto now = std::time(NULL); int next_update_delta = configurer::Configuration::get_next_update_delta(); int expiry_delta = configurer::Configuration::get_expiry_delta(); return store_public_keys(issuer, top_value, now + next_update_delta, now + expiry_delta); } bool scitokens::Enforcer::scope_validator(const jwt::claim &claim, void *myself) { auto me = reinterpret_cast(myself); if (claim.get_type() != jwt::json::type::string) { return false; } std::string scope = claim.as_string(); std::string requested_path = normalize_absolute_path(me->m_test_path); auto scope_iter = scope.begin(); // std::cout << "Comparing scope " << scope << " against test accesses " << // me->m_test_authz << ":" << requested_path << std::endl; bool compat_modify = false, compat_create = false, compat_cancel = false; while (scope_iter != scope.end()) { while (*scope_iter == ' ') { scope_iter++; } auto next_scope_iter = std::find(scope_iter, scope.end(), ' '); std::string full_authz; full_authz.reserve(std::distance(scope_iter, next_scope_iter)); full_authz.assign(scope_iter, next_scope_iter); auto sep_iter = full_authz.find(':'); std::string authz = full_authz.substr(0, sep_iter); std::string path; if (sep_iter == std::string::npos) { path = "/"; } else { path = full_authz.substr((++sep_iter)); } path = normalize_absolute_path(path); // If we are in compatibility mode and this is a WLCG token, then // translate the authorization names to utilize the SciToken-style // names. std::string alt_authz; if (me->m_validate_profile == SciToken::Profile::COMPAT && me->m_validator.get_profile() == SciToken::Profile::WLCG_1_0) { if (authz == "storage.read") { authz = "read"; } else if (authz == "storage.create") { authz = "write"; alt_authz = "create"; } else if (authz == "storage.modify") { authz = "write"; alt_authz = "modify"; } else if (authz == "compute.read") { authz = "condor"; path = "/READ"; } else if (authz == "compute.modify") { compat_modify = true; } else if (authz == "compute.create") { compat_create = true; } else if (authz == "compute.cancel") { compat_cancel = true; } } if (me->m_test_authz.empty()) { me->m_gen_acls.emplace_back(authz, path); if (!alt_authz.empty()) me->m_gen_acls.emplace_back(alt_authz, path); } else if (((me->m_test_authz == authz) || (!alt_authz.empty() && (me->m_test_authz == alt_authz))) && (requested_path.substr(0, path.size()) == path)) { return true; } scope_iter = next_scope_iter; } // Compatibility mode: the combination on compute modify, create, and cancel // mode are equivalent to the condor:/WRITE authorization. if (compat_modify && compat_create && compat_cancel) { if (me->m_test_authz.empty()) { me->m_gen_acls.emplace_back("condor", "/WRITE"); } else if ((me->m_test_authz == "condor") && (requested_path.substr(0, 6) == "/WRITE")) { return true; } } return me->m_test_authz.empty(); } // Configuration class functions std::pair configurer::Configuration::set_cache_home(const std::string dir_path) { // If setting to "", then we should treat as though it is unsetting the // config if (dir_path.length() == 0) { // User is configuring to empty string m_cache_home = std::make_shared(dir_path); return std::make_pair(true, ""); } std::vector path_components = path_split(dir_path); // cleans any extraneous /'s std::string cleaned_dir_path; for (const auto &component : path_components) { // add the / back to the path components cleaned_dir_path += "/" + component; } // Check that the cache_home exists, and if not try to create it auto rp = mkdir_and_parents_if_needed( cleaned_dir_path); // Structured bindings not introduced until cpp 17 if (!rp.first) { // std::string err_prefix{ "An issue was encountered with the provided cache home path: "}; return std::make_pair(false, err_prefix + rp.second); } // Now it exists and we can write to it, set the value and let // scitokens_cache handle the rest m_cache_home = std::make_shared(cleaned_dir_path); return std::make_pair(true, ""); } void configurer::Configuration::set_tls_ca_file(const std::string ca_file) { m_tls_ca_file = std::make_shared(ca_file); } std::string configurer::Configuration::get_cache_home() { return *m_cache_home; } std::string configurer::Configuration::get_tls_ca_file() { return *m_tls_ca_file; } // bool configurer::Configuration::check_dir(const std::string dir_path) { // struct stat info; // return stat(dir_path.c_str(), &info) == 0 && (info.st_mode & S_IFDIR); // } std::pair configurer::Configuration::mkdir_and_parents_if_needed( const std::string dir_path) { // SciTokens-cpp already makes assumptions about using Linux file paths, // so making that assumption here as well. // Using these perms because that's what the actual cache file uses in // scitokens_cache mode_t mode = 0700; // Maybe these permissions should be configurable? int result; std::string currentLevel; std::vector path_components = path_split(dir_path); for (const auto &component : path_components) { currentLevel += "/" + component; result = mkdir(currentLevel.c_str(), mode); if ((result < 0) && errno != EEXIST) { std::string err_prefix{"There was an error while creating/checking " "the directory: mkdir error: "}; return std::make_pair(false, err_prefix + strerror(errno)); } } return std::make_pair(true, ""); } std::vector configurer::Configuration::path_split(std::string path) { std::vector path_components; std::stringstream ss(path); std::string component; while (std::getline(ss, component, '/')) { if (!component.empty()) { path_components.push_back(component); } } if (path_components[0] == "") { path_components.erase(path_components.begin()); } return path_components; } scitokens-cpp-1.1.3/src/scitokens_internal.h000066400000000000000000001013471475716002100211450ustar00rootroot00000000000000 #include #include #include #include #include #include #include #include #if defined(__GNUC__) #define WARN_UNUSED_RESULT __attribute__((warn_unused_result)) #else #define WARN_UNUSED_RESULT #endif namespace { struct FixedClock { jwt::date m_now; jwt::date now() const { return m_now; } }; } // namespace namespace jwt { template class decoded_jwt; namespace traits { struct kazuho_picojson; } } // namespace jwt namespace configurer { class Configuration { public: Configuration() {} static void set_next_update_delta(int _next_update_delta) { m_next_update_delta = _next_update_delta; } static int get_next_update_delta() { return m_next_update_delta; } static void set_expiry_delta(int _expiry_delta) { m_expiry_delta = _expiry_delta; } static int get_expiry_delta() { return m_expiry_delta; } static std::pair set_cache_home(const std::string cache_home); static std::string get_cache_home(); static void set_tls_ca_file(const std::string ca_file); static std::string get_tls_ca_file(); private: static std::atomic_int m_next_update_delta; static std::atomic_int m_expiry_delta; static std::shared_ptr m_cache_home; static std::shared_ptr m_tls_ca_file; // static bool check_dir(const std::string dir_path); static std::pair mkdir_and_parents_if_needed(const std::string dir_path); static std::vector path_split(const std::string dir_path); }; } // namespace configurer namespace scitokens { namespace internal { class SimpleCurlGet { int m_maxbytes{1048576}; unsigned m_timeout; std::vector m_data; size_t m_len{0}; std::unique_ptr m_curl; std::unique_ptr m_curl_multi; fd_set m_read_fd_set[FD_SETSIZE]; fd_set m_write_fd_set[FD_SETSIZE]; fd_set m_exc_fd_set[FD_SETSIZE]; int m_max_fd{-1}; long m_timeout_ms{0}; public: static const unsigned default_timeout = 4; static const unsigned extended_timeout = 30; SimpleCurlGet(int maxbytes = 1024 * 1024, unsigned timeout = 30) : m_maxbytes(maxbytes), m_timeout(timeout), m_curl(nullptr, &curl_easy_cleanup), m_curl_multi(nullptr, &curl_multi_cleanup) {} struct GetStatus { bool m_done{false}; int m_status_code{-1}; }; GetStatus perform_start(const std::string &url); GetStatus perform_continue(); int perform(const std::string &url, time_t expiry_time); void get_data(char *&buffer, size_t &len); std::string get_url() const; long get_timeout_ms() const { return m_timeout_ms; } int get_max_fd() const { return m_max_fd; } fd_set *get_read_fd_set() { return m_read_fd_set; } fd_set *get_write_fd_set() { return m_write_fd_set; } fd_set *get_exc_fd_set() { return m_exc_fd_set; } private: static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp); }; } // namespace internal class UnsupportedKeyException : public std::runtime_error { public: explicit UnsupportedKeyException(const std::string &msg) : std::runtime_error(msg) {} }; class JWTVerificationException : public std::runtime_error { public: explicit JWTVerificationException(const std::string &msg) : std::runtime_error("token verification failed: " + msg) {} }; class CurlException : public std::runtime_error { public: explicit CurlException(const std::string &msg) : std::runtime_error(msg) {} }; class MissingIssuerException : public std::runtime_error { public: MissingIssuerException() : std::runtime_error("Issuer not specified in claims") {} }; class InvalidIssuerException : public std::runtime_error { public: InvalidIssuerException(const std::string &msg) : std::runtime_error(msg) {} }; class JsonException : public std::runtime_error { public: JsonException(const std::string &msg) : std::runtime_error(msg) {} }; class SciTokenKey { public: SciTokenKey() : m_kid("none"), m_name("none") {} SciTokenKey(const std::string &key_id, const std::string &algorithm, const std::string &public_contents, const std::string &private_contents) : m_kid(key_id), m_name(algorithm), m_public(public_contents), m_private(private_contents) {} std::string serialize(jwt::builder &builder) { if (m_kid != "none") { builder.set_key_id(m_kid); } return builder.sign(*this); } std::string sign(const std::string &data, std::error_code &ec) const { if (m_name == "RS256") { return jwt::algorithm::rs256(m_public, m_private).sign(data, ec); } else if (m_name == "ES256") { return jwt::algorithm::es256(m_public, m_private).sign(data, ec); } throw UnsupportedKeyException( "Provided algorithm name is not supported"); } std::string name() const { return m_name; } void verify(const std::string &data, const std::string &signature, std::error_code &ec) const { if (m_name == "RS256") { jwt::algorithm::rs256(m_public, m_private) .verify(data, signature, ec); } else if (m_name == "ES256") { jwt::algorithm::es256(m_public, m_private) .verify(data, signature, ec); } else { throw UnsupportedKeyException( "Provided algorithm is not supported."); } } private: std::string m_kid; std::string m_name; std::string m_public; std::string m_private; }; class Validator; class AsyncStatus { public: AsyncStatus() = default; AsyncStatus(const AsyncStatus &) = delete; AsyncStatus &operator=(const AsyncStatus &) = delete; enum AsyncState { DOWNLOAD_METADATA, DOWNLOAD_PUBLIC_KEY, DONE }; bool m_done{false}; bool m_continue_fetch{false}; bool m_ignore_error{false}; bool m_do_store{true}; bool m_has_metadata{false}; bool m_oauth_fallback{false}; AsyncState m_state{DOWNLOAD_METADATA}; std::unique_lock m_refresh_lock; int64_t m_next_update{-1}; int64_t m_expires{-1}; picojson::value m_keys; std::string m_issuer; std::string m_kid; std::string m_oauth_metadata_url; std::unique_ptr m_cget; std::string m_jwt_string; std::string m_public_pem; std::string m_algorithm; struct timeval get_timeout_val(time_t expiry_time) const { auto now = time(NULL); long timeout_ms = 100 * (expiry_time - now); if (m_cget && (m_cget->get_timeout_ms() < timeout_ms)) timeout_ms = m_cget->get_timeout_ms(); struct timeval timeout; timeout.tv_sec = timeout_ms / 1000; timeout.tv_usec = (timeout_ms % 1000) * 1000; return timeout; } int get_max_fd() const { return m_cget ? m_cget->get_max_fd() : -1; } fd_set *get_read_fd_set() { return m_cget ? m_cget->get_read_fd_set() : nullptr; } fd_set *get_write_fd_set() { return m_cget ? m_cget->get_write_fd_set() : nullptr; } fd_set *get_exc_fd_set() { return m_cget ? m_cget->get_exc_fd_set() : nullptr; } }; class SciTokenAsyncStatus { public: SciTokenAsyncStatus() = default; SciTokenAsyncStatus(const SciTokenAsyncStatus &) = delete; SciTokenAsyncStatus &operator=(const SciTokenAsyncStatus &) = delete; std::unique_ptr m_validator; std::unique_ptr m_status; }; class SciToken { friend class scitokens::Validator; public: enum class Profile { COMPAT = 0, SCITOKENS_1_0, SCITOKENS_2_0, WLCG_1_0, AT_JWT }; SciToken(SciTokenKey &signing_algorithm) : m_key(signing_algorithm) {} void set_claim(const std::string &key, const jwt::claim &value) { m_claims[key] = value; if (key == "iss") { m_issuer_set = true; } } void set_serialize_mode(Profile profile) { m_serialize_profile = profile; } void set_deserialize_mode(Profile profile) { m_deserialize_profile = profile; } const jwt::claim get_claim(const std::string &key) { return m_claims[key]; } bool has_claim(const std::string &key) const { return m_claims.find(key) != m_claims.end(); } void set_claim_list(const std::string &claim, std::vector &claim_list) { picojson::array array; array.reserve(claim_list.size()); for (const auto &entry : claim_list) { array.emplace_back(entry); } m_claims[claim] = jwt::claim(picojson::value(array)); } // Return a claim as a string // If the claim is not a string, it can throw // a std::bad_cast() exception. const std::string get_claim_string(const std::string &key) { return m_claims[key].as_string(); } const std::vector get_claim_list(const std::string &key) { picojson::array array; try { array = m_claims[key].as_array(); } catch (std::bad_cast &) { throw JsonException("Claim's value is not a JSON list"); } std::vector result; for (const auto &value : array) { result.emplace_back(value.get()); } return result; } void set_lifetime(int lifetime) { m_lifetime = lifetime; } std::string serialize() { jwt::builder builder(jwt::create()); if (!m_issuer_set) { throw MissingIssuerException(); } auto time = std::chrono::system_clock::now(); builder.set_issued_at(time); builder.set_not_before(time); builder.set_expires_at(time + std::chrono::seconds(m_lifetime)); if (m_serialize_profile == Profile::AT_JWT) { builder.set_type("at+jwt"); } uuid_t uuid; uuid_generate(uuid); char uuid_str[37]; uuid_unparse_lower(uuid, uuid_str); m_claims["jti"] = jwt::claim(std::string(uuid_str)); if (m_serialize_profile == Profile::SCITOKENS_2_0) { m_claims["ver"] = jwt::claim(std::string("scitoken:2.0")); auto iter = m_claims.find("aud"); if (iter == m_claims.end()) { m_claims["aud"] = jwt::claim(std::string("ANY")); } } else if (m_serialize_profile == Profile::WLCG_1_0) { m_claims["wlcg.ver"] = jwt::claim(std::string("1.0")); auto iter = m_claims.find("aud"); if (iter == m_claims.end()) { m_claims["aud"] = jwt::claim(std::string("https://wlcg.cern.ch/jwt/v1/any")); } } // Set all the payload claims for (auto it : m_claims) { builder.set_payload_claim(it.first, it.second); } return m_key.serialize(builder); } void deserialize(const std::string &data, std::vector allowed_issuers = {}); std::unique_ptr deserialize_start(const std::string &data, std::vector allowed_issuers = {}); std::unique_ptr deserialize_continue(std::unique_ptr status); private: bool m_issuer_set{false}; int m_lifetime{600}; Profile m_profile{Profile::SCITOKENS_1_0}; Profile m_serialize_profile{Profile::COMPAT}; Profile m_deserialize_profile{Profile::COMPAT}; std::unordered_map m_claims; std::unique_ptr> m_decoded; SciTokenKey &m_key; }; class Validator { typedef int (*StringValidatorFunction)(const char *value, char **err_msg); typedef bool (*ClaimValidatorFunction)(const jwt::claim &claim_value, void *data); typedef std::map> ClaimStringValidatorMap; typedef std::map>> ClaimValidatorMap; public: Validator() : m_now(std::chrono::system_clock::now()) {} void set_now(std::chrono::system_clock::time_point now) { m_now = now; } std::unique_ptr verify_async(const SciToken &scitoken) { const jwt::decoded_jwt *jwt_decoded = scitoken.m_decoded.get(); if (!jwt_decoded) { throw JWTVerificationException( "Token is not deserialized from string."); } return verify_async(*jwt_decoded); } void verify(const SciToken &scitoken, time_t expiry_time) { auto result = verify_async(scitoken); while (!result->m_done) { auto timeout_val = result->get_timeout_val(expiry_time); select(result->get_max_fd() + 1, result->get_read_fd_set(), result->get_write_fd_set(), result->get_exc_fd_set(), &timeout_val); if (time(NULL) >= expiry_time) { throw CurlException("Timeout when loading the OIDC metadata."); } result = verify_async_continue(std::move(result)); } } void verify(const jwt::decoded_jwt &jwt) { auto result = verify_async(jwt); while (!result->m_done) { result = verify_async_continue(std::move(result)); } } std::unique_ptr verify_async(const jwt::decoded_jwt &jwt) { // If token has a typ header claim (RFC8725 Section 3.11), trust that in // COMPAT mode. if (jwt.has_type()) { std::string t_type = jwt.get_type(); if (m_validate_profile == SciToken::Profile::COMPAT) { if (t_type == "at+jwt" || t_type == "application/at+jwt") { m_profile = SciToken::Profile::AT_JWT; } } else if (m_validate_profile == SciToken::Profile::AT_JWT) { if (t_type != "at+jwt" && t_type != "application/at+jwt") { throw JWTVerificationException( "'typ' header claim must be at+jwt"); } m_profile = SciToken::Profile::AT_JWT; } } else { if (m_validate_profile == SciToken::Profile::AT_JWT) { throw JWTVerificationException( "'typ' header claim must be set for at+jwt tokens"); } } if (!jwt.has_payload_claim("iat")) { throw JWTVerificationException("'iat' claim is mandatory"); } if (m_profile == SciToken::Profile::SCITOKENS_1_0 || m_profile == SciToken::Profile::SCITOKENS_2_0) { if (!jwt.has_payload_claim("nbf")) { throw JWTVerificationException("'nbf' claim is mandatory"); } } if (!jwt.has_payload_claim("exp")) { throw JWTVerificationException("'exp' claim is mandatory"); } if (!jwt.has_payload_claim("iss")) { throw JWTVerificationException("'iss' claim is mandatory"); } if (!m_allowed_issuers.empty()) { std::string issuer = jwt.get_issuer(); bool permitted = false; for (const auto &allowed_issuer : m_allowed_issuers) { if (issuer == allowed_issuer) { permitted = true; break; } } if (!permitted) { throw JWTVerificationException( "Token issuer is not in list of allowed issuers."); } } for (const auto &claim : m_critical_claims) { if (!jwt.has_payload_claim(claim)) { std::stringstream ss; ss << "'" << claim << "' claim is mandatory"; throw JWTVerificationException(ss.str()); } } std::string public_pem; std::string algorithm; // Key id is optional in the RFC, set to blank if it doesn't exist std::string key_id; if (jwt.has_key_id()) { key_id = jwt.get_key_id(); } auto status = get_public_key_pem(jwt.get_issuer(), key_id, public_pem, algorithm); status->m_jwt_string = jwt.get_token(); status->m_public_pem = public_pem; status->m_algorithm = algorithm; return verify_async_continue(std::move(status)); } std::unique_ptr verify_async_continue(std::unique_ptr status) { if (!status->m_done) { std::string public_pem, algorithm; status = get_public_key_pem_continue(std::move(status), public_pem, algorithm); status->m_public_pem = public_pem; status->m_algorithm = algorithm; if (!status->m_done) { return std::move(status); } } // std::cout << "Public PEM: " << public_pem << std::endl << "Algorithm: // " << algorithm << std::endl; SciTokenKey key(status->m_kid, status->m_algorithm, status->m_public_pem, ""); auto verifier = jwt::verify({m_now}) .allow_algorithm(key); const jwt::decoded_jwt jwt( status->m_jwt_string); verifier.verify(jwt); bool must_verify_everything = true; if (jwt.has_payload_claim("ver")) { const jwt::claim &claim = jwt.get_payload_claim("ver"); if (claim.get_type() != jwt::json::type::string) { throw JWTVerificationException( "'ver' claim value must be a string (if present)"); } std::string ver_string = claim.as_string(); if ((ver_string == "scitokens:2.0") || (ver_string == "scitoken:2.0")) { must_verify_everything = false; if ((m_validate_profile != SciToken::Profile::COMPAT) && (m_validate_profile != SciToken::Profile::SCITOKENS_2_0)) { throw JWTVerificationException( "Invalidate token type; not expecting a SciToken 2.0."); } m_profile = SciToken::Profile::SCITOKENS_2_0; if (!jwt.has_payload_claim("aud")) { throw JWTVerificationException( "'aud' claim required for SciTokens 2.0 profile"); } } else if (ver_string == "scitokens:1.0") { must_verify_everything = m_validate_all_claims; if ((m_validate_profile != SciToken::Profile::COMPAT) && (m_validate_profile != SciToken::Profile::SCITOKENS_1_0)) { throw JWTVerificationException( "Invalidate token type; not expecting a SciToken 1.0."); } m_profile = SciToken::Profile::SCITOKENS_1_0; } else { std::stringstream ss; ss << "Unknown profile version in token: " << ver_string; throw JWTVerificationException(ss.str()); } // Handle WLCG common JWT profile. } else if (jwt.has_payload_claim("wlcg.ver")) { if ((m_validate_profile != SciToken::Profile::COMPAT) && (m_validate_profile != SciToken::Profile::WLCG_1_0)) { throw JWTVerificationException( "Invalidate token type; not expecting a WLCG 1.0."); } m_profile = SciToken::Profile::WLCG_1_0; must_verify_everything = false; const jwt::claim &claim = jwt.get_payload_claim("wlcg.ver"); if (claim.get_type() != jwt::json::type::string) { throw JWTVerificationException( "'ver' claim value must be a string (if present)"); } std::string ver_string = claim.as_string(); if (ver_string != "1.0") { std::stringstream ss; ss << "Unknown WLCG profile version in token: " << ver_string; throw JWTVerificationException(ss.str()); } if (!jwt.has_payload_claim("aud")) { throw JWTVerificationException( "Malformed token: 'aud' claim required for WLCG profile"); } } else if (m_profile == SciToken::Profile::AT_JWT) { // detected early above from typ header claim. must_verify_everything = false; } else { if ((m_validate_profile != SciToken::Profile::COMPAT) && (m_validate_profile != SciToken::Profile::SCITOKENS_1_0)) { throw JWTVerificationException( "Invalidate token type; not expecting a SciToken 1.0."); } m_profile = SciToken::Profile::SCITOKENS_1_0; must_verify_everything = m_validate_all_claims; } auto claims = jwt.get_payload_claims(); for (const auto &claim_pair : claims) { if (claim_pair.first == "iat" || claim_pair.first == "nbf" || claim_pair.first == "exp" || claim_pair.first == "ver") { continue; } auto iter = m_validators.find(claim_pair.first); auto iter_claim = m_claim_validators.find(claim_pair.first); if ((iter == m_validators.end() || iter->second.empty()) && (iter_claim == m_claim_validators.end() || iter_claim->second.empty())) { bool is_issuer = claim_pair.first == "iss"; if (is_issuer && !m_allowed_issuers.empty()) { // skip; we verified it above } else if (must_verify_everything) { std::stringstream ss; ss << "'" << claim_pair.first << "' claim verification is mandatory"; // std::cout << ss.str() << std::endl; throw JWTVerificationException(ss.str()); } } // std::cout << "Running claim " << claim_pair.first << " through // validation." << std::endl; if (iter != m_validators.end()) for (const auto &verification_func : iter->second) { const jwt::claim &claim = jwt.get_payload_claim(claim_pair.first); if (claim.get_type() != jwt::json::type::string) { std::stringstream ss; ss << "'" << claim_pair.first << "' claim value must be a string to verify."; throw JWTVerificationException(ss.str()); } std::string value = claim.as_string(); char *err_msg = nullptr; if (verification_func(value.c_str(), &err_msg)) { if (err_msg) { throw JWTVerificationException(err_msg); } else { std::stringstream ss; ss << "'" << claim_pair.first << "' claim verification failed."; throw JWTVerificationException(ss.str()); } } } if (iter_claim != m_claim_validators.end()) for (const auto &verification_pair : iter_claim->second) { const jwt::claim &claim = jwt.get_payload_claim(claim_pair.first); if (verification_pair.first( claim, verification_pair.second) == false) { std::stringstream ss; ss << "'" << claim_pair.first << "' claim verification failed."; throw JWTVerificationException(ss.str()); } } } std::unique_ptr result(new AsyncStatus()); result->m_done = true; return result; } void add_critical_claims(const std::vector &claims) { std::copy(claims.begin(), claims.end(), std::back_inserter(m_critical_claims)); } void add_allowed_issuers(const std::vector &allowed_issuers) { std::copy(allowed_issuers.begin(), allowed_issuers.end(), std::back_inserter(m_allowed_issuers)); } void add_string_validator(const std::string &claim, StringValidatorFunction func) { auto result = m_validators.insert( {claim, std::vector()}); result.first->second.push_back(func); } void add_claim_validator(const std::string &claim, ClaimValidatorFunction func, void *data) { auto result = m_claim_validators.insert( {claim, std::vector>()}); result.first->second.push_back({func, data}); } void set_validate_all_claims_scitokens_1(bool new_val) { m_validate_all_claims = new_val; } /** * Get the profile of the last validated token. * * If there has been no validation - or the validation failed, * then the return value is unspecified. * * Will not return Profile::COMPAT. */ SciToken::Profile get_profile() const { if (m_profile == SciToken::Profile::COMPAT) { throw JWTVerificationException("Token profile has not been set."); } return m_profile; } /** * Set the profile that will be used for validation; COMPAT indicates any * supported profile is allowable. */ void set_validate_profile(SciToken::Profile profile) { m_validate_profile = profile; } /** * Store the contents of a public EC key for a given issuer. */ static bool store_public_ec_key(const std::string &issuer, const std::string &kid, const std::string &key); /** * Store the contents of a JWKS for a given issuer. */ static bool store_jwks(const std::string &issuer, const std::string &jwks); /** * Trigger a refresh of the JWKS or a given issuer. */ static bool refresh_jwks(const std::string &issuer); /** * Fetch the contents of fa JWKS for a given issuer (do not trigger a * refresh). Will return an empty JWKS if no valid JWKS is available. */ static std::string get_jwks(const std::string &issuer); private: static std::unique_ptr get_public_key_pem(const std::string &issuer, const std::string &kid, std::string &public_pem, std::string &algorithm); static std::unique_ptr get_public_key_pem_continue(std::unique_ptr status, std::string &public_pem, std::string &algorithm); static std::unique_ptr get_public_keys_from_web(const std::string &issuer, unsigned timeout); static std::unique_ptr get_public_keys_from_web_continue(std::unique_ptr status); static bool get_public_keys_from_db(const std::string issuer, int64_t now, picojson::value &keys, int64_t &next_update); static bool store_public_keys(const std::string &issuer, const picojson::value &keys, int64_t next_update, int64_t expires); bool m_validate_all_claims{true}; SciToken::Profile m_profile{SciToken::Profile::COMPAT}; SciToken::Profile m_validate_profile{SciToken::Profile::COMPAT}; ClaimStringValidatorMap m_validators; ClaimValidatorMap m_claim_validators; std::chrono::system_clock::time_point m_now; std::vector m_critical_claims; std::vector m_allowed_issuers; }; class Enforcer { public: typedef std::vector> AclsList; Enforcer(std::string issuer, std::vector audience_list) : m_issuer(issuer), m_audiences(audience_list) { m_validator.add_allowed_issuers({m_issuer}); m_validator.add_claim_validator("jti", &Enforcer::str_validator, nullptr); m_validator.add_claim_validator("sub", &Enforcer::str_validator, nullptr); m_validator.add_claim_validator("opt", &Enforcer::all_validator, nullptr); m_validator.add_claim_validator("aud", &Enforcer::aud_validator, this); m_validator.add_claim_validator("scope", &Enforcer::scope_validator, this); std::vector critical_claims = {"scope"}; // If any audiences are in the given to us, then force the validator to // check it. if (!m_audiences.empty()) { critical_claims.push_back("aud"); } m_validator.add_critical_claims(critical_claims); } void set_now(std::chrono::system_clock::time_point now) { m_validator.set_now(now); } void set_validate_profile(SciToken::Profile profile) { m_validate_profile = profile; } bool test(const SciToken &scitoken, const std::string &authz, const std::string &path) { reset_state(); m_test_path = path; m_test_authz = authz; try { m_validator.verify(scitoken, time(NULL) + 20); return true; } catch (std::runtime_error &) { throw; } } AclsList generate_acls(const SciToken &scitoken) { reset_state(); m_validator.verify(scitoken, time(NULL) + 20); return m_gen_acls; } std::unique_ptr generate_acls_start(const SciToken &scitoken, AclsList &acls) { reset_state(); auto status = m_validator.verify_async(scitoken); if (status->m_done) { acls = m_gen_acls; } return status; } std::unique_ptr generate_acls_continue(std::unique_ptr status, AclsList &acls) { auto result = m_validator.verify_async_continue(std::move(status)); if (result->m_done) { acls = m_gen_acls; } return result; } private: static bool all_validator(const jwt::claim &, void *) { return true; } static bool str_validator(const jwt::claim &claim, void *) { return claim.get_type() == jwt::json::type::string; } static bool scope_validator(const jwt::claim &claim, void *myself); static bool aud_validator(const jwt::claim &claim, void *myself) { auto me = reinterpret_cast(myself); std::vector jwt_audiences; if (claim.get_type() == jwt::json::type::string) { const std::string &audience = claim.as_string(); jwt_audiences.push_back(audience); } else if (claim.get_type() == jwt::json::type::array) { const picojson::array &audiences = claim.as_array(); for (const auto &aud_value : audiences) { const std::string &audience = aud_value.get(); jwt_audiences.push_back(audience); } } for (const auto &aud_value : jwt_audiences) { if (((me->m_validator.get_profile() == SciToken::Profile::SCITOKENS_2_0) && (aud_value == "ANY")) || ((me->m_validator.get_profile() == SciToken::Profile::WLCG_1_0) && (aud_value == "https://wlcg.cern.ch/jwt/v1/any"))) { return true; } for (const auto &aud : me->m_audiences) { if (aud == aud_value) { return true; } } } return false; } void reset_state() { m_test_path = ""; m_test_authz = ""; m_gen_acls.clear(); m_validator.set_validate_profile(m_validate_profile); } SciToken::Profile m_validate_profile{SciToken::Profile::COMPAT}; std::string m_test_path; std::string m_test_authz; AclsList m_gen_acls; std::string m_issuer; std::vector m_audiences; scitokens::Validator m_validator; }; } // namespace scitokens scitokens-cpp-1.1.3/src/test.cpp000066400000000000000000000070751475716002100165640ustar00rootroot00000000000000#include #include #include #include #include "scitokens.h" int main(int argc, const char **argv) { std::string token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXUyJ9.eyJpc3MiOiJhdXRoMCJ9." "AbIJTDMFc7yUa5MhvcP03nJPyCPzZtQcGEp-zWfOkEE"; if (argc == 2) { token = argv[1]; } auto decoded = jwt::decode(token); for (auto &e : decoded.get_payload_claims()) std::cout << e.first << " = " << e.second.to_json() << std::endl; std::ifstream priv_ifs("test.pem"); std::string private_contents((std::istreambuf_iterator(priv_ifs)), (std::istreambuf_iterator())); std::ifstream pub_ifs("test.pem.pub"); std::string public_contents((std::istreambuf_iterator(pub_ifs)), (std::istreambuf_iterator())); char *err_msg; SciTokenKey key = scitoken_key_create("key-es356", "RS256", public_contents.c_str(), private_contents.c_str(), &err_msg); if (!key) { std::cout << "Failed to generate a key: " << err_msg << std::endl; return 1; } SciToken scitoken = scitoken_create(key); if (scitoken_set_claim_string(scitoken, "iss", "https://demo.scitokens.org", &err_msg)) { std::cout << "Failed to set a claim: " << err_msg << std::endl; } // Test setting and getting a claim char *value; if (scitoken_get_claim_string(scitoken, "iss", &value, &err_msg)) { std::cout << "Failed to get a claim: " << err_msg << std::endl; } if (strcmp(value, "https://demo.scitokens.org") != 0) { std::cout << "Failed to get same claim a claim: " << err_msg << std::endl; } if (scitoken_serialize(scitoken, &value, &err_msg)) { std::cout << "Failed to generate a token: " << err_msg << std::endl; return 1; } std::cout << "SciToken: " << value << std::endl; auto decoded2 = jwt::decode(value); for (auto &e : decoded2.get_payload_claims()) std::cout << e.first << " = " << e.second.to_json() << std::endl; scitoken_destroy(scitoken); scitoken_key_destroy(key); // Get updated strings from https://demo.scitokens.org/ // std::string test_value = // "eyJ0eXAiOiJKV1QiLCJhbGciOiJFUzI1NiIsImtpZCI6ImtleS1lczI1NiJ9.eyJpc3MiOiJodHRwczovL2RlbW8uc2NpdG9rZW5zLm9yZyIsImV4cCI6MTU0NjM4OTU5MiwiaWF0IjoxNTQ2Mzg4OTkyLCJuYmYiOjE1NDYzODg5OTIsImp0aSI6IjRkMzM2MTU5LWMxMDEtNGRhYy1iYzI5LWI5NDQ3ZDRkY2IxZSJ9.VfSCPj79IfdVCZHw8n0RJJupbaSU0OqMWxRVAnVUNvk1SCz0Ep3O06Boe5I0SRiZR8_0jzHw9vHZ0YOT_0kPAw"; std::string test_value = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6ImtleS1yczI1NiJ9." "eyJpc3MiOiJodHRwczovL2RlbW8uc2NpdG9rZW5zLm9yZyIsImV4cCI6MTU0NjM5MjAwOS" "wiaWF0IjoxNTQ2MzkxNDA5LCJuYmYiOjE1NDYzOTE0MDksImp0aSI6ImFkYTk2MjdiLWEx" "MGYtNGMyYS05Nzc2LTE4ZThkN2JmN2M4NSJ9.cNMG5zI2-JHh7l_" "PUPUAxom5Vi6Q3akKmv6q57CoVKHtxZAZRc47Uoix_" "AH3Xzr42qohr2FPamRTxUMsfZjrAFDJ_4JhJ-kKjJ3cRXXF-" "gj7lbniCDGOBuPXeMsVmeED15nauZ3XKXUHTGLEsg5O6RjS7sGKM_" "e9YiYvcTvWXcdkrkxZ2dPPU-R3IxdK6PtE9OB2XOk85H670OAJT3qimKm8Dk_" "Ri6DEEty1Su_" "1Tov3ac5B19iZkbhhVPMVP0cRolR9UNLhMxQAsbgEmArQOcs046AOzqQz6osOkdYOrVVO7" "lO2owUyMol94mB_39y1M8jcf5WNq3ukMMIzMCAPwA"; if (scitoken_deserialize(test_value.c_str(), &scitoken, nullptr, &err_msg)) { std::cout << "Failed to deserialize a token: " << err_msg << std::endl; return 1; } return 0; } scitokens-cpp-1.1.3/src/test_access.cpp000066400000000000000000000027131475716002100200770ustar00rootroot00000000000000#include #include "scitokens.h" int main(int argc, const char **argv) { if (argc < 6) { std::cerr << "Usage: " << argv[0] << " (TOKEN) (ISSUER) (AUDIENCE) (AUTHZ) (PATH)" << std::endl; return 1; } std::string token(argv[1]); std::string issuer(argv[2]); std::string audience(argv[3]); std::string authz(argv[4]); std::string path(argv[5]); const char *aud_list[2]; aud_list[0] = audience.c_str(); aud_list[1] = nullptr; SciToken scitoken; char *err_msg = nullptr; if (scitoken_deserialize(token.c_str(), &scitoken, nullptr, &err_msg)) { std::cout << "Failed to deserialize a token: " << err_msg << std::endl; return 1; } std::cout << "Token deserialization successful. Checking authorizations." << std::endl; Enforcer enf; if (!(enf = enforcer_create(issuer.c_str(), aud_list, &err_msg))) { std::cout << "Failed to create a new enforcer object: " << err_msg << std::endl; return 1; } const Acl acl{authz.c_str(), path.c_str()}; if (enforcer_test(enf, scitoken, &acl, &err_msg)) { if (err_msg) { std::cout << "Access test failed: " << err_msg << std::endl; } else { std::cout << "Access test failed." << std::endl; } return 1; } std::cout << "Access test successful." << std::endl; enforcer_destroy(enf); return 0; } scitokens-cpp-1.1.3/src/verify.cpp000066400000000000000000000073111475716002100171020ustar00rootroot00000000000000 #include "scitokens.h" #include #include #include namespace { const char usage[] = "\n" "Syntax: %s [--cred cred_file] TOKEN\n" "\n" " Options\n" " -h | --help Display usage\n" " -c | --cred File containing the signing credential.\n" " -i | --issuer Issuer of the token to verify.\n" " -K | --keyid Name of the token key.\n" " -p | --profile Profile to enforce (wlcg, scitokens1, " "scitokens2, atjwt).\n" "\n"; const struct option long_options[] = {{"help", no_argument, NULL, 'h'}, {"cred", required_argument, NULL, 'c'}, {"issuer", required_argument, NULL, 'i'}, {"keyid", required_argument, NULL, 'K'}, {"profile", required_argument, NULL, 'p'}, {0, 0, 0, 0}}; const char short_options[] = "hc:i:K:p:"; std::string g_cred, g_issuer, g_keyid, g_profile; int init_arguments(int argc, char *const argv[]) { int arg; while ((arg = getopt_long(argc, argv, short_options, long_options, nullptr)) != -1) { switch (arg) { case 'h': printf(usage, argv[0]); exit(0); break; case 'c': g_cred = optarg; break; case 'i': g_issuer = optarg; break; case 'K': g_keyid = optarg; break; case 'p': g_profile = optarg; break; default: fprintf(stderr, usage, argv[0]); exit(1); break; } } if (optind < argc - 1) { fprintf(stderr, "%s: invalid option -- %s\n", argv[0], argv[optind]); fprintf(stderr, usage, argv[0]); exit(1); } if (optind == argc) { fprintf(stderr, "%s: Must provide a token as a requirement\n", argv[0]); fprintf(stderr, usage, argv[0]); exit(1); } if ((!g_cred.empty() || !g_issuer.empty() || !g_keyid.empty()) && (g_cred.empty() || g_issuer.empty() || g_keyid.empty())) { fprintf(stderr, "%s: If --cred, --keyid, or --issuer are set, then all must be " "set.\n", argv[0]); fprintf(stderr, usage, argv[0]); exit(1); } return 0; } } // namespace int main(int argc, char *const *argv) { if (argc < 2) { fprintf(stderr, "%s: Insufficient arguments; must at least provide a token.\n", argv[0]); fprintf(stderr, usage, argv[0]); return 1; } if (init_arguments(argc, argv)) { return 1; } std::string token(argv[argc - 1]); if (!g_issuer.empty()) { char *err_msg; std::ifstream pub_ifs(g_cred); std::string public_contents((std::istreambuf_iterator(pub_ifs)), (std::istreambuf_iterator())); auto rv = scitoken_store_public_ec_key(g_issuer.c_str(), g_keyid.c_str(), public_contents.c_str(), &err_msg); if (rv) { fprintf(stderr, "%s: %s\n", argv[0], err_msg); free(err_msg); return 1; } } SciToken scitoken; char *err_msg = nullptr; if (scitoken_deserialize(token.c_str(), &scitoken, nullptr, &err_msg)) { std::cout << "Failed to deserialize a token: " << err_msg << std::endl; return 1; } std::cout << "Token deserialization successful." << std::endl; return 0; } scitokens-cpp-1.1.3/test/000077500000000000000000000000001475716002100152605ustar00rootroot00000000000000scitokens-cpp-1.1.3/test/CMakeLists.txt000066400000000000000000000010201475716002100200110ustar00rootroot00000000000000 add_executable(scitokens-gtest main.cpp) if( NOT SCITOKENS_EXTERNAL_GTEST ) add_dependencies(scitokens-gtest gtest) include_directories("${PROJECT_SOURCE_DIR}/vendor/gtest/googletest/include") endif() if(SCITOKENS_EXTERNAL_GTEST) set(LIBGTEST "gtest") else() set(LIBGTEST "${CMAKE_BINARY_DIR}/external/gtest/src/gtest-build/lib/libgtest.a") endif() target_link_libraries(scitokens-gtest SciTokens "${LIBGTEST}" pthread) add_test( NAME unit COMMAND ${CMAKE_CURRENT_BINARY_DIR}/scitokens-gtest ) scitokens-cpp-1.1.3/test/main.cpp000066400000000000000000000665231475716002100167240ustar00rootroot00000000000000#include "../src/scitokens.h" #include #include #include namespace { const char ec_private[] = "-----BEGIN EC PRIVATE KEY-----\n" "MHcCAQEEIESSMxT7PLTR9A/aqd+CM0/6vv6fQWqDm0mNx8uE9EbpoAoGCCqGSM49\n" "AwEHoUQDQgAE1i+ImZ//iQhOPh0OMfZzdbmPH+3G1ouWezolCugQYWIRqNmwq3zR\n" "EnTbe4EmymTpJ1MJTPP/tCEUP3G/QqQuhA==\n" "-----END EC PRIVATE KEY-----\n"; const char ec_public[] = "-----BEGIN PUBLIC KEY-----\n" "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1i+ImZ//iQhOPh0OMfZzdbmPH+3G\n" "1ouWezolCugQYWIRqNmwq3zREnTbe4EmymTpJ1MJTPP/tCEUP3G/QqQuhA==\n" "-----END PUBLIC KEY-----\n"; const char ec_private_2[] = "-----BEGIN EC PRIVATE KEY-----\n" "MHcCAQEEIJH6NpWPHcM7wxL/bv89Nezug+KEUQjI9fZxhrBHNA1ioAoGCCqGSM49\n" "AwEHoUQDQgAEb8M7AxRN+DmbfYOoA6DeHCcSeA+kXWCq4E/g2ME/uBOdP8RE0tql\n" "e8fxYcaPikgMcppGq2ycTiLGgEYXgsq2JA==\n" "-----END EC PRIVATE KEY-----\n"; const char ec_public_2[] = "-----BEGIN PUBLIC KEY-----\n" "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEb8M7AxRN+DmbfYOoA6DeHCcSeA+k\n" "XWCq4E/g2ME/uBOdP8RE0tqle8fxYcaPikgMcppGq2ycTiLGgEYXgsq2JA==\n" "-----END PUBLIC KEY-----\n"; TEST(SciTokenTest, CreateToken) { SciToken token = scitoken_create(nullptr); ASSERT_TRUE(token != nullptr); scitoken_destroy(token); } TEST(SciTokenTest, SignToken) { char *err_msg = nullptr; std::unique_ptr mykey( scitoken_key_create("1", "ES256", ec_public, ec_private, &err_msg), scitoken_key_destroy); ASSERT_TRUE(mykey.get() != nullptr) << err_msg; std::unique_ptr mytoken( scitoken_create(mykey.get()), scitoken_destroy); ASSERT_TRUE(mytoken.get() != nullptr); auto rv = scitoken_set_claim_string( mytoken.get(), "iss", "https://demo.scitokens.org/gtest", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *value; rv = scitoken_serialize(mytoken.get(), &value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; EXPECT_TRUE(value != nullptr); std::unique_ptr value_ptr(value, free); ASSERT_TRUE(strlen(value) > 50); } class SerializeTest : public ::testing::Test { protected: void SetUp() override { char *err_msg = nullptr; m_key = KeyPtr( scitoken_key_create("1", "ES256", ec_public, ec_private, &err_msg), scitoken_key_destroy); ASSERT_TRUE(m_key.get() != nullptr) << err_msg; m_token = TokenPtr(scitoken_create(m_key.get()), scitoken_destroy); ASSERT_TRUE(m_token.get() != nullptr); auto rv = scitoken_set_claim_string( m_token.get(), "iss", "https://demo.scitokens.org/gtest", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_store_public_ec_key("https://demo.scitokens.org/gtest", "1", ec_public, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; scitoken_set_lifetime(m_token.get(), 60); m_audiences_array.push_back("https://demo.scitokens.org/"); m_audiences_array.push_back(nullptr); const char *groups[3] = {nullptr, nullptr, nullptr}; const char group0[] = "group0"; const char group1[] = "group1"; groups[0] = group0; groups[1] = group1; rv = scitoken_set_claim_string_list(m_token.get(), "groups", groups, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; m_read_token.reset(scitoken_create(nullptr)); ASSERT_TRUE(m_read_token.get() != nullptr); } using KeyPtr = std::unique_ptr; KeyPtr m_key{nullptr, scitoken_key_destroy}; using TokenPtr = std::unique_ptr; TokenPtr m_token{nullptr, scitoken_destroy}; std::vector m_audiences_array; TokenPtr m_read_token{nullptr, scitoken_destroy}; }; TEST_F(SerializeTest, VerifyTest) { char *err_msg = nullptr; char *token_value = nullptr; auto rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *value; rv = scitoken_get_claim_string(m_read_token.get(), "iss", &value, &err_msg); ASSERT_TRUE(value != nullptr); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr value_ptr(value, free); EXPECT_STREQ(value, "https://demo.scitokens.org/gtest"); value_ptr.reset(); rv = scitoken_get_claim_string(m_read_token.get(), "doesnotexist", &value, &err_msg); EXPECT_FALSE(rv == 0); } TEST_F(SerializeTest, TestStringList) { char *err_msg = nullptr; char **value; auto rv = scitoken_get_claim_string_list(m_token.get(), "groups", &value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(value != nullptr); ASSERT_TRUE(value[0] != nullptr); EXPECT_STREQ(value[0], "group0"); ASSERT_TRUE(value[1] != nullptr); EXPECT_STREQ(value[1], "group1"); EXPECT_TRUE(value[2] == nullptr); } TEST_F(SerializeTest, VerifyWLCGTest) { char *err_msg = nullptr; char *token_value = nullptr; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::WLCG_1_0); auto rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); // Accepts any profile. rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *value; rv = scitoken_get_claim_string(m_read_token.get(), "wlcg.ver", &value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(value != nullptr); std::unique_ptr value_ptr(value, free); EXPECT_STREQ(value, "1.0"); value_ptr.reset(); rv = scitoken_get_claim_string(m_read_token.get(), "ver", &value, &err_msg); EXPECT_FALSE(rv == 0); // Accepts only a WLCG token scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::WLCG_1_0); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only SciToken 1.0; should fail. scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::SCITOKENS_1_0); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_FALSE(rv == 0); } TEST_F(SerializeTest, FailVerifyToken) { char *err_msg = nullptr; std::unique_ptr mykey( scitoken_key_create("1", "ES256", ec_public_2, ec_private_2, &err_msg), scitoken_key_destroy); ASSERT_TRUE(mykey.get() != nullptr) << err_msg; std::unique_ptr mytoken( scitoken_create(mykey.get()), scitoken_destroy); ASSERT_TRUE(mytoken.get() != nullptr); auto rv = scitoken_set_claim_string( mytoken.get(), "iss", "https://demo.scitokens.org/gtest", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *value; rv = scitoken_serialize(mytoken.get(), &value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; EXPECT_TRUE(value != nullptr); std::unique_ptr value_ptr(value, free); EXPECT_TRUE(strlen(value) > 50); // Should fail; we signed it with the wrong public key. rv = scitoken_deserialize_v2(value, m_read_token.get(), nullptr, &err_msg); EXPECT_FALSE(rv == 0); } TEST_F(SerializeTest, VerifyATJWTTest) { char *err_msg = nullptr; // Serialize as at+jwt token. char *token_value = nullptr; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::AT_JWT); auto rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); // Accepts any profile. rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only an at+jwt token, should work with at+jwt token scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::AT_JWT); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only SciToken 2.0; should fail scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::SCITOKENS_2_0); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_FALSE(rv == 0); } TEST_F(SerializeTest, FailVerifyATJWTTest) { char *err_msg = nullptr; // Serialize as "compat" token. char *token_value = nullptr; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::COMPAT); auto rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); // Accepts any profile. rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only an at+jwt token, should fail with COMPAT token scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::AT_JWT); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_FALSE(rv == 0); } TEST_F(SerializeTest, EnforcerTest) { /* * Test that the enforcer works and returns an err_msg */ char *err_msg = nullptr; auto rv = scitoken_set_claim_string( m_token.get(), "aud", "https://demo.scitokens.org/", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; auto enforcer = enforcer_create("https://demo.scitokens.org/gtest", &m_audiences_array[0], &err_msg); ASSERT_TRUE(enforcer != nullptr) << err_msg; Acl acl; acl.authz = "read"; acl.resource = "/stuff"; rv = scitoken_set_claim_string(m_token.get(), "scope", "read:/blah", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_set_claim_string(m_token.get(), "ver", "scitoken:2.0", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *token_value = nullptr; rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = enforcer_test(enforcer, m_read_token.get(), &acl, &err_msg); ASSERT_STREQ( err_msg, "token verification failed: 'scope' claim verification failed."); ASSERT_TRUE(rv == -1) << err_msg; } TEST_F(SerializeTest, EnforcerScopeTest) { char *err_msg = nullptr; auto rv = scitoken_set_claim_string( m_token.get(), "aud", "https://demo.scitokens.org/", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; auto enforcer = enforcer_create("https://demo.scitokens.org/gtest", &m_audiences_array[0], &err_msg); ASSERT_TRUE(enforcer != nullptr) << err_msg; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::WLCG_1_0); rv = scitoken_set_claim_string( m_token.get(), "scope", "storage.modify:/ storage.read:/ openid offline_access", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *token_value = nullptr; rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; Acl *acls; enforcer_generate_acls(enforcer, m_read_token.get(), &acls, &err_msg); ASSERT_TRUE(acls != nullptr) << err_msg; int idx = 0; bool found_read = false; bool found_write = false; while (acls[idx].resource && acls[idx++].authz) { auto resource = acls[idx - 1].resource; auto authz = acls[idx - 1].authz; if (strcmp(authz, "read") == 0) { found_read = true; ASSERT_STREQ(resource, "/"); } else if (strcmp(authz, "write") == 0) { found_write = true; ASSERT_STREQ(resource, "/"); } } ASSERT_TRUE(found_read); ASSERT_TRUE(found_write); } } // namespace TEST_F(SerializeTest, DeserializeAsyncTest) { char *err_msg = nullptr; // Serialize as "compat" token. char *token_value = nullptr; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::COMPAT); auto rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); SciToken scitoken; SciTokenStatus status; // Accepts any profile. rv = scitoken_deserialize_start(token_value, &scitoken, nullptr, &status, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only an at+jwt token, should fail with COMPAT token while (rv == 0 && status) { rv = scitoken_deserialize_continue(&scitoken, &status, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; } } TEST_F(SerializeTest, FailDeserializeAsyncTest) { char *err_msg = nullptr; std::unique_ptr mykey( scitoken_key_create("1", "ES256", ec_public_2, ec_private_2, &err_msg), scitoken_key_destroy); ASSERT_TRUE(mykey.get() != nullptr) << err_msg; std::unique_ptr mytoken( scitoken_create(mykey.get()), scitoken_destroy); ASSERT_TRUE(mytoken.get() != nullptr); auto rv = scitoken_set_claim_string( mytoken.get(), "iss", "https://demo.scitokens.org/gtest", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *value; rv = scitoken_serialize(mytoken.get(), &value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; EXPECT_TRUE(value != nullptr); std::unique_ptr value_ptr(value, free); EXPECT_TRUE(strlen(value) > 50); char *token_value = nullptr; std::unique_ptr token_value_ptr(token_value, free); SciToken scitoken; SciTokenStatus status; // Accepts any profile. rv = scitoken_deserialize_start(value, &scitoken, nullptr, &status, &err_msg); EXPECT_FALSE(rv == 0) << err_msg; // Accepts only an at+jwt token, should fail with COMPAT token while (rv == 0 && status) { rv = scitoken_deserialize_continue(&scitoken, &status, &err_msg); EXPECT_FALSE(rv == 0) << err_msg; } } TEST_F(SerializeTest, ExplicitTime) { char *err_msg = nullptr; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::WLCG_1_0); auto rv = scitoken_set_claim_string(m_token.get(), "scope", "storage.read:/", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *token_value = nullptr; rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; auto enforcer = enforcer_create("https://demo.scitokens.org/gtest", &m_audiences_array[0], &err_msg); ASSERT_TRUE(enforcer != nullptr) << err_msg; Acl *acls; rv = enforcer_generate_acls(enforcer, m_read_token.get(), &acls, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(acls != nullptr); enforcer_set_time(enforcer, time(NULL), &err_msg); rv = enforcer_generate_acls(enforcer, m_read_token.get(), &acls, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; enforcer_set_time(enforcer, time(NULL) + 100, &err_msg); rv = enforcer_generate_acls(enforcer, m_read_token.get(), &acls, &err_msg); ASSERT_FALSE(rv == 0); enforcer_set_time(enforcer, time(NULL) - 100, &err_msg); rv = enforcer_generate_acls(enforcer, m_read_token.get(), &acls, &err_msg); ASSERT_FALSE(rv == 0); enforcer_destroy(enforcer); } class SerializeNoKidTest : public ::testing::Test { protected: void SetUp() override { char *err_msg = nullptr; m_key = KeyPtr(scitoken_key_create("none", "ES256", ec_public, ec_private, &err_msg), scitoken_key_destroy); ASSERT_TRUE(m_key.get() != nullptr) << err_msg; m_token = TokenPtr(scitoken_create(m_key.get()), scitoken_destroy); ASSERT_TRUE(m_token.get() != nullptr); auto rv = scitoken_set_claim_string( m_token.get(), "iss", "https://demo.scitokens.org/gtest", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_store_public_ec_key("https://demo.scitokens.org/gtest", "1", ec_public, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; scitoken_set_lifetime(m_token.get(), 60); m_audiences_array.push_back("https://demo.scitokens.org/"); m_audiences_array.push_back(nullptr); const char *groups[3] = {nullptr, nullptr, nullptr}; const char group0[] = "group0"; const char group1[] = "group1"; groups[0] = group0; groups[1] = group1; rv = scitoken_set_claim_string_list(m_token.get(), "groups", groups, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; m_read_token.reset(scitoken_create(nullptr)); ASSERT_TRUE(m_read_token.get() != nullptr); } using KeyPtr = std::unique_ptr; KeyPtr m_key{nullptr, scitoken_key_destroy}; using TokenPtr = std::unique_ptr; TokenPtr m_token{nullptr, scitoken_destroy}; std::vector m_audiences_array; TokenPtr m_read_token{nullptr, scitoken_destroy}; }; TEST_F(SerializeNoKidTest, VerifyATJWTTest) { char *err_msg = nullptr; // Serialize as at+jwt token. char *token_value = nullptr; scitoken_set_serialize_profile(m_token.get(), SciTokenProfile::AT_JWT); auto rv = scitoken_serialize(m_token.get(), &token_value, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; std::unique_ptr token_value_ptr(token_value, free); // Accepts any profile. rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only an at+jwt token, should work with at+jwt token scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::AT_JWT); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Accepts only SciToken 2.0; should fail scitoken_set_deserialize_profile(m_read_token.get(), SciTokenProfile::SCITOKENS_2_0); rv = scitoken_deserialize_v2(token_value, m_read_token.get(), nullptr, &err_msg); ASSERT_FALSE(rv == 0); } class KeycacheTest : public ::testing::Test { protected: std::string demo_scitokens_url = "https://demo.scitokens.org"; void SetUp() override { char *err_msg = nullptr; auto rv = keycache_set_jwks(demo_scitokens_url.c_str(), demo_scitokens.c_str(), &err_msg); ASSERT_TRUE(rv == 0) << err_msg; } // Reference copy of the keys at https://demo.scitokens.org/oauth2/certs; // may need to be updated periodically. std::string demo_scitokens = "{\"keys\":[{\"alg\":\"RS256\",\"e\":\"AQAB\",\"kid\":\"key-rs256\"," "\"kty\":\"RSA\",\"n\":\"uGDGTLXnqh3mfopjys6sFUBvFl3F4Qt6NEYphq_u_" "aBhtN1X9NEyb78uB_" "I1KjciJNGLIQU0ECsJiFx6qV1hR9xE1dPyrS3bU92AVtnBrvzUtTU-aUZAmZQiuAC_rC0-" "z_" "TOQr6qJkkUgZtxR9n9op55ZBpRfZD5dzhkW4Dm146vfTKt0D4cIMoMNJS5xQx9nibeB4E8" "hryZDW_" "fPeD0XZDcpByNyP0jFDYkxdUtQFvyRpz4WMZ4ejUfvW3gf4LRAfGZJtMnsZ7ZW4RfoQbhi" "XKMfWeBEjQDiXh0r-KuZLykxhYJtpf7fTnPna753IzMgRMmW3F69iQn2LQN3LoSMw==\"," "\"use\":\"sig\"},{\"alg\":\"ES256\",\"kid\":\"key-es256\",\"kty\":" "\"EC\",\"use\":\"sig\",\"x\":" "\"ncSCrGTBTXXOhNiAOTwNdPjwRz1hVY4saDNiHQK9Bh4=\",\"y\":" "\"sCsFXvx7FAAklwq3CzRCBcghqZOFPB2dKUayS6LY_Lo=\"}]}"; std::string demo_scitokens2 = "{\"keys\":[{\"alg\":\"ES256\",\"kid\":\"key-es256\",\"kty\":\"EC\"," "\"use\":\"sig\",\"x\":\"ncSCrGTBTXXOhNiAOTwNdPjwRz1hVY4saDNiHQK9Bh4=" "\",\"y\":\"sCsFXvx7FAAklwq3CzRCBcghqZOFPB2dKUayS6LY_Lo=\"}]}"; }; TEST_F(KeycacheTest, RefreshTest) { char *err_msg = nullptr; auto rv = keycache_refresh_jwks(demo_scitokens_url.c_str(), &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *output_jwks; rv = keycache_get_cached_jwks(demo_scitokens_url.c_str(), &output_jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(output_jwks != nullptr); std::string output_jwks_str(output_jwks); free(output_jwks); EXPECT_EQ(demo_scitokens, output_jwks_str); } TEST_F(KeycacheTest, RefreshInvalid) { char *err_msg = nullptr, *jwks; auto rv = keycache_refresh_jwks("https://demo.scitokens.org/invalid", &err_msg); ASSERT_FALSE(rv == 0); rv = keycache_get_cached_jwks("https://demo.scitokens.org/invalid", &jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(jwks != nullptr); std::string jwks_str(jwks); free(jwks); EXPECT_EQ(jwks_str, "{\"keys\": []}"); } TEST_F(KeycacheTest, GetInvalid) { char *err_msg = nullptr, *jwks; auto rv = keycache_get_cached_jwks("https://demo.scitokens.org/unknown", &jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(jwks != nullptr); std::string jwks_str(jwks); free(jwks); } TEST_F(KeycacheTest, GetTest) { char *err_msg = nullptr, *jwks; auto rv = keycache_get_cached_jwks(demo_scitokens_url.c_str(), &jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(jwks != nullptr); std::string jwks_str(jwks); free(jwks); EXPECT_EQ(demo_scitokens, jwks_str); } TEST_F(KeycacheTest, SetGetTest) { char *err_msg = nullptr; auto rv = keycache_set_jwks(demo_scitokens_url.c_str(), demo_scitokens2.c_str(), &err_msg); ASSERT_TRUE(rv == 0) << err_msg; char *jwks; rv = keycache_get_cached_jwks(demo_scitokens_url.c_str(), &jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(jwks != nullptr); std::string jwks_str(jwks); free(jwks); EXPECT_EQ(demo_scitokens2, jwks_str); } TEST_F(KeycacheTest, SetGetConfiguredCacheHome) { // Set cache home char cache_path[FILENAME_MAX]; ASSERT_TRUE(getcwd(cache_path, sizeof(cache_path)) != nullptr); // Side effect gets cwd char *err_msg = nullptr; std::string key = "keycache.cache_home"; auto rv = scitoken_config_set_str(key.c_str(), cache_path, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Set the jwks at the new cache home rv = keycache_set_jwks(demo_scitokens_url.c_str(), demo_scitokens2.c_str(), &err_msg); ASSERT_TRUE(rv == 0) << err_msg; // Fetch the cached jwks from the new cache home char *jwks; rv = keycache_get_cached_jwks(demo_scitokens_url.c_str(), &jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(jwks != nullptr); std::string jwks_str(jwks); free(jwks); EXPECT_EQ(demo_scitokens2, jwks_str); // Check that cache home is still what was set char *output; rv = scitoken_config_get_str(key.c_str(), &output, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; EXPECT_EQ(*output, *cache_path); free(output); // Reset cache home to whatever it was before by setting empty config rv = scitoken_config_set_str(key.c_str(), "", &err_msg); ASSERT_TRUE(rv == 0) << err_msg; } TEST_F(KeycacheTest, InvalidConfigKeyTest) { char *err_msg = nullptr; int new_update_interval = 400; std::string key = "invalid key"; auto rv = scitoken_config_set_int(key.c_str(), new_update_interval, &err_msg); ASSERT_FALSE(rv == 0); const char *key2 = nullptr; rv = scitoken_config_set_int(key2, new_update_interval, &err_msg); ASSERT_FALSE(rv == 0); } TEST_F(KeycacheTest, SetGetUpdateTest) { char *err_msg = nullptr; int new_update_interval = 400; std::string key = "keycache.update_interval_s"; auto rv = scitoken_config_set_int(key.c_str(), new_update_interval, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_config_get_int(key.c_str(), &err_msg); EXPECT_EQ(rv, new_update_interval) << err_msg; } TEST_F(KeycacheTest, SetGetExpirationTest) { char *err_msg = nullptr; int new_expiration_interval = 2 * 24 * 3600; std::string key = "keycache.expiration_interval_s"; auto rv = scitoken_config_set_int(key.c_str(), new_expiration_interval, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = scitoken_config_get_int(key.c_str(), &err_msg); EXPECT_EQ(rv, new_expiration_interval) << err_msg; } TEST_F(KeycacheTest, SetInvalidUpdateTest) { char *err_msg = nullptr; int new_update_interval = -1; std::string key = "keycache.update_interval_s"; auto rv = scitoken_config_set_int(key.c_str(), new_update_interval, &err_msg); ASSERT_FALSE(rv == 0); } TEST_F(KeycacheTest, SetInvalidExpirationTest) { char *err_msg = nullptr; int new_expiration_interval = -2 * 24 * 3600; std::string key = "keycache.expiration_interval_s"; auto rv = scitoken_config_set_int(key.c_str(), new_expiration_interval, &err_msg); ASSERT_FALSE(rv == 0); } TEST_F(KeycacheTest, RefreshExpiredTest) { char *err_msg = nullptr, *jwks; int new_expiration_interval = 0; std::string key = "keycache.expiration_interval_s"; auto rv = scitoken_config_set_int(key.c_str(), new_expiration_interval, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; rv = keycache_refresh_jwks(demo_scitokens_url.c_str(), &err_msg); ASSERT_TRUE(rv == 0) << err_msg; sleep(1); rv = keycache_get_cached_jwks(demo_scitokens_url.c_str(), &jwks, &err_msg); ASSERT_TRUE(rv == 0) << err_msg; ASSERT_TRUE(jwks != nullptr); std::string jwks_str(jwks); free(jwks); EXPECT_EQ(jwks_str, "{\"keys\": []}"); } int main(int argc, char **argv) { ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); } scitokens-cpp-1.1.3/vendor/000077500000000000000000000000001475716002100155765ustar00rootroot00000000000000scitokens-cpp-1.1.3/vendor/.clang-format000066400000000000000000000000501475716002100201440ustar00rootroot00000000000000DisableFormat: true SortIncludes: Never scitokens-cpp-1.1.3/vendor/gtest/000077500000000000000000000000001475716002100167245ustar00rootroot00000000000000scitokens-cpp-1.1.3/vendor/jwt-cpp/000077500000000000000000000000001475716002100171625ustar00rootroot00000000000000scitokens-cpp-1.1.3/vendor/jwt-cpp/.gitattributes000066400000000000000000000005721475716002100220610ustar00rootroot00000000000000# Auto detect text files and perform LF normalization * text=auto # Custom for Visual Studio *.cs diff=csharp # Standard to msysgit *.doc diff=astextplain *.DOC diff=astextplain *.docx diff=astextplain *.DOCX diff=astextplain *.dot diff=astextplain *.DOT diff=astextplain *.pdf diff=astextplain *.PDF diff=astextplain *.rtf diff=astextplain *.RTF diff=astextplain scitokens-cpp-1.1.3/vendor/jwt-cpp/.gitignore000066400000000000000000000115101475716002100211500ustar00rootroot00000000000000## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. # User-specific files *.suo *.user *.userosscache *.sln.docstates # User-specific files (MonoDevelop/Xamarin Studio) *.userprefs # Build results [Dd]ebug/ [Dd]ebugPublic/ [Rr]elease/ [Rr]eleases/ x64/ x86/ bld/ [Bb]in/ [Oo]bj/ [Ll]og/ # Visual Studio 2015 cache/options directory .vs/ # Uncomment if you have tasks that create the project's static files in wwwroot #wwwroot/ # MSTest test Results [Tt]est[Rr]esult*/ [Bb]uild[Ll]og.* # NUNIT *.VisualState.xml TestResult.xml # Build Results of an ATL Project [Dd]ebugPS/ [Rr]eleasePS/ dlldata.c # DNX project.lock.json artifacts/ *_i.c *_p.c *_i.h *.ilk *.meta *.obj *.pch *.pdb *.pgc *.pgd *.rsp *.sbr *.tlb *.tli *.tlh *.tmp *.tmp_proj *.log *.vspscc *.vssscc .builds *.pidb *.svclog *.scc # Chutzpah Test files _Chutzpah* # Visual C++ cache files ipch/ *.aps *.ncb *.opendb *.opensdf *.sdf *.cachefile *.VC.db *.VC.VC.opendb # Visual Studio profiler *.psess *.vsp *.vspx *.sap # TFS 2012 Local Workspace $tf/ # Guidance Automation Toolkit *.gpState # ReSharper is a .NET coding add-in _ReSharper*/ *.[Rr]e[Ss]harper *.DotSettings.user # JustCode is a .NET coding add-in .JustCode # TeamCity is a build add-in _TeamCity* # DotCover is a Code Coverage Tool *.dotCover # NCrunch _NCrunch_* .*crunch*.local.xml nCrunchTemp_* # MightyMoose *.mm.* AutoTest.Net/ # Web workbench (sass) .sass-cache/ # Installshield output folder [Ee]xpress/ # DocProject is a documentation generator add-in DocProject/buildhelp/ DocProject/Help/*.HxT DocProject/Help/*.HxC DocProject/Help/*.hhc DocProject/Help/*.hhk DocProject/Help/*.hhp DocProject/Help/Html2 DocProject/Help/html # Click-Once directory publish/ # Publish Web Output *.[Pp]ublish.xml *.azurePubxml # TODO: Comment the next line if you want to checkin your web deploy settings # but database connection strings (with potential passwords) will be unencrypted *.pubxml *.publishproj # Microsoft Azure Web App publish settings. Comment the next line if you want to # checkin your Azure Web App publish settings, but sensitive information contained # in these scripts will be unencrypted PublishScripts/ # NuGet Packages *.nupkg # The packages folder can be ignored because of Package Restore **/packages/* # except build/, which is used as an MSBuild target. !**/packages/build/ # Uncomment if necessary however generally it will be regenerated when needed #!**/packages/repositories.config # NuGet v3's project.json files produces more ignoreable files *.nuget.props *.nuget.targets # Microsoft Azure Build Output csx/ *.build.csdef # Microsoft Azure Emulator ecf/ rcf/ # Windows Store app package directories and files AppPackages/ BundleArtifacts/ Package.StoreAssociation.xml _pkginfo.txt # Visual Studio cache files # files ending in .cache can be ignored *.[Cc]ache # but keep track of directories ending in .cache !*.[Cc]ache/ # Others ClientBin/ ~$* *~ *.dbmdl *.dbproj.schemaview *.pfx *.publishsettings node_modules/ orleans.codegen.cs # Since there are multiple workflows, uncomment next line to ignore bower_components # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) #bower_components/ # RIA/Silverlight projects Generated_Code/ # Backup & report files from converting an old project file # to a newer Visual Studio version. Backup files are not needed, # because we have git ;-) _UpgradeReport_Files/ Backup*/ UpgradeLog*.XML UpgradeLog*.htm # SQL Server files *.mdf *.ldf # Business Intelligence projects *.rdl.data *.bim.layout *.bim_*.settings # Microsoft Fakes FakesAssemblies/ # GhostDoc plugin setting file *.GhostDoc.xml # Node.js Tools for Visual Studio .ntvs_analysis.dat # Visual Studio 6 build log *.plg # Visual Studio 6 workspace options file *.opt # Visual Studio LightSwitch build output **/*.HTMLClient/GeneratedArtifacts **/*.DesktopClient/GeneratedArtifacts **/*.DesktopClient/ModelManifest.xml **/*.Server/GeneratedArtifacts **/*.Server/ModelManifest.xml _Pvt_Extensions # Paket dependency manager .paket/paket.exe paket-files/ # FAKE - F# Make .fake/ # JetBrains Rider .idea/ *.sln.iml # ========================= # Operating System Files # ========================= # OSX # ========================= .DS_Store .AppleDouble .LSOverride # Thumbnails ._* # Files that might appear in the root of a volume .DocumentRevisions-V100 .fseventsd .Spotlight-V100 .TemporaryItems .Trashes .VolumeIcon.icns # Directories potentially created on remote AFP share .AppleDB .AppleDesktop Network Trash Folder Temporary Items .apdisk # Windows # ========================= # Windows image file caches Thumbs.db ehthumbs.db # Folder config file Desktop.ini # Recycle Bin used on file shares $RECYCLE.BIN/ # Windows Installer files *.cab *.msi *.msm *.msp # Windows shortcuts *.lnk # Linux files test *.o *.o.d .vscode docs build/*scitokens-cpp-1.1.3/vendor/jwt-cpp/.travis.yml000066400000000000000000000005771475716002100213040ustar00rootroot00000000000000language: cpp compiler: gcc os: linux dist: xenial install: - sudo apt update - sudo apt install libgtest-dev - (cd /usr/src/gtest && sudo `which cmake` .) - sudo make -C /usr/src/gtest - sudo ln -s /usr/src/gtest/libgtest.a /usr/lib/libgtest.a - sudo ln -s /usr/src/gtest/libgtest_main.a /usr/lib/libgtest_main.a script: - cmake . - make - ./tests/jwt-cpp-test scitokens-cpp-1.1.3/vendor/jwt-cpp/CMakeLists.txt000066400000000000000000000135241475716002100217270ustar00rootroot00000000000000cmake_minimum_required(VERSION 3.12) # HUNTER_ENABLED is always set if this package is included in a project using hunter (HunterGate sets it) In this case # we will use hunter as well to stay consistent. If not the use can supply it on configure to force using hunter. if(HUNTER_ENABLED) include("cmake/HunterGate.cmake") huntergate(URL "https://github.com/cpp-pm/hunter/archive/v0.23.314.tar.gz" SHA1 "95c47c92f68edb091b5d6d18924baabe02a6962a") message(STATUS "jwt-cpp: using hunter for dependency resolution") endif() project(jwt-cpp) option(JWT_BUILD_EXAMPLES "Configure CMake to build examples (or not)" ON) option(JWT_BUILD_TESTS "Configure CMake to build tests (or not)" OFF) option(JWT_ENABLE_COVERAGE "Enable code coverage testing" OFF) option(JWT_ENABLE_FUZZING "Enable fuzz testing" OFF) option(JWT_EXTERNAL_PICOJSON "Use find_package() to locate picojson, provided to integrate with package managers" OFF) option(JWT_DISABLE_BASE64 "Do not include the base64 implementation from this library" OFF) option(JWT_DISABLE_PICOJSON "Do not provide the picojson template specialiaze" OFF) set(JWT_SSL_LIBRARY_OPTIONS OpenSSL LibreSSL wolfSSL) set(JWT_SSL_LIBRARY OpenSSL CACHE STRING "Determines which SSL library to build with") set_property(CACHE JWT_SSL_LIBRARY PROPERTY STRINGS ${JWT_SSL_LIBRARY_OPTIONS}) set(JWT_JSON_TRAITS_OPTIONS boost-json danielaparker-jsoncons kazuho-picojson nlohmann-json) list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/cmake") if(NOT JWT_SSL_LIBRARY IN_LIST JWT_SSL_LIBRARY_OPTIONS) message(FATAL_ERROR "JWT_SSL_LIBRARY must be one of ${JWT_SSL_LIBRARY_OPTIONS}") endif() # If Hunter is enabled, we configure it to resolve OpenSSL and warn the user if he selected an option not supported by # hunter. We fall back to the system library in this case. if(HUNTER_ENABLED) if(${JWT_SSL_LIBRARY} MATCHES "OpenSSL") hunter_add_package(OpenSSL) elseif(${JWT_SSL_LIBRARY} MATCHES "LibreSSL") message(WARNING "Hunter does not support LibreSSL yet, the system library will be used (if available)") elseif(${JWT_SSL_LIBRARY} MATCHES "wolfSSL") message(WARNING "Hunter does not support wolfSSL yet, the system library will be used (if available)") endif() if(JWT_EXTERNAL_PICOJSON) message(WARNING "Hunter does not support picojson yet, the system library will be used (if available)") endif() endif() # Lookup dependencies if(${JWT_SSL_LIBRARY} MATCHES "OpenSSL") find_package(OpenSSL 1.0.1 REQUIRED) elseif(${JWT_SSL_LIBRARY} MATCHES "LibreSSL") find_package(LibreSSL 3.0.0 REQUIRED) elseif(${JWT_SSL_LIBRARY} MATCHES "wolfSSL") find_package(PkgConfig REQUIRED) pkg_check_modules(wolfssl REQUIRED IMPORTED_TARGET wolfssl) list(TRANSFORM wolfssl_INCLUDE_DIRS APPEND "/wolfssl") # This is required to access OpenSSL compatability API endif() if(JWT_EXTERNAL_PICOJSON) find_package(picojson 1.3.0 REQUIRED) endif() set(JWT_INCLUDE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/include) set(JWT_HEADER_FILES ${JWT_INCLUDE_PATH}/jwt-cpp/jwt.h) foreach(traits ${JWT_JSON_TRAITS_OPTIONS}) list(APPEND JWT_HEADER_FILES ${JWT_INCLUDE_PATH}/jwt-cpp/traits/${traits}/defaults.h ${JWT_INCLUDE_PATH}/jwt-cpp/traits/${traits}/traits.h) endforeach() if(NOT JWT_DISABLE_BASE64) list(APPEND JWT_HEADER_FILES ${JWT_INCLUDE_PATH}/jwt-cpp/base.h) endif() add_library(jwt-cpp INTERFACE) add_library(jwt-cpp::jwt-cpp ALIAS jwt-cpp) # To match export target_compile_features(jwt-cpp INTERFACE cxx_std_11) if(JWT_DISABLE_BASE64) target_compile_definitions(jwt-cpp INTERFACE JWT_DISABLE_BASE64) endif() if(JWT_DISABLE_PICOJSON) target_compile_definitions(jwt-cpp INTERFACE JWT_DISABLE_PICOJSON) endif() include(GNUInstallDirs) include(CMakePackageConfigHelpers) target_include_directories(jwt-cpp INTERFACE $ $) if(${JWT_SSL_LIBRARY} MATCHES "OpenSSL") target_link_libraries(jwt-cpp INTERFACE OpenSSL::SSL OpenSSL::Crypto) endif() if(${JWT_SSL_LIBRARY} MATCHES "LibreSSL") target_link_libraries(jwt-cpp INTERFACE LibreSSL::TLS) endif() if(${JWT_SSL_LIBRARY} MATCHES "wolfSSL") target_link_libraries(jwt-cpp INTERFACE PkgConfig::wolfssl) # This is required to access OpenSSL compatability API target_include_directories(jwt-cpp INTERFACE ${wolfssl_INCLUDE_DIRS}) target_compile_definitions(jwt-cpp INTERFACE OPENSSL_EXTRA OPENSSL_ALL) endif() if(JWT_EXTERNAL_PICOJSON) target_link_libraries(jwt-cpp INTERFACE picojson::picojson>) endif() # Hunter needs relative paths so the files are palced correctly if(NOT JWT_CMAKE_FILES_INSTALL_DIR) set(JWT_CMAKE_FILES_INSTALL_DIR cmake) endif() configure_package_config_file( ${CMAKE_CURRENT_LIST_DIR}/cmake/jwt-cpp-config.cmake.in ${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config.cmake INSTALL_DESTINATION ${JWT_CMAKE_FILES_INSTALL_DIR} PATH_VARS JWT_EXTERNAL_PICOJSON JWT_SSL_LIBRARY) write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config-version.cmake VERSION 0.6.0 COMPATIBILITY ExactVersion) install(TARGETS jwt-cpp EXPORT jwt-cpp-targets PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) install(EXPORT jwt-cpp-targets NAMESPACE jwt-cpp:: FILE jwt-cpp-targets.cmake DESTINATION ${JWT_CMAKE_FILES_INSTALL_DIR}) install(DIRECTORY ${JWT_INCLUDE_PATH}/jwt-cpp DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) if(NOT JWT_EXTERNAL_PICOJSON AND NOT JWT_DISABLE_PICOJSON) install(FILES ${JWT_INCLUDE_PATH}/picojson/picojson.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/picojson) endif() install(FILES ${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config.cmake ${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config-version.cmake DESTINATION ${JWT_CMAKE_FILES_INSTALL_DIR}) if(JWT_BUILD_EXAMPLES) add_subdirectory(example) endif() if(JWT_BUILD_TESTS) add_subdirectory(tests) endif() if(JWT_ENABLE_FUZZING) add_subdirectory(tests/fuzz) endif() scitokens-cpp-1.1.3/vendor/jwt-cpp/Doxyfile000066400000000000000000003243741475716002100207050ustar00rootroot00000000000000# Doxyfile 1.8.13 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the config file # that follow. The default is UTF-8 which is also the encoding used for all text # before the first occurrence of this tag. Doxygen uses libiconv (or the iconv # built into libc) for the transcoding. See http://www.gnu.org/software/libiconv # for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = JWT-CPP # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = v0.6.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = "A header only library for creating and validating JSON Web Tokens (JWT) in C++" # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = doxy # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = NO # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = "The $name class" \ "The $name widget" \ "The $name file" \ is \ provides \ specifies \ contains \ represents \ a \ an \ the # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = YES # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 4 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines. ALIASES = # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, # C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: # FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: # Fortran. In the later case the parser tries to guess whether the code is fixed # or free formatted code, this is the default for Fortran type files), VHDL. For # instance to make doxygen treat .inc files as Fortran files (default is PHP), # and .f files as C (default is Fortran), use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See http://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. # Minimum value: 0, maximum value: 99, default value: 0. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 0 # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = YES # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = NO # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # (class|struct|union) declarations. If set to NO, these declarations will be # included in the documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = YES # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete # parameter documentation, but not about the absence of documentation. # The default value is: NO. WARN_NO_PARAMDOC = NO # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. # The default value is: NO. WARN_AS_ERROR = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = include README.md docs # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: http://www.gnu.org/software/libiconv) for the list of # possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, # *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, # *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf. FILE_PATTERNS = *.c \ *.cc \ *.cxx \ *.cpp \ *.c++ \ *.java \ *.ii \ *.ixx \ *.ipp \ *.i++ \ *.inl \ *.idl \ *.ddl \ *.odl \ *.h \ *.hh \ *.hxx \ *.hpp \ *.h++ \ *.cs \ *.d \ *.php \ *.php4 \ *.php5 \ *.phtml \ *.inc \ *.m \ *.markdown \ *.md \ *.mm \ *.dox \ *.py \ *.pyw \ *.f90 \ *.f95 \ *.f03 \ *.f08 \ *.f \ *.for \ *.tcl \ *.vhd \ *.vhdl \ *.ucf \ *.qsf # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = include/nlohmann/json.hpp, include/picojson/picojson.h # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = jwt::details, std # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = example # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = * # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = YES # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = README.md #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # function all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see http://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the config file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: http://clang.llvm.org/) for more accurate parsing at the # cost of reduced performance. This can be particularly helpful with template # rich C++ code for which doxygen's built-in parser lacks the necessary type # information. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse-libclang=ON option for CMake. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in # which the alphabetical index list will be split. # Minimum value: 1, maximum value: 20, default value: 5. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = doxygen-awesome.css doxygen-awesome-sidebar-only.css .github/overrides.css # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # http://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to YES can help to show when doxygen was last run and thus if the # documentation is up to date. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = NO # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = NO # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: http://developer.apple.com/tools/xcode/), introduced with # OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html # for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the master .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location of Qt's # qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the # generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = YES # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # http://www.mathjax.org) which uses client side Javascript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from http://www.mathjax.org before deployment. # The default value is: http://cdn.mathjax.org/mathjax/latest. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /