microsoft-onnxscript-284f2fa/ 0000775 0000000 0000000 00000000000 14753710715 0016354 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/.azure-pipelines/ 0000775 0000000 0000000 00000000000 14753710715 0021546 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/.azure-pipelines/_release-template.yml 0000664 0000000 0000000 00000001231 14753710715 0025656 0 ustar 00root root 0000000 0000000 # Template steps for the release pipeline
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.11'
displayName: 'Set Up Python'
- script: python -m pip install --upgrade pip build wheel
displayName: 'Install Python build dependencies'
- script: python -m build
displayName: 'Build ONNX Script wheel'
- task: CopyFiles@2
displayName: 'Copy Python Wheel to: $(Build.ArtifactStagingDirectory)'
inputs:
SourceFolder: 'dist'
Contents: '*.*'
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: PublishBuildArtifacts@1
displayName: 'Save build artifacts'
inputs:
ArtifactName: onnxscript
microsoft-onnxscript-284f2fa/.azure-pipelines/release-dev.yml 0000664 0000000 0000000 00000000435 14753710715 0024467 0 ustar 00root root 0000000 0000000 # Build the dev version of the package and publish to artifacts
# To configure triggers, see https://github.com/microsoft/onnx-converters-private/wiki/ONNX-Script-release
trigger: none
pool:
vmImage: ubuntu-latest
variables:
CI: 'true'
steps:
- template: _release-template.yml
microsoft-onnxscript-284f2fa/.azure-pipelines/release.yml 0000664 0000000 0000000 00000000753 14753710715 0023716 0 ustar 00root root 0000000 0000000 # Build the release version of the package and publish to artifacts
trigger: none
pool:
vmImage: ubuntu-latest
variables:
CI: 'true'
# Set the release environment variable to build a release version of the wheel
ONNX_SCRIPT_RELEASE: 1
steps:
- template: _release-template.yml
# Test the wheels. This needs to happen after PublishBuildArtifacts
# to avoid interference with the artifacts
- script: python -m pip install dist/*.whl --no-deps
displayName: 'Install wheel'
microsoft-onnxscript-284f2fa/.editorconfig 0000664 0000000 0000000 00000000143 14753710715 0021027 0 ustar 00root root 0000000 0000000 root = true
[*]
trim_trailing_whitespace = true
insert_final_newline = true
indent_style = space
microsoft-onnxscript-284f2fa/.gitattributes 0000664 0000000 0000000 00000000132 14753710715 0021243 0 ustar 00root root 0000000 0000000 **/*.pb filter=lfs diff=lfs merge=lfs -text
**/*.onnx filter=lfs diff=lfs merge=lfs -text
microsoft-onnxscript-284f2fa/.github/ 0000775 0000000 0000000 00000000000 14753710715 0017714 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/.github/codeql/ 0000775 0000000 0000000 00000000000 14753710715 0021163 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/.github/codeql/codeql-config.yml 0000664 0000000 0000000 00000000526 14753710715 0024423 0 ustar 00root root 0000000 0000000 query-filters:
- exclude:
id: py/import-and-import-from
- exclude:
# Module level cyclic import, although should be avoided, is not a problem
# from the perspective of functionality. Follow advice on
# https://codeql.github.com/codeql-query-help/python/py-cyclic-import/ to break the cycle if needed.
id: py/cyclic-import
microsoft-onnxscript-284f2fa/.github/dependabot.yaml 0000664 0000000 0000000 00000002017 14753710715 0022705 0 ustar 00root root 0000000 0000000 # To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/requirements/ci/" # Location of package manifests
schedule:
interval: "daily"
# disable version updates for pip dependencies
open-pull-requests-limit: 5
- package-ecosystem: "pip"
directory: "/requirements/lintrunner/" # Location of package manifests
schedule:
interval: "weekly"
# disable version updates for pip dependencies
open-pull-requests-limit: 5
- package-ecosystem: "github-actions"
# Workflow files stored in the
# default location of `.github/workflows`
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 5
microsoft-onnxscript-284f2fa/.github/workflows/ 0000775 0000000 0000000 00000000000 14753710715 0021751 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/.github/workflows/codeql-analysis.yml 0000664 0000000 0000000 00000005556 14753710715 0025577 0 ustar 00root root 0000000 0000000 # For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "main" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "main" ]
types: [opened, synchronize, reopened, ready_for_review]
schedule:
- cron: '25 16 * * 4'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
# Do not run on drafts to make reviewing easier
if: github.event.pull_request.draft == false
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
queries: security-extended,security-and-quality
config-file: ./.github/codeql/codeql-config.yml
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# âšī¸ Command-line programs to run using the OS shell.
# đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
microsoft-onnxscript-284f2fa/.github/workflows/lint.yaml 0000664 0000000 0000000 00000005616 14753710715 0023613 0 ustar 00root root 0000000 0000000 name: Lint
on:
push:
branches:
- main
- 'gh/**/base' # ghstack base branches
pull_request:
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}
cancel-in-progress: true
jobs:
optional-lint:
name: Optional Lint
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: misspell # Check spelling
uses: reviewdog/action-misspell@v1
with:
github_token: ${{ secrets.github_token }}
locale: "US"
reporter: github-pr-check
level: info
filter_mode: diff_context
- name: shellcheck # Static check shell scripts
uses: reviewdog/action-shellcheck@v1
with:
github_token: ${{ secrets.github_token }}
reporter: github-pr-check
level: info
filter_mode: diff_context
enforce-style:
name: Enforce style
runs-on: ubuntu-latest
permissions:
security-events: write
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
# Version range or exact version of Python to use, using SemVer's version range syntax. Reads from .python-version if unset.
python-version: "3.10"
- name: Install ONNXScript
run: |
# Install dependencies
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools
python -m pip install -r requirements-dev.txt
# FIXME: numpy 2.2 has some typing changes that break the mypy CI but it's otherwise fine
python -m pip install "numpy<2.2"
# Install packages
python -m pip install -e .
lintrunner init
- name: Run lintrunner on all files
run: |
set +e
if ! lintrunner --force-color --all-files --tee-json=lint.json -v; then
echo ""
echo -e "\e[1m\e[36mYou can reproduce these results locally by using \`lintrunner\`.\e[0m"
echo -e "\e[1m\e[36mSee https://github.com/microsoft/onnxscript#coding-style for setup instructions.\e[0m"
exit 1
fi
- name: Produce SARIF
if: always()
run: |
python -m lintrunner_adapters to-sarif lint.json lintrunner.sarif
- name: Upload SARIF file
# Use always() to always upload SARIF even if lintrunner returns with error code
# To toggle linter comments in the files page, press `i` on the keyboard
if: always()
continue-on-error: true
uses: github/codeql-action/upload-sarif@v3
with:
# Path to SARIF file relative to the root of the repository
sarif_file: lintrunner.sarif
category: lintrunner
checkout_path: ${{ github.workspace }}
microsoft-onnxscript-284f2fa/.github/workflows/main.yaml 0000664 0000000 0000000 00000007475 14753710715 0023576 0 ustar 00root root 0000000 0000000 name: CI
on:
schedule:
# Run weekly on Mondays and Wednesdays 00:00
- cron: '00 00 * * MON,WED'
push:
branches:
- main
- 'gh/**/base' # ghstack base branches
- rel-*
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}
cancel-in-progress: true
jobs:
test:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
name:
- py312
- py311
- py311-torch-nightly
- py311-onnx-weekly
- py311-ort-nightly
- py310
include:
- name: py312
python-version: "3.12"
nox-tag: test build
- name: py311
python-version: "3.11"
nox-tag: test
- name: py310
python-version: "3.10"
nox-tag: test
- name: py311-torch-nightly
python-version: "3.11"
nox-tag: test-torch-nightly
- name: py311-onnx-weekly
python-version: "3.11"
nox-tag: test-onnx-weekly
- name: py311-ort-nightly
python-version: "3.11"
nox-tag: test-ort-nightly
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install nox
run: python -m pip install nox
- name: Pull Test Data
run: git lfs pull
- name: Run tests
run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=onnxscript --cov-report=xml --cov-append --cov-branch -n=auto --junitxml junit.xml
env:
CATCH_ORT_SEGFAULT: "${{ matrix.os == 'ubuntu-latest' && '1' || '0' }}"
CREATE_REPRODUCTION_REPORT: "${{ matrix.os == 'ubuntu-latest' && '1' || '0' }}"
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload torchlib error reports
if: always()
uses: actions/upload-artifact@v4
with:
name: Error reports (${{ matrix.name }}-${{ matrix.os }})
path: error_reports
build_docs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: pip
cache-dependency-path: "**/requirements-dev.txt"
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install -r requirements-dev.txt
- name: Versions
run: |
pip list | grep numpy
pip list | grep onnx
pip list | grep torch
- name: Install package
run: pip install .
- name: Build documentation
run: python -m sphinx docs dist/html
update_readme:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
- name: Update readme
run: |
python docs/update_readme.py
git diff --exit-code -- 'README.md'
if [ $? -ne 0 ]; then
echo "Update readme by running `python docs/update_readme.py`"
exit 1
fi
microsoft-onnxscript-284f2fa/.github/workflows/pages.yaml 0000664 0000000 0000000 00000002457 14753710715 0023744 0 ustar 00root root 0000000 0000000 name: Publish Docs
on:
push:
branches: ["main"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow one concurrent deployment
concurrency:
group: "pages"
cancel-in-progress: true
jobs:
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Pages
uses: actions/configure-pages@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- uses: actions/checkout@v4
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install -r requirements-dev.txt
- name: Install package
run: pip install .
- name: Build documentation
run: python -m sphinx docs dist/html
- name: Upload documentation archive
uses: actions/upload-pages-artifact@v3
with:
path: 'dist/html'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
microsoft-onnxscript-284f2fa/.gitignore 0000664 0000000 0000000 00000002465 14753710715 0020353 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
test-output.xml
*.sarif
# Sphinx documentation
docs/_build/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# Lock files from package managers
.python-version
Pipfile.lock
poetry.lock
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
_venv/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Pycharm
.idea/
# VSCode
.vscode/
!.vscode/extensions.json
# Generated files
*.onnx
*.csv
*.xlsx
!testdata/**/*.onnx
*.onnxlib
**/onnx_backend_test_code/**
docs/auto_examples/*
tests/export/*
tests/models/testoutputs/*
tests/mylib.onnxlib
**/serde_test_profiles/*
tools/ort_rewriter_profiling/.logs/*
tools/ort_rewriter_profiling/onnx_models/*
microsoft-onnxscript-284f2fa/.lintrunner.toml 0000664 0000000 0000000 00000007612 14753710715 0021535 0 ustar 00root root 0000000 0000000 # Configuration for lintrunner https://github.com/suo/lintrunner
merge_base_with = 'main'
[[linter]]
code = 'RUFF'
include_patterns = [
'**/*.py',
'**/*.pyi',
]
exclude_patterns = [
'tests/models/**',
]
command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'ruff_linter',
'--config=pyproject.toml',
'@{{PATHSFILE}}'
]
init_command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'pip_init',
'--dry-run={{DRYRUN}}',
'--requirement=requirements/lintrunner/requirements.txt',
]
is_formatter = true
[[linter]]
code = 'MYPY'
include_patterns = [
'onnxscript/**/*.py',
'onnxscript/**/*.pyi',
]
exclude_patterns = [
'tests/**', # Skip linting test files for speed
# FIXME: Fix typing annotations in these files
'onnxscript/converter_test.py',
'onnxscript/converter.py',
'onnxscript/evaluator_test.py',
'onnxscript/evaluator.py',
'onnxscript/onnx_types.py',
'onnxscript/**/*_test.py', # Skip linting test files for speed
'onnxscript/function_libs/torch_lib/ops/**', # Operators typing do not play well with mypy
'onnxscript/optimizer/_legacy/evaluator.py', # FIXME
'onnxscript/optimizer/_legacy/constant_folding.py', # FIXME
'onnxscript/rewriter/onnxruntime/transformers/fastgelu.py', # FIXME
'onnxscript/rewriter/onnxruntime/instance_to_group_normalization.py', # FIXME
'onnxscript/rewriter/ort_fusions/_smollm_*.py', # onnxscript code
'onnxscript/_legacy_ir/irbuilder.py', # FIXME
'onnxscript/rewriter/onnxruntime/transformers/multihead_attention.py', # FIXME
'onnxscript/tools/function_unittest_producer.py', # FIXME
'onnxscript/_legacy_ir/visitor.py', # FIXME
'onnxscript/rewriter/onnxruntime/transformers/layernorm.py', # FIXME
'onnxscript/rewriter/generic_pattern.py', # FIXME
]
command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'mypy_linter',
'--config=pyproject.toml',
'--show-disable',
'--',
'@{{PATHSFILE}}'
]
init_command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'pip_init',
'--dry-run={{DRYRUN}}',
'--requirement=requirements/lintrunner/requirements.txt',
]
[[linter]]
code = 'RUFF-FORMAT'
include_patterns = [
'**/*.py',
]
exclude_patterns = [
'tests/onnx_backend_test_code/**',
]
command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'ruff_format_linter',
'--',
'@{{PATHSFILE}}'
]
init_command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'pip_init',
'--dry-run={{DRYRUN}}',
'--requirement=requirements/lintrunner/requirements.txt',
]
is_formatter = true
[[linter]]
code = 'PYLINT'
include_patterns = [
'**/*.py',
]
exclude_patterns = [
'examples/**', # TODO: Merge with docs/examples
'docs/examples/**',
'docs/tutorial/examples/**',
'onnxscript/converter_test.py',
'tests/functions/**',
'tests/models/**',
'tests/onnx_backend_test_code/**',
'onnxscript/optimizer/**', # FIXME
'onnxscript/rewriter/**', # FIXME
'onnxscript/_legacy_ir/**', # FIXME
]
command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'pylint_linter',
'--rcfile=pyproject_pylint.toml',
'--show-disable',
'--',
'@{{PATHSFILE}}'
]
init_command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'pip_init',
'--dry-run={{DRYRUN}}',
'--requirement=requirements/lintrunner/requirements.txt',
]
[[linter]]
code = 'EDITORCONFIG-CHECKER'
include_patterns = ['**']
exclude_patterns = [
'**/*.ipynb',
'**/*.onnx',
'**/*.pb'
]
command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'editorconfig_checker_linter',
'--',
'@{{PATHSFILE}}'
]
init_command = [
'python',
'-m',
'lintrunner_adapters',
'run',
'pip_init',
'--dry-run={{DRYRUN}}',
'--requirement=requirements/lintrunner/requirements.txt',
]
microsoft-onnxscript-284f2fa/.vscode/ 0000775 0000000 0000000 00000000000 14753710715 0017715 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/.vscode/extensions.json 0000664 0000000 0000000 00000000107 14753710715 0023005 0 ustar 00root root 0000000 0000000 {
"recommendations": [
"editorconfig.editorconfig"
]
}
microsoft-onnxscript-284f2fa/CODE_OF_CONDUCT.md 0000664 0000000 0000000 00000001056 14753710715 0021155 0 ustar 00root root 0000000 0000000 # Microsoft Open Source Code of Conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
Resources:
- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
- Employees can reach out at [aka.ms/opensource/moderation-support](https://aka.ms/opensource/moderation-support)
microsoft-onnxscript-284f2fa/CONTRIBUTING.md 0000664 0000000 0000000 00000003476 14753710715 0020617 0 ustar 00root root 0000000 0000000
â ī¸ |
NOTE: ONNX Script is in very early
and active development and the team anticipates
breaking changes as the project evolves.
ONNX Script is not ready for production,
but early feedback is welcome.
|
â ī¸ |
----
# Contributing to ONNX Script
We're always looking for your help to improve the product (bug fixes, new features, documentation, etc). Currently ONNX Script is under early and heavy development, so we encourage proposing any major changes by [filing an issue](https://github.com/microsoft/onnxscript/issues) to discuss your idea with the team first.
## Report a security issue
**Please do not report security vulnerabilities through public GitHub issues.**
Please refer to our guidance on filing [Security Issues](SECURITY.md).
## Licensing guidelines
This project welcomes contributions and suggestions. Most contributions require you to
agree to a Contributor License Agreement (CLA) declaring that you have the right to,
and actually do, grant us the rights to use your contribution. For details, visit
https://cla.microsoft.com.
When you submit a pull request, a CLA-bot will automatically determine whether you need
to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the
instructions provided by the bot. You will only need to do this once across all repositories using our CLA.
## Code of conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
microsoft-onnxscript-284f2fa/LICENSE 0000664 0000000 0000000 00000002061 14753710715 0017360 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
microsoft-onnxscript-284f2fa/MANIFEST.in 0000664 0000000 0000000 00000000323 14753710715 0020110 0 ustar 00root root 0000000 0000000 # include
include *.rst
include LICENSE
include README.md
include VERSION
include MANIFEST.in
# exclude from sdist
recursive-exclude onnxscript *.onnx
recursive-exclude onnxscript *_test.py
prune */__pycache__
microsoft-onnxscript-284f2fa/README.md 0000664 0000000 0000000 00000030474 14753710715 0017643 0 ustar 00root root 0000000 0000000 # ONNX Script
[](https://github.com/microsoft/onnxscript/actions/workflows/main.yaml)
[](https://aiinfra.visualstudio.com/ONNX%20Converters/_build/latest?definitionId=1258&branchName=main)
[](https://pypi.org/project/onnxscript)
[](https://pypi.org/project/onnxscript)
[](https://github.com/astral-sh/ruff)
[](https://github.com/psf/black)
ONNX Script enables developers to naturally author ONNX functions and
models using a subset of Python. ONNX Script is:
* **Expressive:** enables the authoring of all ONNX functions.
* **Simple and concise:** function code is natural and simple.
* **Debuggable:** allows for eager-mode evaluation that provides for a
more delightful ONNX model debugging experience.
This repo also covers:
* **ONNX IR:** an in-memory IR that supports the full ONNX spec, designed
for graph construction, analysis and transformation.
* **ONNX Script Optimizer:** provides functionality to optimize an ONNX
model by performing optimizations and clean-ups such as constant folding,
dead code elimination, etc.
* **ONNX Rewriter:** provides functionality to replace certain patterns in
an ONNX graph with replacement patterns based on user-defined rewrite rules.
Note however that ONNX Script does **not** intend to support the entirety
of the Python language.
Website: [https://onnxscript.ai/](https://onnxscript.ai/)
## Design Overview
ONNX Script provides a few major capabilities for authoring and debugging
ONNX models and functions:
* A converter which translates a Python ONNX Script function into an
ONNX graph, accomplished by traversing the [Python Abstract Syntax Tree][python-ast] to build an ONNX graph equivalent of the function.
* A converter that operates inversely, translating ONNX models and
functions into ONNX Script. This capability can be used to fully round-trip
ONNX Script â ONNX graph.
* A runtime shim that allows such functions to be evaluated
(in an "eager mode"). This functionality currently relies on
[ONNX Runtime][onnx-runtime] for executing every [ONNX Operator][onnx-ops],
and there is a Python-only reference runtime for ONNX underway that
will also be supported.
Note that the runtime is intended to help understand and debug function definitions. Performance is not a goal here.
## Installing ONNX Script
```bash
pip install --upgrade onnxscript
```
### Install for Development
```bash
git clone https://github.com/microsoft/onnxscript
cd onnxscript
pip install -r requirements-dev.txt
pip install -e .
```
### Run Unit Tests
```bash
pytest .
```
## Example
```python update-readme
import onnx
# We use ONNX opset 15 to define the function below.
from onnxscript import FLOAT, script
from onnxscript import opset15 as op
# We use the script decorator to indicate that
# this is meant to be translated to ONNX.
@script()
def onnx_hardmax(X, axis: int):
"""Hardmax is similar to ArgMax, with the result being encoded OneHot style."""
# The type annotation on X indicates that it is a float tensor of
# unknown rank. The type annotation on axis indicates that it will
# be treated as an int attribute in ONNX.
#
# Invoke ONNX opset 15 op ArgMax.
# Use unnamed arguments for ONNX input parameters, and named
# arguments for ONNX attribute parameters.
argmax = op.ArgMax(X, axis=axis, keepdims=False)
xshape = op.Shape(X, start=axis)
# use the Constant operator to create constant tensors
zero = op.Constant(value_ints=[0])
depth = op.GatherElements(xshape, zero)
empty_shape = op.Constant(value_ints=[0])
depth = op.Reshape(depth, empty_shape)
values = op.Constant(value_ints=[0, 1])
cast_values = op.CastLike(values, X)
return op.OneHot(argmax, depth, cast_values, axis=axis)
# We use the script decorator to indicate that
# this is meant to be translated to ONNX.
@script()
def sample_model(X: FLOAT[64, 128], Wt: FLOAT[128, 10], Bias: FLOAT[10]) -> FLOAT[64, 10]:
matmul = op.MatMul(X, Wt) + Bias
return onnx_hardmax(matmul, axis=1)
# onnx_model is an in-memory ModelProto
onnx_model = sample_model.to_model_proto()
# Save the ONNX model at a given path
onnx.save(onnx_model, "sample_model.onnx")
# Check the model
try:
onnx.checker.check_model(onnx_model)
except onnx.checker.ValidationError as e:
print(f"The model is invalid: {e}")
else:
print("The model is valid!")
```
The decorator parses the code of the function, converting it into an
intermediate representation. If it fails, it produces an error message
indicating the line where the error was detected. If it succeeds, the
intermediate representation can be converted into an ONNX graph
structure of type `FunctionProto`:
* `Hardmax.to_function_proto()` returns a `FunctionProto`
### Eager Mode Evaluation
Eager mode is mostly used to debug and validate that intermediate results
are as expected. The function defined above can be called as below,
executing in an eager-evaluation mode:
```python
import numpy as np
v = np.array([[0, 1], [2, 3]], dtype=np.float32)
result = Hardmax(v)
```
More examples can be found in the [docs/examples](docs/examples) directory.
## ONNX IR
An in-memory IR that supports the full ONNX spec, designed for graph construction, analysis and transformation.
### Features
* **Full ONNX spec support:** all valid models representable by ONNX protobuf,
and a subset of invalid models (so you can load and fix them).
* **Low memory footprint:** mmap'ed external tensors; unified interface for
ONNX TensorProto, Numpy arrays and PyTorch Tensors etc. No tensor size
limitation. Zero copies.
* **Straightforward access patterns:** Access value information and traverse the
graph topology at ease.
* **Robust mutation:** Create as many iterators as you like on the graph while mutating it.
* **Speed:** Performant graph manipulation, serialization/deserialization to Protobuf.
* **Pythonic and familiar APIs:** Classes define Pythonic apis and still map to
ONNX protobuf concepts in an intuitive way.
## ONNX Script Tools
### ONNX Optimizer
The ONNX Script Optimizer tool provides the user with the functionality to optimize an ONNX model by performing optimizations and clean-ups such as constant folding, dead code elimination, etc. In order to utilize the optimizer tool:
```python
import onnxscript
onnxscript.optimizer.optimize(onnx_model)
```
For a detailed summary of all the optimizations applied by the optimizer call, refer to the tutorial [Optimizing a Model using the Optimizer](https://onnxscript.ai/tutorial/optimizer/optimize.html)
### ONNX Rewriter
The ONNX Rewriter tool provides the user with the functionality to replace certain patterns in an ONNX graph with another pattern based on user-defined rewrite rules. The rewriter tools allows two different methods in which patterns in the graph can be rewritten.
### Pattern-based rewriting
For this style of rewriting, the user provides a `target_pattern` that is to be replaced, a `replacement_pattern` and a `match_condition` (pattern rewrite will occur only if the match condition is satisfied). A simple example on how to use the pattern-based rewriting tool is as follows:
```python
from onnxscript.rewriter import pattern
# The target pattern
def erf_gelu_pattern(op, x):
return 0.5 * (x * (op.Erf(x / math.sqrt(2)) + 1.0))
def erf_gelu_pattern_2(op, x):
return (x * (op.Erf(x / math.sqrt(2)) + 1.0)) * 0.5
# The replacement pattern
def gelu(op, x: ir.Value):
return op.Gelu(x, domain="com.microsoft")
# Create multiple rules
rule1 = pattern.RewriteRule(
erf_gelu_pattern, # Target Pattern
gelu, # Replacement
)
rule2 = pattern.RewriteRule(
erf_gelu_pattern_2, # Target Pattern
gelu, # Replacement
)
# Create a Rewrite Rule Set with multiple rules.
rewrite_rule_set = pattern.RewriteRuleSet([rule1, rule2])
# Apply rewrites
model_with_rewrite_applied = onnxscript.rewriter.rewrite(
model, # Original ONNX Model
pattern_rewrite_rules=rewrite_rule_set,
)
return model_with_rewrite_applied
```
For a detailed tutorial on how to create target_pattern, replacement_pattern and match_condition blocks in order to utilize the pattern-based rewriter, refer to the tutorial [Pattern-based Rewrite Using Rules](https://onnxscript.ai/tutorial/rewriter/rewrite_patterns.html)
### Function-based rewriting
This style of rewriting matches a `FUNCTION_KEYWORD` and `PACKAGE_NAME` provided by the user to an existing function within the graph and replaces it with a new function provided by the user.
## Development Guidelines
Every change impacting the converter or the eager evaluation must be
unit tested with class `OnnxScriptTestCase` to ensure both systems do
return the same results with the same inputs.
### Coding Style
We use `ruff`, `black`, `isort`, and `mypy` etc. to check code formatting and use `lintrunner` to run all linters.
You can install the dependencies and initialize with
```sh
pip install lintrunner lintrunner-adapters
lintrunner init
```
This will install lintrunner on your system and download all the necessary dependencies to run linters locally.
If you want to see what lintrunner init will install, run `lintrunner init --dry-run`.
To lint local changes:
```bash
lintrunner
```
To format files:
```bash
lintrunner f
```
To lint all files:
```bash
lintrunner --all-files
```
Use `--output oneline` to produce a compact list of lint errors, useful when
there are many errors to fix.
See all available options with `lintrunner -h`.
To read more about lintrunner, see [wiki](https://github.com/pytorch/pytorch/wiki/lintrunner).
To update an existing linting rule or create a new one, modify `.lintrunner.toml` or create a
new adapter following examples in https://github.com/justinchuby/lintrunner-adapters.
## Contributing
We're always looking for your help to improve the product (bug fixes, new features, documentation, etc). Currently ONNX Script is under early and heavy development, so we encourage proposing any major changes by [filing an issue](https://github.com/microsoft/onnxscript/issues) to discuss your idea with the team first.
### Report a Security Issue
**Please do not report security vulnerabilities through public GitHub issues.**
Please refer to our guidance on filing [Security Issues](SECURITY.md).
### Licensing Guidelines
This project welcomes contributions and suggestions. Most contributions require you to
agree to a Contributor License Agreement (CLA) declaring that you have the right to,
and actually do, grant us the rights to use your contribution. For details, visit
https://cla.microsoft.com.
When you submit a pull request, a CLA-bot will automatically determine whether you need
to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the
instructions provided by the bot. You will only need to do this once across all repositories using our CLA.
### Code of Conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Trademarks
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
trademarks or logos is subject to and must follow
[Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
Any use of third-party trademarks or logos is subject to those third-party's policies.
[python-ast]: https://docs.python.org/3/library/ast.html
[onnx-runtime]: https://onnxruntime.ai
[onnx-ops]: https://github.com/onnx/onnx/blob/main/docs/Operators.md
[onnxfns1A.py]: https://github.com/microsoft/onnxscript/blob/main/onnxscript/tests/models/onnxfns1A.py
microsoft-onnxscript-284f2fa/ROADMAP.md 0000664 0000000 0000000 00000002024 14753710715 0017757 0 ustar 00root root 0000000 0000000
â ī¸ |
NOTE: ONNX Script is in very early
and active development and the team anticipates
breaking changes as the project evolves.
ONNX Script is not ready for production,
but early feedback is welcome.
|
â ī¸ |
----
# ONNX Script Roadmap
* Improve error-checking and error-handling to identify use of unsupported
Python features/constructs as early as possible and report it.
* Improve documentation and tutorials
* Features
- Support nested functions, for use as sub-graphs in ops like Scan
- Generalize support for _break_ statements in loops
- Support indexing notation for slicing/gather/scatter operations
- Improve type-annotation support, especially for tensor shapes
- Improve support for non-tensor types (sequences, maps, and optional)
- Improve checking in eager-mode to ensure its semantics is aligned with ONNX semantics
- Support for variadic inputs/outputs
microsoft-onnxscript-284f2fa/SECURITY.md 0000664 0000000 0000000 00000005333 14753710715 0020151 0 ustar 00root root 0000000 0000000
# Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
microsoft-onnxscript-284f2fa/SUPPORT.md 0000664 0000000 0000000 00000002403 14753710715 0020051 0 ustar 00root root 0000000 0000000 # TODO: The maintainer of this repo has not yet edited this file
**REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project?
- **No CSS support:** Fill out this template with information about how to file issues and get help.
- **Yes CSS support:** Fill out an intake form at [aka.ms/spot](https://aka.ms/spot). CSS will work with/help you to determine next steps. More details also available at [aka.ms/onboardsupport](https://aka.ms/onboardsupport).
- **Not sure?** Fill out a SPOT intake as though the answer were "Yes". CSS will help you decide.
*Then remove this first heading from this SUPPORT.MD file before publishing your repo.*
# Support
## How to file issues and get help
This project uses GitHub Issues to track bugs and feature requests. Please search the existing
issues before filing new issues to avoid duplicates. For new issues, file your bug or
feature request as a new Issue.
For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE
FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER
CHANNEL. WHERE WILL YOU HELP PEOPLE?**.
## Microsoft Support Policy
Support for this **PROJECT or PRODUCT** is limited to the resources listed above.
microsoft-onnxscript-284f2fa/VERSION 0000664 0000000 0000000 00000000006 14753710715 0017420 0 ustar 00root root 0000000 0000000 0.2.0
microsoft-onnxscript-284f2fa/codecov.yml 0000664 0000000 0000000 00000000202 14753710715 0020513 0 ustar 00root root 0000000 0000000 coverage:
status:
project:
default:
informational: true
patch:
default:
informational: true
microsoft-onnxscript-284f2fa/docs/ 0000775 0000000 0000000 00000000000 14753710715 0017304 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/docs/_static/ 0000775 0000000 0000000 00000000000 14753710715 0020732 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/docs/_static/css/ 0000775 0000000 0000000 00000000000 14753710715 0021522 5 ustar 00root root 0000000 0000000 microsoft-onnxscript-284f2fa/docs/_static/css/custom.css 0000664 0000000 0000000 00000002026 14753710715 0023546 0 ustar 00root root 0000000 0000000 .navbar-brand {
padding: 4px 0 2px 0;
}
.bi {
width: 1em;
height: 1em;
display: inline-block;
vertical-align: -0.125em;
fill: currentcolor;
flex-shrink: 0;
}
.article-meta {
display: flex;
align-items: first baseline;
opacity: 80%;
}
.article-meta:not(:last-child) {
margin-bottom: 0.5rem;
}
.article-meta .bi:first-child {
margin-right: 0.5em;
}
.article-authors > ul {
display: inline;
list-style: none;
margin: 0;
padding: 0;
}
.article-authors > ul > li {
display: inline;
}
.article-authors > ul > li:not(:last-child)::after {
content: " âĸ ";
}
.article-date > span {
font-style: italic;
font-size: 0.875rem;
opacity: 80%;
}
.highlight {
overflow-x: auto;
overflow-y: scroll;
}
.code-title {
text-align: center;
font-weight: normal;
font-style: italic;
margin-top: 1.5em;
}
@media screen {
.longcode > .highlight {
max-height: 25em;
}
}
figure.math {
text-align: center;
margin: 1em 0;
}
figure.math svg {
fill: currentColor;
max-width: max(60%, 400px);
}
microsoft-onnxscript-284f2fa/docs/_static/logo-dark.png 0000664 0000000 0000000 00000023161 14753710715 0023322 0 ustar 00root root 0000000 0000000 PNG
IHDR 0 ` .° pHYs % %IR$đ IDATxí}xUöäbKHä* ˇĖī
+ģ$D ŅÄüāw}VÆÄx!qÆ~G%0ėâÉÎā&ĐHĮ&IH$d¯ōÚâTuUuUuW¨÷yúI§/U§ëōīō~ßšŠŖŖ8pāĀŅ#c!B:G1žîĀ~LŦ2ķz9!$=ãšQzĖSe&8_j|^iŅņĘÁņāĒ->OÉ8ŋŋ˙{[z
įāą,VšĄünrĮc(Ā1T
c Â#ĢCŠA×ōã_ÜŅŅŅčį<øCiGGG^GGGIĮ-ŗ˙Īô°2Éļ*Mĩ
,XæĪô7ÆgĢÄ$Įâˇģ áz¤U.yÔ¨øâ÷HfŖŦÅJ&Ŋāq1ēf#÷%E~XÖøÉx|°?ĸÅXąrĐjÃ6$JÖ@2N éxQ%q>ŗyßŦ'4+n+K)ĩØĶ
¸>ĮķËĸ8Ũ0Ļ`ėŗqãÆú}û÷Å&$$Ô=:lĖcb|Ĩ;
,Ęą2ßPy ĩ¤åUãŖ]üÜČĻá9H7ņÂÎD+5iûVŖ'ĢãZįŊīĖL9^C9å`d!ķĨ,^]]MN<Ųō§?ũéÂŠĶ§âåžČZJJJD\\ŋ}ų$v#Z:ēāRÔāÅf¤ÅZ,câ0Y7=¸îZĀgōėO0Á",ygŌeQYāĪį5ēũ4(%'Mp
ķ0ÄĸË9ˇĀ`Šž¸8gqCsssk¯OIIiģíļÛnIž
h$ĩ2FĀŊPĢmH|Õ˨ÆÄY^Äî Ā5ōG`ÄŲøVĨFd,{^[FŨ'rDŲåøXéBr}ŲK.=ähkk8sæĖā3gÎ]ģv ¯Š 5ã~ŌYÃXhôf+<ŗgG##23.EõZŌ4ßdįÅp50;ĻdÔŗĐ§pá=cTį:HÉŨJ˧iP 3g
/~}ôkCwĸDj&L4qR<ÄĶĸĸĸد%á#SaÆĪÅŋv 1^<ĸÄDGf"YLÃŌ¨š\$KmØÆ!;$(rđxŗ$3ĪE č|Î9ŽáŨ{VX:%ÚÚZ˛m[éÅ3gFÃ˙_|ņÅBČ`3wÎÚ;×"##rqÔčQŊeH\X&į5+,jÎ
Iđf4j˙ÔíĪe^Ŗ7Ŋu<"°CĸĮžUōúzˇŪÉ2UÆāzVčĀē¸O<ņDŊËåj ˙:t((Pp[ŋ=ūmâ;âķĪ'9KrHn^nË×_ûĩCũ&á¯ÆB īĻ3úÁũäĩi6rņY4Ę$:ėP\ĄŪëzĮÎL