pax_global_header 0000666 0000000 0000000 00000000064 14771017755 0014530 g ustar 00root root 0000000 0000000 52 comment=6ccbf165174e291ae96142fe3e1460b7e672a2c3
.coveragerc 0000664 0000000 0000000 00000000460 14771017755 0013215 0 ustar 00root root 0000000 0000000 [run]
source =
orangecontrib.spectroscopy
omit =
*/tests/*
*/utils/skimage/*
*/utils/spc/*
[report]
exclude_lines =
pragma: no cover
raise NotImplementedError
if __name__ == .__main__.:
except MemoryError
assert False
raise AssertionError
if (typing\.)?TYPE_CHECKING:
.github/ 0000775 0000000 0000000 00000000000 14771017755 0012434 5 ustar 00root root 0000000 0000000 .github/workflows/ 0000775 0000000 0000000 00000000000 14771017755 0014471 5 ustar 00root root 0000000 0000000 .github/workflows/check_pylint_diff.sh 0000775 0000000 0000000 00000014334 14771017755 0020501 0 ustar 00root root 0000000 0000000 #!/usr/bin/env bash
# Copyright (C) 2016 Kernc, Google Inc., authors, and contributors
# Licensed under http://www.apache.org/licenses/LICENSE-2.0
# Created By: miha@reciprocitylabs.com
set -o pipefail
set -o nounset
set -o errexit
ARG1=${1:-}
GIT_REPO="$(pwd)"
TMP_REPO="$GIT_REPO/$(mktemp -d pylint_diff.XXXXXXX)"
CACHE_DIR="$GIT_REPO/.pylint_cache"
UNCOMMITED_PATCH="$TMP_REPO/uncommited.patch"
SCRIPT=$(basename "$0")
PYLINT="$(command -v pylint 2>/dev/null || true)"
RADON="$(command -v radon 2>/dev/null || true)"
PYLINT_ARGS="--msg-template='{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}'"
RADON_ARGS='cc --min C --no-assert --show-closures --show-complexity --average'
trap "status=\$?; cd '$GIT_REPO'; rm -rf '$TMP_REPO'; exit \$status" EXIT
mkdir -p "$CACHE_DIR"
print_help ()
{
echo "
Usage: $SCRIPT [TEST_COMMIT | -h]
This script will compare pylint error count from two different commits.
Note: all changes that are not committed will be ignored.
The script will work only if the current commit is a merge commit, or if the
second test_commit argument is provided.
Given the commit tree:
D---E---F---G---H
\\ /
A---B---C
- Running '$SCRIPT' on H will check the diff between G and H.
- Running '$SCRIPT F' on H will check the diff between F and H.
- Running '$SCRIPT F' on C will check the diff between E and C. The E commit is
set by the merge base of the current head and the specified commit F.
"
exit 0
}
case $ARG1 in -h|--help) print_help ; esac
if [ ! "$PYLINT$RADON" ]; then
echo 'Error: pylint and/or radon required'
exit 3
fi
# Make a local clone: prevents copying of objects
# Handle shallow git clones
is_shallow=$([ -f "$GIT_REPO/.git/shallow" ] && echo true || echo)
if [ "$is_shallow" ]; then
mv "$GIT_REPO/.git/shallow" "$GIT_REPO/.git/shallow-bak"
fi
git clone -q --local --depth=50 "$GIT_REPO" "$TMP_REPO" 2>/dev/null
if [ "$is_shallow" ]; then
mv "$GIT_REPO/.git/shallow-bak" "$GIT_REPO/.git/shallow"
cp "$GIT_REPO/.git/shallow" "$TMP_REPO/.git/shallow"
fi
# Move over any modified but uncommited files ...
if ! git diff-index --quiet HEAD; then
git stash save -q --keep-index
git stash show -p stash@\{0\} > "$UNCOMMITED_PATCH"
git stash pop -q --index
fi
cd "$TMP_REPO"
# ... and commit them
if [ "$(cat "$UNCOMMITED_PATCH" 2>/dev/null || true)" ]; then
git apply "$UNCOMMITED_PATCH"
git commit -a -m 'Commit changed files'
was_dirty='+'
fi >/dev/null 2>&1
git reset --hard -q HEAD
CURRENT_COMMIT=$(git rev-parse HEAD)
if [ "$ARG1" ]; then
PREVIOUS_COMMIT=$(git merge-base HEAD "$ARG1")
else
PREVIOUS_COMMIT=$(git show --pretty=raw HEAD |
awk '/^parent /{ print $2; exit }')
fi
echo
echo "Comparing commits ${CURRENT_COMMIT:0:10}${was_dirty:-} and ${PREVIOUS_COMMIT:0:10}"
CHANGED_FILES=$(git diff --name-only $CURRENT_COMMIT $PREVIOUS_COMMIT |
grep "\.py$" || true )
[ ! "$(command -v md5sum 2>/dev/null)" ] && md5sum() { md5; } # for OS X
CHANGED_FILES_HASH=$(echo "$CHANGED_FILES" | md5sum | cut -d ' ' -f 1)
if [ ! "$CHANGED_FILES" ]; then
echo "No python files changed. Skipping lint checks."
exit 0
fi
echo
echo "Comparing files"
echo "==============="
echo "$CHANGED_FILES"
echo
# Run pylint on the old and new code, to compare the quality.
# If pylint is run multiple times it will store the previous results and show
# the change in quality with a non-negative number if code was improved or not
# changed, and a negative number if more code issues have been introduced.
checkout ()
{
{ git checkout -q "$1"
git reset --hard -q HEAD
} 2>/dev/null
}
Number_of_issues ()
{
cached="$1"
{ cat "$cached" 2>/dev/null ||
echo "$CHANGED_FILES" |
xargs "$PYLINT" "$PYLINT_ARGS" |
tee "$cached"
} | awk -F'[\\. ]' '/^Your code has been rated at /{ print $7 }' || true
}
Cyclomatic_complexity ()
{
cached="$1"
{ cat "$cached" 2>/dev/null ||
echo "$CHANGED_FILES" |
xargs "$RADON" $RADON_ARGS |
tee "$cached"
} | awk -F'[()]' '/ .+\([0-9]+\)$/ { tot += $2 } END { print tot }' || true
}
Get_diffable ()
{
sed -E "/$diff_block_end/,\$d" |
sort |
sed -E "s/$match_line_num/$replace_line_num/"
}
for check in \
'Pylint,Number_of_issues,^Report$,^([^:]+:)[0-9]+:,\\1,^\\+' \
'radon,Cyclomatic_complexity,^[0-9]+ blocks,^( +[MCF]) [0-9:]+,\\1:,^[+-]'
do
IFS=',' read check \
func \
diff_block_end \
match_line_num \
replace_line_num \
show_diff_lines < <(echo "$check")
# If command not available, skip it
if [ ! "$(eval echo \$$(echo $check | tr '[:lower:]' '[:upper:]') )" ]; then
continue
fi
cached_previous="$CACHE_DIR/previous.$check.$PREVIOUS_COMMIT.$CHANGED_FILES_HASH"
cached_current="$CACHE_DIR/current.$check.$CURRENT_COMMIT.$CHANGED_FILES_HASH"
[ -f "$cached_previous" ] || rm -r "$CACHE_DIR/previous."* 2>/dev/null || true
[ -f "$cached_current" ] || rm -r "$CACHE_DIR/current."* 2>/dev/null || true
[ -f "$cached_previous" ] || checkout $PREVIOUS_COMMIT
RESULT_PARENT=$($func "$cached_previous")
[ -f "$cached_current" ] || checkout $CURRENT_COMMIT
RESULT_CURRENT=$($func "$cached_current")
echo
echo "$check result"
echo "================================================================="
cat "$cached_current"
echo
echo
echo "$check diff"
echo "================================================================="
diff --unified=0 --minimal \
<(Get_diffable < "$cached_previous") \
<(Get_diffable < "$cached_current") |
grep -E "$show_diff_lines" | tail -n +3 || true
echo
echo
echo "$check results"
echo "================================================================="
echo "${func//_/ } on parent commit: $RESULT_PARENT"
echo "${func//_/ } on the pull request: $RESULT_CURRENT ($(printf "%+d" $((RESULT_CURRENT - RESULT_PARENT))))"
echo
if awk "BEGIN { exit ${RESULT_CURRENT:-0} > ${RESULT_PARENT:-0} ? 0 : 1 }"; then
echo "FAIL: ${func//_/ } got worse"
exit 1
fi
done
echo "OK"
.github/workflows/lint_workflow.yml 0000664 0000000 0000000 00000000715 14771017755 0020117 0 ustar 00root root 0000000 0000000 name: Lint workflow
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: '2'
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: '3.10'
- name: Install Tox
run: pip install tox
- name: Run Pylint
run: tox -e pylint-ci
.github/workflows/release.yml 0000664 0000000 0000000 00000000447 14771017755 0016641 0 ustar 00root root 0000000 0000000 name: Release
on:
release:
types: [published]
workflow_dispatch:
jobs:
release:
uses: biolab/orange-ci-cd/.github/workflows/release.yml@master
with:
pure-python: true
secrets:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
permissions:
id-token: write .github/workflows/test.yml 0000664 0000000 0000000 00000000642 14771017755 0016175 0 ustar 00root root 0000000 0000000 name: Test
on:
push:
branches:
- master
pull_request:
branches:
- master
workflow_dispatch:
jobs:
normal:
name: Usual
uses: biolab/orange-ci-cd/.github/workflows/test-addons.yml@master
dask:
name: Dask
uses: biolab/orange-ci-cd/.github/workflows/test-job.yml@master
with:
os: ubuntu-latest
python-version: '3.10'
tox_env: dask
pyqt: '5.15.*'
.gitignore 0000664 0000000 0000000 00000000476 14771017755 0013073 0 ustar 00root root 0000000 0000000 # Build files
build
dist
*.egg-info/*
*.so
*.la
*.o
*.py[doc]
MANIFEST
.tox
.eggs
# Cython generated files
__pycache__
# Editor files
.vscode/
.idea
.idea/*
*~
.project
.pydevproject
.settings/*
.DS_Store
# Coverage reports
htmlcov/*
.coverage
# check_pylint_diff
.pylint_cache
# dask lcok files
*.lock
*.dirlock
.readthedocs.yaml 0000664 0000000 0000000 00000001665 14771017755 0014333 0 ustar 00root root 0000000 0000000 # Read the Docs configuration file for Sphinx projects
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Set the OS, Python version and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.12"
# Build documentation in the "docs/" directory with Sphinx
sphinx:
configuration: doc/conf.py
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
# builder: "dirhtml"
# Fail on all warnings to avoid broken references
# fail_on_warning: true
# Optionally build your docs in additional formats such as PDF and ePub
# formats:
# - pdf
# - epub
# Optional but recommended, declare the Python requirements required
# to build your documentation
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- method: pip
path: .
extra_requirements:
- doc CONTRIBUTING.md 0000664 0000000 0000000 00000006660 14771017755 0013335 0 ustar 00root root 0000000 0000000 # How to contribute
Thanks for your interest in contributing to the Orange-Spectroscopy add-on!
The following documents how to get started with a development installation, and
some preferred procedures for getting your contribution included in the project.
## Orange vs Spectroscopy Add-on
The Orange-Spectroscopy add-on extends and enhances the Orange data mining suite
to enable the analysis of spectroscopic data. Your intended enhancement might be
more generally useful or improve on something already present in the main Orange3
code, in which case you might want to work there. Get in touch by opening an issue
with your idea if you are not sure where it belongs.
## Getting Started
You will want a "fork" repository to base your changes. We follow the
fork -> branch -> pull request -> merge workflow common on GitHub. See
[https://help.github.com/articles/fork-a-repo/](https://help.github.com/articles/fork-a-repo/)
for more details.
Once you have a git checkout of the add-on code, you will want some kind of virtual
environment to keep your development separate from your regular Orange install (and
other Python work/programs you may have). The follow describes how to do this using
the Anaconda conda environment system, but can also apply to virtualenvs.
Run "Anaconda Prompt" or similar and:
conda config --add channels conda-forge
conda create --name="orange-spectroscopy" orange3
conda activate orange-spectroscopy
Navigate to your orange-spectroscopy src directory, then install in development mode:
pip install -e .
If all went well, you should be able to run the tests:
python setup.py test
And run Orange (with spectroscopy widgets present):
orange-canvas
Each time you want to use the development version, you must activate the environment first:
conda activate orange-spectroscopy
## Communication
It's a good idea to let the team know what you're working on before embarking,
especially for a large project. We prefer to discuss this in GitHub issues, so please
file one describing your contribution before getting too far in.
For large projects in particular, consider working in a public Pull Request marked
`[WIP]` (work-in-progress) and ask for feedback along the way.
## Making Changes
* Create a topic branch from where you want to base your work.
* This is usually the master branch.
* To quickly create a topic branch based on master, run `git checkout -b
my-contribution master`. Please avoid working directly on the
`master` branch.
* Make commits of logical and atomic units.
* Check for unnecessary whitespace with `git diff --check` before committing.
* Make sure your commit messages are clear and reference the code/module you are changing.
* **Make sure you have added the necessary tests for your changes.**
* Run _all_ the tests to assure nothing else was accidentally broken.
* `python setup.py test`
* Check your code quality with pylint (Does not work on Windows at the moment, see [#188](https://github.com/Quasars/orange-spectroscopy/issues/188)):
* `python setup.py lint`
* Please add appropriate **documentation** for your new or changed feature.
## Submitting Changes
* Push your changes to a topic branch in your fork of the repository.
* Submit a pull request to the repository.
* Automated tests (the same as you ran yourself above) will be run on your branch.
* A core team member(s) with the appropriate expertise will review your proposal
and merge if appropriate.
## Thanks!
LICENSE 0000664 0000000 0000000 00000001144 14771017755 0012101 0 ustar 00root root 0000000 0000000 This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
MANIFEST.in 0000664 0000000 0000000 00000001503 14771017755 0012631 0 ustar 00root root 0000000 0000000 # Ref https://docs.python.org/2/distutils/sourcedist.html#commands
recursive-include orangecontrib *.ows icons/* datasets/* *.txt
recursive-include orangecontrib/spectroscopy/datasets matlab/*
recursive-include orangecontrib/spectroscopy/datasets agilent/*
recursive-include orangecontrib/spectroscopy/datasets Hermes_HDF5/*
recursive-include orangecontrib/spectroscopy/datasets emsc/*
recursive-include orangecontrib/spectroscopy/datasets NeaReaderGSF_test/*
recursive-include orangecontrib/spectroscopy/datasets renishaw_test_files/*
recursive-include orangecontrib/spectroscopy/datasets photothermal/*
recursive-include orangecontrib/spectroscopy/datasets perkinelmer/*
recursive-include orangecontrib/spectroscopy/datasets polar/*
global-exclude __pycache__
# Include it in the source package
include README.pypi
include LICENSE
README.md 0000664 0000000 0000000 00000003314 14771017755 0012354 0 ustar 00root root 0000000 0000000 [](https://zenodo.org/badge/latestdoi/53335377)
Orange toolbox for spectral data analysis
=========================================
This is an add-on for [Orange3](https://orangedatamining.com/) for the analysis
of spectral data.
Installation
------------
To use this add-on, download and install the
[Quasar distribution of Orange](https://quasar.codes/), which comes with
the Orange Spectroscopy add-on pre-installed.
Alternatively, you can install it into a pre-installed [Orange](https://orange.biolab.si/)
data mining suite with the "Add-ons" dialog, which can be opened from the Options menu.
Usage
-----
After the installation, the widgets from this add-on will appear in the toolbox
under the section Spectroscopy.
For an introduction to this add-on, see the following YouTube channels:
* [Getting started with Orange](https://www.youtube.com/playlist?list=PLmNPvQr9Tf-ZSDLwOzxpvY-HrE0yv-8Fy) -
introduces data analysis with Orange
* [Spectral Orange](https://www.youtube.com/playlist?list=PLmNPvQr9Tf-bPWjDJvJBPZJ6us_KTAD5T) -
tutorials that use the Spectroscopy add-on on spectral data
For more, see the widget documentation:
* [Orange widgets](https://orange.biolab.si/toolbox/) - general data analysis widgets
* [Spectroscopy widgets](https://orange-spectroscopy.readthedocs.io/) -
widgets specific to spectroscopy
For developers
--------------
If you would like to install from cloned git repository, run
pip install .
To register this add-on with Orange, but keep the code in the development
directory (do not copy it to Python's site-packages directory), run
pip install -e .
Further details can be found in [CONTRIBUTING.md](CONTRIBUTING.md)
README.pypi 0000664 0000000 0000000 00000001101 14771017755 0012725 0 ustar 00root root 0000000 0000000 Spectral Orange
===============
Orange add-on for spectral data analysis. Provides spectra and
hyperspectra plotting, interpolation, preprocessing (cutting, smoothing,
normalization), and integration. Supports common spectral file formats.
See [documentation](https://orange-spectroscopy.readthedocs.io/).
Features
--------
### Load and process data
* Load the spectroscopy data
* Preprocess and interpolate spectra
* Integrate spectra
* Average spectra
* Performs Fast Fourier Transform
### Visualisation
* Visually explore series of spectra
* Plots 2D map of hyperspectra
RELEASING.md 0000664 0000000 0000000 00000003013 14771017755 0012724 0 ustar 00root root 0000000 0000000 Making a new release of Orange-Spectroscopy
===========================================
PRECHECKS
---------
1. Check state of tests on the master branch on github
2. Run tests locally.
3. Confirm that any changed requirements from setup.py were also
incorporated int conda/meta.yaml
PREPARE RELEASE
---------------
Prepare changelog:
git log 0.4.1..master --first-parent --format='%b' > changelog.txt
Review and edit the changelog.
Bump version in setup.py
Commit, start the commit message with "Release x.x.x", add the
changelog to the commit message.
git commit -a
Tag the release:
git tag x.x.x
Now, build a conda package. This will test the release quite thoroughly.
conda-build conda/
If conda-build succeeded, good, you may continue, otherwise throw away the
release commit and tag, fix problems, and restart.
BUILD PACKAGES
--------------
Continue only if conda-build ended without errors!
Push it to GitHub, push also tags:
git push upstream master
git push upstream --tags
Create a release on GitHub. Packages should be built and uploaded to PyPi automatically.
UPLOAD PACKAGES
---------------
Package is uploaded to PyPi automatically.
Copy the built conda package to the correct static/conda/noarch folder in
the quasar.codes web page. Add the new files and then either push or
make a pull request.
git add . && git commit -a -m "Orange-Spectroscopy x.x.x"
Finally, add a new release on the orange-spectroscopy github page:
https://github.com/quasars/orange-spectroscopy
codecov.yml 0000664 0000000 0000000 00000000263 14771017755 0013242 0 ustar 00root root 0000000 0000000 comment:
layout: header, diff, changes, sunburst, uncovered
coverage:
status:
patch:
default:
target: '95'
project:
default:
target: auto
conda/ 0000775 0000000 0000000 00000000000 14771017755 0012160 5 ustar 00root root 0000000 0000000 conda/meta.yaml 0000664 0000000 0000000 00000002416 14771017755 0013775 0 ustar 00root root 0000000 0000000 {% set data = load_setup_py_data() %}
{% set version = data['version'] %}
package:
name: orange-spectroscopy
version: {{ data['version'] }}
source:
git_url: ../
build:
number: 0
noarch: python
script:
- cd doc && make htmlhelp && cd ..
- python setup.py install --single-version-externally-managed --record record.txt
requirements:
build:
- python >=3.9
- sphinx
- setuptools
- recommonmark
run:
- python >=3.8
- numpy >=1.24.0
- orange3 >=3.38.0
- orange-canvas-core >=0.2.4
- orange-widget-base >=4.25.0
- scipy >=1.10.0
- scikit-learn>=1.5.1
- spectral >=0.22.3,!=0.23
- setuptools >=51.0.0
- pip >=19.3
- serverfiles >=0.2
- AnyQt >=0.2.0
- pyqtgraph >=0.13.1
- colorcet
- h5py
- extranormal3 >=0.0.3
- renishawWiRE >=0.1.8
- pillow >=9.0.0
- lmfit >=1.3.3
- bottleneck
- pebble
- agilent-format>=0.4.5
test:
imports:
- orangecontrib.spectroscopy
commands:
- test -f $PREFIX/help/orange-spectroscopy/index.html # [unix]
- python -m unittest -v orangecontrib.spectroscopy.tests
about:
home: https://github.com/quasars
license: GPL-3
license_family: GPL
summary: 'Orange-Spectroscopy'
extra:
recipe-maintainers:
- markotoplak
doc/ 0000775 0000000 0000000 00000000000 14771017755 0011641 5 ustar 00root root 0000000 0000000 doc/Makefile 0000664 0000000 0000000 00000016450 14771017755 0013307 0 ustar 00root root 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Orange3ExampleAdd-on.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Orange3ExampleAdd-on.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Orange3ExampleAdd-on"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Orange3ExampleAdd-on"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
doc/conf.py 0000664 0000000 0000000 00000022732 14771017755 0013146 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Orange3 Example Add-on documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 21 11:28:31 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'recommonmark',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Orange-Spectroscopy (Add-on)'
copyright = '2024'
author = 'Quasars'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'OrangeSpectroscopyAdd-on'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'OrangeSpectroscopyAdd-on', 'Orange-Spectroscopy Add-on Documentation',
'Biolab', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'orangespectroscopyadd-on', 'Orange-SpectroscopyAdd-on Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Orange-Spectroscopy Add-on', 'Orange-Spectroscopy Add-on Documentation',
author, 'Orange-SpectroscopyAdd-on', 'Spectral analysis with Orange.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
def setup(app):
app.add_css_file('style.css')
# prevent smart quotes which introduce strange characters,
# which are not valid utf-8
# sphinx htmlhelp outputs cp1252 instead of utf-8 on my (Marko's) computer
smartquotes = False
html_js_files = [
'https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js'
]
doc/index.rst 0000664 0000000 0000000 00000002411 14771017755 0013500 0 ustar 00root root 0000000 0000000 Orange-Spectroscopy documentation
=================================
Orange-Spectroscopy is an add-on for Orange for the analysis of spectral data.
To use it, download and install the `Quasar distribution of Orange
`_, which comes with Orange-Spectroscopy pre-installed.
You can also install it into your `Orange `_
data mining suite with the "Add-ons" menu (Options->Add-ons).
For an introduction to Orange and widgets from this add-on, see
the following YouTube channels:
* `Getting started with Orange `_ -
introduces data analysis with Orange
* `Spectral Orange `_ -
tutorials that use the Spectroscopy add-on on spectral data
Widgets
-------
.. toctree::
:maxdepth: 1
widgets/spectra
widgets/hyperspectra
widgets/interpolate
widgets/preprocess-spectra
widgets/integrate-spectra
widgets/multifile
widgets/tilefile
widgets/average
widgets/interferogram-to-spectrum
widgets/reshape-map
widgets/pls
widgets/peakfit
widgets/snr
widgets/polar
Indices and tables
------------------
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
doc/make.bat 0000664 0000000 0000000 00000015541 14771017755 0013254 0 ustar 00root root 0000000 0000000 @ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^` where ^ is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 2> nul
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Orange3ExampleAdd-on.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Orange3ExampleAdd-on.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end
doc/static/ 0000775 0000000 0000000 00000000000 14771017755 0013130 5 ustar 00root root 0000000 0000000 doc/static/style.css 0000664 0000000 0000000 00000000653 14771017755 0015006 0 ustar 00root root 0000000 0000000 p + dl {
border-top: 2px solid gray;
border-bottom: 2px solid gray;
padding: 12px;
}
p + dl dd:last-of-type {
margin-bottom: 0;
}
p + dl dt {
font-weight: bold;
}
p + dl dt::after {
content: ":";
}
dd dt {
font-weight: bold;
display: inline-block;
}
dd dt::after {
content: ":";
}
dd dd {
display: inline;
margin: 0;
}
dd dd:after{
display: block;
content: '';
}
doc/widgets.json 0000664 0000000 0000000 00000006674 14771017755 0014217 0 ustar 00root root 0000000 0000000 [
[
"Spectroscopy",
[
{
"text": "Spectra",
"doc": "widgets/spectra.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/spectra.svg",
"background": "light-blue",
"keywords": [
"curves",
"lines",
"spectrum"
]
},
{
"text": "HyperSpectra",
"doc": "widgets/hyperspectra.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/hyper.svg",
"background": "light-blue",
"keywords": [
"image",
"spectral",
"chemical",
"imaging"
]
},
{
"text": "Spectral Series",
"doc": null,
"icon": "../orangecontrib/spectroscopy/widgets/icons/spectralseries.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Interpolate",
"doc": "widgets/interpolate.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/interpolate.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Preprocess Spectra",
"doc": "widgets/preprocess-spectra.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/preprocess.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Integrate Spectra",
"doc": "widgets/integrate-spectra.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/integrate.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Peak Fit",
"doc": "widgets/peakfit.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/peakfit.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Multifile",
"doc": "widgets/multifile.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/multifile.svg",
"background": "light-blue",
"keywords": [
"file",
"files",
"multiple"
]
},
{
"text": "Tile File",
"doc": "widgets/tilefile.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/tilefile.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Average Spectra",
"doc": "widgets/average.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/average.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Bin",
"doc": null,
"icon": "../orangecontrib/spectroscopy/widgets/icons/bin.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Interferogram to Spectrum",
"doc": "widgets/interferogram-to-spectrum.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/fft.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "PLS",
"doc": null,
"icon": "../orangecontrib/spectroscopy/widgets/icons/PLS.svg",
"background": "light-blue",
"keywords": [
"partial least squares"
]
},
{
"text": "Reshape Map",
"doc": "widgets/reshape-map.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/reshape.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "SNR",
"doc": "widgets/snr.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/snr.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "Align Stack",
"doc": null,
"icon": "../orangecontrib/spectroscopy/widgets/icons/stackalign.svg",
"background": "light-blue",
"keywords": []
},
{
"text": "4+ Angle Polarisation",
"doc": "widgets/polar.md",
"icon": "../orangecontrib/spectroscopy/widgets/icons/unknown.svg",
"background": "light-blue",
"keywords": []
}
]
]
]
doc/widgets/ 0000775 0000000 0000000 00000000000 14771017755 0013307 5 ustar 00root root 0000000 0000000 doc/widgets/SpikeRemoval.md 0000664 0000000 0000000 00000001662 14771017755 0016237 0 ustar 00root root 0000000 0000000 # Spike Removal
The **Spike Removal** preprocessor enables you to remove anomalous spiked data
from Raman spectra. It achieves this in a two part method. First, it finds spectra
with a large difference between neighboring data points (the **cutoff** parameter).
These are processed further: points above the z-score **threshold** (spikes)
are interpolated.
1. **Cutoff**: only spectra
with difference between neighboring datapoints higher than cutoff are
going to be processed further.
2. **Threshold**: the z-score threshold above which points are marked as spikes.
3. **Distance**: the number of nearby data points (right and left) for interpolation of spikes.
**References**
- Python code from https://towardsdatascience.com/removing-spikes-from-raman-spectra-8a9fdda0ac22
- Whitaker, Darren A., and Kevin Hayes. “A simple algorithm for despiking Raman spectra.” Chemometrics and Intelligent Laboratory Systems 179 (2018): 82–84.
doc/widgets/als.md 0000664 0000000 0000000 00000006224 14771017755 0014414 0 ustar 00root root 0000000 0000000 # Asymmetric Least Squares Smoothing
The **ALS** tool provides three different methods for Least Squares smoothing. The three processes
share a smoothing value & factor as well as the iteration variable. In addition to this,
each process has its own respective variable used in calculation.
Variable Descriptions
---------------------
- Smoothing Constant & Factor: Determines the degree of smoothing of the background.
Larger constants will lead to larger smoothing. Values can be input as 1E+10 or 1E-10 for easy
input of large numbers.
- Weighting Deviations (Asymmetric Lest Squares Smoothing): 0.5 = symmetric, <0.5: negative
deviations are more strongly suppressed
- Weighting Deviations (Asymmetrically Reweighed Penalized Least squares smoothing):
0 < ratio < 1, smaller values allow less negative values
- Order of difference penalties (Adaptive):
integer indicating the order of the difference of penalties
Process Descriptions
--------------------
**Asymmetric Least Squares Smoothing (als)**
Baseline problems in instrument methods can be characterized by a superimposed
signal composed of a series of signal peaks which are either all positive or negative. This method
uses a smoother with an asymmetric weighting of deviations to get a baseline estimator.
In doing such, this processor is able to quickly ascertain and correct a baseline
while retaining the signal peak information.
**Asymmetrically Reweighed Penalized Least squares smoothing (arpls)**
This method is based on an iterative reweighing of baseline estimation. If a signal is below a previously fitted baseline,
large weight is given. On the other hand, no weight or small weight is given
when a signal is above a fitted baseline as it could be assumed to be a part
of the peak. As noise is distributed above the baseline as well as below the
baseline, however, it is desirable to give the same or similar weights in
either case. For the purpose, we propose a new weighting scheme based on the
generalized logistic function. The proposed method estimates the noise level
iteratively and adjusts the weights correspondingly.
**Adaptive iteratively reweighed penalized least squares for baseline fitting (airPLS)**
Baseline drift always blurs or even swamps signals and deteriorates analytical
results, particularly in multivariate analysis. It is necessary to correct
baseline drift to perform further data analysis. Simple or modified polynomial
fitting has been found to be effective in some extent. However, this method
requires user intervention and prone to variability especially in low
signal-to-noise ratio environments. The proposed adaptive iteratively
reweighed Penalized Least Squares (airPLS) algorithm doesn't require any
user intervention and prior information, such as detected peaks. It
iteratively changes weights of sum squares errors (SSE) between the fitted
baseline and original signals, and the weights of SSE are obtained adaptively
using between previously fitted baseline and original signals. This baseline
estimator is general, fast and flexible in fitting baseline.
**Source Repository**
https://irfpy.irf.se/projects/ica/_modules/irfpy/ica/baseline.html
doc/widgets/atmcorr.md 0000775 0000000 0000000 00000004146 14771017755 0015310 0 ustar 00root root 0000000 0000000 # Atmospheric gas correction
The **Atmospheric gas correction** preprocessor is designed to remove
H20 and CO2 gas lines from spectra, based on a user-supplied
reference spectrum.
The user defines a set of spectral ranges that will be corrected. The default
ranges are 1330-2100 cm-1 and 3410-3850 cm-1 for H20
and 2190-2480 cm-1 for CO2.
In each of these ranges (individually), the preprocessor either subtracts
(or adds) as much of the reference spectrum as necessary to maximize the
smoothness of the output (**Correct**), replaces the data with a smooth line
(**Bridge**) or does nothing (**No-op**). Ranges to be corrected must not overlap.
For each range to be **Correct**ed and for each spectrum, the amount of reference
subtracted from (or added to) the spectrum is chosen such that the sum of
squares of the first derivative (differences between consecutive points) is
minimized.
If **Use mean of references** is unchecked and multiple reference spectra
are used, the subtracted reference is a weighted sum of all the references.
In practice, this may be a poor replacement for arbitrary linear mixes of the
references; this should be investigated and developed further.
Optionally, the corrected ranges are smoothed with a Savitzky-Golay
filter of user-defined **Savitzky-Golay window size** and polynomial order 3.
For each range to be **Bridge**d, data are replaced with a spline that merges
gradually with the data at its edges. The spline is derived from the level and
slope in a window of **Bridge base window size** points, while the transition
between data and spline follows a Tukey window with alpha=0.2.
**Reference spectrum**
To generate a suitable reference spectrum for your machine, measure the same
sample at different levels of atmospheric gases (e.g. evacuating with clean air
versus ambient air after breathing in the room), take the difference and pass
it through a background correction such as ALS. (Suggested ASL parameters:
smoothing constant 1000, weighting deviations 0.0001.)
**Publication**
https://doi.org/10.3390/mps3020034
doc/widgets/average.md 0000664 0000000 0000000 00000001046 14771017755 0015244 0 ustar 00root root 0000000 0000000 Average Spectra
===============
Average spectra.
**Inputs**
- Data: input dataset
**Outputs**
- Averages: averaged dataset
The **Average Spectra** widget enables you to calculate average spectra. It can output the average of the entire dataset, or average into groups defined by a Categorical feature.

Use *Group by* to output averages defined by a Categorical feature.
Columns of non-Numerical data will return a value if every row in that group has the same value, otherwise it will return Unknown.
doc/widgets/hyperspectra.md 0000664 0000000 0000000 00000003445 14771017755 0016350 0 ustar 00root root 0000000 0000000 HyperSpectra
============
Plots 2D map of hyperspectra.
**Inputs**
- Data: input dataset
**Outputs**
- Selection: spectra from selected area
- Data: dataset with information whether a spectrum was selected or not
The **HyperSpectra** widget plots hyperspectra that were read from the *.map* file. To use this widget with infrared spectral data, you need to transform it with **Reshape Map** widget.
At the top, **HyperSpectra** shows a 2D map of a slice of the spectra. At the bottom, a spectra plot is shown with the red line indicating the wavenumber slice we are observing at the top.

1. Image values: define the transformation (usually an integral) of the spectra or use a feature to use as values for the plot. The former transformation can be an *integral from 0*, *integral from baseline*, *peak from 0*, *peak from baseline*, *closest value*, *X-value of maximum from 0* or *X-value of maximum from baseline*.
2. The hyperspectral plot of the slice of the spectra.
- Zoom in (Z): zoom in to the area selected from the hyperspectral plot
- Zoom to fit (backspace): return to the original plot
- Select (square) (S): select an area from the plot by clicking at the top left corner and then the bottom right corner of the desired selection area
- Select (polygon) (P): select an area by circumscribing a polygon
- Save graph (Mod + S): save the visualization as a .png, .svg or .pdf file.
- Axis x: define the attribute for the x axis
- Axis y: define the attribute for the y axis
- Color: select the color for the plot
3. The spectral plot of the selected image region. It behaves like the [Spectra](spectra.md) widget.
4. Region selectors for the chosen integration method.
5. Split between image and spectral view: move it to increase the image size.
doc/widgets/images/ 0000775 0000000 0000000 00000000000 14771017755 0014554 5 ustar 00root root 0000000 0000000 doc/widgets/images/Average-Spectra-stamped.png 0000664 0000000 0000000 00000012456 14771017755 0021676 0 ustar 00root root 0000000 0000000 PNG
IHDR " V
a sRGB gAMA a pHYs od IDATx^_}jyPhFH4n}(ANDԴEbiE
%BM&FB®1eQVV`Y[#u@٨Z]{ɮ.t~g=3;sw>y̙?wqژ?ADԆ @B " P: !D ! u C @BA|Gn0CCC֛o,,,$[{U+MTźBt h43ǎ3/bNc2yU&jhajtɓ'̓>hsٵkWA0Fuܹsfqq\rżffvv6Zq3<mx،$z^?>K4iC5ymzPp!a]!v9p]^YY1Oe6~yd64