pax_global_header 0000666 0000000 0000000 00000000064 15011602407 0014506 g ustar 00root root 0000000 0000000 52 comment=747e369a0d12c940aa06d10dc43afb8e45463d7a
hass-nabucasa-0.101.0/ 0000775 0000000 0000000 00000000000 15011602407 0014356 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/.devcontainer.json 0000664 0000000 0000000 00000002362 15011602407 0020013 0 ustar 00root root 0000000 0000000 {
"name": "Hass-NabuCasa Dev",
"image": "mcr.microsoft.com/vscode/devcontainers/python:1-3.13",
"postCreateCommand": "python3 -m pip install -e .[test]",
"postStartCommand": "python3 -m pip install -e .",
"containerUser": "vscode",
"containerEnv": {
"GIT_EDITOR": "code --wait"
},
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"esbenp.prettier-vscode",
"ms-python.python",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode"
],
"settings": {
"python.pythonPath": "/usr/local/bin/python",
"python.formatting.provider": "ruff",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.rulers": [
88
],
"editor.codeActionsOnSave": {
"source.fixAll": "always",
"source.organizeImports": "always"
},
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh"
}
}
}
}
hass-nabucasa-0.101.0/.github/ 0000775 0000000 0000000 00000000000 15011602407 0015716 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 15011602407 0020101 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/.github/ISSUE_TEMPLATE/bug.yml 0000664 0000000 0000000 00000002030 15011602407 0021374 0 ustar 00root root 0000000 0000000 name: Report an issue with the hass-nabucasa package
description: Report an issue with the hass-nabucasa package.
body:
- type: textarea
validations:
required: true
attributes:
label: The problem
description: >-
Describe the issue you are experiencing here, to communicate to the
maintainers. Tell us what you were trying to do and what happened.
Provide a clear and concise description of what the problem is.
- type: input
validations:
required: true
attributes:
label: What version of the package with the issue?
- type: input
validations:
required: true
attributes:
label: What python version are you using?
- type: input
attributes:
label: What was the last working version of the package?
description: >
If known, otherwise leave blank.
- type: textarea
validations:
required: true
attributes:
label: Debug logs?
description: Here you can paste debug logs for the package.
render: txt
hass-nabucasa-0.101.0/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000003024 15011602407 0022070 0 ustar 00root root 0000000 0000000 blank_issues_enabled: false
contact_links:
- name: I have an issue with my cloud account
url: https://nabucasa.com/support/
about: If You have issues related to your cloud account you need to create a ticket on the Nabu Casa website.
- name: I have an issue with Google Assistant
url: https://github.com/home-assistant/core/issues/new?assignees=&labels=&projects=&template=bug_report.yml&integration_name=google_assistant&integration_link=https://www.home-assistant.io/integrations/google_assistant/
about: For issues related to Google Assistant create a ticket on the Nabu Casa website or a issue in the Home Assistant core repository.
- name: I have an issue with Alexa
url: https://github.com/home-assistant/core/issues/new?assignees=&labels=&projects=&template=bug_report.yml&integration_name=alexa&integration_link=https://www.home-assistant.io/integrations/alexa/
about: For issues related to Alexa create a ticket on the Nabu Casa website or a issue in the Home Assistant core repository.
- name: I have an issue with remote connection
url: https://nabucasa.com/support/
about: For issues related to the remote connection create a ticket on the Nabu Casa website.
- name: I have an issue with payments
url: https://nabucasa.com/support/
about: For issues related to payments create a ticket on the Nabu Casa website.
- name: I want to request a feature to be added
url: https://community.home-assistant.io/tags/c/feature-requests/13/cloud
about: Please use the forums for feature requests
hass-nabucasa-0.101.0/.github/dependabot.yml 0000664 0000000 0000000 00000000410 15011602407 0020541 0 ustar 00root root 0000000 0000000 version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: pip
directory: "/"
schedule:
interval: weekly
open-pull-requests-limit: 10
hass-nabucasa-0.101.0/.github/release-drafter.yml 0000664 0000000 0000000 00000000502 15011602407 0021503 0 ustar 00root root 0000000 0000000 name-template: '$RESOLVED_VERSION'
tag-template: '$RESOLVED_VERSION'
categories:
- title: "⬆️ Dependencies"
collapse-after: 1
labels:
- "dependencies"
version-resolver:
default: minor
change-template: '- #$NUMBER $TITLE @$AUTHOR'
sort-direction: ascending
template: |
## What's Changed
$CHANGES
hass-nabucasa-0.101.0/.github/workflows/ 0000775 0000000 0000000 00000000000 15011602407 0017753 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/.github/workflows/ci.yml 0000664 0000000 0000000 00000003106 15011602407 0021071 0 ustar 00root root 0000000 0000000 name: CI
on:
push:
branches:
- main
pull_request:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint:
runs-on: ubuntu-latest
name: Lint
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python 3.13
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: "3.13"
cache: "pip"
cache-dependency-path: pyproject.toml
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install -e .[test]
- name: Lint/test with pre-commit
run: SKIP=no-commit-to-branch pre-commit run --all-files
test:
runs-on: ubuntu-latest
name: Run tests with Python ${{ matrix.python-version }}
needs: lint
strategy:
matrix:
python-version:
- "3.13"
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: pyproject.toml
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install -e .[test]
- name: Run Tests
run: scripts/test
hass-nabucasa-0.101.0/.github/workflows/pythonpublish.yml 0000664 0000000 0000000 00000003277 15011602407 0023417 0 ustar 00root root 0000000 0000000 name: Upload Python Package
on:
release:
types:
- published
permissions: {}
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Make sure tag_name is not empty
run: |
if [[ "${{ github.event.release.tag_name }}" == "" ]]; then
exit 1
fi
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: "3.x"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools build
- name: Set version ${{ github.event.release.tag_name }}
run: |
sed -i "s/^version = \".*\"/version = \"${{ github.event.release.tag_name }}\"/" pyproject.toml
- name: Build ${{ github.event.release.tag_name }}
run: |
python -m build
- name: Upload dists
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: "dist"
path: "dist/"
if-no-files-found: error
retention-days: 5
publish:
name: Upload release to PyPI
runs-on: ubuntu-latest
needs: "build"
environment:
name: release
url: https://pypi.org/p/hass-nabucasa
permissions:
id-token: write
steps:
- name: Download dists
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: "dist"
path: "dist/"
- name: Publish dists to PyPI
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4
hass-nabucasa-0.101.0/.github/workflows/release-drafter.yml 0000664 0000000 0000000 00000000563 15011602407 0023547 0 ustar 00root root 0000000 0000000 name: Release Drafter
on:
push:
branches:
- main
jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
# Drafts your next Release notes as Pull Requests are merged into "main"
- uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 # v6.1.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
hass-nabucasa-0.101.0/.gitignore 0000664 0000000 0000000 00000002313 15011602407 0016345 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
bin
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# Editors
.vscode/
hass-nabucasa-0.101.0/.pre-commit-config.yaml 0000664 0000000 0000000 00000005477 15011602407 0020654 0 ustar 00root root 0000000 0000000 repos:
- repo: local
hooks:
- id: ruff-check
name: Ruff Linter
language: system
types: [python]
entry: python3 -m ruff check --fix
require_serial: true
stages: [pre-commit, pre-push, manual]
- id: ruff-format
name: Ruff Formatter
language: system
types: [python]
entry: python3 -m ruff format
require_serial: true
stages: [pre-commit, pre-push, manual]
- id: check-ast
name: Check Python AST
language: system
types: [python]
entry: check-ast
- id: check-case-conflict
name: Check for case conflicts
language: system
entry: check-case-conflict
- id: check-docstring-first
name: heck docstring is first
language: system
types: [python]
entry: check-docstring-first
- id: check-executables-have-shebangs
name: Check that executables have shebangs
language: system
types: [text, executable]
entry: check-executables-have-shebangs
stages: [pre-commit, pre-push, manual]
- id: check-json
name: Check JSON files
language: system
types: [json]
entry: check-json
- id: check-merge-conflict
name: Check for merge conflicts
language: system
types: [text]
entry: check-merge-conflict
- id: check-toml
name: Check TOML files
language: system
types: [toml]
entry: check-toml
- id: codespell
name: Check code for common misspellings
language: system
types: [text]
entry: codespell
exclude: voice_data.py
- id: detect-private-key
name: Detect Private Keys
language: system
types: [text]
entry: detect-private-key
- id: end-of-file-fixer
name: Fix End of Files
language: system
types: [text]
entry: end-of-file-fixer
stages: [pre-commit, pre-push, manual]
- id: no-commit-to-branch
name: Don't commit to the main branch
language: system
entry: no-commit-to-branch
pass_filenames: false
always_run: true
args:
- --branch=main
- id: pylint
name: Starring code with pylint
entry: python3 -m pylint
language: system
types: [python]
require_serial: true
files: ^hass_nabucasa/.+\.py$
- id: trailing-whitespace
name: Trim Trailing Whitespace
language: system
types: [text]
entry: trailing-whitespace-fixer
stages: [pre-commit, pre-push, manual]
- id: mypy
name: mypy
entry: python3 -m mypy
language: system
types: [python]
require_serial: true
files: ^hass_nabucasa/.+\.py$
hass-nabucasa-0.101.0/LICENSE 0000664 0000000 0000000 00000104515 15011602407 0015371 0 ustar 00root root 0000000 0000000 GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
Copyright (C)
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
.
hass-nabucasa-0.101.0/README.md 0000664 0000000 0000000 00000004203 15011602407 0015634 0 ustar 00root root 0000000 0000000 # hass-nabucasa
`hass-nabucasa` is the underlying library that enables Home Assistant to connect to and utilize Nabu Casa cloud services.
This library handles a range of cloud-related functionality including:
- Authentication and account management
- Remote UI connections via [SniTun](https://www.github.com/NabuCasa/snitun)
- API interactions with Nabu Casa cloud services
- Voice processing capabilities
- ACME certificate management
- Google Assistant and Alexa integration
- Cloud webhook management
- Cloud file storage and management
## Installation
```bash
python3 -m pip install hass-nabucasa==x.y.z
```
## Release process
`hass-nabucasa` is released through GitHub and published to [PyPI].
The release process is automated and triggered through the GitHub UI:
1. Go to the [GitHub Releases page][releases].
2. Find the draft release created by release-drafter.
3. Verify that the tag and name are the expected ones (e.g., `1.2.3`)
4. Publish the release (and set it as the latest release)
Once published, GitHub Actions workflows automatically:
- Build the package
- Publish to [PyPI]
There is no need to manually update version information in the codebase.
## Development and contributing
### Development environment
We recommend using Visual Studio Code with the official Dev Container extension for development. This provides a consistent, pre-configured environment with all dependencies installed.
This will automatically set up a development environment with all required dependencies.
### Running tests
```bash
scripts/test
```
### Code quality
This project uses pre-commit hooks for code quality checks:
```bash
scripts/lint
```
### Updating voice data
To update the voice data with the latest from Azure:
```bash
python3 -m scripts.update_voice_data
```
You will need to fetch an Azure TTS token. You can generate one by running the [sample key generator server](https://github.com/Azure-Samples/cognitive-services-speech-sdk/tree/master/samples/js/browser/server) and visiting `http://localhost:3001/api/get-speech-token`.
[releases]: https://github.com/NabuCasa/hass-nabucasa/releases
[PyPI]: https://pypi.org/project/hass-nabucasa/
hass-nabucasa-0.101.0/hass_nabucasa/ 0000775 0000000 0000000 00000000000 15011602407 0017151 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/hass_nabucasa/__init__.py 0000664 0000000 0000000 00000042461 15011602407 0021271 0 ustar 00root root 0000000 0000000 """Component to integrate the Home Assistant cloud."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable, Mapping
from datetime import datetime, timedelta
import json
import logging
from pathlib import Path
import shutil
from typing import Any, Generic, Literal, TypeVar
from aiohttp import ClientSession
from atomicwrites import atomic_write
import jwt
from .account_api import AccountApi
from .auth import CloudError, CognitoAuth
from .client import CloudClient
from .cloud_api import async_subscription_info
from .cloudhooks import Cloudhooks
from .const import (
ACCOUNT_URL,
CONFIG_DIR,
DEFAULT_SERVERS,
DEFAULT_VALUES,
MODE_DEV, # noqa: F401
STATE_CONNECTED,
SubscriptionReconnectionReason,
)
from .files import Files
from .google_report_state import GoogleReportState
from .ice_servers import IceServers
from .instance_api import (
InstanceApi,
InstanceConnectionDetails,
)
from .iot import CloudIoT
from .remote import RemoteUI
from .utils import UTC, gather_callbacks, parse_date, utcnow
from .voice import Voice
from .voice_api import VoiceApi
_ClientT = TypeVar("_ClientT", bound=CloudClient)
_LOGGER = logging.getLogger(__name__)
class AlreadyConnectedError(CloudError):
"""Raised when a connection is already established."""
def __init__(self, *, details: InstanceConnectionDetails) -> None:
"""Initialize an already connected error."""
super().__init__("instance_already_connected")
self.details = details
class Cloud(Generic[_ClientT]):
"""Store the configuration of the cloud connection."""
def __init__(
self,
client: _ClientT,
mode: Literal["development", "production"],
*,
cognito_client_id: str | None = None,
region: str | None = None,
user_pool_id: str | None = None,
account_link_server: str | None = None,
accounts_server: str | None = None,
acme_server: str | None = None,
cloudhook_server: str | None = None,
relayer_server: str | None = None,
remotestate_server: str | None = None,
servicehandlers_server: str | None = None,
**kwargs: Any, # noqa: ARG002
) -> None:
"""Create an instance of Cloud."""
self.client = client
self.mode = mode
self._on_initialized: list[Callable[[], Awaitable[None]]] = []
self._on_start: list[Callable[[], Awaitable[None]]] = []
self._on_stop: list[Callable[[], Awaitable[None]]] = []
self._init_task: asyncio.Task | None = None
self._subscription_reconnection_task: asyncio.Task | None = None
self.access_token: str | None = None
self.id_token: str | None = None
self.refresh_token: str | None = None
self.started: bool | None = None
# Set reference
self.client.cloud = self
_values = DEFAULT_VALUES[mode]
_servers = DEFAULT_SERVERS[mode]
self.cognito_client_id = _values.get("cognito_client_id", cognito_client_id)
self.region = _values.get("region", region)
self.user_pool_id = _values.get("user_pool_id", user_pool_id)
self.account_link_server = _servers.get("account_link", account_link_server)
self.accounts_server = _servers.get("accounts", accounts_server)
self.acme_server = _servers.get("acme", acme_server)
self.cloudhook_server = _servers.get("cloudhook", cloudhook_server)
self.relayer_server = _servers.get("relayer", relayer_server)
self.remotestate_server = _servers.get("remotestate", remotestate_server)
self.servicehandlers_server = _servers.get(
"servicehandlers",
servicehandlers_server,
)
# Needs to be setup before other components
self.iot = CloudIoT(self)
# Setup the rest of the components
self.account = AccountApi(self)
self.auth = CognitoAuth(self)
self.cloudhooks = Cloudhooks(self)
self.files = Files(self)
self.google_report_state = GoogleReportState(self)
self.ice_servers = IceServers(self)
self.instance = InstanceApi(self)
self.remote = RemoteUI(self)
self.voice = Voice(self)
self.voice_api = VoiceApi(self)
@property
def is_logged_in(self) -> bool:
"""Get if cloud is logged in."""
return self.id_token is not None
@property
def is_connected(self) -> bool:
"""Return True if we are connected."""
return self.iot.state == STATE_CONNECTED
@property
def websession(self) -> ClientSession:
"""Return websession for connections."""
return self.client.websession
@property
def subscription_expired(self) -> bool:
"""Return a boolean if the subscription has expired."""
return utcnow() > self.expiration_date + timedelta(days=7)
@property
def valid_subscription(self) -> bool:
"""Return True if the subscription is valid."""
return (
self._subscription_reconnection_task is None
and not self.subscription_expired
)
@property
def expiration_date(self) -> datetime:
"""Return the subscription expiration as a UTC datetime object."""
if (parsed_date := parse_date(self.claims["custom:sub-exp"])) is None:
raise ValueError(
f"Invalid expiration date ({self.claims['custom:sub-exp']})",
)
return datetime.combine(parsed_date, datetime.min.time()).replace(tzinfo=UTC)
@property
def username(self) -> str:
"""Return the subscription username."""
return self.claims["cognito:username"]
@property
def claims(self) -> Mapping[str, str]:
"""Return the claims from the id token."""
return self._decode_claims(str(self.id_token))
@property
def user_info_path(self) -> Path:
"""Get path to the stored auth."""
return self.path(f"{self.mode}_auth.json")
async def ensure_not_connected(
self,
*,
access_token: str,
) -> None:
"""Raise AlreadyConnectedError if already connected."""
try:
connection = await self.instance.connection(
skip_token_check=True,
access_token=access_token,
)
except CloudError:
return
if connection["connected"]:
raise AlreadyConnectedError(details=connection["details"])
async def update_token(
self,
id_token: str,
access_token: str,
refresh_token: str | None = None,
) -> asyncio.Task | None:
"""Update the id and access token."""
self.id_token = id_token
self.access_token = access_token
if refresh_token is not None:
self.refresh_token = refresh_token
await self.run_executor(self._write_user_info)
if self.started is None:
return None
if not self.started and not self.subscription_expired:
self.started = True
return asyncio.create_task(self._start())
if self.started and self.subscription_expired:
self.started = False
await self.stop()
if self.subscription_expired:
self.async_initialize_subscription_reconnection_handler(
SubscriptionReconnectionReason.SUBSCRIPTION_EXPIRED
)
return None
def register_on_initialized(
self,
on_initialized_cb: Callable[[], Awaitable[None]],
) -> None:
"""Register an async on_initialized callback.
on_initialized callbacks are called after all on_start callbacks.
"""
self._on_initialized.append(on_initialized_cb)
def register_on_start(self, on_start_cb: Callable[[], Awaitable[None]]) -> None:
"""Register an async on_start callback."""
self._on_start.append(on_start_cb)
def register_on_stop(self, on_stop_cb: Callable[[], Awaitable[None]]) -> None:
"""Register an async on_stop callback."""
self._on_stop.append(on_stop_cb)
def path(self, *parts: Any) -> Path:
"""Get config path inside cloud dir.
Async friendly.
"""
return Path(self.client.base_path, CONFIG_DIR, *parts)
def run_executor(self, callback: Callable, *args: Any) -> asyncio.Future:
"""Run function inside executore.
Return a awaitable object.
"""
return self.client.loop.run_in_executor(None, callback, *args)
async def login(
self, email: str, password: str, *, check_connection: bool = False
) -> None:
"""Log a user in."""
await self.auth.async_login(email, password, check_connection=check_connection)
async def login_verify_totp(
self,
email: str,
code: str,
mfa_tokens: dict[str, Any],
*,
check_connection: bool = False,
) -> None:
"""Verify TOTP code during login."""
await self.auth.async_login_verify_totp(
email, code, mfa_tokens, check_connection=check_connection
)
async def logout(self) -> None:
"""Close connection and remove all credentials."""
self.id_token = None
self.access_token = None
self.refresh_token = None
self.started = False
await self.stop()
# Cleanup auth data
if self.user_info_path.exists():
await self.run_executor(self.user_info_path.unlink)
await self.client.logout_cleanups()
async def remove_data(self) -> None:
"""Remove all stored data."""
if self.started:
raise ValueError("Cloud not stopped")
try:
await self.remote.reset_acme()
finally:
await self.run_executor(self._remove_data)
def _remove_data(self) -> None:
"""Remove all stored data."""
base_path = self.path()
# Recursively remove .cloud
if base_path.is_dir():
shutil.rmtree(base_path)
# Guard against .cloud not being a directory
if base_path.exists():
base_path.unlink()
def _write_user_info(self) -> None:
"""Write user info to a file."""
base_path = self.path()
if not base_path.exists():
base_path.mkdir()
with atomic_write(self.user_info_path, overwrite=True) as fp:
fp.write(
json.dumps(
{
"id_token": self.id_token,
"access_token": self.access_token,
"refresh_token": self.refresh_token,
},
indent=4,
),
)
self.user_info_path.chmod(0o600)
async def initialize(self) -> None:
"""Initialize the cloud component (load auth and maybe start)."""
def load_config() -> None | dict[str, Any]:
"""Load config."""
# Ensure config dir exists
base_path = self.path()
if not base_path.exists():
base_path.mkdir()
if not self.user_info_path.exists():
return None
try:
content: dict[str, Any] = json.loads(
self.user_info_path.read_text(encoding="utf-8"),
)
except (ValueError, OSError) as err:
path = self.user_info_path.relative_to(self.client.base_path)
self.client.loop.call_soon_threadsafe(
self.client.user_message,
"load_auth_data",
"Home Assistant Cloud error",
f"Unable to load authentication from {path}. "
"[Please login again](/config/cloud)",
)
_LOGGER.warning(
"Error loading cloud authentication info from %s: %s",
path,
err,
)
return None
return content
info = await self.run_executor(load_config)
if info is None:
# No previous token data
self.started = False
return
self.id_token = info["id_token"]
self.access_token = info["access_token"]
self.refresh_token = info["refresh_token"]
self._init_task = asyncio.create_task(self._finish_initialize())
async def _finish_initialize(self) -> None:
"""Finish initializing the cloud component (load auth and maybe start)."""
try:
await self.auth.async_check_token()
except CloudError:
_LOGGER.debug("Failed to check cloud token", exc_info=True)
if await self.async_subscription_is_valid():
await self._start(skip_subscription_check=True)
await gather_callbacks(_LOGGER, "on_initialized", self._on_initialized)
self.started = True
self._init_task = None
async def _start(self, skip_subscription_check: bool = False) -> None:
"""Start the cloud component."""
if skip_subscription_check or await self.async_subscription_is_valid():
await self.client.cloud_started()
await gather_callbacks(_LOGGER, "on_start", self._on_start)
async def stop(self) -> None:
"""Stop the cloud component."""
if self._init_task:
self._init_task.cancel()
self._init_task = None
await self.client.cloud_stopped()
await gather_callbacks(_LOGGER, "on_stop", self._on_stop)
@staticmethod
def _decode_claims(token: str) -> Mapping[str, Any]:
"""Decode the claims in a token."""
decoded: Mapping[str, Any] = jwt.decode(
token,
options={"verify_signature": False},
)
return decoded
def async_initialize_subscription_reconnection_handler(
self,
reason: SubscriptionReconnectionReason,
) -> None:
"""Initialize the subscription reconnection handler."""
if self._subscription_reconnection_task is not None:
_LOGGER.debug("Subscription reconnection handler already running")
return
self._subscription_reconnection_task = asyncio.create_task(
self._subscription_reconnection_handler(reason),
name="subscription_reconnection_handler",
)
async def async_subscription_is_valid(self) -> bool:
"""Verify that the subscription is valid."""
if self._subscription_reconnection_task is not None:
return False
if self.subscription_expired:
self.async_initialize_subscription_reconnection_handler(
SubscriptionReconnectionReason.SUBSCRIPTION_EXPIRED
)
return False
billing_plan_type = await self._async_get_billing_plan_type()
if billing_plan_type is None or billing_plan_type == "no_subscription":
_LOGGER.error("No subscription found")
self.async_initialize_subscription_reconnection_handler(
SubscriptionReconnectionReason.NO_SUBSCRIPTION
)
return False
return True
async def _async_get_billing_plan_type(self) -> str | None:
"""Get the billing_plan_type status."""
billing_plan_type: str | None = None
try:
subscription = await async_subscription_info(self, True)
billing_plan_type = subscription.get("billing_plan_type")
except CloudError as err:
_LOGGER.warning("Could not get subscription info", exc_info=err)
return billing_plan_type
async def _subscription_reconnection_handler(
self, reason: SubscriptionReconnectionReason
) -> None:
"""Handle subscription reconnection."""
issue_identifier = f"{reason.value}_{self.expiration_date}"
while True:
now_as_utc = utcnow()
sub_expired = self.expiration_date
if sub_expired > (now_as_utc - timedelta(days=1)):
wait_hours = 3
elif sub_expired > (now_as_utc - timedelta(days=7)):
wait_hours = 12
elif sub_expired > (now_as_utc - timedelta(days=180)):
wait_hours = 24
elif sub_expired > (now_as_utc - timedelta(days=400)):
wait_hours = 96
else:
_LOGGER.info(
"Subscription expired at %s, not waiting for activation",
sub_expired.strftime("%Y-%m-%d"),
)
break
_LOGGER.info(
"Subscription expired at %s, waiting %s hours for activation",
sub_expired.strftime("%Y-%m-%d"),
wait_hours,
)
await self.client.async_create_repair_issue(
identifier=issue_identifier,
translation_key=reason.value,
placeholders={"account_url": ACCOUNT_URL},
severity="error",
)
await asyncio.sleep(wait_hours * 60 * 60)
if not self.is_logged_in:
_LOGGER.info("No longer logged in, stopping reconnection handler")
break
await self.auth.async_renew_access_token()
if not self.subscription_expired:
await self.initialize()
break
await self.client.async_delete_repair_issue(identifier=issue_identifier)
_LOGGER.debug("Stopping subscription reconnection handler")
self._subscription_reconnection_task = None
hass-nabucasa-0.101.0/hass_nabucasa/account_api.py 0000664 0000000 0000000 00000002627 15011602407 0022017 0 ustar 00root root 0000000 0000000 """Manage account API."""
from __future__ import annotations
from typing import TYPE_CHECKING, TypedDict
from .api import ApiBase, CloudApiError, api_exception_handler
class AccountApiError(CloudApiError):
"""Exception raised when handling account API."""
class AccountServicesDetails(TypedDict):
"""Details of the services."""
alexa: AccountServiceDetails
google_home: AccountServiceDetails
remote_access: AccountServiceDetails
stt: AccountServiceDetails
storage: AccountStorageServiceDetails
tts: AccountServiceDetails
webhooks: AccountServiceDetails
webrtc: AccountServiceDetails
class AccountServiceDetails(TypedDict):
"""Details of a service."""
available: bool
class AccountStorageServiceDetails(AccountServiceDetails):
"""Details of a service."""
limit_bytes: int
class AccountApi(ApiBase):
"""Class to help communicate with the instance API."""
@property
def hostname(self) -> str:
"""Get the hostname."""
if TYPE_CHECKING:
assert self._cloud.servicehandlers_server is not None
return self._cloud.servicehandlers_server
@api_exception_handler(AccountApiError)
async def services(self) -> AccountServicesDetails:
"""Get the services details."""
details: AccountServicesDetails = await self._call_cloud_api(
path="/account/services",
)
return details
hass-nabucasa-0.101.0/hass_nabucasa/account_link.py 0000664 0000000 0000000 00000010067 15011602407 0022200 0 ustar 00root root 0000000 0000000 """Helpers to help with account linking."""
from __future__ import annotations
import asyncio
import logging
from typing import TYPE_CHECKING, Any
from aiohttp.client_ws import ClientWebSocketResponse
if TYPE_CHECKING:
from . import Cloud, _ClientT
_LOGGER = logging.getLogger(__name__)
# Each function can only be called once.
ERR_ALREADY_CONSUMED = "already_consumed"
# If the specified service is not supported
ERR_UNSUPORTED = "unsupported"
# If authorizing is currently unavailable
ERR_UNAVAILABLE = "unavailable"
# If we try to get tokens without being connected.
ERR_NOT_CONNECTED = "not_connected"
# Unknown error
ERR_UNKNOWN = "unknown"
# This error will be converted to asyncio.TimeoutError
ERR_TIMEOUT = "timeout"
class AccountLinkException(Exception):
"""Base exception for when account link errors happen."""
def __init__(self, code: str) -> None:
"""Initialize the exception."""
super().__init__(code)
self.code = code
def _update_token_response(tokens: dict[str, str], service: str) -> None:
"""Update token response in place."""
tokens["service"] = service
class AuthorizeAccountHelper:
"""Class to help the user authorize a third party account with Home Assistant."""
def __init__(self, cloud: Cloud[_ClientT], service: str) -> None:
"""Initialize the authorize account helper."""
self.cloud = cloud
self.service = service
self._client: ClientWebSocketResponse | None = None
async def async_get_authorize_url(self) -> str:
"""Generate the url where the user can authorize Home Assistant."""
if self._client is not None:
raise AccountLinkException(ERR_ALREADY_CONSUMED)
_LOGGER.debug("Opening connection for %s", self.service)
self._client = await self.cloud.client.websession.ws_connect(
f"https://{self.cloud.account_link_server}/v1",
)
await self._client.send_json({"service": self.service})
try:
response = await self._get_response()
except asyncio.CancelledError:
await self._client.close()
self._client = None
raise
authorize_url: str = response["authorize_url"]
return authorize_url
async def async_get_tokens(self) -> dict[str, str]:
"""Return the tokens when the user finishes authorizing."""
if self._client is None:
raise AccountLinkException(ERR_NOT_CONNECTED)
try:
response = await self._get_response()
finally:
await self._client.close()
self._client = None
_LOGGER.debug("Received tokens for %s", self.service)
tokens: dict[str, str] = response["tokens"]
_update_token_response(tokens, self.service)
return tokens
async def _get_response(self) -> dict[str, Any]:
"""Read a response from the connection and handle errors."""
assert self._client is not None
response: dict[str, Any] = await self._client.receive_json()
if "error" in response:
if response["error"] == ERR_TIMEOUT:
raise TimeoutError
raise AccountLinkException(response["error"])
return response
async def async_fetch_access_token(
cloud: Cloud[_ClientT],
service: str,
refresh_token: str,
) -> dict[str, str]:
"""Fetch access tokens using a refresh token."""
_LOGGER.debug("Fetching tokens for %s", service)
resp = await cloud.client.websession.post(
f"https://{cloud.account_link_server}/refresh_token/{service}",
json={"refresh_token": refresh_token},
)
resp.raise_for_status()
tokens: dict[str, str] = await resp.json()
_update_token_response(tokens, service)
return tokens
async def async_fetch_available_services(
cloud: Cloud[_ClientT],
) -> list[dict[str, Any]]:
"""Fetch available services."""
resp = await cloud.client.websession.get(
f"https://{cloud.account_link_server}/services",
)
resp.raise_for_status()
content: list[dict[str, Any]] = await resp.json()
return content
hass-nabucasa-0.101.0/hass_nabucasa/acme.py 0000664 0000000 0000000 00000050261 15011602407 0020434 0 ustar 00root root 0000000 0000000 """Handle ACME and local certificates."""
from __future__ import annotations
import asyncio
import contextlib
from datetime import datetime, timedelta
import logging
from pathlib import Path
from typing import TYPE_CHECKING
import urllib
from acme import challenges, client, crypto_util, errors, messages
import async_timeout
from atomicwrites import atomic_write
import attr
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.extensions import SubjectAlternativeName
from cryptography.x509.oid import NameOID
import josepy as jose
import OpenSSL
from . import cloud_api
from .utils import utcnow
FILE_ACCOUNT_KEY = "acme_account.pem"
FILE_PRIVATE_KEY = "remote_private.pem"
FILE_FULLCHAIN = "remote_fullchain.pem"
FILE_REGISTRATION = "acme_reg.json"
ACCOUNT_KEY_SIZE = 2048
PRIVATE_KEY_SIZE = 2048
USER_AGENT = "home-assistant-cloud"
_LOGGER = logging.getLogger(__name__)
if TYPE_CHECKING:
from . import Cloud, _ClientT
class AcmeClientError(Exception):
"""Raise if a acme client error raise."""
class AcmeChallengeError(AcmeClientError):
"""Raise if a challenge fails."""
class AcmeJWSVerificationError(AcmeClientError):
"""Raise if a JWS verification fails."""
class AcmeNabuCasaError(AcmeClientError):
"""Raise errors on nabucasa API."""
@attr.s
class ChallengeHandler:
"""Handle ACME data over a challenge."""
challenge = attr.ib(type=messages.ChallengeBody)
order = attr.ib(type=messages.OrderResource)
response = attr.ib(type=challenges.ChallengeResponse)
validation = attr.ib(type=str)
class AcmeHandler:
"""Class handle a local certification."""
def __init__(self, cloud: Cloud[_ClientT], domains: list[str], email: str) -> None:
"""Initialize local ACME Handler."""
self.cloud = cloud
self._acme_server = f"https://{cloud.acme_server}/directory"
self._account_jwk: jose.JWKRSA | None = None
self._acme_client: client.ClientV2 | None = None
self._x509: x509.Certificate | None = None
self._domains = domains
self._email = email
@property
def email(self) -> str:
"""Return the email."""
return self._email
@property
def domains(self) -> list[str]:
"""Return the domains."""
return self._domains
@property
def path_account_key(self) -> Path:
"""Return path of account key."""
return Path(self.cloud.path(FILE_ACCOUNT_KEY))
@property
def path_private_key(self) -> Path:
"""Return path of private key."""
return Path(self.cloud.path(FILE_PRIVATE_KEY))
@property
def path_fullchain(self) -> Path:
"""Return path of cert fullchain."""
return Path(self.cloud.path(FILE_FULLCHAIN))
@property
def path_registration_info(self) -> Path:
"""Return path of acme client registration file."""
return Path(self.cloud.path(FILE_REGISTRATION))
@property
def certificate_available(self) -> bool:
"""Return True if a certificate is loaded."""
return self._x509 is not None
@property
def is_valid_certificate(self) -> bool:
"""Validate date of a certificate and return True is valid."""
if (expire_date := self.expire_date) is None:
return False
return expire_date > utcnow()
@property
def expire_date(self) -> datetime | None:
"""Return datetime of expire date for certificate."""
if not self._x509:
return None
return self._x509.not_valid_after_utc
@property
def common_name(self) -> str | None:
"""Return CommonName of certificate."""
if not self._x509:
return None
return str(
self._x509.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value,
)
@property
def alternative_names(self) -> list[str] | None:
"""Return alternative names of certificate."""
if not self._x509:
return None
alternative_names = self._x509.extensions.get_extension_for_class(
SubjectAlternativeName,
).value
return [str(entry.value) for entry in alternative_names]
@property
def fingerprint(self) -> str | None:
"""Return SHA1 hex string as fingerprint."""
if not self._x509:
return None
fingerprint = self._x509.fingerprint(hashes.SHA1())
return fingerprint.hex()
def _generate_csr(self) -> bytes:
"""Load or create private key."""
if self.path_private_key.exists():
_LOGGER.debug("Load private keyfile: %s", self.path_private_key)
key_pem = self.path_private_key.read_bytes()
else:
_LOGGER.debug("create private keyfile: %s", self.path_private_key)
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, PRIVATE_KEY_SIZE)
key_pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
self.path_private_key.write_bytes(key_pem)
self.path_private_key.chmod(0o600)
return crypto_util.make_csr(key_pem, self._domains)
def _load_account_key(self) -> None:
"""Load or create account key."""
if self.path_account_key.exists():
_LOGGER.debug("Load account keyfile: %s", self.path_account_key)
pem = self.path_account_key.read_bytes()
key = serialization.load_pem_private_key(pem, password=None)
else:
_LOGGER.debug("Create new RSA keyfile: %s", self.path_account_key)
key = rsa.generate_private_key(
public_exponent=65537,
key_size=ACCOUNT_KEY_SIZE,
)
# Store it to file
pem = key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
self.path_account_key.write_bytes(pem)
self.path_account_key.chmod(0o600)
if TYPE_CHECKING:
assert isinstance(key, rsa.RSAPrivateKey)
self._account_jwk = jose.JWKRSA(key=jose.ComparableRSAKey(key))
def _create_client(self) -> None:
"""Create new ACME client."""
if self.path_registration_info.exists():
_LOGGER.info("Load exists ACME registration")
regr = messages.RegistrationResource.json_loads(
self.path_registration_info.read_text(encoding="utf-8"),
)
acme_url = urllib.parse.urlparse(self._acme_server)
regr_url = urllib.parse.urlparse(regr.uri)
if acme_url[0] != regr_url[0] or acme_url[1] != regr_url[1]:
_LOGGER.info("Reset new ACME registration")
self.path_registration_info.unlink()
self.path_account_key.unlink()
# Make sure that account key is loaded
self._load_account_key()
assert self._account_jwk is not None
# Load a exists registration
if self.path_registration_info.exists():
try:
network = client.ClientNetwork(
self._account_jwk,
account=regr,
user_agent=USER_AGENT,
)
directory = client.ClientV2.get_directory(
url=self._acme_server,
net=network,
)
self._acme_client = client.ClientV2(directory=directory, net=network)
except (errors.Error, ValueError) as err:
# https://github.com/certbot/certbot/blob/63fb97d8dea73ba63964f69fac0b15acfed02b3e/acme/acme/client.py#L670
# The client raises ValueError for RequestException
raise AcmeClientError(f"Can't connect to ACME server: {err}") from err
return
# Create a new registration
try:
network = client.ClientNetwork(self._account_jwk, user_agent=USER_AGENT)
directory = client.ClientV2.get_directory(
url=self._acme_server,
net=network,
)
self._acme_client = client.ClientV2(directory=directory, net=network)
except (errors.Error, ValueError) as err:
raise AcmeClientError(f"Can't connect to ACME server: {err}") from err
try:
_LOGGER.info(
"Register a ACME account with TOS: %s",
self._acme_client.directory.meta.terms_of_service,
)
regr = self._acme_client.new_account(
messages.NewRegistration.from_data(
email=self._email,
terms_of_service_agreed=True,
),
)
except errors.Error as err:
raise AcmeClientError(f"Can't register to ACME server: {err}") from err
# Store registration info
self.path_registration_info.write_text(
regr.json_dumps_pretty(),
encoding="utf-8",
)
self.path_registration_info.chmod(0o600)
def _create_order(self, csr_pem: bytes) -> messages.OrderResource:
"""Initialize domain challenge and return token."""
_LOGGER.info("Initialize challenge for a new ACME certificate")
assert self._acme_client is not None
try:
return self._acme_client.new_order(csr_pem)
except (messages.Error, errors.Error) as err:
if (
isinstance(err, messages.Error)
and err.typ == "urn:ietf:params:acme:error:malformed"
and err.detail == "JWS verification error"
):
raise AcmeJWSVerificationError(
f"JWS verification failed: {err}",
) from None
raise AcmeChallengeError(
f"Can't order a new ACME challenge: {err}",
) from None
def _start_challenge(self, order: messages.OrderResource) -> list[ChallengeHandler]:
"""Initialize domain challenge and return token."""
_LOGGER.info("Start challenge for a new ACME certificate")
# Find DNS challenge
# pylint: disable=not-an-iterable
dns_challenges: list[messages.ChallengeBody] = []
for auth in order.authorizations:
for challenge in auth.body.challenges:
if challenge.typ != "dns-01":
continue
dns_challenges.append(challenge)
if len(dns_challenges) == 0:
raise AcmeChallengeError("No pending ACME challenge")
handlers = []
for dns_challenge in dns_challenges:
try:
response, validation = dns_challenge.response_and_validation(
self._account_jwk,
)
except errors.Error as err:
raise AcmeChallengeError(
f"Can't validate the new ACME challenge: {err}",
) from None
handlers.append(
ChallengeHandler(dns_challenge, order, response, validation),
)
return handlers
def _answer_challenge(self, handler: ChallengeHandler) -> None:
"""Answer challenge."""
_LOGGER.info("Answer challenge for the new ACME certificate")
if TYPE_CHECKING:
assert self._acme_client is not None
try:
self._acme_client.answer_challenge(handler.challenge, handler.response)
except errors.Error as err:
raise AcmeChallengeError(f"Can't accept ACME challenge: {err}") from err
def _finish_challenge(self, order: messages.OrderResource) -> None:
"""Wait until challenge is finished."""
# Wait until it's authorize and fetch certification
if TYPE_CHECKING:
assert self._acme_client is not None
deadline = datetime.now() + timedelta(seconds=90)
try:
order = self._acme_client.poll_authorizations(order, deadline)
order = self._acme_client.finalize_order(
order,
deadline,
fetch_alternative_chains=True,
)
except errors.Error as err:
raise AcmeChallengeError(f"Wait of ACME challenge fails: {err}") from err
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception while finalizing order")
raise AcmeChallengeError(
"Unexpected exception while finalizing order",
) from None
# Cleanup the old stuff
if self.path_fullchain.exists():
_LOGGER.info("Renew old certificate: %s", self.path_fullchain)
self.path_fullchain.unlink()
else:
_LOGGER.info("Create new certificate: %s", self.path_fullchain)
with atomic_write(self.path_fullchain, overwrite=True) as fp:
fp.write(order.fullchain_pem)
self.path_fullchain.chmod(0o600)
async def load_certificate(self) -> None:
"""Get x509 Cert-Object."""
if self._x509:
# The certificate is already loaded
return
def _load_cert() -> x509.Certificate | None:
"""Load certificate in a thread."""
if not self.path_fullchain.exists():
# The certificate is not available
return None
return x509.load_pem_x509_certificate(self.path_fullchain.read_bytes())
try:
self._x509 = await self.cloud.run_executor(_load_cert)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception loading certificate")
def _revoke_certificate(self) -> None:
"""Revoke certificate."""
if not self.path_fullchain.exists():
_LOGGER.warning("Can't revoke not exists certificate")
return
if self._acme_client is None:
_LOGGER.error("No acme client")
return
fullchain = jose.ComparableX509(
OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM,
self.path_fullchain.read_bytes(),
),
)
_LOGGER.info("Revoke certificate")
try:
# https://letsencrypt.org/docs/revoking/#specifying-a-reason-code
self._acme_client.revoke(fullchain, 4)
except errors.ConflictError:
pass
except errors.Error as err:
# Ignore errors where certificate did not exist
if "No such certificate" in str(err): # noqa: SIM114
pass
# Ignore errors where certificate has expired
elif "Certificate is expired" in str(err): # noqa: SIM114
pass
# Ignore errors where unrecognized issuer (happens dev/prod switch)
elif "Certificate from unrecognized issuer" in str(err):
pass
else:
raise AcmeClientError(f"Can't revoke certificate: {err}") from err
def _deactivate_account(self) -> None:
"""Deactivate account."""
if not self.path_registration_info.exists() or self._acme_client is None:
return
_LOGGER.info("Load exists ACME registration")
regr = messages.RegistrationResource.json_loads(
self.path_registration_info.read_text(encoding="utf-8"),
)
try:
self._acme_client.deactivate_registration(regr)
except errors.Error as err:
raise AcmeClientError(f"Can't deactivate account: {err}") from err
def _have_any_file(self) -> bool:
return (
self.path_registration_info.exists()
or self.path_account_key.exists()
or self.path_fullchain.exists()
or self.path_private_key.exists()
)
def _remove_files(self) -> None:
self.path_registration_info.unlink(missing_ok=True)
self.path_account_key.unlink(missing_ok=True)
self.path_fullchain.unlink(missing_ok=True)
self.path_private_key.unlink(missing_ok=True)
async def issue_certificate(self) -> None:
"""Create/Update certificate."""
if not self._acme_client:
await self.cloud.run_executor(self._create_client)
# Initialize challenge / new certificate
csr = await self.cloud.run_executor(self._generate_csr)
order = await self.cloud.run_executor(self._create_order, csr)
dns_challenges: list[ChallengeHandler] = await self.cloud.run_executor(
self._start_challenge,
order,
)
try:
for challenge in dns_challenges:
# Update DNS
try:
async with async_timeout.timeout(30):
resp = await cloud_api.async_remote_challenge_txt(
self.cloud,
challenge.validation,
)
assert resp.status in (200, 201)
except (TimeoutError, AssertionError):
raise AcmeNabuCasaError(
"Can't set challenge token to NabuCasa DNS!",
) from None
# Answer challenge
try:
_LOGGER.info(
"Waiting 60 seconds for publishing DNS to ACME provider",
)
await asyncio.sleep(60)
await self.cloud.run_executor(self._answer_challenge, challenge)
except AcmeChallengeError as err:
_LOGGER.error("Could not complete answer challenge - %s", err)
# There is no point in continuing here
break
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception while answering challenge")
# There is no point in continuing here
break
finally:
try:
async with async_timeout.timeout(30):
# We only need to cleanup for the last entry
await cloud_api.async_remote_challenge_cleanup(
self.cloud,
dns_challenges[-1].validation,
)
except TimeoutError:
_LOGGER.error("Failed to clean up challenge from NabuCasa DNS!")
# Finish validation
try:
await self.cloud.run_executor(self._finish_challenge, order)
except AcmeChallengeError as err:
raise AcmeNabuCasaError(f"Could not finish challenge - {err}") from err
await self.load_certificate()
async def reset_acme(self) -> None:
"""Revoke and deactivate acme certificate/account."""
_LOGGER.info("Revoke and deactivate ACME user/certificate")
if (
self._acme_client is None
and self._account_jwk is None
and self._x509 is None
and not await self.cloud.run_executor(self._have_any_file)
):
_LOGGER.info("ACME user/certificates already cleaned up")
return
if not self._acme_client:
await self.cloud.run_executor(self._create_client)
try:
with contextlib.suppress(AcmeClientError):
await self.cloud.run_executor(self._revoke_certificate)
with contextlib.suppress(AcmeClientError):
await self.cloud.run_executor(self._deactivate_account)
finally:
self._acme_client = None
self._account_jwk = None
self._x509 = None
await self.cloud.run_executor(self._remove_files)
async def hardening_files(self) -> None:
"""Control permission on files."""
def _control() -> None:
# Set file permission to 0600
if self.path_account_key.exists():
self.path_account_key.chmod(0o600)
if self.path_registration_info.exists():
self.path_registration_info.chmod(0o600)
if self.path_fullchain.exists():
self.path_fullchain.chmod(0o600)
if self.path_private_key.exists():
self.path_private_key.chmod(0o600)
try:
await self.cloud.run_executor(_control)
except OSError:
_LOGGER.warning("Can't check and hardening file permission")
hass-nabucasa-0.101.0/hass_nabucasa/api.py 0000664 0000000 0000000 00000016144 15011602407 0020302 0 ustar 00root root 0000000 0000000 """Define the API base class."""
from __future__ import annotations
from abc import ABC, abstractmethod
from collections.abc import Awaitable, Callable, Coroutine
import contextlib
from functools import wraps
from json import JSONDecodeError
import logging
from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar, final
from aiohttp import (
ClientError,
ClientResponse,
ClientResponseError,
ClientTimeout,
hdrs,
)
from .auth import CloudError, Unauthenticated, UnknownError
if TYPE_CHECKING:
from . import Cloud, _ClientT
P = ParamSpec("P")
T = TypeVar("T")
_LOGGER = logging.getLogger(__name__)
def api_exception_handler(
exception: type[CloudApiError],
) -> Callable[
[Callable[P, Awaitable[T]]],
Callable[P, Coroutine[Any, Any, T]],
]:
"""Handle API exceptions."""
def decorator(
func: Callable[P, Awaitable[T]],
) -> Callable[P, Coroutine[Any, Any, T]]:
@wraps(func)
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
try:
return await func(*args, **kwargs)
except (
CloudApiNonRetryableError,
CloudApiCodedError,
exception,
):
raise
except (CloudApiError, UnknownError, Unauthenticated) as err:
raise exception(err, orig_exc=err) from err
except Exception as err:
raise exception(
f"Unexpected error while calling API: {err}",
orig_exc=err,
) from err
return wrapper
return decorator
class CloudApiError(CloudError):
"""Exception raised when handling cloud API."""
def __init__(
self,
context: str | Exception,
*,
orig_exc: Exception | None = None,
) -> None:
"""Initialize."""
super().__init__(context)
self.orig_exc = orig_exc
class CloudApiCodedError(CloudApiError):
"""Exception raised when handling cloud API."""
def __init__(self, context: str | Exception, *, code: str) -> None:
"""Initialize."""
super().__init__(context)
self.code = code
class CloudApiTimeoutError(CloudApiError):
"""Exception raised when handling cloud API times out."""
class CloudApiClientError(CloudApiError):
"""Exception raised when handling cloud API client error."""
class CloudApiNonRetryableError(CloudApiCodedError):
"""Exception raised when handling cloud API non-retryable error."""
class ApiBase(ABC):
"""Class to help communicate with the cloud API."""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize the API base."""
self._cloud = cloud
@property
@abstractmethod
def hostname(self) -> str:
"""Get the hostname."""
@property
def non_retryable_error_codes(self) -> set[str]:
"""Get the non-retryable error codes."""
return set()
@property
@final
def _non_retryable_error_codes(self) -> set[str]:
"""Get the non-retryable error codes."""
return {"NC-CE-02", "NC-CE-03"}.union(self.non_retryable_error_codes)
def _do_log_response(
self,
resp: ClientResponse,
data: list[Any] | dict[Any, Any] | str | None = None,
) -> None:
"""Log the response."""
isok = resp.status < 400
target = resp.url.path if resp.url.host == self.hostname else ""
_LOGGER.debug(
"Response for %s from %s%s (%s) %s",
resp.method,
resp.url.host,
target,
resp.status,
data["message"]
if not isok and isinstance(data, dict) and "message" in data
else "",
)
async def _call_raw_api(
self,
*,
url: str,
method: str,
client_timeout: ClientTimeout,
headers: dict[str, Any],
jsondata: dict[str, Any] | None = None,
data: Any | None = None,
) -> ClientResponse:
"""Call raw API."""
try:
resp = await self._cloud.websession.request(
method=method,
url=url,
timeout=client_timeout,
headers=headers,
json=jsondata,
data=data,
)
except TimeoutError as err:
raise CloudApiTimeoutError(
"Timeout reached while calling API",
orig_exc=err,
) from err
except ClientResponseError as err:
raise CloudApiClientError(
f"Failed to fetch: ({err.status}) {err.message}",
orig_exc=err,
) from err
except ClientError as err:
raise CloudApiClientError(f"Failed to fetch: {err}", orig_exc=err) from err
except Exception as err:
raise CloudApiError(
f"Unexpected error while calling API: {err}",
orig_exc=err,
) from err
return resp
async def _call_cloud_api(
self,
*,
path: str,
method: str = "GET",
client_timeout: ClientTimeout | None = None,
jsondata: dict[str, Any] | None = None,
headers: dict[str, Any] | None = None,
skip_token_check: bool = False,
) -> Any:
"""Call cloud API."""
data: dict[str, Any] | list[Any] | str | None = None
if not skip_token_check:
await self._cloud.auth.async_check_token()
if TYPE_CHECKING:
assert self._cloud.id_token is not None
resp = await self._call_raw_api(
method=method,
url=f"https://{self.hostname}{path}",
client_timeout=client_timeout or ClientTimeout(total=10),
headers={
hdrs.ACCEPT: "application/json",
hdrs.AUTHORIZATION: self._cloud.id_token,
hdrs.CONTENT_TYPE: "application/json",
hdrs.USER_AGENT: self._cloud.client.client_name,
**(headers or {}),
},
jsondata=jsondata,
)
if resp.status < 500:
with contextlib.suppress(JSONDecodeError):
data = await resp.json()
self._do_log_response(resp, data)
if data is None and resp.method.upper() != "DELETE":
raise CloudApiError("Failed to parse API response") from None
if (
resp.status == 400
and isinstance(data, dict)
and (message := data.get("message"))
and (code := message.split(" ")[0])
and code in self._non_retryable_error_codes
):
raise CloudApiNonRetryableError(message, code=code) from None
if resp.status == 403 and self._cloud.subscription_expired:
raise CloudApiNonRetryableError(
"Subscription has expired",
code="subscription_expired",
) from None
try:
resp.raise_for_status()
except ClientResponseError as err:
raise CloudApiError(
f"Failed to fetch: ({err.status}) {err.message}",
orig_exc=err,
) from err
return data
hass-nabucasa-0.101.0/hass_nabucasa/auth.py 0000664 0000000 0000000 00000027665 15011602407 0020504 0 ustar 00root root 0000000 0000000 """Package to communicate with the authentication API."""
from __future__ import annotations
import asyncio
from functools import lru_cache, partial
import logging
import random
from typing import TYPE_CHECKING, Any
import async_timeout
import boto3
import botocore
from botocore.exceptions import BotoCoreError, ClientError
import pycognito
from pycognito.exceptions import ForceChangePasswordException, MFAChallengeException
from .const import MESSAGE_AUTH_FAIL
if TYPE_CHECKING:
from . import Cloud, _ClientT
_LOGGER = logging.getLogger(__name__)
class CloudError(Exception):
"""Base class for cloud related errors."""
class Unauthenticated(CloudError):
"""Raised when authentication failed."""
class MFARequired(CloudError):
"""Raised when MFA is required."""
_mfa_tokens: dict[str, Any]
def __init__(self, mfa_tokens: dict[str, Any]) -> None:
"""Initialize MFA required error."""
super().__init__("MFA required.")
self._mfa_tokens = mfa_tokens
@property
def mfa_tokens(self) -> dict[str, Any]:
"""Return MFA tokens."""
return self._mfa_tokens
class InvalidTotpCode(CloudError):
"""Raised when the TOTP code is invalid."""
class UserNotFound(CloudError):
"""Raised when a user is not found."""
class UserExists(CloudError):
"""Raised when a username already exists."""
class UserNotConfirmed(CloudError):
"""Raised when a user has not confirmed email yet."""
class PasswordChangeRequired(CloudError):
"""Raised when a password change is required."""
# https://github.com/PyCQA/pylint/issues/1085
# pylint: disable=useless-super-delegation
def __init__(self, message: str = "Password change required.") -> None:
"""Initialize a password change required error."""
super().__init__(message)
class UnknownError(CloudError):
"""Raised when an unknown error occurs."""
AWS_EXCEPTIONS: dict[str, type[CloudError]] = {
"CodeMismatchException": InvalidTotpCode,
"UserNotFoundException": UserNotFound,
"UserNotConfirmedException": UserNotConfirmed,
"UsernameExistsException": UserExists,
"NotAuthorizedException": Unauthenticated,
"PasswordResetRequiredException": PasswordChangeRequired,
}
class CognitoAuth:
"""Handle cloud auth."""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Configure the auth api."""
self.cloud = cloud
self._refresh_task: asyncio.Task | None = None
self._session: boto3.Session | None = None
self._request_lock = asyncio.Lock()
cloud.iot.register_on_connect(self.on_connect)
cloud.iot.register_on_disconnect(self.on_disconnect)
async def _async_handle_token_refresh(self) -> None:
"""Handle Cloud access token refresh."""
sleep_time = random.randint(2400, 3600)
while True:
try:
await asyncio.sleep(sleep_time)
await self.async_renew_access_token()
except CloudError as err:
_LOGGER.error("Can't refresh cloud token: %s", err)
except asyncio.CancelledError:
# Task is canceled, stop it.
break
sleep_time = random.randint(3100, 3600)
async def on_connect(self) -> None:
"""When the instance is connected."""
self._refresh_task = asyncio.create_task(self._async_handle_token_refresh())
async def on_disconnect(self) -> None:
"""When the instance is disconnected."""
if self._refresh_task is not None:
self._refresh_task.cancel()
async def async_register(
self,
email: str,
password: str,
*,
client_metadata: Any | None = None,
) -> None:
"""Register a new account."""
try:
async with self._request_lock:
cognito = await self.cloud.run_executor(
self._create_cognito_client,
)
await self.cloud.run_executor(
partial(
cognito.register,
email.lower(),
password,
client_metadata=client_metadata,
),
)
except ClientError as err:
raise _map_aws_exception(err) from err
except BotoCoreError as err:
raise UnknownError from err
async def async_resend_email_confirm(self, email: str) -> None:
"""Resend email confirmation."""
try:
async with self._request_lock:
cognito = await self.cloud.run_executor(
partial(self._create_cognito_client, username=email),
)
await self.cloud.run_executor(
partial(
cognito.client.resend_confirmation_code,
Username=email,
ClientId=cognito.client_id,
),
)
except ClientError as err:
raise _map_aws_exception(err) from err
except BotoCoreError as err:
raise UnknownError from err
async def async_forgot_password(self, email: str) -> None:
"""Initialize forgotten password flow."""
try:
async with self._request_lock:
cognito = await self.cloud.run_executor(
partial(self._create_cognito_client, username=email),
)
await self.cloud.run_executor(cognito.initiate_forgot_password)
except ClientError as err:
raise _map_aws_exception(err) from err
except BotoCoreError as err:
raise UnknownError from err
async def async_login(
self,
email: str,
password: str,
*,
check_connection: bool = False,
) -> None:
"""Log user in and fetch certificate."""
try:
async with self._request_lock:
assert not self.cloud.is_logged_in, "Cannot login if already logged in."
cognito: pycognito.Cognito = await self.cloud.run_executor(
partial(self._create_cognito_client, username=email),
)
async with async_timeout.timeout(30):
await self.cloud.run_executor(
partial(cognito.authenticate, password=password),
)
if check_connection:
await self.cloud.ensure_not_connected(
access_token=cognito.access_token
)
task = await self.cloud.update_token(
cognito.id_token,
cognito.access_token,
cognito.refresh_token,
)
if task:
await task
except MFAChallengeException as err:
raise MFARequired(err.get_tokens()) from err
except ForceChangePasswordException as err:
raise PasswordChangeRequired from err
except ClientError as err:
raise _map_aws_exception(err) from err
except BotoCoreError as err:
raise UnknownError from err
async def async_login_verify_totp(
self,
email: str,
code: str,
mfa_tokens: dict[str, Any],
*,
check_connection: bool = False,
) -> None:
"""Log user in and fetch certificate if MFA is required."""
try:
async with self._request_lock:
assert not self.cloud.is_logged_in, (
"Cannot verify TOTP if already logged in."
)
cognito: pycognito.Cognito = await self.cloud.run_executor(
partial(self._create_cognito_client, username=email),
)
async with async_timeout.timeout(30):
await self.cloud.run_executor(
partial(
cognito.respond_to_software_token_mfa_challenge,
code=code,
mfa_tokens=mfa_tokens,
),
)
if check_connection:
await self.cloud.ensure_not_connected(
access_token=cognito.access_token
)
task = await self.cloud.update_token(
cognito.id_token,
cognito.access_token,
cognito.refresh_token,
)
if task:
await task
except ClientError as err:
raise _map_aws_exception(err) from err
except BotoCoreError as err:
raise UnknownError from err
async def async_check_token(self) -> None:
"""Check that the token is valid and renew if necessary."""
async with self._request_lock:
cognito = await self._async_authenticated_cognito()
if not cognito.check_token(renew=False):
return
try:
await self._async_renew_access_token()
except (Unauthenticated, UserNotFound) as err:
_LOGGER.error("Unable to refresh token: %s", err)
self.cloud.client.user_message(
"cloud_subscription_expired",
"Home Assistant Cloud",
MESSAGE_AUTH_FAIL,
)
# Don't await it because it could cancel this task
asyncio.create_task(self.cloud.logout())
raise
async def async_renew_access_token(self) -> None:
"""Renew access token."""
async with self._request_lock:
await self._async_renew_access_token()
async def _async_renew_access_token(self) -> None:
"""Renew access token internals.
Does not consume lock.
"""
cognito = await self._async_authenticated_cognito()
try:
await self.cloud.run_executor(cognito.renew_access_token)
await self.cloud.update_token(cognito.id_token, cognito.access_token)
except ClientError as err:
raise _map_aws_exception(err) from err
except BotoCoreError as err:
raise UnknownError from err
async def _async_authenticated_cognito(self) -> pycognito.Cognito:
"""Return an authenticated cognito instance."""
if self.cloud.access_token is None or self.cloud.refresh_token is None:
raise Unauthenticated("No authentication found")
return await self.cloud.run_executor(
partial(
self._create_cognito_client,
access_token=self.cloud.access_token,
refresh_token=self.cloud.refresh_token,
),
)
def _create_cognito_client(self, **kwargs: Any) -> pycognito.Cognito:
"""Create a new cognito client.
NOTE: This will do I/O
"""
if self._session is None:
self._session = boto3.session.Session()
return _cached_cognito(
user_pool_id=self.cloud.user_pool_id,
client_id=self.cloud.cognito_client_id,
user_pool_region=self.cloud.region,
botocore_config=botocore.config.Config(signature_version=botocore.UNSIGNED),
session=self._session,
**kwargs,
)
def _map_aws_exception(err: ClientError) -> CloudError:
"""Map AWS exception to our exceptions."""
ex = AWS_EXCEPTIONS.get(err.response["Error"]["Code"], UnknownError)
return ex(err.response["Error"]["Message"])
@lru_cache(maxsize=2)
def _cached_cognito(
user_pool_id: str,
client_id: str,
user_pool_region: str,
botocore_config: Any,
session: Any,
**kwargs: Any,
) -> pycognito.Cognito:
"""Create a cached cognito client.
NOTE: This will do I/O
"""
return pycognito.Cognito(
user_pool_id=user_pool_id,
client_id=client_id,
user_pool_region=user_pool_region,
botocore_config=botocore_config,
session=session,
**kwargs,
)
hass-nabucasa-0.101.0/hass_nabucasa/client.py 0000664 0000000 0000000 00000010112 15011602407 0020774 0 ustar 00root root 0000000 0000000 """Client interface for Home Assistant to cloud."""
from __future__ import annotations
from abc import ABC, abstractmethod
from asyncio import AbstractEventLoop
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal
from aiohttp import ClientSession
from aiohttp.web import AppRunner
from .iot import HandlerError
if TYPE_CHECKING:
from . import Cloud
class RemoteActivationNotAllowed(HandlerError):
"""Raised when it's not allowed to remotely activate remote UI."""
def __init__(self) -> None:
"""Initialize Error Message."""
super().__init__("remote_activation_not_allowed")
class CloudClient(ABC):
"""Interface class for Home Assistant."""
cloud: Cloud
@property
@abstractmethod
def base_path(self) -> Path:
"""Return path to base dir."""
@property
@abstractmethod
def loop(self) -> AbstractEventLoop:
"""Return client loop."""
@property
@abstractmethod
def websession(self) -> ClientSession:
"""Return client session for aiohttp."""
@property
@abstractmethod
def client_name(self) -> str:
"""Return name of the client, this will be used as the user-agent."""
@property
@abstractmethod
def aiohttp_runner(self) -> AppRunner | None:
"""Return client webinterface aiohttp application."""
@property
@abstractmethod
def cloudhooks(self) -> dict[str, dict[str, str | bool]]:
"""Return list of cloudhooks."""
@property
@abstractmethod
def remote_autostart(self) -> bool:
"""Return true if we want start a remote connection."""
@abstractmethod
async def cloud_connected(self) -> None:
"""Cloud connected."""
@abstractmethod
async def cloud_disconnected(self) -> None:
"""Cloud disconnected."""
@abstractmethod
async def cloud_started(self) -> None:
"""Cloud started with an active subscription."""
@abstractmethod
async def cloud_stopped(self) -> None:
"""Cloud stopped."""
@abstractmethod
async def logout_cleanups(self) -> None:
"""Cleanup before logout."""
@abstractmethod
async def async_cloud_connect_update(self, connect: bool) -> None:
"""Process cloud remote message to client.
If it's not allowed to remotely enable remote control, the implementation
should raise RemoteActivationNotAllowed
"""
@abstractmethod
async def async_cloud_connection_info(
self,
payload: dict[str, Any],
) -> dict[str, Any]:
"""Process cloud connection info message to client."""
@abstractmethod
async def async_alexa_message(self, payload: dict[str, Any]) -> dict[str, Any]:
"""Process cloud alexa message to client."""
@abstractmethod
async def async_system_message(self, payload: dict[str, Any]) -> None:
"""Process cloud system message to client."""
@abstractmethod
async def async_google_message(self, payload: dict[str, Any]) -> dict[str, Any]:
"""Process cloud google message to client."""
@abstractmethod
async def async_webhook_message(self, payload: dict[str, Any]) -> dict[str, Any]:
"""Process cloud webhook message to client."""
@abstractmethod
async def async_cloudhooks_update(
self,
data: dict[str, dict[str, str | bool]],
) -> None:
"""Update local list of cloudhooks."""
@abstractmethod
def dispatcher_message(self, identifier: str, data: Any = None) -> None:
"""Send data to dispatcher."""
@abstractmethod
def user_message(self, identifier: str, title: str, message: str) -> None:
"""Create a message for user to UI."""
@abstractmethod
async def async_create_repair_issue(
self,
identifier: str,
translation_key: str,
*,
placeholders: dict[str, str] | None = None,
severity: Literal["error", "warning"] = "warning",
) -> None:
"""Create a repair issue."""
@abstractmethod
async def async_delete_repair_issue(self, identifier: str) -> None:
"""Delete a repair issue."""
hass-nabucasa-0.101.0/hass_nabucasa/cloud_api.py 0000664 0000000 0000000 00000025145 15011602407 0021471 0 ustar 00root root 0000000 0000000 """Cloud APIs."""
from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps
from json import JSONDecodeError
import logging
from typing import (
TYPE_CHECKING,
Any,
Concatenate,
ParamSpec,
TypedDict,
TypeVar,
cast,
)
from aiohttp import ClientResponse, ContentTypeError
from aiohttp.hdrs import AUTHORIZATION, USER_AGENT
_LOGGER = logging.getLogger(__name__)
P = ParamSpec("P")
T = TypeVar("T")
if TYPE_CHECKING:
from . import Cloud, _ClientT
class _FilesHandlerUrlResponse(TypedDict):
"""URL Response from files handler."""
url: str
class FilesHandlerDownloadDetails(_FilesHandlerUrlResponse):
"""Download details from files handler."""
class FilesHandlerUploadDetails(_FilesHandlerUrlResponse):
"""Upload details from files handler."""
headers: dict[str, str]
class FilesHandlerListEntry(TypedDict):
"""List entry for files handlers."""
Key: str
Size: int
LastModified: str
Metadata: dict[str, Any]
def _do_log_response(resp: ClientResponse, content: str = "") -> None:
"""Log the response."""
meth = _LOGGER.debug if resp.status < 400 else _LOGGER.warning
meth("Fetched %s (%s) %s", resp.url, resp.status, content)
def _check_token(
func: Callable[Concatenate[Cloud[_ClientT], P], Awaitable[T]],
) -> Callable[Concatenate[Cloud[_ClientT], P], Coroutine[Any, Any, T]]:
"""Decorate a function to verify valid token."""
@wraps(func)
async def check_token(
cloud: Cloud[_ClientT],
*args: P.args,
**kwargs: P.kwargs,
) -> T:
"""Validate token, then call func."""
await cloud.auth.async_check_token()
return await func(cloud, *args, **kwargs)
return check_token
def _log_response(
func: Callable[Concatenate[P], Awaitable[ClientResponse]],
) -> Callable[Concatenate[P], Coroutine[Any, Any, ClientResponse]]:
"""Decorate a function to log bad responses."""
@wraps(func)
async def log_response(
*args: P.args,
**kwargs: P.kwargs,
) -> ClientResponse:
"""Log response if it's bad."""
resp = await func(*args, **kwargs)
_do_log_response(resp)
return resp
return log_response
@_check_token
@_log_response
async def async_create_cloudhook(cloud: Cloud[_ClientT]) -> ClientResponse:
"""Create a cloudhook."""
if TYPE_CHECKING:
assert cloud.id_token is not None
return await cloud.websession.post(
f"https://{cloud.cloudhook_server}/generate",
headers={AUTHORIZATION: cloud.id_token, USER_AGENT: cloud.client.client_name},
)
@_check_token
@_log_response
async def async_remote_register(cloud: Cloud[_ClientT]) -> ClientResponse:
"""Create/Get a remote URL."""
if TYPE_CHECKING:
assert cloud.id_token is not None
url = f"https://{cloud.servicehandlers_server}/instance/register"
return await cloud.websession.post(
url,
headers={AUTHORIZATION: cloud.id_token, USER_AGENT: cloud.client.client_name},
)
@_check_token
@_log_response
async def async_remote_token(
cloud: Cloud[_ClientT],
aes_key: bytes,
aes_iv: bytes,
) -> ClientResponse:
"""Create a remote snitun token."""
if TYPE_CHECKING:
assert cloud.id_token is not None
url = f"https://{cloud.servicehandlers_server}/instance/snitun_token"
return await cloud.websession.post(
url,
headers={AUTHORIZATION: cloud.id_token, USER_AGENT: cloud.client.client_name},
json={"aes_key": aes_key.hex(), "aes_iv": aes_iv.hex()},
)
@_check_token
@_log_response
async def async_remote_challenge_txt(
cloud: Cloud[_ClientT],
txt: str,
) -> ClientResponse:
"""Set DNS challenge."""
if TYPE_CHECKING:
assert cloud.id_token is not None
url = f"https://{cloud.servicehandlers_server}/instance/dns_challenge_txt"
return await cloud.websession.post(
url,
headers={AUTHORIZATION: cloud.id_token, USER_AGENT: cloud.client.client_name},
json={"txt": txt},
)
@_check_token
@_log_response
async def async_remote_challenge_cleanup(
cloud: Cloud[_ClientT],
txt: str,
) -> ClientResponse:
"""Remove DNS challenge."""
if TYPE_CHECKING:
assert cloud.id_token is not None
url = f"https://{cloud.servicehandlers_server}/instance/dns_challenge_cleanup"
return await cloud.websession.post(
url,
headers={AUTHORIZATION: cloud.id_token, USER_AGENT: cloud.client.client_name},
json={"txt": txt},
)
@_check_token
async def async_alexa_access_token(cloud: Cloud[_ClientT]) -> ClientResponse:
"""Request Alexa access token."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.post(
f"https://{cloud.servicehandlers_server}/alexa/access_token",
headers={AUTHORIZATION: cloud.id_token, USER_AGENT: cloud.client.client_name},
)
_LOGGER.log(
logging.DEBUG if resp.status < 400 else logging.INFO,
"Fetched %s (%s)",
resp.url,
resp.status,
)
return resp
@_check_token
async def async_files_download_details(
cloud: Cloud[_ClientT],
*,
storage_type: str,
filename: str,
) -> FilesHandlerDownloadDetails:
"""Get files download details."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.get(
f"https://{cloud.servicehandlers_server}/files"
f"/download_details/{storage_type}/{filename}",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
)
data: dict[str, Any] = await resp.json()
_do_log_response(
resp,
data["message"] if resp.status == 400 and "message" in data else "",
)
resp.raise_for_status()
return cast("FilesHandlerDownloadDetails", data)
@_check_token
async def async_files_list(
cloud: Cloud[_ClientT],
*,
storage_type: str,
) -> list[FilesHandlerListEntry]:
"""List files for storage type."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.get(
f"https://{cloud.servicehandlers_server}/files/{storage_type}",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
)
data: dict[str, Any] | list[dict[str, Any]] = await resp.json()
_do_log_response(
resp,
data["message"]
if resp.status == 400 and isinstance(data, dict) and "message" in data
else "",
)
resp.raise_for_status()
return cast("list[FilesHandlerListEntry]", data)
@_check_token
async def async_files_upload_details(
cloud: Cloud[_ClientT],
*,
storage_type: str,
filename: str,
base64md5hash: str,
size: int,
metadata: dict[str, Any] | None = None,
) -> FilesHandlerUploadDetails:
"""Get files upload details."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.get(
f"https://{cloud.servicehandlers_server}/files/upload_details",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
json={
"storage_type": storage_type,
"filename": filename,
"md5": base64md5hash,
"size": size,
"metadata": metadata,
},
)
data: dict[str, Any] = await resp.json()
_do_log_response(
resp,
data["message"] if "message" in data and resp.status == 400 else "",
)
resp.raise_for_status()
return cast("FilesHandlerUploadDetails", data)
@_check_token
async def async_files_delete_file(
cloud: Cloud[_ClientT],
*,
storage_type: str,
filename: str,
) -> None:
"""Delete a file."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.delete(
f"https://{cloud.servicehandlers_server}/files",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
json={
"storage_type": storage_type,
"filename": filename,
},
)
# Successful delete returns no content
try:
data: dict[str, Any] = await resp.json()
except (ContentTypeError, JSONDecodeError):
data = {}
_do_log_response(
resp,
data["message"] if resp.status == 400 and "message" in data else "",
)
resp.raise_for_status()
@_check_token
@_log_response
async def async_google_actions_request_sync(cloud: Cloud[_ClientT]) -> ClientResponse:
"""Request a Google Actions sync request."""
return await cloud.websession.post(
f"https://{cloud.remotestate_server}/request_sync",
headers={
AUTHORIZATION: f"Bearer {cloud.id_token}",
USER_AGENT: cloud.client.client_name,
},
)
@_check_token
async def async_subscription_info(
cloud: Cloud[_ClientT], skip_renew: bool = False
) -> dict[str, Any]:
"""Fetch subscription info."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.get(
f"https://{cloud.accounts_server}/payments/subscription_info",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
)
_do_log_response(resp)
resp.raise_for_status()
data: dict[str, Any] = await resp.json()
# If subscription info indicates we are subscribed, force a refresh of the token
if data.get("provider") and not cloud.started and not skip_renew:
_LOGGER.debug("Found disconnected account with valid subscription, connecting")
await cloud.auth.async_renew_access_token()
return data
@_check_token
async def async_migrate_paypal_agreement(cloud: Cloud[_ClientT]) -> dict[str, Any]:
"""Migrate a paypal agreement from legacy."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.post(
f"https://{cloud.accounts_server}/payments/migrate_paypal_agreement",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
)
_do_log_response(resp)
resp.raise_for_status()
data: dict[str, Any] = await resp.json()
return data
@_check_token
async def async_resolve_cname(cloud: Cloud[_ClientT], hostname: str) -> list[str]:
"""Resolve DNS CNAME."""
if TYPE_CHECKING:
assert cloud.id_token is not None
resp = await cloud.websession.post(
f"https://{cloud.accounts_server}/instance/resolve_dns_cname",
headers={"authorization": cloud.id_token, USER_AGENT: cloud.client.client_name},
json={"hostname": hostname},
)
_do_log_response(resp)
resp.raise_for_status()
data: list[str] = await resp.json()
return data
hass-nabucasa-0.101.0/hass_nabucasa/cloudhooks.py 0000664 0000000 0000000 00000004615 15011602407 0021703 0 ustar 00root root 0000000 0000000 """Manage cloud cloudhooks."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
import async_timeout
from . import cloud_api
if TYPE_CHECKING:
from . import Cloud, _ClientT
class Cloudhooks:
"""Class to help manage cloudhooks."""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize cloudhooks."""
self.cloud = cloud
cloud.iot.register_on_connect(self.async_publish_cloudhooks)
async def async_publish_cloudhooks(self) -> None:
"""Inform the Relayer of the cloudhooks that we support."""
if not self.cloud.is_connected:
return
cloudhooks = self.cloud.client.cloudhooks
await self.cloud.iot.async_send_message(
"webhook-register",
{"cloudhook_ids": [info["cloudhook_id"] for info in cloudhooks.values()]},
expect_answer=False,
)
async def async_create(self, webhook_id: str, managed: bool) -> dict[str, Any]:
"""Create a cloud webhook."""
cloudhooks = self.cloud.client.cloudhooks
if webhook_id in cloudhooks:
raise ValueError("Hook is already enabled for the cloud.")
if not self.cloud.iot.connected:
raise ValueError("Cloud is not connected")
# Create cloud hook
async with async_timeout.timeout(10):
resp = await cloud_api.async_create_cloudhook(self.cloud)
resp.raise_for_status()
data = await resp.json()
cloudhook_id = data["cloudhook_id"]
cloudhook_url = data["url"]
# Store hook
cloudhooks = dict(cloudhooks)
hook = cloudhooks[webhook_id] = {
"webhook_id": webhook_id,
"cloudhook_id": cloudhook_id,
"cloudhook_url": cloudhook_url,
"managed": managed,
}
await self.cloud.client.async_cloudhooks_update(cloudhooks)
await self.async_publish_cloudhooks()
return hook
async def async_delete(self, webhook_id: str) -> None:
"""Delete a cloud webhook."""
cloudhooks = self.cloud.client.cloudhooks
if webhook_id not in cloudhooks:
raise ValueError("Hook is not enabled for the cloud.")
# Remove hook
cloudhooks = dict(cloudhooks)
cloudhooks.pop(webhook_id)
await self.cloud.client.async_cloudhooks_update(cloudhooks)
await self.async_publish_cloudhooks()
hass-nabucasa-0.101.0/hass_nabucasa/const.py 0000664 0000000 0000000 00000004322 15011602407 0020652 0 ustar 00root root 0000000 0000000 """Constants for the hass-nabucasa."""
from __future__ import annotations
from enum import StrEnum
ACCOUNT_URL = "https://account.nabucasa.com/"
CONFIG_DIR = ".cloud"
REQUEST_TIMEOUT = 10
MODE_PROD = "production"
MODE_DEV = "development"
STATE_CONNECTING = "connecting"
STATE_CONNECTED = "connected"
STATE_DISCONNECTED = "disconnected"
DISPATCH_REMOTE_CONNECT = "remote_connect"
DISPATCH_REMOTE_DISCONNECT = "remote_disconnect"
DISPATCH_REMOTE_BACKEND_UP = "remote_backend_up"
DISPATCH_REMOTE_BACKEND_DOWN = "remote_backend_down"
DEFAULT_SERVERS: dict[str, dict[str, str]] = {
"production": {
"account_link": "account-link.nabucasa.com",
"accounts": "accounts.nabucasa.com",
"acme": "acme-v02.api.letsencrypt.org",
"cloudhook": "webhooks-api.nabucasa.com",
"relayer": "cloud.nabucasa.com",
"remotestate": "remotestate.nabucasa.com",
"servicehandlers": "servicehandlers.nabucasa.com",
},
"development": {},
}
DEFAULT_VALUES: dict[str, dict[str, str]] = {
"production": {
"cognito_client_id": "60i2uvhvbiref2mftj7rgcrt9u",
"user_pool_id": "us-east-1_87ll5WOP8",
"region": "us-east-1",
},
"development": {},
}
MESSAGE_EXPIRATION = """
It looks like your Home Assistant Cloud subscription has expired. Please check
your [account page](/config/cloud/account) to continue using the service.
"""
MESSAGE_AUTH_FAIL = """
You have been logged out of Home Assistant Cloud because we have been unable
to verify your credentials. Please [log in](/config/cloud) again to continue
using the service.
"""
MESSAGE_REMOTE_READY = """
Your remote access is now available.
You can manage your connectivity on the
[Cloud panel](/config/cloud) or with our [portal](https://account.nabucasa.com/).
"""
MESSAGE_REMOTE_SETUP = """
Unable to create a certificate. We will automatically
retry it and notify you when it's available.
"""
MESSAGE_LOAD_CERTIFICATE_FAILURE = """
Unable to load the certificate. We will automatically
recreate it and notify you when it's available.
"""
class SubscriptionReconnectionReason(StrEnum):
"""Subscription reconnection reason."""
NO_SUBSCRIPTION = "no_subscription"
SUBSCRIPTION_EXPIRED = "subscription_expired"
hass-nabucasa-0.101.0/hass_nabucasa/files.py 0000664 0000000 0000000 00000015537 15011602407 0020640 0 ustar 00root root 0000000 0000000 """Manage cloud files."""
from __future__ import annotations
import base64
from collections.abc import AsyncIterator, Callable, Coroutine
import contextlib
from enum import StrEnum
import hashlib
import logging
from typing import TYPE_CHECKING, Any, TypedDict
from aiohttp import (
ClientResponseError,
ClientTimeout,
StreamReader,
)
from .api import ApiBase, CloudApiError, CloudApiNonRetryableError
_LOGGER = logging.getLogger(__name__)
_FILE_TRANSFER_TIMEOUT = 43200.0 # 43200s == 12h
class StorageType(StrEnum):
"""Storage types."""
BACKUP = "backup"
class FilesError(CloudApiError):
"""Exception raised when handling files."""
class _FilesHandlerUrlResponse(TypedDict):
"""URL Response from files handler."""
url: str
class FilesHandlerDownloadDetails(_FilesHandlerUrlResponse):
"""Download details from files handler."""
class FilesHandlerUploadDetails(_FilesHandlerUrlResponse):
"""Upload details from files handler."""
headers: dict[str, str]
class StoredFile(TypedDict):
"""Stored file."""
Key: str
Size: int
LastModified: str
Metadata: dict[str, Any]
async def calculate_b64md5(
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
size: int,
) -> str:
"""Calculate the MD5 hash of a file.
Raises FilesError if the bytes read from the stream does not match the size.
"""
file_hash = hashlib.md5() # noqa: S324 Disable warning about using md5
bytes_read = 0
stream = await open_stream()
async for chunk in stream:
bytes_read += len(chunk)
file_hash.update(chunk)
if bytes_read != size:
raise FilesError(
f"Indicated size {size} does not match actual size {bytes_read}"
)
return base64.b64encode(file_hash.digest()).decode()
class Files(ApiBase):
"""Class to help manage files."""
@property
def hostname(self) -> str:
"""Get the hostname."""
if TYPE_CHECKING:
assert self._cloud.servicehandlers_server is not None
return self._cloud.servicehandlers_server
@property
def non_retryable_error_codes(self) -> set[str]:
"""Get the non-retryable error codes."""
return {"NC-SH-FH-03"}
async def upload(
self,
*,
storage_type: StorageType,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
filename: str,
base64md5hash: str,
size: int,
metadata: dict[str, Any] | None = None,
) -> None:
"""Upload a file."""
_LOGGER.debug("Uploading %s file with name %s", storage_type, filename)
try:
details: FilesHandlerUploadDetails = await self._call_cloud_api(
path="/files/upload_details",
jsondata={
"storage_type": storage_type,
"filename": filename,
"md5": base64md5hash,
"size": size,
"metadata": metadata,
},
)
except CloudApiNonRetryableError:
raise
except CloudApiError as err:
raise FilesError(err, orig_exc=err) from err
try:
response = await self._call_raw_api(
method="PUT",
url=details["url"],
data=await open_stream(),
headers=details["headers"] | {"content-length": str(size)},
client_timeout=ClientTimeout(
connect=10.0,
total=_FILE_TRANSFER_TIMEOUT,
),
)
self._do_log_response(response)
if 400 <= (status := response.status) < 500:
# We can try to get some context.
error = await response.text()
if error and "" in error and "" in error:
with contextlib.suppress(AttributeError, IndexError):
# This is ugly but it's the best we can do, we have no control
# over the error message structure, so we try what we can.
error = error.split("")[1].split("")[0]
raise FilesError(
f"Failed to upload: ({status}) {error[:256].replace('\n', ' ')}"
)
response.raise_for_status()
except CloudApiError as err:
raise FilesError(err, orig_exc=err) from err
except ClientResponseError as err:
raise FilesError(
f"Failed to upload: ({err.status}) {err.message}",
orig_exc=err,
) from err
async def download(
self,
storage_type: StorageType,
filename: str,
) -> StreamReader:
"""Download a file."""
_LOGGER.debug("Downloading %s file with name %s", storage_type, filename)
try:
details: FilesHandlerDownloadDetails = await self._call_cloud_api(
path=f"/files/download_details/{storage_type}/{filename}",
)
except CloudApiNonRetryableError:
raise
except CloudApiError as err:
raise FilesError(err, orig_exc=err) from err
try:
response = await self._call_raw_api(
method="GET",
headers={},
url=details["url"],
client_timeout=ClientTimeout(
connect=10.0,
total=_FILE_TRANSFER_TIMEOUT,
),
)
self._do_log_response(response)
response.raise_for_status()
except CloudApiError as err:
raise FilesError(err, orig_exc=err) from err
except ClientResponseError as err:
raise FilesError(
f"Failed to download: ({err.status}) {err.message}",
orig_exc=err,
) from err
return response.content
async def list(
self,
storage_type: StorageType,
) -> list[StoredFile]:
"""List files."""
_LOGGER.debug("Listing %s files", storage_type)
try:
files: list[StoredFile] = await self._call_cloud_api(
path=f"/files/{storage_type}"
)
except CloudApiError as err:
raise FilesError(err, orig_exc=err) from err
return files
async def delete(
self,
storage_type: StorageType,
filename: str,
) -> None:
"""Delete a file."""
_LOGGER.debug("Deleting %s file with name %s", storage_type, filename)
try:
await self._call_cloud_api(
path="/files",
method="DELETE",
jsondata={
"storage_type": storage_type,
"filename": filename,
},
)
except CloudApiError as err:
raise FilesError(err, orig_exc=err) from err
hass-nabucasa-0.101.0/hass_nabucasa/google_report_state.py 0000664 0000000 0000000 00000007547 15011602407 0023607 0 ustar 00root root 0000000 0000000 """Module to handle Google Report State."""
from __future__ import annotations
import asyncio
from asyncio.queues import Queue
from typing import TYPE_CHECKING, Any
import uuid
from . import iot_base
if TYPE_CHECKING:
from . import Cloud, _ClientT
MAX_PENDING = 100
ERR_DISCARD_CODE = "message_discarded"
ERR_DISCARD_MSG = "Message discarded because max messages reachced"
class ErrorResponse(Exception):
"""Raised when a request receives a success=false response."""
def __init__(self, code: str, message: str) -> None:
"""Initialize error response."""
super().__init__(code)
self.code = code
self.message = message
class GoogleReportState(iot_base.BaseIoT):
"""Report states to Google.
Uses a queue to send messages.
"""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize Google Report State."""
super().__init__(cloud)
self._connect_lock = asyncio.Lock()
self._to_send: Queue[dict[str, Any]] = Queue(100)
self._message_sender_task: asyncio.Task | None = None
# Local code waiting for a response
self._response_handler: dict[str, asyncio.Future[None]] = {}
self.register_on_connect(self._async_on_connect)
self.register_on_disconnect(self._async_on_disconnect)
# Register start/stop
cloud.register_on_stop(self.disconnect)
@property
def package_name(self) -> str:
"""Return the package name for logging."""
return __name__
@property
def ws_server_url(self) -> str:
"""Server to connect to."""
return f"wss://{self.cloud.remotestate_server}/v1"
async def async_send_message(self, msg: Any) -> None:
"""Send a message."""
msgid: str = uuid.uuid4().hex
# Since connect is async, guard against send_message called twice in parallel.
async with self._connect_lock:
if self.state == iot_base.STATE_DISCONNECTED:
asyncio.create_task(self.connect())
# Give connect time to start up and change state.
await asyncio.sleep(0)
if self._to_send.full():
discard_msg = self._to_send.get_nowait()
self._response_handler.pop(discard_msg["msgid"]).set_exception(
ErrorResponse(ERR_DISCARD_CODE, ERR_DISCARD_MSG),
)
fut = self._response_handler[msgid] = asyncio.Future()
self._to_send.put_nowait({"msgid": msgid, "payload": msg})
try:
return await fut
finally:
self._response_handler.pop(msgid, None)
def async_handle_message(self, msg: dict[str, Any]) -> None:
"""Handle a message."""
response_handler = self._response_handler.get(msg["msgid"])
if response_handler is not None:
if "error" in msg:
response_handler.set_exception(
ErrorResponse(msg["error"], msg["message"]),
)
else:
response_handler.set_result(msg.get("payload"))
return
self._logger.warning("Got unhandled message: %s", msg)
async def _async_on_connect(self) -> None:
"""On Connect handler."""
self._message_sender_task = asyncio.create_task(self._async_message_sender())
async def _async_on_disconnect(self) -> None:
"""On disconnect handler."""
if self._message_sender_task is not None:
self._message_sender_task.cancel()
self._message_sender_task = None
async def _async_message_sender(self) -> None:
"""Start sending messages."""
self._logger.debug("Message sender task activated")
try:
while True:
await self.async_send_json_message(await self._to_send.get())
except asyncio.CancelledError:
pass
self._logger.debug("Message sender task shut down")
hass-nabucasa-0.101.0/hass_nabucasa/ice_servers.py 0000664 0000000 0000000 00000011625 15011602407 0022041 0 ustar 00root root 0000000 0000000 """Manage ICE servers."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable
import logging
import random
import time
from typing import TYPE_CHECKING
from aiohttp import ClientResponseError
from aiohttp.hdrs import AUTHORIZATION, USER_AGENT
from webrtc_models import RTCIceServer
if TYPE_CHECKING:
from . import Cloud, _ClientT
_LOGGER = logging.getLogger(__name__)
class IceServers:
"""Class to manage ICE servers."""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize ICE Servers."""
self.cloud = cloud
self._refresh_task: asyncio.Task | None = None
self._ice_servers: list[RTCIceServer] = []
self._ice_servers_listener: Callable[[], Awaitable[None]] | None = None
self._ice_servers_listener_unregister: Callable[[], None] | None = None
async def _async_fetch_ice_servers(self) -> list[RTCIceServer]:
"""Fetch ICE servers."""
if TYPE_CHECKING:
assert self.cloud.id_token is not None
if self.cloud.subscription_expired:
return []
async with self.cloud.websession.get(
f"https://{self.cloud.servicehandlers_server}/webrtc/ice_servers",
headers={
AUTHORIZATION: self.cloud.id_token,
USER_AGENT: self.cloud.client.client_name,
},
) as resp:
resp.raise_for_status()
return [
RTCIceServer(
urls=item["urls"],
username=item["username"],
credential=item["credential"],
)
for item in await resp.json()
]
def _get_refresh_sleep_time(self) -> int:
"""Get the sleep time for refreshing ICE servers."""
timestamps = [
int(server.username.split(":")[0])
for server in self._ice_servers
if server.username is not None and ":" in server.username
]
if not timestamps:
return random.randint(3600, 3600 * 12) # 1-12 hours
if (expiration := min(timestamps) - int(time.time()) - 3600) < 0:
return random.randint(100, 300)
# 1 hour before the earliest expiration
return expiration
async def _async_refresh_ice_servers(self) -> None:
"""Handle ICE server refresh."""
while True:
try:
self._ice_servers = await self._async_fetch_ice_servers()
except ClientResponseError as err:
_LOGGER.error("Can't refresh ICE servers: %s", err.message)
# We should not keep the existing ICE servers with old timestamps
# as that will retrigger a refresh almost immediately.
if err.status in (401, 403):
self._ice_servers = []
except asyncio.CancelledError:
# Task is canceled, stop it.
break
if self._ice_servers_listener is not None:
await self._ice_servers_listener()
sleep_time = self._get_refresh_sleep_time()
await asyncio.sleep(sleep_time)
def _on_add_listener(self) -> None:
"""When the instance is connected."""
self._refresh_task = asyncio.create_task(self._async_refresh_ice_servers())
def _on_remove_listener(self) -> None:
"""When the instance is disconnected."""
if self._refresh_task is not None:
self._refresh_task.cancel()
self._refresh_task = None
async def async_register_ice_servers_listener(
self,
register_ice_server_fn: Callable[
[list[RTCIceServer]],
Awaitable[Callable[[], None]],
],
) -> Callable[[], None]:
"""Register a listener for ICE servers and return unregister function."""
_LOGGER.debug("Registering ICE servers listener")
async def perform_ice_server_update() -> None:
"""Perform ICE server update by unregistering and registering servers."""
_LOGGER.debug("Updating ICE servers")
if self._ice_servers_listener_unregister is not None:
self._ice_servers_listener_unregister()
self._ice_servers_listener_unregister = None
self._ice_servers_listener_unregister = await register_ice_server_fn(
self._ice_servers,
)
_LOGGER.debug("ICE servers updated")
def remove_listener() -> None:
"""Remove listener."""
if self._ice_servers_listener_unregister is not None:
self._ice_servers_listener_unregister()
self._ice_servers_listener_unregister = None
self._ice_servers = []
self._ice_servers_listener = None
self._on_remove_listener()
self._ice_servers_listener = perform_ice_server_update
self._on_add_listener()
return remove_listener
hass-nabucasa-0.101.0/hass_nabucasa/instance_api.py 0000664 0000000 0000000 00000003457 15011602407 0022171 0 ustar 00root root 0000000 0000000 """Manage instance API."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Literal, TypedDict
from aiohttp import hdrs
from .api import ApiBase, CloudApiError, api_exception_handler
_LOGGER = logging.getLogger(__name__)
class InstanceApiError(CloudApiError):
"""Exception raised when handling instance API."""
class InstanceConnectionDetails(TypedDict):
"""Connection details from instance API."""
connected_at: str
name: str
remote_ip_address: str
version: str
class InstanceConnectionConnnected(TypedDict):
"""Connection details from instance API."""
connected: Literal[True]
details: InstanceConnectionDetails
class InstanceConnectionDisconnected(TypedDict):
"""Connection details from instance API."""
connected: Literal[False]
type InstanceConnection = InstanceConnectionConnnected | InstanceConnectionDisconnected
class InstanceApi(ApiBase):
"""Class to help communicate with the instance API."""
@property
def hostname(self) -> str:
"""Get the hostname."""
if TYPE_CHECKING:
assert self._cloud.servicehandlers_server is not None
return self._cloud.servicehandlers_server
@api_exception_handler(InstanceApiError)
async def connection(
self,
*,
access_token: str | None = None,
skip_token_check: bool = False,
) -> InstanceConnection:
"""Get the connection details."""
_LOGGER.debug("Getting instance connection details")
details: InstanceConnection = await self._call_cloud_api(
path="/instance/connection",
headers={
hdrs.AUTHORIZATION: access_token or self._cloud.access_token,
},
skip_token_check=skip_token_check,
)
return details
hass-nabucasa-0.101.0/hass_nabucasa/iot.py 0000664 0000000 0000000 00000017232 15011602407 0020323 0 ustar 00root root 0000000 0000000 """Module to handle messages from Home Assistant cloud."""
from __future__ import annotations
import asyncio
from contextlib import suppress
import logging
import pprint
import random
from typing import TYPE_CHECKING, Any
import uuid
from . import iot_base
from .utils import Registry
if TYPE_CHECKING:
from . import Cloud, _ClientT
HANDLERS = Registry()
_LOGGER = logging.getLogger(__name__)
class UnknownHandler(Exception):
"""Exception raised when trying to handle unknown handler."""
class ErrorMessage(Exception):
"""Exception raised when there was error handling message in the cloud."""
def __init__(self, error: Any) -> None:
"""Initialize Error Message."""
super().__init__("Error in Cloud")
self.error = error
class HandlerError(Exception):
"""Exception raised when the handler failed."""
def __init__(self, error: str) -> None:
"""Initialize Error Message."""
super().__init__("Error in handler")
self.error = error
class CloudIoT(iot_base.BaseIoT):
"""Class to manage the IoT connection."""
mark_connected_after_first_message: bool = True
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize the CloudIoT class."""
super().__init__(cloud)
# Local code waiting for a response
self._response_handler: dict[str, asyncio.Future[Any]] = {}
# Register start/stop
cloud.register_on_start(self.start)
cloud.register_on_stop(self.disconnect)
@property
def package_name(self) -> str:
"""Return the package name for logging."""
return __name__
@property
def ws_heartbeat(self) -> float | None:
"""Server to connect to."""
return 300
@property
def ws_server_url(self) -> str:
"""Server to connect to."""
return f"wss://{self.cloud.relayer_server}/websocket"
async def start(self) -> None:
"""Start the CloudIoT server."""
if self.cloud.subscription_expired:
return
asyncio.create_task(self.connect())
async def async_send_message(
self,
handler: str,
payload: Any,
expect_answer: bool = True,
) -> Any | None:
"""Send a message."""
msgid = uuid.uuid4().hex
fut: asyncio.Future[Any] | None = None
if expect_answer:
fut = self._response_handler[msgid] = asyncio.Future()
try:
await self.async_send_json_message(
{"msgid": msgid, "handler": handler, "payload": payload},
)
if expect_answer and fut is not None:
return await fut
return None
finally:
self._response_handler.pop(msgid, None)
def async_handle_message(self, msg: dict[str, Any]) -> None:
"""Handle a message."""
response_handler = self._response_handler.get(msg["msgid"])
if response_handler is not None:
if "payload" in msg:
response_handler.set_result(msg["payload"])
else:
response_handler.set_exception(ErrorMessage(msg["error"]))
return
asyncio.create_task(self._async_handle_handler_message(msg))
async def _async_handle_handler_message(self, message: dict[str, Any]) -> None:
"""Handle incoming IoT message."""
response = {"msgid": message["msgid"]}
try:
handler = HANDLERS.get(message["handler"])
if handler is None:
raise UnknownHandler
result = await handler(self.cloud, message.get("payload"))
# No response from handler
if result is None:
return
response["payload"] = result
except UnknownHandler:
response["error"] = "unknown-handler"
except HandlerError as err:
self._logger.warning("Error handling message: %s", err.error)
response["error"] = err.error
except Exception: # pylint: disable=broad-except
self._logger.exception("Error handling message")
response["error"] = "exception"
# Client is unset in case the connection has been lost.
if self.client is None:
return
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug("Publishing message:\n%s\n", pprint.pformat(response))
# Suppress when client is closing.
with suppress(ConnectionResetError):
await self.client.send_json(response)
async def _connected(self) -> None:
"""Handle connected."""
await super()._connected()
await self.cloud.client.cloud_connected()
async def _disconnected(self) -> None:
"""Handle connected."""
await super()._disconnected()
await self.cloud.client.cloud_disconnected()
@HANDLERS.register("system")
async def async_handle_system(cloud: Cloud[_ClientT], payload: dict[str, Any]) -> None:
"""Handle an incoming IoT message for System."""
return await cloud.client.async_system_message(payload)
@HANDLERS.register("alexa")
async def async_handle_alexa(
cloud: Cloud[_ClientT],
payload: dict[str, Any],
) -> dict[str, Any]:
"""Handle an incoming IoT message for Alexa."""
return await cloud.client.async_alexa_message(payload)
@HANDLERS.register("google_actions")
async def async_handle_google_actions(
cloud: Cloud[_ClientT],
payload: dict[str, Any],
) -> dict[str, Any]:
"""Handle an incoming IoT message for Google Actions."""
return await cloud.client.async_google_message(payload)
@HANDLERS.register("cloud")
async def async_handle_cloud(cloud: Cloud[_ClientT], payload: dict[str, Any]) -> None:
"""Handle an incoming IoT message for cloud component."""
action = payload["action"]
if action == "logout":
# Log out of Home Assistant Cloud
await cloud.logout()
_LOGGER.error(
"You have been logged out from Home Assistant cloud: %s",
payload["reason"],
)
elif action == "disconnect_remote":
# Disconnect Remote connection
await cloud.remote.disconnect(clear_snitun_token=True)
elif action == "evaluate_remote_security":
async def _reconnect() -> None:
"""Reconnect after a random timeout."""
await asyncio.sleep(random.randint(60, 7200))
await cloud.remote.disconnect(clear_snitun_token=True)
await cloud.remote.connect()
# Reconnect to remote frontends
cloud.client.loop.create_task(_reconnect())
elif action in ("user_notification", "critical_user_notification"):
# Send user Notification
cloud.client.user_message(
"homeassistant_cloud_notification",
payload["title"],
payload["message"],
)
else:
_LOGGER.warning("Received unknown cloud action: %s", action)
@HANDLERS.register("remote_sni")
async def async_handle_remote_sni(
cloud: Cloud[_ClientT],
payload: dict[str, Any], # noqa: ARG001
) -> dict[str, Any]:
"""Handle remote UI requests for cloud."""
await cloud.client.async_cloud_connect_update(True)
return {"server": cloud.remote.snitun_server}
@HANDLERS.register("connection_info")
async def async_handle_connection_info(
cloud: Cloud[_ClientT],
payload: dict[str, Any],
) -> dict[str, Any]:
"""Handle connection info requests for cloud."""
return await cloud.client.async_cloud_connection_info(payload)
@HANDLERS.register("webhook")
async def async_handle_webhook(
cloud: Cloud[_ClientT],
payload: dict[str, Any],
) -> dict[str, Any]:
"""Handle an incoming IoT message for cloud webhooks."""
return await cloud.client.async_webhook_message(payload)
hass-nabucasa-0.101.0/hass_nabucasa/iot_base.py 0000664 0000000 0000000 00000026430 15011602407 0021315 0 ustar 00root root 0000000 0000000 """Base class to keep a websocket connection open to a server."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable
import dataclasses
import logging
import pprint
import random
from socket import gaierror
from typing import TYPE_CHECKING, Any
from aiohttp import (
ClientError,
ClientWebSocketResponse,
WSMessage,
WSMsgType,
WSServerHandshakeError,
client_exceptions,
hdrs,
)
from .auth import CloudError
from .const import (
MESSAGE_EXPIRATION,
STATE_CONNECTED,
STATE_CONNECTING,
STATE_DISCONNECTED,
SubscriptionReconnectionReason,
)
from .utils import gather_callbacks
if TYPE_CHECKING:
from . import Cloud, _ClientT
@dataclasses.dataclass
class DisconnectReason:
"""Disconnect reason."""
clean: bool
reason: str
class NotConnected(Exception):
"""Exception raised when trying to handle unknown handler."""
class BaseIoT:
"""Class to manage the IoT connection."""
mark_connected_after_first_message = False
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize the CloudIoT class."""
self.cloud = cloud
# The WebSocket client
self.client: ClientWebSocketResponse | None = None
# Scheduled sleep task till next connection retry
self.retry_task: asyncio.Task | None = None
# Boolean to indicate if we wanted the connection to close
self.close_requested: bool = False
# The current number of attempts to connect, impacts wait time
self.tries: int = 0
# Current state of the connection
self.state: str = STATE_DISCONNECTED
self._on_connect: list[Callable[[], Awaitable[None]]] = []
self._on_disconnect: list[Callable[[], Awaitable[None]]] = []
self._logger = logging.getLogger(self.package_name)
self._disconnect_event: asyncio.Event | None = None
self.last_disconnect_reason: DisconnectReason | None = None
@property
def package_name(self) -> str:
"""Return package name for logging."""
raise NotImplementedError
@property
def ws_heartbeat(self) -> float | None:
"""Server to connect to."""
return None
@property
def ws_server_url(self) -> str:
"""Server to connect to."""
raise NotImplementedError
@property
def require_subscription(self) -> bool:
"""If the server requires a valid subscription."""
return True
def async_handle_message(self, msg: dict[str, Any]) -> None:
"""Handle incoming message.
Run all async tasks in a wrapper to log appropriately.
"""
raise NotImplementedError
# --- Do not override after this line ---
def register_on_connect(self, on_connect_cb: Callable[[], Awaitable[None]]) -> None:
"""Register an async on_connect callback."""
self._on_connect.append(on_connect_cb)
def register_on_disconnect(
self,
on_disconnect_cb: Callable[[], Awaitable[None]],
) -> None:
"""Register an async on_disconnect callback."""
self._on_disconnect.append(on_disconnect_cb)
@property
def connected(self) -> bool:
"""Return if we're currently connected."""
return self.state == STATE_CONNECTED
async def async_send_json_message(self, message: dict[str, Any]) -> None:
"""Send a message.
Raises NotConnected if client not connected.
"""
if self.state != STATE_CONNECTED or self.client is None:
raise NotConnected
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug("Publishing message:\n%s\n", pprint.pformat(message))
await self.client.send_json(message)
async def connect(self) -> None:
"""Connect to the IoT broker."""
if self.state != STATE_DISCONNECTED:
raise RuntimeError("Connect called while not disconnected")
self.close_requested = False
self.state = STATE_CONNECTING
self.tries = 0
self._disconnect_event = asyncio.Event()
while True:
try:
self._logger.debug("Trying to connect")
await self._handle_connection()
except Exception: # pylint: disable=broad-except
# Safety net. This should never hit.
# Still adding it here to make sure we can always reconnect
self._logger.exception("Unexpected error")
if self.state == STATE_CONNECTED:
await self._disconnected()
if self.close_requested:
break
if self.require_subscription and self.cloud.subscription_expired:
self.cloud.async_initialize_subscription_reconnection_handler(
SubscriptionReconnectionReason.SUBSCRIPTION_EXPIRED,
)
break
self.state = STATE_CONNECTING
self.tries += 1
try:
await self._wait_retry()
except asyncio.CancelledError:
# Happens if disconnect called
break
self.state = STATE_DISCONNECTED
self._disconnect_event.set()
self._disconnect_event = None
async def _wait_retry(self) -> None:
"""Wait until it's time till the next retry."""
# Sleep 2^tries + 0…tries*3 seconds between retries
self.retry_task = asyncio.create_task(
asyncio.sleep(2 ** min(9, self.tries) + random.randint(0, self.tries * 3)),
)
await self.retry_task
self.retry_task = None
async def _handle_connection(self) -> None:
"""Connect to the IoT broker."""
try:
await self.cloud.auth.async_check_token()
except CloudError as err:
self._logger.warning(
"Cannot connect because unable to refresh token: %s",
err,
)
return
if self.require_subscription and self.cloud.subscription_expired:
self._logger.debug("Cloud subscription expired. Cancelling connecting.")
self.cloud.client.user_message(
"cloud_subscription_expired",
"Home Assistant Cloud",
MESSAGE_EXPIRATION,
)
self.close_requested = True
return
disconnect_clean: bool = False
disconnect_reason: (
str
| WSServerHandshakeError
| ClientError
| ConnectionResetError
| gaierror
| None
) = None
try:
self.client = await self.cloud.websession.ws_connect(
self.ws_server_url,
heartbeat=self.ws_heartbeat,
headers={
hdrs.AUTHORIZATION: f"Bearer {self.cloud.id_token}",
hdrs.USER_AGENT: self.cloud.client.client_name,
},
)
if not self.mark_connected_after_first_message:
await self._connected()
while not self.client.closed:
msg: WSMessage | None | str = None
try:
msg = await self.client.receive(55)
except TimeoutError:
# This is logged as info instead of warning because when
# this hits there is not really much that can be done about it.
# But the context is still valuable to have while
# troubleshooting.
self._logger.info("Timeout while waiting to receive message")
await self.client.ping()
continue
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING):
disconnect_clean = self.state == STATE_CONNECTED
disconnect_reason = f"Closed by server. {msg.extra} ({msg.data})"
break
# Do this inside the loop because if 2 clients are connected,
# it can happen that we get connected with valid auth,
# but then server decides to drop our connection.
if self.state != STATE_CONNECTED:
await self._connected()
if msg.type == WSMsgType.ERROR:
disconnect_reason = "Connection error"
break
if msg.type != WSMsgType.TEXT:
disconnect_reason = f"Received non-Text message: {msg.type}"
break
try:
msg_content: dict[str, Any] = msg.json()
except ValueError:
disconnect_reason = "Received invalid JSON."
break
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug(
"Received message:\n%s\n",
pprint.pformat(msg_content),
)
try:
self.async_handle_message(msg_content)
except Exception: # pylint: disable=broad-except
self._logger.exception("Unexpected error handling %s", msg_content)
if self.client.closed:
if self.close_requested:
disconnect_clean = True
disconnect_reason = "Close requested"
elif disconnect_reason is None:
disconnect_reason = "Closed by server. Unknown reason"
except client_exceptions.WSServerHandshakeError as err:
if err.status == 401:
disconnect_reason = "Invalid auth."
self.close_requested = True
# Should we notify user?
else:
disconnect_reason = err
except (client_exceptions.ClientError, ConnectionResetError, gaierror) as err:
disconnect_reason = err
except asyncio.CancelledError:
disconnect_clean = True
disconnect_reason = "Connection Cancelled"
finally:
if self.client:
base_msg = "Connection closed"
await self.client.close()
self.client = None
else:
base_msg = "Unable to connect"
msg = f"{base_msg}: {disconnect_reason}"
self.last_disconnect_reason = DisconnectReason(disconnect_clean, msg)
if self.close_requested or disconnect_clean:
self._logger.info(msg)
else:
self._logger.warning(msg)
async def disconnect(self) -> None:
"""Disconnect the client."""
self.close_requested = True
if self.client is not None:
await self.client.close()
elif self.retry_task is not None:
self.retry_task.cancel()
if self._disconnect_event is not None:
await self._disconnect_event.wait()
async def _connected(self) -> None:
"""Handle connected."""
self.last_disconnect_reason = None
self.tries = 0
self.state = STATE_CONNECTED
self._logger.info("Connected")
if self._on_connect:
await gather_callbacks(self._logger, "on_connect", self._on_connect)
async def _disconnected(self) -> None:
"""Handle connected."""
if self._on_disconnect:
await gather_callbacks(self._logger, "on_disconnect", self._on_disconnect)
hass-nabucasa-0.101.0/hass_nabucasa/py.typed 0000664 0000000 0000000 00000000000 15011602407 0020636 0 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/hass_nabucasa/remote.py 0000664 0000000 0000000 00000054110 15011602407 0021017 0 ustar 00root root 0000000 0000000 """Manage remote UI connections."""
from __future__ import annotations
import asyncio
from contextvars import ContextVar
from datetime import datetime, timedelta
from enum import Enum
import logging
import random
from ssl import SSLContext, SSLError
from typing import TYPE_CHECKING, cast
import aiohttp
import async_timeout
import attr
from snitun.exceptions import SniTunConnectionError
from snitun.utils.aes import generate_aes_keyset
from snitun.utils.aiohttp_client import SniTunClientAioHttp
from . import cloud_api, const, utils
from .acme import AcmeClientError, AcmeHandler, AcmeJWSVerificationError
from .const import SubscriptionReconnectionReason
if TYPE_CHECKING:
from . import Cloud, _ClientT
_LOGGER = logging.getLogger(__name__)
RENEW_IF_EXPIRES_DAYS = 25
WARN_RENEW_FAILED_DAYS = 18
is_cloud_request = ContextVar("IS_CLOUD_REQUEST", default=False)
class RemoteError(Exception):
"""General remote error."""
class RemoteBackendError(RemoteError):
"""Backend problem with nabucasa API."""
class RemoteInsecureVersion(RemoteError):
"""Raise if you try to connect with an insecure Core version."""
class RemoteForbidden(RemoteError):
"""Raise if remote connection is not allowed."""
class RemoteNotConnected(RemoteError):
"""Raise if a request need connection and we are not ready."""
class SubscriptionExpired(RemoteError):
"""Raise if we cannot connect because subscription expired."""
@attr.s
class SniTunToken:
"""Handle snitun token."""
fernet = attr.ib(type=bytes)
aes_key = attr.ib(type=bytes)
aes_iv = attr.ib(type=bytes)
valid = attr.ib(type=datetime)
throttling = attr.ib(type=int)
@attr.s
class Certificate:
"""Handle certificate details."""
common_name = attr.ib(type=str)
expire_date = attr.ib(type=datetime)
fingerprint = attr.ib(type=str)
alternative_names = attr.ib(type=list[str] | None)
class CertificateStatus(str, Enum):
"""Representation of the remote UI status."""
ERROR = "error"
GENERATING = "generating"
LOADED = "loaded"
LOADING = "loading"
READY = "ready"
class RemoteUI:
"""Class to help manage remote connections."""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize cloudhooks."""
self.cloud = cloud
self._acme: AcmeHandler | None = None
self._snitun: SniTunClientAioHttp | None = None
self._snitun_server: str | None = None
self._instance_domain: str | None = None
self._alias: list[str] | None = None
self._reconnect_task: asyncio.Task | None = None
self._acme_task: asyncio.Task | None = None
self._token: SniTunToken | None = None
self._certificate_status: CertificateStatus | None = None
self._info_loaded = asyncio.Event()
# Register start/stop
cloud.register_on_start(self.start)
cloud.register_on_stop(self.stop)
async def start(self) -> None:
"""Start remote UI loop."""
if self.cloud.subscription_expired:
self.cloud.async_initialize_subscription_reconnection_handler(
SubscriptionReconnectionReason.SUBSCRIPTION_EXPIRED,
)
return
self._acme_task = asyncio.create_task(self._certificate_handler())
await self._info_loaded.wait()
async def stop(self) -> None:
"""Stop remote UI loop."""
if self._acme_task is None:
return
self._acme_task.cancel()
self._acme_task = None
@property
def snitun_server(self) -> str | None:
"""Return connected snitun server."""
return self._snitun_server
@property
def certificate_status(self) -> CertificateStatus | None:
"""Return the certificate status."""
return self._certificate_status
@property
def instance_domain(self) -> str | None:
"""Return instance domain."""
return self._instance_domain
@property
def alias(self) -> list[str] | None:
"""Return alias."""
return self._alias
@property
def is_connected(self) -> bool:
"""Return true if we are ready to connect."""
return bool(False if self._snitun is None else self._snitun.is_connected)
@property
def certificate(self) -> Certificate | None:
"""Return certificate details."""
if (
not self._acme
or not self._acme.certificate_available
or self._acme.common_name is None
or self._acme.expire_date is None
or self._acme.fingerprint is None
):
return None
return Certificate(
self._acme.common_name,
self._acme.expire_date,
self._acme.fingerprint,
alternative_names=self._acme.alternative_names,
)
async def _create_context(self) -> SSLContext:
"""Create SSL context with acme certificate."""
context = utils.server_context_modern()
# We can not get here without this being set, but mypy does not know that.
assert self._acme is not None
await self.cloud.run_executor(
context.load_cert_chain,
self._acme.path_fullchain,
self._acme.path_private_key,
)
return context
async def _recreate_backend(self) -> None:
"""There was a connection error, recreate the backend."""
_LOGGER.info("Recreating backend")
await self.close_backend()
# Wait until backend is cleaned
await asyncio.sleep(5)
await self.load_backend()
async def _recreate_acme(self, domains: list[str], email: str) -> None:
"""Recreate the acme client."""
if self._acme and self._acme.certificate_available:
await self._acme.reset_acme()
self._acme = AcmeHandler(self.cloud, domains, email)
async def load_backend(self) -> bool:
"""Load backend details."""
try:
async with async_timeout.timeout(30):
resp = await cloud_api.async_remote_register(self.cloud)
resp.raise_for_status()
except (TimeoutError, aiohttp.ClientError) as err:
msg = "Can't update remote details from Home Assistant cloud"
if isinstance(err, aiohttp.ClientResponseError):
msg += f" ({err.status})" # pylint: disable=no-member
elif isinstance(err, asyncio.TimeoutError):
msg += " (timeout)"
_LOGGER.error(msg)
return False
data = await resp.json()
# Extract data
_LOGGER.debug("Retrieve instance data: %s", data)
instance_domain = data["domain"]
email = data["email"]
server = data["server"]
# Cache data
self._instance_domain = instance_domain
self._snitun_server = server
self._alias = cast("list[str]", data.get("alias", []))
domains: list[str] = [instance_domain, *self._alias]
# Set instance details for certificate
self._acme = AcmeHandler(self.cloud, domains, email)
# Load exists certificate
self._certificate_status = CertificateStatus.LOADING
await self._acme.load_certificate()
# Domain changed / revoke CA
ca_domains = set(self._acme.alternative_names or [])
if self._acme.common_name:
ca_domains.add(self._acme.common_name)
if not self._acme.certificate_available or (
ca_domains and ca_domains != set(domains)
):
for alias in self.alias or []:
if not await self._custom_domain_dns_configuration_is_valid(
instance_domain,
alias,
):
domains.remove(alias)
if ca_domains != set(domains):
if ca_domains:
_LOGGER.warning(
"Invalid certificate found for: (%s)",
",".join(ca_domains),
)
await self._recreate_acme(domains, email)
self._info_loaded.set()
should_create_cert = await self._should_renew_certificates()
if should_create_cert:
try:
self._certificate_status = CertificateStatus.GENERATING
await self._acme.issue_certificate()
except (AcmeJWSVerificationError, AcmeClientError) as err:
if isinstance(err, AcmeJWSVerificationError):
await self._recreate_acme(domains, email)
self.cloud.client.user_message(
"cloud_remote_acme",
"Home Assistant Cloud",
const.MESSAGE_REMOTE_SETUP,
)
self._certificate_status = CertificateStatus.ERROR
return False
self.cloud.client.user_message(
"cloud_remote_acme",
"Home Assistant Cloud",
const.MESSAGE_REMOTE_READY,
)
self._certificate_status = CertificateStatus.LOADED
await self._acme.hardening_files()
self._certificate_status = CertificateStatus.READY
if self.cloud.client.aiohttp_runner is None:
_LOGGER.debug("Waiting for aiohttp runner to come available")
# aiohttp_runner comes available when Home Assistant has started.
while self.cloud.client.aiohttp_runner is None: # noqa: ASYNC110
await asyncio.sleep(1)
try:
context = await self._create_context()
except SSLError as err:
if err.reason == "KEY_VALUES_MISMATCH":
self.cloud.client.user_message(
"cloud_remote_acme",
"Home Assistant Cloud",
const.MESSAGE_LOAD_CERTIFICATE_FAILURE,
)
await self._recreate_acme(domains, email)
self._certificate_status = CertificateStatus.ERROR
return False
# Setup snitun / aiohttp wrapper
_LOGGER.debug("Initializing SniTun")
self._snitun = SniTunClientAioHttp(
self.cloud.client.aiohttp_runner,
context,
snitun_server=self._snitun_server,
snitun_port=443,
)
_LOGGER.debug("Starting SniTun")
is_cloud_request.set(True)
await self._snitun.start(False, self._recreate_backend)
self.cloud.client.dispatcher_message(const.DISPATCH_REMOTE_BACKEND_UP)
_LOGGER.debug(
"Connecting remote backend: %s",
self.cloud.client.remote_autostart,
)
# Connect to remote is autostart enabled
if self.cloud.client.remote_autostart:
asyncio.create_task(self.connect())
return True
async def close_backend(self) -> None:
"""Close connections and shutdown backend."""
_LOGGER.debug("Closing backend")
# Close reconnect task
if self._reconnect_task:
self._reconnect_task.cancel()
self._reconnect_task = None
# Disconnect snitun
if self._snitun:
await self._snitun.stop()
# Cleanup
self._snitun = None
self._acme = None
self._token = None
self._instance_domain = None
self._alias = None
self._snitun_server = None
self.cloud.client.dispatcher_message(const.DISPATCH_REMOTE_BACKEND_DOWN)
async def handle_connection_requests(self, caller_ip: str) -> None: # noqa: ARG002
"""Handle connection requests."""
if not self._snitun:
raise RemoteNotConnected("Can't handle request-connection without backend")
if self._snitun.is_connected:
return
await self.connect()
async def _refresh_snitun_token(self) -> None:
"""Handle snitun token."""
if self._token and self._token.valid > utils.utcnow():
_LOGGER.debug("Don't need refresh snitun token")
return
if self.cloud.subscription_expired:
raise SubscriptionExpired
# Generate session token
aes_key, aes_iv = generate_aes_keyset()
try:
async with async_timeout.timeout(30):
resp = await cloud_api.async_remote_token(self.cloud, aes_key, aes_iv)
if resp.status == 409:
raise RemoteInsecureVersion
if resp.status == 403:
msg = ""
if "application/json" in (resp.content_type or ""):
msg = (await resp.json()).get("message", "")
raise RemoteForbidden(msg)
if resp.status not in (200, 201):
raise RemoteBackendError
except (TimeoutError, aiohttp.ClientError):
raise RemoteBackendError from None
data = await resp.json()
self._token = SniTunToken(
data["token"].encode(),
aes_key,
aes_iv,
utils.utc_from_timestamp(data["valid"]),
data["throttling"],
)
async def connect(self) -> None:
"""Connect to snitun server."""
if not self._snitun:
raise RemoteNotConnected("Can't handle request-connection without backend")
# Check if we already connected
if self._snitun.is_connected:
return
insecure = False
forbidden = False
try:
_LOGGER.debug("Refresh snitun token")
async with async_timeout.timeout(30):
await self._refresh_snitun_token()
# We can not get here without this being set, but mypy does not know that.
assert self._token is not None
_LOGGER.debug("Attempting connection to %s", self._snitun_server)
async with async_timeout.timeout(30):
await self._snitun.connect(
self._token.fernet,
self._token.aes_key,
self._token.aes_iv,
throttling=self._token.throttling,
)
_LOGGER.debug("Connected")
self.cloud.client.dispatcher_message(const.DISPATCH_REMOTE_CONNECT)
except TimeoutError:
_LOGGER.error("Timeout connecting to snitun server")
except SniTunConnectionError as err:
_LOGGER.log(
logging.ERROR if self._reconnect_task is not None else logging.INFO,
"Connection problem to snitun server (%s)",
err,
)
except RemoteBackendError:
_LOGGER.error("Can't refresh the snitun token")
except RemoteForbidden as err:
_LOGGER.error("Remote connection is not allowed %s", err)
forbidden = True
except RemoteInsecureVersion:
self.cloud.client.user_message(
"connect_remote_insecure",
"Home Assistant Cloud error",
"Remote connection is disabled because this Home Assistant instance "
"is marked as insecure. For more information and to enable it again, "
"visit the [Nabu Casa Account page](https://account.nabucasa.com).",
)
insecure = True
except SubscriptionExpired:
pass
except AttributeError:
pass # Ignore because HA shutdown on snitun token refresh
finally:
# start retry task
if (
self._snitun
and not self._reconnect_task
and not (insecure or forbidden)
):
self._reconnect_task = asyncio.create_task(self._reconnect_snitun())
# Disconnect if the instance is mark as insecure and we're in reconnect mode
elif self._reconnect_task and (insecure or forbidden):
asyncio.create_task(self.disconnect())
async def disconnect(self, clear_snitun_token: bool = False) -> None:
"""Disconnect from snitun server."""
if not self._snitun:
raise RemoteNotConnected("Can't handle request-connection without backend")
# Stop reconnect task
if self._reconnect_task:
self._reconnect_task.cancel()
if clear_snitun_token:
self._token = None
# Check if we already connected
if not self._snitun.is_connected:
return
await self._snitun.disconnect()
self.cloud.client.dispatcher_message(const.DISPATCH_REMOTE_DISCONNECT)
async def _reconnect_snitun(self) -> None:
"""Reconnect after disconnect."""
try:
while True:
if self._snitun is not None and self._snitun.is_connected:
await self._snitun.wait()
self.cloud.client.dispatcher_message(const.DISPATCH_REMOTE_DISCONNECT)
await asyncio.sleep(random.randint(1, 15))
await self.connect()
except asyncio.CancelledError:
pass
finally:
_LOGGER.debug("Close remote UI reconnect guard")
self._reconnect_task = None
async def _certificate_handler(self) -> None:
"""Handle certification ACME Tasks."""
while True:
try:
if self._snitun:
_LOGGER.debug("Sleeping until tomorrow")
await asyncio.sleep(utils.next_midnight() + random.randint(1, 3600))
else:
_LOGGER.debug("Initializing backend")
if not await self.load_backend():
await asyncio.sleep(10)
continue
if TYPE_CHECKING:
assert self._acme is not None
# Renew certificate?
if not await self._should_renew_certificates():
continue
# Renew certificate
try:
_LOGGER.debug("Renewing certificate")
self._certificate_status = CertificateStatus.GENERATING
await self._acme.issue_certificate()
self._certificate_status = CertificateStatus.LOADED
await self._recreate_backend()
self._certificate_status = CertificateStatus.READY
except AcmeClientError:
# Only log as warning if we have a certain amount of days left
if self._acme.expire_date is None or (
self._acme.expire_date
> utils.utcnow()
< (utils.utcnow() + timedelta(days=WARN_RENEW_FAILED_DAYS))
):
meth = _LOGGER.warning
self._certificate_status = CertificateStatus.ERROR
else:
meth = _LOGGER.debug
self._certificate_status = CertificateStatus.READY
meth("Renewal of ACME certificate failed. Trying again later")
except asyncio.CancelledError:
break
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error in Remote UI loop")
raise
_LOGGER.debug("Stopping Remote UI loop")
await self.close_backend()
async def _check_cname(self, hostname: str) -> list[str]:
"""Get CNAME records for hostname."""
try:
return await cloud_api.async_resolve_cname(self.cloud, hostname)
except (TimeoutError, aiohttp.ClientError):
_LOGGER.error("Can't resolve CNAME for %s", hostname)
return []
async def _custom_domain_dns_configuration_is_valid(
self,
instance_domain: str,
custom_domain: str,
) -> bool:
"""Validate custom domain."""
# Check primary entry
if instance_domain not in await self._check_cname(custom_domain):
return False
# Check LE entry
return f"_acme-challenge.{instance_domain}" in await self._check_cname(
f"_acme-challenge.{custom_domain}",
)
async def _should_renew_certificates(self) -> bool:
"""Check if certificates should be renewed."""
bad_alias = []
if TYPE_CHECKING:
assert self._acme is not None
assert self.instance_domain is not None
if not self._acme.certificate_available:
return True
if self._acme.expire_date is None:
return True
if self._acme.expire_date > (
utils.utcnow() + timedelta(days=RENEW_IF_EXPIRES_DAYS)
):
return False
check_alias = [
domain for domain in self._acme.domains if domain != self.instance_domain
]
if not check_alias:
return True
# Check if defined alias is still valid:
for alias in check_alias:
# Check primary entry
if not await self._custom_domain_dns_configuration_is_valid(
self.instance_domain,
alias,
):
bad_alias.append(alias) # noqa: PERF401
if not bad_alias:
# No bad configuration detected
return True
if self._acme.expire_date > (
utils.utcnow() + timedelta(days=WARN_RENEW_FAILED_DAYS)
):
await self.cloud.client.async_create_repair_issue(
identifier=f"warn_bad_custom_domain_configuration_{self._acme.expire_date.timestamp()}",
translation_key="warn_bad_custom_domain_configuration",
placeholders={"custom_domains": ",".join(bad_alias)},
severity="warning",
)
return False
# Recreate the acme client with working domains
await self.cloud.client.async_create_repair_issue(
identifier=f"reset_bad_custom_domain_configuration_{self._acme.expire_date.timestamp()}",
translation_key="reset_bad_custom_domain_configuration",
placeholders={"custom_domains": ",".join(bad_alias)},
severity="error",
)
await self._recreate_acme(
[domain for domain in self._acme.domains if domain not in bad_alias],
self._acme.email,
)
return True
async def reset_acme(self) -> None:
"""Reset the ACME client."""
if not self._acme:
return
await self._acme.reset_acme()
hass-nabucasa-0.101.0/hass_nabucasa/utils.py 0000664 0000000 0000000 00000005724 15011602407 0020673 0 ustar 00root root 0000000 0000000 """Helper methods to handle the time in Home Assistant."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable
import datetime as dt
from logging import Logger
import ssl
from typing import TypeVar
import ciso8601
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name
UTC = dt.UTC
def utcnow() -> dt.datetime:
"""Get now in UTC time."""
return dt.datetime.now(UTC)
def utc_from_timestamp(timestamp: float) -> dt.datetime:
"""Return a UTC time from a timestamp."""
return dt.datetime.fromtimestamp(timestamp, UTC)
def parse_date(dt_str: str) -> dt.date | None:
"""Convert a date string to a date object."""
try:
return ciso8601.parse_datetime(dt_str).date()
except ValueError: # If dt_str did not match our format
return None
def server_context_modern() -> ssl.SSLContext:
"""
Return an SSL context following the Mozilla recommendations.
TLS configuration follows the best-practice guidelines specified here:
https://wiki.mozilla.org/Security/Server_Side_TLS
Modern guidelines are followed.
"""
context = ssl.SSLContext(ssl.PROTOCOL_TLS) # pylint: disable=no-member
context.options |= (
ssl.OP_NO_SSLv2
| ssl.OP_NO_SSLv3
| ssl.OP_NO_TLSv1
| ssl.OP_NO_TLSv1_1
| ssl.OP_CIPHER_SERVER_PREFERENCE
)
if hasattr(ssl, "OP_NO_COMPRESSION"):
context.options |= ssl.OP_NO_COMPRESSION
context.set_ciphers(
"ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:"
"ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256",
)
return context
def next_midnight() -> float:
"""Return the seconds till next local midnight."""
midnight = dt.datetime.now().replace(
hour=0,
minute=0,
second=0,
microsecond=0,
) + dt.timedelta(days=1)
return (midnight - dt.datetime.now()).total_seconds()
async def gather_callbacks(
logger: Logger,
name: str,
callbacks: list[Callable[[], Awaitable[None]]],
) -> None:
"""Gather callbacks and log exceptions."""
results = await asyncio.gather(*[cb() for cb in callbacks], return_exceptions=True)
for result, callback in zip(results, callbacks, strict=False):
if not isinstance(result, Exception):
continue
logger.error("Unexpected error in %s %s", name, callback, exc_info=result)
class Registry(dict):
"""Registry of items."""
def register(self, name: str) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Return decorator to register item with a specific name."""
def decorator(func: CALLABLE_T) -> CALLABLE_T:
"""Register decorated function."""
self[name] = func
return func
return decorator
hass-nabucasa-0.101.0/hass_nabucasa/voice.py 0000664 0000000 0000000 00000051045 15011602407 0020635 0 ustar 00root root 0000000 0000000 """Voice handler with Azure."""
from __future__ import annotations
from collections.abc import AsyncIterable
from datetime import datetime
from enum import Enum
import logging
from typing import TYPE_CHECKING
from xml.etree import ElementTree as ET
from aiohttp.hdrs import ACCEPT, AUTHORIZATION, CONTENT_TYPE, USER_AGENT
import attr
from .utils import utc_from_timestamp, utcnow
from .voice_api import VoiceApiError
from .voice_data import TTS_VOICES
if TYPE_CHECKING:
from . import Cloud, _ClientT
_LOGGER = logging.getLogger(__name__)
class VoiceError(Exception):
"""General Voice error."""
class VoiceTokenError(VoiceError):
"""Error with token handling."""
class VoiceReturnError(VoiceError):
"""Backend error for voice."""
class Gender(str, Enum):
"""Gender Type for voices."""
MALE = "male"
FEMALE = "female"
class AudioOutput(str, Enum):
"""Gender Type for voices."""
MP3 = "mp3"
RAW = "raw"
STT_LANGUAGES = [
"af-ZA",
"am-ET",
"ar-AE",
"ar-BH",
"ar-DZ",
"ar-EG",
"ar-IL",
"ar-IQ",
"ar-JO",
"ar-KW",
"ar-LB",
"ar-LY",
"ar-MA",
"ar-OM",
"ar-PS",
"ar-QA",
"ar-SA",
"ar-SY",
"ar-TN",
"ar-YE",
"az-AZ",
"bg-BG",
"bn-IN",
"bs-BA",
"ca-ES",
"cs-CZ",
"cy-GB",
"da-DK",
"de-AT",
"de-CH",
"de-DE",
"el-GR",
"en-AU",
"en-CA",
"en-GB",
"en-GH",
"en-HK",
"en-IE",
"en-IN",
"en-KE",
"en-NG",
"en-NZ",
"en-PH",
"en-SG",
"en-TZ",
"en-US",
"en-ZA",
"es-AR",
"es-BO",
"es-CL",
"es-CO",
"es-CR",
"es-CU",
"es-DO",
"es-EC",
"es-ES",
"es-GQ",
"es-GT",
"es-HN",
"es-MX",
"es-NI",
"es-PA",
"es-PE",
"es-PR",
"es-PY",
"es-SV",
"es-US",
"es-UY",
"es-VE",
"et-EE",
"eu-ES",
"fa-IR",
"fi-FI",
"fil-PH",
"fr-BE",
"fr-CA",
"fr-CH",
"fr-FR",
"ga-IE",
"gl-ES",
"gu-IN",
"he-IL",
"hi-IN",
"hr-HR",
"hu-HU",
"hy-AM",
"id-ID",
"is-IS",
"it-CH",
"it-IT",
"ja-JP",
"jv-ID",
"ka-GE",
"kk-KZ",
"km-KH",
"kn-IN",
"ko-KR",
"lo-LA",
"lt-LT",
"lv-LV",
"mk-MK",
"ml-IN",
"mn-MN",
"mr-IN",
"ms-MY",
"mt-MT",
"my-MM",
"nb-NO",
"ne-NP",
"nl-BE",
"nl-NL",
"pl-PL",
"ps-AF",
"pt-BR",
"pt-PT",
"ro-RO",
"ru-RU",
"si-LK",
"sk-SK",
"sl-SI",
"so-SO",
"sq-AL",
"sr-RS",
"sv-SE",
"sw-KE",
"sw-TZ",
"ta-IN",
"te-IN",
"th-TH",
"tr-TR",
"uk-UA",
"uz-UZ",
"vi-VN",
"wuu-CN",
"yue-CN",
"zh-CN",
"zh-CN-shandong",
"zh-CN-sichuan",
"zh-HK",
"zh-TW",
"zu-ZA",
]
# Old. Do not update anymore.
MAP_VOICE = {
("af-ZA", Gender.FEMALE): "AdriNeural",
("af-ZA", Gender.MALE): "WillemNeural",
("am-ET", Gender.FEMALE): "MekdesNeural",
("am-ET", Gender.MALE): "AmehaNeural",
("ar-DZ", Gender.FEMALE): "AminaNeural",
("ar-DZ", Gender.MALE): "IsmaelNeural",
("ar-BH", Gender.FEMALE): "LailaNeural",
("ar-BH", Gender.MALE): "AliNeural",
("ar-EG", Gender.FEMALE): "SalmaNeural",
("ar-EG", Gender.MALE): "ShakirNeural",
("ar-IQ", Gender.FEMALE): "RanaNeural",
("ar-IQ", Gender.MALE): "BasselNeural",
("ar-JO", Gender.FEMALE): "SanaNeural",
("ar-JO", Gender.MALE): "TaimNeural",
("ar-KW", Gender.FEMALE): "NouraNeural",
("ar-KW", Gender.MALE): "FahedNeural",
("ar-LY", Gender.FEMALE): "ImanNeural",
("ar-LY", Gender.MALE): "OmarNeural",
("ar-MA", Gender.FEMALE): "MounaNeural",
("ar-MA", Gender.MALE): "JamalNeural",
("ar-QA", Gender.FEMALE): "AmalNeural",
("ar-QA", Gender.MALE): "MoazNeural",
("ar-SA", Gender.FEMALE): "ZariyahNeural",
("ar-SA", Gender.MALE): "HamedNeural",
("ar-SY", Gender.FEMALE): "AmanyNeural",
("ar-SY", Gender.MALE): "LaithNeural",
("ar-TN", Gender.FEMALE): "ReemNeural",
("ar-TN", Gender.MALE): "HediNeural",
("ar-AE", Gender.FEMALE): "FatimaNeural",
("ar-AE", Gender.MALE): "HamdanNeural",
("ar-YE", Gender.FEMALE): "MaryamNeural",
("ar-YE", Gender.MALE): "SalehNeural",
("bn-BD", Gender.FEMALE): "NabanitaNeural",
("bn-BD", Gender.MALE): "PradeepNeural",
("bn-IN", Gender.FEMALE): "TanishaaNeural",
("bn-IN", Gender.MALE): "BashkarNeural",
("bg-BG", Gender.FEMALE): "KalinaNeural",
("bg-BG", Gender.MALE): "BorislavNeural",
("my-MM", Gender.FEMALE): "NilarNeural",
("my-MM", Gender.MALE): "ThihaNeural",
("ca-ES", Gender.FEMALE): "JoanaNeural",
("ca-ES", Gender.MALE): "EnricNeural",
("zh-HK", Gender.FEMALE): "HiuMaanNeural",
("zh-HK", Gender.MALE): "WanLungNeural",
("zh-CN", Gender.FEMALE): "XiaoxiaoNeural",
("zh-CN", Gender.MALE): "YunyangNeural",
("zh-TW", Gender.FEMALE): "HsiaoChenNeural",
("zh-TW", Gender.MALE): "YunJheNeural",
("hr-HR", Gender.FEMALE): "GabrijelaNeural",
("hr-HR", Gender.MALE): "SreckoNeural",
("cs-CZ", Gender.FEMALE): "VlastaNeural",
("cs-CZ", Gender.MALE): "AntoninNeural",
("da-DK", Gender.FEMALE): "ChristelNeural",
("da-DK", Gender.MALE): "JeppeNeural",
("nl-BE", Gender.FEMALE): "DenaNeural",
("nl-BE", Gender.MALE): "ArnaudNeural",
("nl-NL", Gender.FEMALE): "ColetteNeural",
("nl-NL", Gender.MALE): "MaartenNeural",
("en-AU", Gender.FEMALE): "NatashaNeural",
("en-AU", Gender.MALE): "WilliamNeural",
("en-CA", Gender.FEMALE): "ClaraNeural",
("en-CA", Gender.MALE): "LiamNeural",
("en-HK", Gender.FEMALE): "YanNeural",
("en-HK", Gender.MALE): "SamNeural",
("en-IN", Gender.FEMALE): "NeerjaNeural",
("en-IN", Gender.MALE): "PrabhatNeural",
("en-IE", Gender.FEMALE): "EmilyNeural",
("en-IE", Gender.MALE): "ConnorNeural",
("en-KE", Gender.FEMALE): "AsiliaNeural",
("en-KE", Gender.MALE): "ChilembaNeural",
("en-NZ", Gender.FEMALE): "MollyNeural",
("en-NZ", Gender.MALE): "MitchellNeural",
("en-NG", Gender.FEMALE): "EzinneNeural",
("en-NG", Gender.MALE): "AbeoNeural",
("en-PH", Gender.FEMALE): "RosaNeural",
("en-PH", Gender.MALE): "JamesNeural",
("en-SG", Gender.FEMALE): "LunaNeural",
("en-SG", Gender.MALE): "WayneNeural",
("en-ZA", Gender.FEMALE): "LeahNeural",
("en-ZA", Gender.MALE): "LukeNeural",
("en-TZ", Gender.FEMALE): "ImaniNeural",
("en-TZ", Gender.MALE): "ElimuNeural",
("en-GB", Gender.FEMALE): "LibbyNeural",
("en-GB", Gender.MALE): "RyanNeural",
("en-US", Gender.FEMALE): "JennyNeural",
("en-US", Gender.MALE): "GuyNeural",
("et-EE", Gender.FEMALE): "AnuNeural",
("et-EE", Gender.MALE): "KertNeural",
("fil-PH", Gender.FEMALE): "BlessicaNeural",
("fil-PH", Gender.MALE): "AngeloNeural",
("fi-FI", Gender.FEMALE): "SelmaNeural",
("fi-FI", Gender.MALE): "HarriNeural",
("fr-BE", Gender.FEMALE): "CharlineNeural",
("fr-BE", Gender.MALE): "GerardNeural",
("fr-CA", Gender.FEMALE): "SylvieNeural",
("fr-CA", Gender.MALE): "AntoineNeural",
("fr-FR", Gender.FEMALE): "DeniseNeural",
("fr-FR", Gender.MALE): "HenriNeural",
("fr-CH", Gender.FEMALE): "ArianeNeural",
("fr-CH", Gender.MALE): "FabriceNeural",
("gl-ES", Gender.FEMALE): "SabelaNeural",
("gl-ES", Gender.MALE): "RoiNeural",
("de-AT", Gender.FEMALE): "IngridNeural",
("de-AT", Gender.MALE): "JonasNeural",
("de-DE", Gender.FEMALE): "KatjaNeural",
("de-DE", Gender.MALE): "ConradNeural",
("de-CH", Gender.FEMALE): "LeniNeural",
("de-CH", Gender.MALE): "JanNeural",
("el-GR", Gender.FEMALE): "AthinaNeural",
("el-GR", Gender.MALE): "NestorasNeural",
("gu-IN", Gender.FEMALE): "DhwaniNeural",
("gu-IN", Gender.MALE): "NiranjanNeural",
("he-IL", Gender.FEMALE): "HilaNeural",
("he-IL", Gender.MALE): "AvriNeural",
("hi-IN", Gender.FEMALE): "SwaraNeural",
("hi-IN", Gender.MALE): "MadhurNeural",
("hu-HU", Gender.FEMALE): "NoemiNeural",
("hu-HU", Gender.MALE): "TamasNeural",
("is-IS", Gender.FEMALE): "GudrunNeural",
("is-IS", Gender.MALE): "GunnarNeural",
("id-ID", Gender.FEMALE): "GadisNeural",
("id-ID", Gender.MALE): "ArdiNeural",
("ga-IE", Gender.FEMALE): "OrlaNeural",
("ga-IE", Gender.MALE): "ColmNeural",
("it-IT", Gender.FEMALE): "ElsaNeural",
("it-IT", Gender.MALE): "DiegoNeural",
("ja-JP", Gender.FEMALE): "NanamiNeural",
("ja-JP", Gender.MALE): "KeitaNeural",
("jv-ID", Gender.FEMALE): "SitiNeural",
("jv-ID", Gender.MALE): "DimasNeural",
("kn-IN", Gender.FEMALE): "SapnaNeural",
("kn-IN", Gender.MALE): "GaganNeural",
("kk-KZ", Gender.FEMALE): "AigulNeural",
("kk-KZ", Gender.MALE): "DauletNeural",
("km-KH", Gender.FEMALE): "SreymomNeural",
("km-KH", Gender.MALE): "PisethNeural",
("ko-KR", Gender.FEMALE): "SunHiNeural",
("ko-KR", Gender.MALE): "InJoonNeural",
("lo-LA", Gender.FEMALE): "KeomanyNeural",
("lo-LA", Gender.MALE): "ChanthavongNeural",
("lv-LV", Gender.FEMALE): "EveritaNeural",
("lv-LV", Gender.MALE): "NilsNeural",
("lt-LT", Gender.FEMALE): "OnaNeural",
("lt-LT", Gender.MALE): "LeonasNeural",
("mk-MK", Gender.FEMALE): "MarijaNeural",
("mk-MK", Gender.MALE): "AleksandarNeural",
("ms-MY", Gender.FEMALE): "YasminNeural",
("ms-MY", Gender.MALE): "OsmanNeural",
("ml-IN", Gender.FEMALE): "SobhanaNeural",
("ml-IN", Gender.MALE): "MidhunNeural",
("mt-MT", Gender.FEMALE): "GraceNeural",
("mt-MT", Gender.MALE): "JosephNeural",
("mr-IN", Gender.FEMALE): "AarohiNeural",
("mr-IN", Gender.MALE): "ManoharNeural",
("nb-NO", Gender.FEMALE): "IselinNeural",
("nb-NO", Gender.MALE): "FinnNeural",
("ps-AF", Gender.FEMALE): "LatifaNeural",
("ps-AF", Gender.MALE): "GulNawazNeural",
("fa-IR", Gender.FEMALE): "DilaraNeural",
("fa-IR", Gender.MALE): "FaridNeural",
("pl-PL", Gender.FEMALE): "AgnieszkaNeural",
("pl-PL", Gender.MALE): "MarekNeural",
("pt-BR", Gender.FEMALE): "FranciscaNeural",
("pt-BR", Gender.MALE): "AntonioNeural",
("pt-PT", Gender.FEMALE): "RaquelNeural",
("pt-PT", Gender.MALE): "DuarteNeural",
("ro-RO", Gender.FEMALE): "AlinaNeural",
("ro-RO", Gender.MALE): "EmilNeural",
("ru-RU", Gender.FEMALE): "SvetlanaNeural",
("ru-RU", Gender.MALE): "DmitryNeural",
("sr-RS", Gender.FEMALE): "SophieNeural",
("sr-RS", Gender.MALE): "NicholasNeural",
("si-LK", Gender.FEMALE): "ThiliniNeural",
("si-LK", Gender.MALE): "SameeraNeural",
("sk-SK", Gender.FEMALE): "ViktoriaNeural",
("sk-SK", Gender.MALE): "LukasNeural",
("sl-SI", Gender.FEMALE): "PetraNeural",
("sl-SI", Gender.MALE): "RokNeural",
("so-SO", Gender.FEMALE): "UbaxNeural",
("so-SO", Gender.MALE): "MuuseNeural",
("es-AR", Gender.FEMALE): "ElenaNeural",
("es-AR", Gender.MALE): "TomasNeural",
("es-BO", Gender.FEMALE): "SofiaNeural",
("es-BO", Gender.MALE): "MarceloNeural",
("es-CL", Gender.FEMALE): "CatalinaNeural",
("es-CL", Gender.MALE): "LorenzoNeural",
("es-CO", Gender.FEMALE): "SalomeNeural",
("es-CO", Gender.MALE): "GonzaloNeural",
("es-CR", Gender.FEMALE): "MariaNeural",
("es-CR", Gender.MALE): "JuanNeural",
("es-CU", Gender.FEMALE): "BelkysNeural",
("es-CU", Gender.MALE): "ManuelNeural",
("es-DO", Gender.FEMALE): "RamonaNeural",
("es-DO", Gender.MALE): "EmilioNeural",
("es-EC", Gender.FEMALE): "AndreaNeural",
("es-EC", Gender.MALE): "LuisNeural",
("es-SV", Gender.FEMALE): "LorenaNeural",
("es-SV", Gender.MALE): "RodrigoNeural",
("es-GQ", Gender.FEMALE): "TeresaNeural",
("es-GQ", Gender.MALE): "JavierNeural",
("es-GT", Gender.FEMALE): "MartaNeural",
("es-GT", Gender.MALE): "AndresNeural",
("es-HN", Gender.FEMALE): "KarlaNeural",
("es-HN", Gender.MALE): "CarlosNeural",
("es-MX", Gender.FEMALE): "DaliaNeural",
("es-MX", Gender.MALE): "JorgeNeural",
("es-NI", Gender.FEMALE): "YolandaNeural",
("es-NI", Gender.MALE): "FedericoNeural",
("es-PA", Gender.FEMALE): "MargaritaNeural",
("es-PA", Gender.MALE): "RobertoNeural",
("es-PY", Gender.FEMALE): "TaniaNeural",
("es-PY", Gender.MALE): "MarioNeural",
("es-PE", Gender.FEMALE): "CamilaNeural",
("es-PE", Gender.MALE): "AlexNeural",
("es-PR", Gender.FEMALE): "KarinaNeural",
("es-PR", Gender.MALE): "VictorNeural",
("es-ES", Gender.FEMALE): "ElviraNeural",
("es-ES", Gender.MALE): "AlvaroNeural",
("es-UY", Gender.FEMALE): "ValentinaNeural",
("es-UY", Gender.MALE): "MateoNeural",
("es-US", Gender.FEMALE): "PalomaNeural",
("es-US", Gender.MALE): "AlonsoNeural",
("es-VE", Gender.FEMALE): "PaolaNeural",
("es-VE", Gender.MALE): "SebastianNeural",
("su-ID", Gender.FEMALE): "TutiNeural",
("su-ID", Gender.MALE): "JajangNeural",
("sw-KE", Gender.FEMALE): "ZuriNeural",
("sw-KE", Gender.MALE): "RafikiNeural",
("sw-TZ", Gender.FEMALE): "RehemaNeural",
("sw-TZ", Gender.MALE): "DaudiNeural",
("sv-SE", Gender.FEMALE): "SofieNeural",
("sv-SE", Gender.MALE): "MattiasNeural",
("ta-IN", Gender.FEMALE): "PallaviNeural",
("ta-IN", Gender.MALE): "ValluvarNeural",
("ta-SG", Gender.FEMALE): "VenbaNeural",
("ta-SG", Gender.MALE): "AnbuNeural",
("ta-LK", Gender.FEMALE): "SaranyaNeural",
("ta-LK", Gender.MALE): "KumarNeural",
("te-IN", Gender.FEMALE): "ShrutiNeural",
("te-IN", Gender.MALE): "MohanNeural",
("th-TH", Gender.FEMALE): "AcharaNeural",
("th-TH", Gender.MALE): "NiwatNeural",
("tr-TR", Gender.FEMALE): "EmelNeural",
("tr-TR", Gender.MALE): "AhmetNeural",
("uk-UA", Gender.FEMALE): "PolinaNeural",
("uk-UA", Gender.MALE): "OstapNeural",
("ur-IN", Gender.FEMALE): "GulNeural",
("ur-IN", Gender.MALE): "SalmanNeural",
("ur-PK", Gender.FEMALE): "UzmaNeural",
("ur-PK", Gender.MALE): "AsadNeural",
("uz-UZ", Gender.FEMALE): "MadinaNeural",
("uz-UZ", Gender.MALE): "SardorNeural",
("vi-VN", Gender.FEMALE): "HoaiMyNeural",
("vi-VN", Gender.MALE): "NamMinhNeural",
("cy-GB", Gender.FEMALE): "NiaNeural",
("cy-GB", Gender.MALE): "AledNeural",
("zu-ZA", Gender.FEMALE): "ThandoNeural",
("zu-ZA", Gender.MALE): "ThembaNeural",
}
@attr.s
class STTResponse:
"""Response of STT."""
success: bool = attr.ib()
text: str | None = attr.ib()
class Voice:
"""Class to help manage azure STT and TTS."""
def __init__(self, cloud: Cloud[_ClientT]) -> None:
"""Initialize azure voice."""
self.cloud = cloud
self._token: str | None = None
self._endpoint_tts: str | None = None
self._endpoint_stt: str | None = None
self._valid: datetime | None = None
def _validate_token(self) -> bool:
"""Validate token outside of coroutine."""
return self.cloud.valid_subscription and bool(
self._valid and utcnow() < self._valid
)
async def _update_token(self) -> None:
"""Update token details."""
if not self.cloud.valid_subscription:
raise VoiceTokenError("Invalid subscription")
try:
details = await self.cloud.voice_api.connection_details()
except VoiceApiError as err:
raise VoiceTokenError(err) from err
self._token = details["authorized_key"]
self._endpoint_stt = details["endpoint_stt"]
self._endpoint_tts = details["endpoint_tts"]
self._valid = utc_from_timestamp(float(details["valid"]))
async def process_stt(
self,
*,
stream: AsyncIterable[bytes],
content_type: str,
language: str,
force_token_renewal: bool = False,
) -> STTResponse:
"""Stream Audio to Azure cognitive instance."""
if language not in STT_LANGUAGES:
raise VoiceError(f"Language {language} not supported")
if force_token_renewal or not self._validate_token():
await self._update_token()
# Send request
async with self.cloud.websession.post(
f"{self._endpoint_stt}?language={language}&profanity=raw",
headers={
CONTENT_TYPE: content_type,
AUTHORIZATION: f"Bearer {self._token}",
ACCEPT: "application/json;text/xml",
USER_AGENT: self.cloud.client.client_name,
},
data=stream,
expect100=True,
chunked=True,
) as resp:
if resp.status == 429 and not force_token_renewal:
# By checking the force_token_renewal argument, we limit retries to 1.
_LOGGER.info("Retrying with new token")
return await self.process_stt(
stream=stream,
content_type=content_type,
language=language,
force_token_renewal=True,
)
if resp.status not in (200, 201):
raise VoiceReturnError(
f"Error processing {language} speech: "
f"{resp.status} {await resp.text()}",
)
data = await resp.json()
# Parse Answer
return STTResponse(
data["RecognitionStatus"] == "Success",
data.get("DisplayText"),
)
async def process_tts(
self,
*,
text: str,
language: str,
output: AudioOutput,
voice: str | None = None,
gender: Gender | None = None,
force_token_renewal: bool = False,
style: str | None = None,
) -> bytes:
"""Get Speech from text over Azure."""
if (language_info := TTS_VOICES.get(language)) is None:
raise VoiceError(f"Unsupported language {language}")
# Backwards compatibility for old config
if voice is None and gender is not None:
voice = MAP_VOICE.get((language, gender))
# If no voice picked, pick first one.
if voice is None:
voice = next(iter(language_info))
if (voice_info := language_info.get(voice)) is None:
raise VoiceError(f"Unsupported voice {voice} for language {language}")
if style and (
isinstance(voice_info, str) or style not in voice_info.get("variants", [])
):
raise VoiceError(
f"Unsupported style {style} for voice {voice} in language {language}"
)
if force_token_renewal or not self._validate_token():
await self._update_token()
# SSML
xml_body = ET.Element(
"speak",
attrib={
"version": "1.0",
"xmlns": "http://www.w3.org/2001/10/synthesis",
"xmlns:mstts": "https://www.w3.org/2001/mstts",
"{http://www.w3.org/XML/1998/namespace}lang": language,
},
)
# Add element
voice_el = ET.SubElement(
xml_body, "voice", attrib={"name": f"{language}-{voice}"}
)
if style:
express_el = ET.SubElement(
voice_el,
"mstts:express-as",
attrib={
"style": style,
},
)
target_el = express_el
else:
target_el = voice_el
target_el.text = text[:2048]
# We can not get here without this being set, but mypy does not know that.
assert self._endpoint_tts is not None
if output == AudioOutput.RAW:
output_header = "raw-16khz-16bit-mono-pcm"
else:
output_header = "audio-24khz-48kbitrate-mono-mp3"
# Send request
async with self.cloud.websession.post(
self._endpoint_tts,
headers={
CONTENT_TYPE: "application/ssml+xml",
AUTHORIZATION: f"Bearer {self._token}",
"X-Microsoft-OutputFormat": output_header,
USER_AGENT: self.cloud.client.client_name,
},
data=ET.tostring(xml_body),
) as resp:
if resp.status == 429 and not force_token_renewal:
# By checking the force_token_renewal argument, we limit retries to 1.
_LOGGER.info("Retrying with new token")
return await self.process_tts(
text=text,
language=language,
output=output,
voice=voice,
gender=gender,
force_token_renewal=True,
)
if resp.status not in (200, 201):
raise VoiceReturnError(
f"Error receiving TTS with {language}/{voice}: "
f"{resp.status} {await resp.text()}",
)
return await resp.read()
hass-nabucasa-0.101.0/hass_nabucasa/voice_api.py 0000664 0000000 0000000 00000002063 15011602407 0021462 0 ustar 00root root 0000000 0000000 """This module provides voice API functionalities."""
from __future__ import annotations
from typing import TYPE_CHECKING, TypedDict
from .api import ApiBase, CloudApiError, api_exception_handler
class VoiceApiError(CloudApiError):
"""Exception raised when handling voice API."""
class VoiceConnectionDetails(TypedDict):
"""Voice connection details from voice API."""
authorized_key: str
endpoint_stt: str
endpoint_tts: str
valid: str
class VoiceApi(ApiBase):
"""Class to help communicate with the voice API."""
@property
def hostname(self) -> str:
"""Get the hostname."""
if TYPE_CHECKING:
assert self._cloud.servicehandlers_server is not None
return self._cloud.servicehandlers_server
@api_exception_handler(VoiceApiError)
async def connection_details(self) -> VoiceConnectionDetails:
"""Get the voice connection details."""
details: VoiceConnectionDetails = await self._call_cloud_api(
path="/voice/connection_details"
)
return details
hass-nabucasa-0.101.0/hass_nabucasa/voice_data.py 0000664 0000000 0000000 00000062573 15011602407 0021636 0 ustar 00root root 0000000 0000000 """
Available voices for TTS.
Automatically generated file, do not edit this file directly.
Run python3 -m scripts/update_voice_data.py to update this file.
"""
TTS_VOICES: dict[str, dict[str, dict | str]] = {
"af-ZA": {"AdriNeural": "Adri", "WillemNeural": "Willem"},
"am-ET": {"AmehaNeural": "Ameha", "MekdesNeural": "Mekdes"},
"ar-AE": {"FatimaNeural": "Fatima", "HamdanNeural": "Hamdan"},
"ar-BH": {"AliNeural": "Ali", "LailaNeural": "Laila"},
"ar-DZ": {"AminaNeural": "Amina", "IsmaelNeural": "Ismael"},
"ar-EG": {"SalmaNeural": "Salma", "ShakirNeural": "Shakir"},
"ar-IQ": {"BasselNeural": "Bassel", "RanaNeural": "Rana"},
"ar-JO": {"SanaNeural": "Sana", "TaimNeural": "Taim"},
"ar-KW": {"FahedNeural": "Fahed", "NouraNeural": "Noura"},
"ar-LB": {"LaylaNeural": "Layla", "RamiNeural": "Rami"},
"ar-LY": {"ImanNeural": "Iman", "OmarNeural": "Omar"},
"ar-MA": {"JamalNeural": "Jamal", "MounaNeural": "Mouna"},
"ar-OM": {"AbdullahNeural": "Abdullah", "AyshaNeural": "Aysha"},
"ar-QA": {"AmalNeural": "Amal", "MoazNeural": "Moaz"},
"ar-SA": {"HamedNeural": "Hamed", "ZariyahNeural": "Zariyah"},
"ar-SY": {"AmanyNeural": "Amany", "LaithNeural": "Laith"},
"ar-TN": {"HediNeural": "Hedi", "ReemNeural": "Reem"},
"ar-YE": {"MaryamNeural": "Maryam", "SalehNeural": "Saleh"},
"as-IN": {"PriyomNeural": "Priyom", "YashicaNeural": "Yashica"},
"az-AZ": {"BabekNeural": "Babek", "BanuNeural": "Banu"},
"bg-BG": {"BorislavNeural": "Borislav", "KalinaNeural": "Kalina"},
"bn-BD": {"NabanitaNeural": "Nabanita", "PradeepNeural": "Pradeep"},
"bn-IN": {"BashkarNeural": "Bashkar", "TanishaaNeural": "Tanishaa"},
"bs-BA": {"GoranNeural": "Goran", "VesnaNeural": "Vesna"},
"ca-ES": {"AlbaNeural": "Alba", "EnricNeural": "Enric", "JoanaNeural": "Joana"},
"cs-CZ": {"AntoninNeural": "Antonin", "VlastaNeural": "Vlasta"},
"cy-GB": {"AledNeural": "Aled", "NiaNeural": "Nia"},
"da-DK": {"ChristelNeural": "Christel", "JeppeNeural": "Jeppe"},
"de-AT": {"IngridNeural": "Ingrid", "JonasNeural": "Jonas"},
"de-CH": {"JanNeural": "Jan", "LeniNeural": "Leni"},
"de-DE": {
"AmalaNeural": "Amala",
"BerndNeural": "Bernd",
"ChristophNeural": "Christoph",
"ConradNeural": {"name": "Conrad", "variants": ["cheerful", "sad"]},
"ElkeNeural": "Elke",
"GiselaNeural": "Gisela",
"KasperNeural": "Kasper",
"KatjaNeural": "Katja",
"KillianNeural": "Killian",
"KlarissaNeural": "Klarissa",
"KlausNeural": "Klaus",
"LouisaNeural": "Louisa",
"MajaNeural": "Maja",
"RalfNeural": "Ralf",
"TanjaNeural": "Tanja",
},
"el-GR": {"AthinaNeural": "Athina", "NestorasNeural": "Nestoras"},
"en-AU": {
"AnnetteNeural": "Annette",
"CarlyNeural": "Carly",
"DarrenNeural": "Darren",
"DuncanNeural": "Duncan",
"ElsieNeural": "Elsie",
"FreyaNeural": "Freya",
"JoanneNeural": "Joanne",
"KenNeural": "Ken",
"KimNeural": "Kim",
"NatashaNeural": "Natasha",
"NeilNeural": "Neil",
"TimNeural": "Tim",
"TinaNeural": "Tina",
"WilliamNeural": "William",
},
"en-CA": {"ClaraNeural": "Clara", "LiamNeural": "Liam"},
"en-GB": {
"AbbiNeural": "Abbi",
"AlfieNeural": "Alfie",
"BellaNeural": "Bella",
"ElliotNeural": "Elliot",
"EthanNeural": "Ethan",
"HollieNeural": "Hollie",
"LibbyNeural": "Libby",
"MaisieNeural": "Maisie",
"NoahNeural": "Noah",
"OliverNeural": "Oliver",
"OliviaNeural": "Olivia",
"RyanNeural": {
"name": "Ryan",
"variants": ["cheerful", "chat", "whispering", "sad"],
},
"SoniaNeural": {"name": "Sonia", "variants": ["cheerful", "sad"]},
"ThomasNeural": "Thomas",
},
"en-HK": {"SamNeural": "Sam", "YanNeural": "Yan"},
"en-IE": {"ConnorNeural": "Connor", "EmilyNeural": "Emily"},
"en-IN": {
"AaravNeural": "Aarav",
"AartiNeural": "Aarti",
"AashiNeural": "Aashi",
"AnanyaNeural": "Ananya",
"ArjunNeural": "Arjun",
"KavyaNeural": "Kavya",
"KunalNeural": "Kunal",
"NeerjaNeural": {
"name": "Neerja",
"variants": ["newscast", "cheerful", "empathetic"],
},
"PrabhatNeural": "Prabhat",
"RehaanNeural": "Rehaan",
},
"en-KE": {"AsiliaNeural": "Asilia", "ChilembaNeural": "Chilemba"},
"en-NG": {"AbeoNeural": "Abeo", "EzinneNeural": "Ezinne"},
"en-NZ": {"MitchellNeural": "Mitchell", "MollyNeural": "Molly"},
"en-PH": {"JamesNeural": "James", "RosaNeural": "Rosa"},
"en-SG": {"LunaNeural": "Luna", "WayneNeural": "Wayne"},
"en-TZ": {"ElimuNeural": "Elimu", "ImaniNeural": "Imani"},
"en-US": {
"AmberNeural": "Amber",
"AnaNeural": "Ana",
"AndrewNeural": "Andrew",
"AriaNeural": {
"name": "Aria",
"variants": [
"chat",
"customerservice",
"narration-professional",
"newscast-casual",
"newscast-formal",
"cheerful",
"empathetic",
"angry",
"sad",
"excited",
"friendly",
"terrified",
"shouting",
"unfriendly",
"whispering",
"hopeful",
],
},
"AshleyNeural": "Ashley",
"AvaNeural": "Ava",
"BrandonNeural": "Brandon",
"BrianNeural": "Brian",
"ChristopherNeural": "Christopher",
"CoraNeural": "Cora",
"DavisNeural": {
"name": "Davis",
"variants": [
"chat",
"angry",
"cheerful",
"excited",
"friendly",
"hopeful",
"sad",
"shouting",
"terrified",
"unfriendly",
"whispering",
],
},
"ElizabethNeural": "Elizabeth",
"EmmaNeural": "Emma",
"EricNeural": "Eric",
"GuyNeural": {
"name": "Guy",
"variants": [
"newscast",
"angry",
"cheerful",
"sad",
"excited",
"friendly",
"terrified",
"shouting",
"unfriendly",
"whispering",
"hopeful",
],
},
"JacobNeural": "Jacob",
"JaneNeural": {
"name": "Jane",
"variants": [
"angry",
"cheerful",
"excited",
"friendly",
"hopeful",
"sad",
"shouting",
"terrified",
"unfriendly",
"whispering",
],
},
"JasonNeural": {
"name": "Jason",
"variants": [
"angry",
"cheerful",
"excited",
"friendly",
"hopeful",
"sad",
"shouting",
"terrified",
"unfriendly",
"whispering",
],
},
"JennyNeural": {
"name": "Jenny",
"variants": [
"assistant",
"chat",
"customerservice",
"newscast",
"angry",
"cheerful",
"sad",
"excited",
"friendly",
"terrified",
"shouting",
"unfriendly",
"whispering",
"hopeful",
],
},
"KaiNeural": {"name": "Kai", "variants": ["conversation"]},
"LunaNeural": {"name": "Luna", "variants": ["conversation"]},
"MichelleNeural": "Michelle",
"MonicaNeural": "Monica",
"NancyNeural": {
"name": "Nancy",
"variants": [
"angry",
"cheerful",
"excited",
"friendly",
"hopeful",
"sad",
"shouting",
"terrified",
"unfriendly",
"whispering",
],
},
"RogerNeural": "Roger",
"SaraNeural": {
"name": "Sara",
"variants": [
"angry",
"cheerful",
"excited",
"friendly",
"hopeful",
"sad",
"shouting",
"terrified",
"unfriendly",
"whispering",
],
},
"SteffanNeural": "Steffan",
"TonyNeural": {
"name": "Tony",
"variants": [
"angry",
"cheerful",
"excited",
"friendly",
"hopeful",
"sad",
"shouting",
"terrified",
"unfriendly",
"whispering",
],
},
},
"en-ZA": {"LeahNeural": "Leah", "LukeNeural": "Luke"},
"es-AR": {"ElenaNeural": "Elena", "TomasNeural": "Tomas"},
"es-BO": {"MarceloNeural": "Marcelo", "SofiaNeural": "Sofia"},
"es-CL": {"CatalinaNeural": "Catalina", "LorenzoNeural": "Lorenzo"},
"es-CO": {"GonzaloNeural": "Gonzalo", "SalomeNeural": "Salome"},
"es-CR": {"JuanNeural": "Juan", "MariaNeural": "Maria"},
"es-CU": {"BelkysNeural": "Belkys", "ManuelNeural": "Manuel"},
"es-DO": {"EmilioNeural": "Emilio", "RamonaNeural": "Ramona"},
"es-EC": {"AndreaNeural": "Andrea", "LuisNeural": "Luis"},
"es-ES": {
"AbrilNeural": "Abril",
"AlvaroNeural": {"name": "Alvaro", "variants": ["cheerful", "sad"]},
"ArnauNeural": "Arnau",
"DarioNeural": "Dario",
"EliasNeural": "Elias",
"ElviraNeural": "Elvira",
"EstrellaNeural": "Estrella",
"IreneNeural": "Irene",
"LaiaNeural": "Laia",
"LiaNeural": "Lia",
"NilNeural": "Nil",
"SaulNeural": "Saul",
"TeoNeural": "Teo",
"TrianaNeural": "Triana",
"VeraNeural": "Vera",
"XimenaNeural": "Ximena",
},
"es-GQ": {"JavierNeural": "Javier", "TeresaNeural": "Teresa"},
"es-GT": {"AndresNeural": "Andres", "MartaNeural": "Marta"},
"es-HN": {"CarlosNeural": "Carlos", "KarlaNeural": "Karla"},
"es-MX": {
"BeatrizNeural": "Beatriz",
"CandelaNeural": "Candela",
"CarlotaNeural": "Carlota",
"CecilioNeural": "Cecilio",
"DaliaNeural": {"name": "Dalia", "variants": ["cheerful", "sad", "whispering"]},
"GerardoNeural": "Gerardo",
"JorgeNeural": {
"name": "Jorge",
"variants": ["cheerful", "chat", "whispering", "sad", "excited"],
},
"LarissaNeural": "Larissa",
"LibertoNeural": "Liberto",
"LucianoNeural": "Luciano",
"MarinaNeural": "Marina",
"NuriaNeural": "Nuria",
"PelayoNeural": "Pelayo",
"RenataNeural": "Renata",
"YagoNeural": "Yago",
},
"es-NI": {"FedericoNeural": "Federico", "YolandaNeural": "Yolanda"},
"es-PA": {"MargaritaNeural": "Margarita", "RobertoNeural": "Roberto"},
"es-PE": {"AlexNeural": "Alex", "CamilaNeural": "Camila"},
"es-PR": {"KarinaNeural": "Karina", "VictorNeural": "Victor"},
"es-PY": {"MarioNeural": "Mario", "TaniaNeural": "Tania"},
"es-SV": {"LorenaNeural": "Lorena", "RodrigoNeural": "Rodrigo"},
"es-US": {"AlonsoNeural": "Alonso", "PalomaNeural": "Paloma"},
"es-UY": {"MateoNeural": "Mateo", "ValentinaNeural": "Valentina"},
"es-VE": {"PaolaNeural": "Paola", "SebastianNeural": "Sebastian"},
"et-EE": {"AnuNeural": "Anu", "KertNeural": "Kert"},
"eu-ES": {"AinhoaNeural": "Ainhoa", "AnderNeural": "Ander"},
"fa-IR": {"DilaraNeural": "Dilara", "FaridNeural": "Farid"},
"fi-FI": {"HarriNeural": "Harri", "NooraNeural": "Noora", "SelmaNeural": "Selma"},
"fil-PH": {"AngeloNeural": "Angelo", "BlessicaNeural": "Blessica"},
"fr-BE": {"CharlineNeural": "Charline", "GerardNeural": "Gerard"},
"fr-CA": {
"AntoineNeural": "Antoine",
"JeanNeural": "Jean",
"SylvieNeural": "Sylvie",
"ThierryNeural": "Thierry",
},
"fr-CH": {"ArianeNeural": "Ariane", "FabriceNeural": "Fabrice"},
"fr-FR": {
"AlainNeural": "Alain",
"BrigitteNeural": "Brigitte",
"CelesteNeural": "Celeste",
"ClaudeNeural": "Claude",
"CoralieNeural": "Coralie",
"DeniseNeural": {
"name": "Denise",
"variants": ["cheerful", "sad", "whispering", "excited"],
},
"EloiseNeural": "Eloise",
"HenriNeural": {
"name": "Henri",
"variants": ["cheerful", "sad", "whispering", "excited"],
},
"JacquelineNeural": "Jacqueline",
"JeromeNeural": "Jerome",
"JosephineNeural": "Josephine",
"MauriceNeural": "Maurice",
"YvesNeural": "Yves",
"YvetteNeural": "Yvette",
},
"ga-IE": {"ColmNeural": "Colm", "OrlaNeural": "Orla"},
"gl-ES": {"RoiNeural": "Roi", "SabelaNeural": "Sabela"},
"gu-IN": {"DhwaniNeural": "Dhwani", "NiranjanNeural": "Niranjan"},
"he-IL": {"AvriNeural": "Avri", "HilaNeural": "Hila"},
"hi-IN": {
"AaravNeural": "Aarav",
"AartiNeural": "Aarti",
"AnanyaNeural": "Ananya",
"ArjunNeural": "Arjun",
"KavyaNeural": "Kavya",
"KunalNeural": "Kunal",
"MadhurNeural": "Madhur",
"RehaanNeural": "Rehaan",
"SwaraNeural": {
"name": "Swara",
"variants": ["newscast", "cheerful", "empathetic"],
},
},
"hr-HR": {"GabrijelaNeural": "Gabrijela", "SreckoNeural": "Srecko"},
"hu-HU": {"NoemiNeural": "Noemi", "TamasNeural": "Tamas"},
"hy-AM": {"AnahitNeural": "Anahit", "HaykNeural": "Hayk"},
"id-ID": {"ArdiNeural": "Ardi", "GadisNeural": "Gadis"},
"is-IS": {"GudrunNeural": "Gudrun", "GunnarNeural": "Gunnar"},
"it-IT": {
"BenignoNeural": "Benigno",
"CalimeroNeural": "Calimero",
"CataldoNeural": "Cataldo",
"DiegoNeural": {"name": "Diego", "variants": ["cheerful", "sad", "excited"]},
"ElsaNeural": "Elsa",
"FabiolaNeural": "Fabiola",
"FiammaNeural": "Fiamma",
"GianniNeural": "Gianni",
"GiuseppeNeural": "Giuseppe",
"ImeldaNeural": "Imelda",
"IrmaNeural": "Irma",
"IsabellaNeural": {
"name": "Isabella",
"variants": ["cheerful", "chat", "whispering", "sad", "excited"],
},
"LisandroNeural": "Lisandro",
"PalmiraNeural": "Palmira",
"PierinaNeural": "Pierina",
"RinaldoNeural": "Rinaldo",
},
"iu-Cans-CA": {"SiqiniqNeural": "Siqiniq", "TaqqiqNeural": "Taqqiq"},
"iu-Latn-CA": {"SiqiniqNeural": "Siqiniq", "TaqqiqNeural": "Taqqiq"},
"ja-JP": {
"AoiNeural": "Aoi",
"DaichiNeural": "Daichi",
"KeitaNeural": "Keita",
"MayuNeural": "Mayu",
"NanamiNeural": {
"name": "Nanami",
"variants": ["chat", "customerservice", "cheerful"],
},
"NaokiNeural": "Naoki",
"ShioriNeural": "Shiori",
},
"jv-ID": {"DimasNeural": "Dimas", "SitiNeural": "Siti"},
"ka-GE": {"EkaNeural": "Eka", "GiorgiNeural": "Giorgi"},
"kk-KZ": {"AigulNeural": "Aigul", "DauletNeural": "Daulet"},
"km-KH": {"PisethNeural": "Piseth", "SreymomNeural": "Sreymom"},
"kn-IN": {"GaganNeural": "Gagan", "SapnaNeural": "Sapna"},
"ko-KR": {
"BongJinNeural": "BongJin",
"GookMinNeural": "GookMin",
"HyunsuNeural": "Hyunsu",
"InJoonNeural": {"name": "InJoon", "variants": ["sad"]},
"JiMinNeural": "JiMin",
"SeoHyeonNeural": "SeoHyeon",
"SoonBokNeural": "SoonBok",
"SunHiNeural": "Sun-Hi",
"YuJinNeural": "YuJin",
},
"lo-LA": {"ChanthavongNeural": "Chanthavong", "KeomanyNeural": "Keomany"},
"lt-LT": {"LeonasNeural": "Leonas", "OnaNeural": "Ona"},
"lv-LV": {"EveritaNeural": "Everita", "NilsNeural": "Nils"},
"mk-MK": {"AleksandarNeural": "Aleksandar", "MarijaNeural": "Marija"},
"ml-IN": {"MidhunNeural": "Midhun", "SobhanaNeural": "Sobhana"},
"mn-MN": {"BataaNeural": "Bataa", "YesuiNeural": "Yesui"},
"mr-IN": {"AarohiNeural": "Aarohi", "ManoharNeural": "Manohar"},
"ms-MY": {"OsmanNeural": "Osman", "YasminNeural": "Yasmin"},
"mt-MT": {"GraceNeural": "Grace", "JosephNeural": "Joseph"},
"my-MM": {"NilarNeural": "Nilar", "ThihaNeural": "Thiha"},
"nb-NO": {
"FinnNeural": "Finn",
"IselinNeural": "Iselin",
"PernilleNeural": "Pernille",
},
"ne-NP": {"HemkalaNeural": "Hemkala", "SagarNeural": "Sagar"},
"nl-BE": {"ArnaudNeural": "Arnaud", "DenaNeural": "Dena"},
"nl-NL": {
"ColetteNeural": "Colette",
"FennaNeural": "Fenna",
"MaartenNeural": "Maarten",
},
"or-IN": {"SubhasiniNeural": "Subhasini", "SukantNeural": "Sukant"},
"pa-IN": {"OjasNeural": "Ojas", "VaaniNeural": "Vaani"},
"pl-PL": {
"AgnieszkaNeural": "Agnieszka",
"MarekNeural": "Marek",
"ZofiaNeural": "Zofia",
},
"ps-AF": {"GulNawazNeural": "Gul Nawaz", "LatifaNeural": "Latifa"},
"pt-BR": {
"AntonioNeural": "Antonio",
"BrendaNeural": "Brenda",
"DonatoNeural": "Donato",
"ElzaNeural": "Elza",
"FabioNeural": "Fabio",
"FranciscaNeural": {"name": "Francisca", "variants": ["calm"]},
"GiovannaNeural": "Giovanna",
"HumbertoNeural": "Humberto",
"JulioNeural": "Julio",
"LeilaNeural": "Leila",
"LeticiaNeural": "Leticia",
"ManuelaNeural": "Manuela",
"NicolauNeural": "Nicolau",
"ThalitaNeural": "Thalita",
"ValerioNeural": "Valerio",
"YaraNeural": "Yara",
},
"pt-PT": {
"DuarteNeural": "Duarte",
"FernandaNeural": "Fernanda",
"RaquelNeural": {"name": "Raquel", "variants": ["sad", "whispering"]},
},
"ro-RO": {"AlinaNeural": "Alina", "EmilNeural": "Emil"},
"ru-RU": {
"DariyaNeural": "Dariya",
"DmitryNeural": "Dmitry",
"SvetlanaNeural": "Svetlana",
},
"si-LK": {"SameeraNeural": "Sameera", "ThiliniNeural": "Thilini"},
"sk-SK": {"LukasNeural": "Lukas", "ViktoriaNeural": "Viktoria"},
"sl-SI": {"PetraNeural": "Petra", "RokNeural": "Rok"},
"so-SO": {"MuuseNeural": "Muuse", "UbaxNeural": "Ubax"},
"sq-AL": {"AnilaNeural": "Anila", "IlirNeural": "Ilir"},
"sr-Latn-RS": {"NicholasNeural": "Nicholas", "SophieNeural": "Sophie"},
"sr-RS": {"NicholasNeural": "Nicholas", "SophieNeural": "Sophie"},
"su-ID": {"JajangNeural": "Jajang", "TutiNeural": "Tuti"},
"sv-SE": {
"HilleviNeural": "Hillevi",
"MattiasNeural": "Mattias",
"SofieNeural": "Sofie",
},
"sw-KE": {"RafikiNeural": "Rafiki", "ZuriNeural": "Zuri"},
"sw-TZ": {"DaudiNeural": "Daudi", "RehemaNeural": "Rehema"},
"ta-IN": {"PallaviNeural": "Pallavi", "ValluvarNeural": "Valluvar"},
"ta-LK": {"KumarNeural": "Kumar", "SaranyaNeural": "Saranya"},
"ta-MY": {"KaniNeural": "Kani", "SuryaNeural": "Surya"},
"ta-SG": {"AnbuNeural": "Anbu", "VenbaNeural": "Venba"},
"te-IN": {"MohanNeural": "Mohan", "ShrutiNeural": "Shruti"},
"th-TH": {
"AcharaNeural": "Achara",
"NiwatNeural": "Niwat",
"PremwadeeNeural": "Premwadee",
},
"tr-TR": {"AhmetNeural": "Ahmet", "EmelNeural": "Emel"},
"uk-UA": {"OstapNeural": "Ostap", "PolinaNeural": "Polina"},
"ur-IN": {"GulNeural": "Gul", "SalmanNeural": "Salman"},
"ur-PK": {"AsadNeural": "Asad", "UzmaNeural": "Uzma"},
"uz-UZ": {"MadinaNeural": "Madina", "SardorNeural": "Sardor"},
"vi-VN": {"HoaiMyNeural": "HoaiMy", "NamMinhNeural": "NamMinh"},
"wuu-CN": {"XiaotongNeural": "Xiaotong", "YunzheNeural": "Yunzhe"},
"yue-CN": {"XiaoMinNeural": "XiaoMin", "YunSongNeural": "YunSong"},
"zh-CN": {
"XiaochenNeural": {"name": "Xiaochen", "variants": ["livecommercial"]},
"XiaohanNeural": {
"name": "Xiaohan",
"variants": [
"calm",
"fearful",
"cheerful",
"disgruntled",
"serious",
"angry",
"sad",
"gentle",
"affectionate",
"embarrassed",
],
},
"XiaomengNeural": {"name": "Xiaomeng", "variants": ["chat"]},
"XiaomoNeural": {
"name": "Xiaomo",
"variants": [
"embarrassed",
"calm",
"fearful",
"cheerful",
"disgruntled",
"serious",
"angry",
"sad",
"depressed",
"affectionate",
"gentle",
"envious",
],
},
"XiaoqiuNeural": "Xiaoqiu",
"XiaorouNeural": "Xiaorou",
"XiaoruiNeural": {
"name": "Xiaorui",
"variants": ["calm", "fearful", "angry", "sad"],
},
"XiaoshuangNeural": {"name": "Xiaoshuang", "variants": ["chat"]},
"XiaoxiaoNeural": {
"name": "Xiaoxiao",
"variants": [
"assistant",
"chat",
"customerservice",
"newscast",
"affectionate",
"angry",
"calm",
"cheerful",
"disgruntled",
"fearful",
"gentle",
"lyrical",
"sad",
"serious",
"poetry-reading",
"friendly",
"chat-casual",
"whispering",
"sorry",
"excited",
],
},
"XiaoxiaoDialectsNeural": "Xiaoxiao Dialects",
"XiaoyanNeural": "Xiaoyan",
"XiaoyiNeural": {
"name": "Xiaoyi",
"variants": [
"angry",
"disgruntled",
"affectionate",
"cheerful",
"fearful",
"sad",
"embarrassed",
"serious",
"gentle",
],
},
"XiaoyouNeural": "Xiaoyou",
"XiaozhenNeural": {
"name": "Xiaozhen",
"variants": [
"angry",
"disgruntled",
"cheerful",
"fearful",
"sad",
"serious",
],
},
"YunfengNeural": {
"name": "Yunfeng",
"variants": [
"angry",
"disgruntled",
"cheerful",
"fearful",
"sad",
"serious",
"depressed",
],
},
"YunhaoNeural": {"name": "Yunhao", "variants": ["advertisement-upbeat"]},
"YunjianNeural": {
"name": "Yunjian",
"variants": [
"narration-relaxed",
"sports-commentary",
"sports-commentary-excited",
"angry",
"disgruntled",
"cheerful",
"sad",
"serious",
"depressed",
"documentary-narration",
],
},
"YunjieNeural": "Yunjie",
"YunxiNeural": {
"name": "Yunxi",
"variants": [
"narration-relaxed",
"embarrassed",
"fearful",
"cheerful",
"disgruntled",
"serious",
"angry",
"sad",
"depressed",
"chat",
"assistant",
"newscast",
],
},
"YunxiaNeural": {
"name": "Yunxia",
"variants": ["calm", "fearful", "cheerful", "angry", "sad"],
},
"YunyangNeural": {
"name": "Yunyang",
"variants": [
"customerservice",
"narration-professional",
"newscast-casual",
],
},
"YunyeNeural": {
"name": "Yunye",
"variants": [
"embarrassed",
"calm",
"fearful",
"cheerful",
"disgruntled",
"serious",
"angry",
"sad",
],
},
"YunzeNeural": {
"name": "Yunze",
"variants": [
"calm",
"fearful",
"cheerful",
"disgruntled",
"serious",
"angry",
"sad",
"depressed",
"documentary-narration",
],
},
},
"zh-CN-henan": {"YundengNeural": "Yundeng"},
"zh-CN-shandong": {"YunxiangNeural": "Yunxiang"},
"zh-HK": {
"HiuGaaiNeural": "HiuGaai",
"HiuMaanNeural": "HiuMaan",
"WanLungNeural": "WanLung",
},
"zh-TW": {
"HsiaoChenNeural": "HsiaoChen",
"HsiaoYuNeural": "HsiaoYu",
"YunJheNeural": "YunJhe",
},
"zu-ZA": {"ThandoNeural": "Thando", "ThembaNeural": "Themba"},
}
hass-nabucasa-0.101.0/pyproject.toml 0000664 0000000 0000000 00000011444 15011602407 0017276 0 ustar 00root root 0000000 0000000 [build-system]
build-backend = "setuptools.build_meta"
requires = [
"setuptools>=62.3",
]
[project]
authors = [
{name = "Nabu Casa, Inc.", email = "opensource@nabucasa.com"},
]
classifiers = [
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
"Topic :: Internet :: Proxy Servers",
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 3.13",
]
dependencies = [
"acme==3.3.0",
"aiohttp>=3.6.1",
"async_timeout>=4",
"atomicwrites-homeassistant==1.4.1",
"attrs>=19.3",
"ciso8601>=2.3.0",
"cryptography>=42.0.0",
"josepy<2",
"pycognito==2024.5.1",
"PyJWT>=2.8.0",
"snitun==0.40.0",
"webrtc-models<1.0.0",
]
description = "Home Assistant cloud integration by Nabu Casa, Inc."
license = {text = "GPL v3"}
name = "hass-nabucasa"
readme = "README.md"
requires-python = ">=3.13"
version = "0.0.0"
[project.optional-dependencies]
test = [
"codespell==2.4.1",
"mypy==1.15.0",
"pre-commit==4.2.0",
"pre-commit-hooks==5.0.0",
"pylint==3.3.7",
"pytest-aiohttp==1.1.0",
"pytest-timeout==2.4.0",
"pytest==8.3.5",
"ruff==0.11.9",
"types_atomicwrites==1.4.5.1",
"types_pyOpenSSL==24.1.0.20240722",
"xmltodict==0.14.2",
"syrupy==4.9.1",
"tomli==2.2.1",
]
[tool.mypy]
check_untyped_defs = true
disallow_incomplete_defs = true
disallow_subclassing_any = true
disallow_untyped_calls = true
disallow_untyped_decorators = true
disallow_untyped_defs = true
ignore_missing_imports = true
no_implicit_optional = true
show_error_codes = true
strict_equality = true
warn_incomplete_stub = true
warn_redundant_casts = true
warn_return_any = true
warn_unreachable = true
warn_unused_configs = true
warn_unused_ignores = true
[tool.pylint.BASIC]
disable = [
"abstract-method",
"cyclic-import",
"duplicate-code",
"global-statement",
"line-too-long",
"locally-disabled",
"missing-docstring",
"not-context-manager",
"too-few-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-public-methods",
"too-many-return-statements",
"too-many-statements",
"unused-argument",
]
extension-pkg-allow-list=[
"ciso8601",
]
generated-members=[
"botocore.errorfactory",
]
good-names= [
"_",
"cb",
"ex",
"fp",
"i",
"id",
"j",
"k",
"Run",
"T",
]
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions=[
"builtins.Exception",
]
[tool.pylint.MAIN]
ignore=[
"tests_*",
]
reports=false
[tool.pytest.ini_options]
asyncio_mode = "auto"
[tool.ruff]
fix = true
line-length = 88
show-fixes = true
target-version = "py311"
[tool.ruff.lint]
ignore = [
"ANN401", # https://docs.astral.sh/ruff/rules/any-type/
"COM812", # https://docs.astral.sh/ruff/rules/missing-trailing-comma/
"DTZ005", # https://docs.astral.sh/ruff/rules/call-datetime-now-without-tzinfo/
"EM101", # https://docs.astral.sh/ruff/rules/raw-string-in-exception/
"EM102", # https://docs.astral.sh/ruff/rules/f-string-in-exception/
"FBT", # https://docs.astral.sh/ruff/rules/#flake8-boolean-trap-fbt
"N818", # https://docs.astral.sh/ruff/rules/error-suffix-on-exception-name/
"PLR2004", # https://docs.astral.sh/ruff/rules/magic-value-comparison/
"RUF006", # https://docs.astral.sh/ruff/rules/asyncio-dangling-task/
"S101", # https://docs.astral.sh/ruff/rules/assert/
"S303", # https://docs.astral.sh/ruff/rules/suspicious-insecure-hash-usage/
"S311", # https://docs.astral.sh/ruff/rules/suspicious-non-cryptographic-random-usage/
"TC002", # https://docs.astral.sh/ruff/rules/typing-only-third-party-import/
"TC003", # https://docs.astral.sh/ruff/rules/typing-only-standard-library-import/
"TRY003", # https://docs.astral.sh/ruff/rules/raise-vanilla-args/
"TRY301", # https://docs.astral.sh/ruff/rules/raise-within-try/
"TRY400", # https://docs.astral.sh/ruff/rules/error-instead-of-exception/
]
select = [
"ALL",
]
[tool.ruff.lint.extend-per-file-ignores]
"py.typed" = [
"D100",
]
[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
mark-parentheses = false
[tool.ruff.lint.isort]
combine-as-imports = true
force-sort-within-sections = true
known-first-party = [
"hass_nabucasa",
]
[tool.ruff.lint.mccabe]
max-complexity = 22
[tool.ruff.lint.pydocstyle]
# Use Google-style docstrings.
convention = "pep257"
[tool.ruff.lint.pylint]
max-args = 15
max-branches = 30
max-returns = 7
max-statements = 80
[tool.setuptools]
include-package-data = true
platforms = [
"any",
]
zip-safe = false
[tool.setuptools.package-data]
hass_nabucasa = [
"py.typed",
]
[tool.setuptools.packages.find]
include = [
"hass_nabucasa*",
]
hass-nabucasa-0.101.0/scripts/ 0000775 0000000 0000000 00000000000 15011602407 0016045 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/scripts/lint 0000775 0000000 0000000 00000000077 15011602407 0016745 0 ustar 00root root 0000000 0000000 #!/bin/sh
cd "$(dirname "$0")/.."
pre-commit run --all-files
hass-nabucasa-0.101.0/scripts/snapshot-update 0000775 0000000 0000000 00000000110 15011602407 0021102 0 ustar 00root root 0000000 0000000 #!/bin/sh
cd "$(dirname "$0")/.."
python3 -m pytest --snapshot-update
hass-nabucasa-0.101.0/scripts/test 0000775 0000000 0000000 00000000066 15011602407 0016754 0 ustar 00root root 0000000 0000000 #!/bin/sh
cd "$(dirname "$0")/.."
python3 -m pytest
hass-nabucasa-0.101.0/scripts/update_voice_data.py 0000775 0000000 0000000 00000004723 15011602407 0022070 0 ustar 00root root 0000000 0000000 #!/usr/bin/python3
"""Update the voice data."""
# ruff: noqa: T201
import asyncio
from pathlib import Path
import subprocess
import aiohttp
voice_data_path = Path(__file__).parent.parent / "hass_nabucasa/voice_data.py"
REGION = "westus"
LIST_VOICES_URL = (
f"https://{REGION}.tts.speech.microsoft.com/cognitiveservices/voices/list"
)
def main() -> None:
"""Run script."""
token = None
while not token:
token = input("Please enter your Azure token: ").strip()
raw_data = asyncio.run(get_data(token))
data = {}
for voice in raw_data:
if voice["Status"] != "GA":
continue
locale = voice["Locale"]
voice_id = voice["ShortName"][len(locale) + 1 :]
voice_name = voice["DisplayName"]
if voice_name.endswith("Neural"):
voice_name = voice_name[:-7].strip()
# Skip variants
if ":" in voice_id or "Multilingual" in voice_id:
continue
voice_info = {
"name": voice_name,
}
if style_list := voice.get("StyleList"):
voice_info = {
"name": voice_name,
"variants": style_list,
}
else:
voice_info = voice_name
data.setdefault(locale, {})[voice_id] = voice_info
# Sort the data
for locale, info in data.items():
data[locale] = dict(
sorted(
info.items(),
key=lambda x: x[1]["name"] if isinstance(x[1], dict) else x[1],
)
)
data = dict(sorted(data.items()))
parts = [
'"""',
"Available voices for TTS.",
"",
"Automatically generated file, do not edit this file directly.",
"Run python3 -m scripts/update_voice_data.py to update this file.",
'"""',
"",
f"TTS_VOICES: dict[str, dict[str, dict | str]] = {data}",
]
voice_data_path.write_text("\n".join(parts))
subprocess.run( # noqa: S603
["ruff", "format", voice_data_path], # noqa: S607
check=True,
stdout=subprocess.DEVNULL,
)
print("Updated voice_data.py with new voice data.")
async def get_data(token: str) -> dict:
"""Gather data."""
async with aiohttp.ClientSession() as session:
voices = await session.get(
LIST_VOICES_URL, headers={"Authorization": f"Bearer {token}"}
)
voices.raise_for_status()
return await voices.json()
if __name__ == "__main__":
main()
hass-nabucasa-0.101.0/tests/ 0000775 0000000 0000000 00000000000 15011602407 0015520 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/tests/__init__.py 0000664 0000000 0000000 00000000045 15011602407 0017630 0 ustar 00root root 0000000 0000000 """Tests for the cloud component."""
hass-nabucasa-0.101.0/tests/__snapshots__/ 0000775 0000000 0000000 00000000000 15011602407 0020336 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/tests/__snapshots__/test_voice.ambr 0000664 0000000 0000000 00000003012 15011602407 0023341 0 ustar 00root root 0000000 0000000 # serializer version: 1
# name: test_process_tts_with_gender
dict({
'speak': dict({
'@version': '1.0',
'@xml:lang': 'en-US',
'@xmlns': 'http://www.w3.org/2001/10/synthesis',
'@xmlns:mstts': 'https://www.w3.org/2001/mstts',
'voice': dict({
'#text': 'Text for Saying',
'@name': 'en-US-JennyNeural',
}),
}),
})
# ---
# name: test_process_tts_with_voice
dict({
'speak': dict({
'@version': '1.0',
'@xml:lang': 'nl-NL',
'@xmlns': 'http://www.w3.org/2001/10/synthesis',
'@xmlns:mstts': 'https://www.w3.org/2001/mstts',
'voice': dict({
'#text': 'Text for Saying',
'@name': 'nl-NL-FennaNeural',
}),
}),
})
# ---
# name: test_process_tts_with_voice_and_style
dict({
'speak': dict({
'@version': '1.0',
'@xml:lang': 'de-DE',
'@xmlns': 'http://www.w3.org/2001/10/synthesis',
'@xmlns:mstts': 'https://www.w3.org/2001/mstts',
'voice': dict({
'@name': 'de-DE-ConradNeural',
'mstts:express-as': dict({
'#text': 'Text for Saying',
'@style': 'cheerful',
}),
}),
}),
})
# ---
# name: test_process_tts_with_voice_and_style.1
dict({
'speak': dict({
'@version': '1.0',
'@xml:lang': 'en-US',
'@xmlns': 'http://www.w3.org/2001/10/synthesis',
'@xmlns:mstts': 'https://www.w3.org/2001/mstts',
'voice': dict({
'#text': 'Text for Saying 2',
'@name': 'en-US-MichelleNeural',
}),
}),
})
# ---
hass-nabucasa-0.101.0/tests/common.py 0000664 0000000 0000000 00000020011 15011602407 0017354 0 ustar 00root root 0000000 0000000 """Test the helper method for writing tests."""
from __future__ import annotations
import asyncio
from collections.abc import Coroutine
from pathlib import Path
import threading
from typing import Any, Literal
from unittest.mock import Mock
from hass_nabucasa.client import CloudClient
class MockClient(CloudClient):
"""Interface class for Home Assistant."""
def __init__(self, base_path, loop, websession) -> None:
"""Initialize MockClient."""
self._loop = loop
self._websession = websession
self._cloudhooks = {}
self._aiohttp_runner = Mock()
self.prop_remote_autostart = True
self.mock_user = []
self.mock_dispatcher = []
self.mock_alexa = []
self.mock_google = []
self.mock_webhooks = []
self.mock_system = []
self.mock_repairs = []
self.mock_connection_info = []
self.mock_return = []
self._base_path = base_path
self.pref_should_connect = False
@property
def base_path(self) -> Path:
"""Return path to base dir."""
return self._base_path
@property
def loop(self):
"""Return client loop."""
return self._loop
@property
def websession(self):
"""Return client session for aiohttp."""
return self._websession
@property
def client_name(self):
"""Return name of the client, this will be used as the user-agent."""
return "hass-nabucasa/tests"
@property
def aiohttp_runner(self):
"""Return client webinterface aiohttp application."""
return self._aiohttp_runner
@property
def cloudhooks(self):
"""Return list of cloudhooks."""
return self._cloudhooks
@property
def remote_autostart(self) -> bool:
"""Return true if we want start a remote connection."""
return self.prop_remote_autostart
async def cloud_connected(self):
"""Handle cloud connected."""
async def cloud_disconnected(self):
"""Handle cloud disconnected."""
async def cloud_started(self):
"""Handle cloud started."""
async def cloud_stopped(self):
"""Handle stopping."""
async def logout_cleanups(self):
"""Need nothing to do."""
def user_message(self, identifier: str, title: str, message: str) -> None:
"""Create a message for user to UI."""
if self.loop._thread_id != threading.get_ident():
raise RuntimeError(
"`CloudClient.user_message` should be called from the event loop"
)
self.mock_user.append((identifier, title, message))
def dispatcher_message(self, identifier: str, data: Any = None) -> None:
"""Send data to dispatcher."""
self.mock_dispatcher.append((identifier, data))
async def async_cloud_connect_update(self, connect: bool) -> None:
"""Process cloud remote message to client."""
self.pref_should_connect = connect
async def async_alexa_message(self, payload):
"""Process cloud alexa message to client."""
self.mock_alexa.append(payload)
return self.mock_return.pop()
async def async_google_message(self, payload):
"""Process cloud google message to client."""
self.mock_google.append(payload)
return self.mock_return.pop()
async def async_webhook_message(self, payload):
"""Process cloud webhook message to client."""
self.mock_webhooks.append(payload)
return self.mock_return.pop()
async def async_system_message(self, payload):
"""Process cloud system message to client."""
self.mock_system.append(payload)
return self.mock_return.pop()
async def async_cloud_connection_info(self, payload) -> dict[Any, Any]:
"""Process cloud connection info message to client."""
self.mock_connection_info.append(payload)
return self.mock_return.pop()
async def async_cloudhooks_update(self, data):
"""Update internal cloudhooks data."""
self._cloudhooks = data
async def async_create_repair_issue(
self,
identifier: str,
translation_key: str,
*,
placeholders: dict[str, str] | None = None,
severity: Literal["error", "warning"] = "warning",
) -> None:
"""Create a repair issue."""
self.mock_repairs.append(
{
"identifier": identifier,
"translation_key": translation_key,
"placeholders": placeholders,
"severity": severity,
},
)
async def async_delete_repair_issue(self, identifier: str) -> None:
"""Delete a repair issue."""
issue = next(
(issue for issue in self.mock_repairs if issue["identifier"] == identifier),
None,
)
if issue is not None:
self.mock_repairs.remove(issue)
class MockAcme:
"""Mock AcmeHandler."""
def __init__(self) -> None:
"""Initialize MockAcme."""
self.is_valid = True
self.call_issue = False
self.call_reset = False
self.call_load = False
self.call_hardening = False
self.init_args = None
self.common_name = None
self.alternative_names = None
self.expire_date = None
self.fingerprint = None
self.email = "test@nabucasa.inc"
@property
def domains(self):
"""Return all domains."""
return self.alternative_names
def set_false(self):
"""Set certificate as not valid."""
self.is_valid = False
@property
def certificate_available(self) -> bool:
"""Return true if certificate is available."""
return self.common_name is not None
@property
def is_valid_certificate(self) -> bool:
"""Return valid certificate."""
return self.is_valid
async def issue_certificate(self):
"""Issue a certificate."""
self.call_issue = True
async def reset_acme(self):
"""Issue a certificate."""
self.call_reset = True
async def load_certificate(self):
"""Load certificate."""
self.call_load = True
async def hardening_files(self):
"""Hardening files."""
self.call_hardening = True
def __call__(self, *args) -> MockAcme:
"""Init."""
self.init_args = args
return self
class MockSnitun:
"""Mock Snitun client."""
def __init__(self) -> None:
"""Initialize MockAcme."""
self.call_start = False
self.call_stop = False
self.call_connect = False
self.call_disconnect = False
self.init_args = None
self.connect_args = None
self.init_kwarg = None
self.wait_task = asyncio.Event()
self.start_whitelist = None
self.start_endpoint_connection_error_callback = None
@property
def is_connected(self):
"""Return if it is connected."""
return self.call_connect and not self.call_disconnect
def wait(self):
"""Return waitable object."""
return self.wait_task.wait()
async def start(
self,
whitelist: bool = False,
endpoint_connection_error_callback: Coroutine[Any, Any, None] | None = None,
):
"""Start snitun."""
self.start_whitelist = whitelist
self.start_endpoint_connection_error_callback = (
endpoint_connection_error_callback
)
self.call_start = True
async def stop(self):
"""Stop snitun."""
self.call_stop = True
async def connect(
self,
token: bytes,
aes_key: bytes,
aes_iv: bytes,
throttling=None,
):
"""Connect snitun."""
self.call_connect = True
self.connect_args = [token, aes_key, aes_iv, throttling]
async def disconnect(self):
"""Disconnect snitun."""
self.wait_task.set()
self.call_disconnect = True
def __call__(self, *args, **kwarg) -> MockSnitun:
"""Init."""
self.init_args = args
self.init_kwarg = kwarg
return self
hass-nabucasa-0.101.0/tests/conftest.py 0000664 0000000 0000000 00000010424 15011602407 0017720 0 ustar 00root root 0000000 0000000 """Set up some common test helper things."""
import asyncio
import logging
from typing import cast
from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch
from aiohttp import web
import pytest
from .common import MockClient
from .utils.aiohttp import mock_aiohttp_client
logging.basicConfig(level=logging.DEBUG)
@pytest.fixture(name="loop")
async def loop_fixture():
"""Return the event loop."""
return asyncio.get_running_loop()
@pytest.fixture
async def aioclient_mock(loop):
"""Fixture to mock aioclient calls."""
with mock_aiohttp_client(loop) as mock_session:
yield mock_session
@pytest.fixture
async def cloud_mock(loop, aioclient_mock, tmp_path):
"""Yield a simple cloud mock."""
cloud = MagicMock(name="Mock Cloud", is_logged_in=True)
def _executor(call, *args):
"""Run executor."""
return loop.run_in_executor(None, call, *args)
cloud.run_executor = _executor
cloud.websession = aioclient_mock.create_session(loop)
cloud.client = MockClient(tmp_path, loop, cloud.websession)
async def update_token(
id_token,
access_token,
refresh_token=None,
):
cloud.id_token = id_token
cloud.access_token = access_token
if refresh_token is not None:
cloud.refresh_token = refresh_token
cloud.update_token = MagicMock(side_effect=update_token)
cloud.ensure_not_connected = AsyncMock()
yield cloud
await cloud.websession.close()
@pytest.fixture
def auth_cloud_mock(cloud_mock):
"""Return an authenticated cloud instance."""
cloud_mock.auth.async_check_token.side_effect = AsyncMock()
cloud_mock.subscription_expired = False
return cloud_mock
@pytest.fixture
def cloud_client(cloud_mock: MagicMock) -> MockClient:
"""Return cloud client impl."""
return cast("MockClient", cloud_mock.client)
@pytest.fixture
def mock_cognito():
"""Mock warrant."""
with patch("hass_nabucasa.auth.CognitoAuth._create_cognito_client") as mock_cog:
yield mock_cog()
@pytest.fixture
def mock_iot_client(cloud_mock):
"""Mock a base IoT client."""
class Client(MagicMock):
"""Websocket client mock."""
closed = PropertyMock(return_value=False)
def auto_close(self, msg_count=1):
"""If the client should disconnect itself after 1 message."""
Client.closed = PropertyMock(side_effect=msg_count * [False] + [True])
async def close(self):
"""Close the client."""
client = Client()
websession = MagicMock()
# Trigger cancelled error to avoid reconnect.
org_websession = cloud_mock.websession
with patch(
"hass_nabucasa.iot_base.BaseIoT._wait_retry",
side_effect=asyncio.CancelledError,
):
websession.ws_connect.side_effect = AsyncMock(return_value=client)
cloud_mock.websession = websession
yield client
cloud_mock.websession = org_websession
class DisconnectMockServer(Exception):
"""Disconnect the mock server."""
@pytest.fixture
async def ws_server(aiohttp_client):
"""Create a mock WS server to connect to and returns a connected client."""
async def create_client_to_server(handle_server_msg):
"""Create a websocket server."""
logger = logging.getLogger(f"{__name__}.ws_server")
async def websocket_handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
# Send a message to trigger IoTBase with
# `mark_connected_after_first_message`
await ws.send_json({"msgid": 0, "handler": "hello"})
async for msg in ws:
logger.debug("Received msg: %s", msg)
try:
resp = await handle_server_msg(msg)
if resp is not None:
logger.debug("Sending msg: %s", msg)
await ws.send_json(resp)
except DisconnectMockServer:
logger.debug("Closing connection (via DisconnectMockServer)")
await ws.close()
return ws
app = web.Application()
app.add_routes([web.get("/ws", websocket_handler)])
client = await aiohttp_client(app)
return await client.ws_connect("/ws")
return create_client_to_server
hass-nabucasa-0.101.0/tests/ruff.toml 0000664 0000000 0000000 00000000372 15011602407 0017361 0 ustar 00root root 0000000 0000000 extend = "../pyproject.toml"
src = [
"tests",
]
[lint]
ignore = [
"ARG001",
"ARG002",
"ANN001",
"ANN002",
"ANN003",
"ANN201",
"ANN202",
"ASYNC109",
"ASYNC230",
"PT006",
"PT007",
"PTH123",
"S105",
"SIM115",
"SLF001",
]
hass-nabucasa-0.101.0/tests/test_account_api.py 0000664 0000000 0000000 00000005336 15011602407 0021425 0 ustar 00root root 0000000 0000000 """Tests for Instance API."""
from typing import Any
from aiohttp import ClientError
import pytest
from hass_nabucasa import Cloud
from hass_nabucasa.account_api import (
AccountApi,
AccountApiError,
AccountServicesDetails,
)
from tests.utils.aiohttp import AiohttpClientMocker
API_HOSTNAME = "example.com"
@pytest.fixture(autouse=True)
def set_hostname(auth_cloud_mock: Cloud):
"""Set API hostname for the mock cloud service."""
auth_cloud_mock.servicehandlers_server = API_HOSTNAME
@pytest.mark.parametrize(
"exception,getmockargs,log_msg,exception_msg",
[
[
AccountApiError,
{"status": 500, "text": "Internal Server Error"},
"Response for get from example.com/account/services (500)",
"Failed to parse API response",
],
[
AccountApiError,
{"status": 429, "text": "Too fast"},
"Response for get from example.com/account/services (429)",
"Failed to parse API response",
],
[
AccountApiError,
{"exc": TimeoutError()},
"",
"Timeout reached while calling API",
],
[
AccountApiError,
{"exc": ClientError("boom!")},
"",
"Failed to fetch: boom!",
],
[
AccountApiError,
{"exc": Exception("boom!")},
"",
"Unexpected error while calling API: boom!",
],
],
)
async def test_problems_getting_services(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
getmockargs: dict[str, Any],
log_msg: str,
exception_msg: str,
caplog: pytest.LogCaptureFixture,
):
"""Test problems getting account services."""
account_api = AccountApi(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/account/services",
**getmockargs,
)
with pytest.raises(exception, match=exception_msg):
await account_api.services()
assert log_msg in caplog.text
@pytest.mark.parametrize(
"services_response",
[{"alexa": {"available": True}, "storage": {"available": False}}],
)
async def test_getting_services(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
services_response: AccountServicesDetails,
caplog: pytest.LogCaptureFixture,
):
"""Test getting account services."""
account_api = AccountApi(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/account/services",
json=services_response,
)
services = await account_api.services()
assert services == services_response
assert "Response for get from example.com/account/services (200)" in caplog.text
hass-nabucasa-0.101.0/tests/test_account_link.py 0000664 0000000 0000000 00000007070 15011602407 0021606 0 ustar 00root root 0000000 0000000 """Test Account Linking tools."""
import asyncio
from unittest.mock import AsyncMock, Mock
from aiohttp import web
import pytest
from hass_nabucasa import account_link
async def create_account_link_server(aiohttp_client, handle_server_msgs):
"""Create a websocket server."""
async def websocket_handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
try:
await handle_server_msgs(ws)
finally:
await ws.close()
return ws
app = web.Application()
app.add_routes([web.get("/ws", websocket_handler)])
client = await aiohttp_client(app)
return await client.ws_connect("/ws")
async def create_helper_instance(
aiohttp_client,
handle_server_msgs,
service,
) -> account_link.AuthorizeAccountHelper:
"""Create a auth helper instance."""
client = await create_account_link_server(aiohttp_client, handle_server_msgs)
mock_cloud = Mock(
client=Mock(websession=Mock(ws_connect=AsyncMock(return_value=client))),
)
return account_link.AuthorizeAccountHelper(mock_cloud, service)
async def test_auth_helper_works(aiohttp_client):
"""Test authorize account helper."""
received = []
async def handle_msgs(ws):
"""Handle the messages on the server."""
data = await ws.receive_json()
received.append(data)
await ws.send_json({"authorize_url": "http://mock-url"})
await ws.send_json({"tokens": {"refresh_token": "abcd", "expires_in": 10}})
helper = await create_helper_instance(aiohttp_client, handle_msgs, "mock-service")
assert await helper.async_get_authorize_url() == "http://mock-url"
assert await helper.async_get_tokens() == {
"refresh_token": "abcd",
"expires_in": 10,
"service": "mock-service",
}
assert helper._client is None
assert len(received) == 1
assert received[0] == {"service": "mock-service"}
async def test_auth_helper_unknown_service(aiohttp_client):
"""Test authorize account helper."""
async def handle_msgs(ws):
"""Handle the messages on the server."""
await ws.receive_json()
await ws.send_json({"error": "unknown"})
helper = await create_helper_instance(aiohttp_client, handle_msgs, "mock-service")
with pytest.raises(account_link.AccountLinkException) as err:
await helper.async_get_authorize_url()
assert err.value.code == "unknown"
async def test_auth_helper_token_timeout(aiohttp_client):
"""Test timeout while waiting for tokens."""
async def handle_msgs(ws):
"""Handle the messages on the server."""
await ws.receive_json()
await ws.send_json({"authorize_url": "http://mock-url"})
await ws.send_json({"error": "timeout"})
helper = await create_helper_instance(aiohttp_client, handle_msgs, "mock-service")
await helper.async_get_authorize_url()
with pytest.raises(asyncio.TimeoutError):
await helper.async_get_tokens()
async def test_auth_helper_token_other_error(aiohttp_client):
"""Test error while waiting for tokens."""
async def handle_msgs(ws):
"""Handle the messages on the server."""
await ws.receive_json()
await ws.send_json({"authorize_url": "http://mock-url"})
await ws.send_json({"error": "something"})
helper = await create_helper_instance(aiohttp_client, handle_msgs, "mock-service")
await helper.async_get_authorize_url()
with pytest.raises(account_link.AccountLinkException) as err:
await helper.async_get_tokens()
assert err.value.code == "something"
hass-nabucasa-0.101.0/tests/test_api.py 0000664 0000000 0000000 00000001521 15011602407 0017701 0 ustar 00root root 0000000 0000000 """Test the base API module."""
from __future__ import annotations
import pytest
from hass_nabucasa.api import (
CloudApiError,
CloudApiNonRetryableError,
api_exception_handler,
)
class CustomException(CloudApiError):
"""Custom exception for testing."""
@pytest.mark.parametrize(
"exception,expected",
[
(CloudApiError("Oh no!"), CloudApiError),
(CloudApiNonRetryableError("Oh no!", code="616"), CloudApiNonRetryableError),
(CustomException("Oh no!"), CustomException),
(KeyError("stt"), CustomException),
],
)
async def test_raising_exception(exception, expected) -> None:
"""Test raising a custom exception."""
@api_exception_handler(CustomException)
async def mock_func() -> None:
raise exception
with pytest.raises(expected):
await mock_func()
hass-nabucasa-0.101.0/tests/test_auth.py 0000664 0000000 0000000 00000023575 15011602407 0020106 0 ustar 00root root 0000000 0000000 """Tests for the tools to communicate with the cloud."""
import asyncio
from unittest.mock import MagicMock, patch
from botocore.exceptions import ClientError
from pycognito.exceptions import MFAChallengeException
import pytest
from hass_nabucasa import auth as auth_api
@pytest.fixture
def mock_cloud(cloud_mock):
"""Mock cloud."""
cloud_mock.is_logged_in = False
return cloud_mock
def aws_error(code, message="Unknown", operation_name="fake_operation_name"):
"""Generate AWS error response."""
response = {"Error": {"Code": code, "Message": message}}
return ClientError(response, operation_name)
async def test_login_invalid_auth(mock_cognito, mock_cloud):
"""Test trying to login with invalid credentials."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.authenticate.side_effect = aws_error("NotAuthorizedException")
with pytest.raises(auth_api.Unauthenticated):
await auth.async_login("user", "pass")
assert len(mock_cloud.update_token.mock_calls) == 0
async def test_login_user_not_found(mock_cognito, mock_cloud):
"""Test trying to login with invalid credentials."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.authenticate.side_effect = aws_error("UserNotFoundException")
with pytest.raises(auth_api.UserNotFound):
await auth.async_login("user", "pass")
assert len(mock_cloud.update_token.mock_calls) == 0
async def test_login_user_not_confirmed(mock_cognito, mock_cloud):
"""Test trying to login without confirming account."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.authenticate.side_effect = aws_error("UserNotConfirmedException")
with pytest.raises(auth_api.UserNotConfirmed):
await auth.async_login("user", "pass")
assert len(mock_cloud.update_token.mock_calls) == 0
async def test_login_user_mfa_required(mock_cognito, mock_cloud):
"""Test trying to login without MFA when it is required."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.authenticate.side_effect = MFAChallengeException("MFA required", {})
with pytest.raises(auth_api.MFARequired):
await auth.async_login("user", "pass")
assert len(mock_cloud.update_token.mock_calls) == 0
async def test_login_user_verify_totp_invalid_code(mock_cognito, mock_cloud):
"""Test trying to login with MFA when it is required."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.respond_to_software_token_mfa_challenge.side_effect = aws_error(
"CodeMismatchException",
)
with pytest.raises(auth_api.InvalidTotpCode):
await auth.async_login_verify_totp("user", "123456", {"session": "session"})
assert len(mock_cloud.update_token.mock_calls) == 0
async def test_login_user_verify_totp(mock_cognito, mock_cloud):
"""Test trying to login with MFA when it is required."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.id_token = "test_id_token"
mock_cognito.access_token = "test_access_token"
mock_cognito.refresh_token = "test_refresh_token"
await auth.async_login_verify_totp("user", "123456", {"session": "session"})
assert len(mock_cognito.respond_to_software_token_mfa_challenge.mock_calls) == 1
mock_cloud.update_token.assert_called_once_with(
"test_id_token",
"test_access_token",
"test_refresh_token",
)
async def test_login(mock_cognito, mock_cloud):
"""Test trying to login without confirming account."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.id_token = "test_id_token"
mock_cognito.access_token = "test_access_token"
mock_cognito.refresh_token = "test_refresh_token"
await auth.async_login("user", "pass")
assert len(mock_cognito.authenticate.mock_calls) == 1
mock_cloud.update_token.assert_called_once_with(
"test_id_token",
"test_access_token",
"test_refresh_token",
)
async def test_login_with_check_connection(mock_cognito, mock_cloud):
"""Test login with connection check."""
auth = auth_api.CognitoAuth(mock_cloud)
mock_cognito.id_token = "test_id_token"
mock_cognito.access_token = "test_access_token"
mock_cognito.refresh_token = "test_refresh_token"
await auth.async_login("user", "pass", check_connection=True)
assert len(mock_cognito.authenticate.mock_calls) == 1
mock_cloud.update_token.assert_called_once_with(
"test_id_token",
"test_access_token",
"test_refresh_token",
)
async def test_register(mock_cognito, cloud_mock):
"""Test registering an account."""
auth = auth_api.CognitoAuth(cloud_mock)
await auth.async_register(
"email@home-assistant.io",
"password",
client_metadata={"test": "metadata"},
)
assert len(mock_cognito.register.mock_calls) == 1
call = mock_cognito.register.mock_calls[0]
result_user, result_password = call.args
assert result_user == "email@home-assistant.io"
assert result_password == "password"
assert call.kwargs["client_metadata"] == {"test": "metadata"}
async def test_register_lowercase_email(mock_cognito, cloud_mock):
"""Test forcing lowercase email when registering an account."""
auth = auth_api.CognitoAuth(cloud_mock)
await auth.async_register("EMAIL@HOME-ASSISTANT.IO", "password")
assert len(mock_cognito.register.mock_calls) == 1
call = mock_cognito.register.mock_calls[0]
result_user = call.args[0]
assert result_user == "email@home-assistant.io"
async def test_register_fails(mock_cognito, cloud_mock):
"""Test registering an account."""
mock_cognito.register.side_effect = aws_error("SomeError")
auth = auth_api.CognitoAuth(cloud_mock)
with pytest.raises(auth_api.CloudError):
await auth.async_register("email@home-assistant.io", "password")
async def test_resend_email_confirm(mock_cognito, cloud_mock):
"""Test starting forgot password flow."""
auth = auth_api.CognitoAuth(cloud_mock)
await auth.async_resend_email_confirm("email@home-assistant.io")
assert len(mock_cognito.client.resend_confirmation_code.mock_calls) == 1
async def test_resend_email_confirm_fails(mock_cognito, cloud_mock):
"""Test failure when starting forgot password flow."""
auth = auth_api.CognitoAuth(cloud_mock)
mock_cognito.client.resend_confirmation_code.side_effect = aws_error("SomeError")
with pytest.raises(auth_api.CloudError):
await auth.async_resend_email_confirm("email@home-assistant.io")
async def test_forgot_password(mock_cognito, cloud_mock):
"""Test starting forgot password flow."""
auth = auth_api.CognitoAuth(cloud_mock)
await auth.async_forgot_password("email@home-assistant.io")
assert len(mock_cognito.initiate_forgot_password.mock_calls) == 1
async def test_forgot_password_fails(mock_cognito, cloud_mock):
"""Test failure when starting forgot password flow."""
auth = auth_api.CognitoAuth(cloud_mock)
mock_cognito.initiate_forgot_password.side_effect = aws_error("SomeError")
with pytest.raises(auth_api.CloudError):
await auth.async_forgot_password("email@home-assistant.io")
async def test_check_token_writes_new_token_on_refresh(mock_cognito, cloud_mock):
"""Test check_token writes new token if refreshed."""
auth = auth_api.CognitoAuth(cloud_mock)
mock_cognito.check_token.return_value = True
mock_cognito.id_token = "new id token"
mock_cognito.access_token = "new access token"
await auth.async_check_token()
assert len(mock_cognito.check_token.mock_calls) == 1
assert cloud_mock.id_token == "new id token"
assert cloud_mock.access_token == "new access token"
cloud_mock.update_token.assert_called_once_with("new id token", "new access token")
async def test_check_token_does_not_write_existing_token(mock_cognito, cloud_mock):
"""Test check_token won't write new token if still valid."""
mock_cognito.check_token.return_value = False
auth = auth_api.CognitoAuth(cloud_mock)
await auth.async_check_token()
assert len(mock_cognito.check_token.mock_calls) == 1
assert cloud_mock.id_token != mock_cognito.id_token
assert cloud_mock.access_token != mock_cognito.access_token
assert len(cloud_mock.update_token.mock_calls) == 0
async def test_check_token_raises(mock_cognito, cloud_mock):
"""Test we raise correct error."""
mock_cognito.renew_access_token.side_effect = aws_error("SomeError")
auth = auth_api.CognitoAuth(cloud_mock)
with pytest.raises(auth_api.CloudError):
await auth.async_check_token()
assert len(mock_cognito.check_token.mock_calls) == 2
assert cloud_mock.id_token != mock_cognito.id_token
assert cloud_mock.access_token != mock_cognito.access_token
assert len(cloud_mock.update_token.mock_calls) == 0
async def test_async_setup(cloud_mock):
"""Test async setup."""
auth_api.CognitoAuth(cloud_mock)
assert len(cloud_mock.iot.mock_calls) == 2
on_connect = cloud_mock.iot.mock_calls[0][1][0]
on_disconnect = cloud_mock.iot.mock_calls[1][1][0]
with (
patch("random.randint", return_value=0),
patch("hass_nabucasa.auth.CognitoAuth.async_renew_access_token") as mock_renew,
):
await on_connect()
# Let handle token sleep once
await asyncio.sleep(0)
# Let handle token refresh token
await asyncio.sleep(0)
assert len(mock_renew.mock_calls) == 1
await on_disconnect()
# Make sure task is no longer being called
await asyncio.sleep(0)
await asyncio.sleep(0)
assert len(mock_renew.mock_calls) == 1
@pytest.mark.parametrize(
"auth_mock_kwargs",
(
{"access_token": None},
{"refresh_token": None},
),
)
async def test_guard_no_login_authenticated_cognito(auth_mock_kwargs: dict[str, None]):
"""Test that not authenticated cognito login raises."""
auth = auth_api.CognitoAuth(MagicMock(**auth_mock_kwargs))
with pytest.raises(auth_api.Unauthenticated):
await auth._async_authenticated_cognito()
hass-nabucasa-0.101.0/tests/test_cloud_api.py 0000664 0000000 0000000 00000032211 15011602407 0021067 0 ustar 00root root 0000000 0000000 """Test cloud API."""
from collections.abc import Generator
from typing import Any
from unittest.mock import AsyncMock, MagicMock, patch
from aiohttp import ClientResponseError
import pytest
from hass_nabucasa import cloud_api
from tests.utils.aiohttp import AiohttpClientMocker
async def test_create_cloudhook(auth_cloud_mock, aioclient_mock):
"""Test creating a cloudhook."""
aioclient_mock.post(
"https://example.com/generate",
json={"cloudhook_id": "mock-webhook", "url": "https://blabla"},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.cloudhook_server = "example.com"
resp = await cloud_api.async_create_cloudhook(auth_cloud_mock)
assert len(aioclient_mock.mock_calls) == 1
assert await resp.json() == {
"cloudhook_id": "mock-webhook",
"url": "https://blabla",
}
async def test_remote_register(auth_cloud_mock, aioclient_mock):
"""Test creating a cloudhook."""
aioclient_mock.post(
"https://example.com/bla/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com/bla"
resp = await cloud_api.async_remote_register(auth_cloud_mock)
assert len(aioclient_mock.mock_calls) == 1
assert await resp.json() == {
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
}
async def test_remote_token(auth_cloud_mock, aioclient_mock):
"""Test creating a cloudhook."""
aioclient_mock.post(
"https://example.com/instance/snitun_token",
json={
"token": "123456",
"server": "rest-remote.nabu.casa",
"valid": 12345,
"throttling": 400,
},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
resp = await cloud_api.async_remote_token(auth_cloud_mock, b"aes", b"iv")
assert len(aioclient_mock.mock_calls) == 1
assert await resp.json() == {
"token": "123456",
"server": "rest-remote.nabu.casa",
"valid": 12345,
"throttling": 400,
}
assert aioclient_mock.mock_calls[0][2] == {"aes_iv": "6976", "aes_key": "616573"}
async def test_remote_challenge_txt(auth_cloud_mock, aioclient_mock):
"""Test creating a cloudhook."""
aioclient_mock.post("https://example.com/instance/dns_challenge_txt")
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
await cloud_api.async_remote_challenge_txt(auth_cloud_mock, "123456")
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == {"txt": "123456"}
async def test_remote_challenge_cleanup(auth_cloud_mock, aioclient_mock):
"""Test creating a cloudhook."""
aioclient_mock.post("https://example.com/instance/dns_challenge_cleanup")
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
await cloud_api.async_remote_challenge_cleanup(auth_cloud_mock, "123456")
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == {"txt": "123456"}
async def test_get_access_token(auth_cloud_mock, aioclient_mock):
"""Test creating a cloudhook."""
aioclient_mock.post("https://example.com/alexa/access_token")
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
await cloud_api.async_alexa_access_token(auth_cloud_mock)
assert len(aioclient_mock.mock_calls) == 1
async def test_subscription_info(auth_cloud_mock, aioclient_mock):
"""Test fetching subscription info."""
aioclient_mock.get(
"https://example.com/payments/subscription_info",
json={
"success": True,
"provider": None,
},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.accounts_server = "example.com"
with patch.object(
auth_cloud_mock.auth,
"async_renew_access_token",
AsyncMock(),
) as mock_renew:
data = await cloud_api.async_subscription_info(auth_cloud_mock)
assert len(aioclient_mock.mock_calls) == 1
assert data == {
"success": True,
"provider": None,
}
auth_cloud_mock.started = False
aioclient_mock.clear_requests()
aioclient_mock.get(
"https://example.com/payments/subscription_info",
json={
"success": True,
"provider": "mock-provider",
},
)
with patch.object(
auth_cloud_mock.auth,
"async_renew_access_token",
AsyncMock(),
) as mock_renew:
data = await cloud_api.async_subscription_info(auth_cloud_mock)
assert len(aioclient_mock.mock_calls) == 1
assert data == {
"success": True,
"provider": "mock-provider",
}
assert len(mock_renew.mock_calls) == 1
async def test_migrate_paypal_agreement(auth_cloud_mock, aioclient_mock):
"""Test a paypal agreement from legacy."""
aioclient_mock.post(
"https://example.com/payments/migrate_paypal_agreement",
json={
"url": "https://example.com/some/path",
},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.accounts_server = "example.com"
data = await cloud_api.async_migrate_paypal_agreement(auth_cloud_mock)
assert len(aioclient_mock.mock_calls) == 1
assert data == {
"url": "https://example.com/some/path",
}
async def test_async_files_download_details(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_download_details function."""
aioclient_mock.get(
"https://example.com/files/download_details/test/test.txt",
json={
"url": "https://example.com/some/path",
},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
details = await cloud_api.async_files_download_details(
cloud=auth_cloud_mock,
storage_type="test",
filename="test.txt",
)
assert len(aioclient_mock.mock_calls) == 1
assert details == {
"url": "https://example.com/some/path",
}
assert (
"Fetched https://example.com/files/download_details/test/test.txt (200)"
in caplog.text
)
async def test_async_files_download_details_error(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_download_details function with error."""
aioclient_mock.get(
"https://example.com/files/download_details/test/test.txt",
status=400,
json={"message": "Boom!"},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
with pytest.raises(ClientResponseError):
await cloud_api.async_files_download_details(
cloud=auth_cloud_mock,
storage_type="test",
filename="test.txt",
)
assert len(aioclient_mock.mock_calls) == 1
assert (
"Fetched https://example.com/files/download_details/test/test.txt (400) Boom!"
in caplog.text
)
async def test_async_files_list(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_list function."""
aioclient_mock.get(
"https://example.com/files/test",
json=[{"Key": "test.txt", "LastModified": "2021-01-01T00:00:00Z", "Size": 2}],
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
details = await cloud_api.async_files_list(
cloud=auth_cloud_mock,
storage_type="test",
)
assert len(aioclient_mock.mock_calls) == 1
assert details == [
{
"Key": "test.txt",
"LastModified": "2021-01-01T00:00:00Z",
"Size": 2,
},
]
assert "Fetched https://example.com/files/test (200)" in caplog.text
async def test_async_files_list_error(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_list function with error listing files."""
aioclient_mock.get(
"https://example.com/files/test",
status=400,
json={"message": "Boom!"},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
with pytest.raises(ClientResponseError):
await cloud_api.async_files_list(
cloud=auth_cloud_mock,
storage_type="test",
)
assert len(aioclient_mock.mock_calls) == 1
assert "Fetched https://example.com/files/test (400) Boom!" in caplog.text
async def test_async_files_upload_details(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_upload_details function."""
aioclient_mock.get(
"https://example.com/files/upload_details",
json={
"url": "https://example.com/some/path",
"headers": {"key": "value"},
},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
base64md5hash = "dGVzdA=="
details = await cloud_api.async_files_upload_details(
cloud=auth_cloud_mock,
storage_type="test",
filename="test.txt",
base64md5hash=base64md5hash,
size=2,
metadata={"homeassistant_version": "1970.1.1"},
)
assert len(aioclient_mock.mock_calls) == 1
# 2 is the body
assert aioclient_mock.mock_calls[0][2] == {
"filename": "test.txt",
"storage_type": "test",
"metadata": {"homeassistant_version": "1970.1.1"},
"md5": base64md5hash,
"size": 2,
}
assert details == {
"url": "https://example.com/some/path",
"headers": {"key": "value"},
}
assert "Fetched https://example.com/files/upload_details (200)" in caplog.text
async def test_async_files_upload_details_error(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_upload_details function with error generating upload URL."""
aioclient_mock.get(
"https://example.com/files/upload_details",
status=400,
json={"message": "Boom!"},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
base64md5hash = "dGVzdA=="
with pytest.raises(ClientResponseError):
await cloud_api.async_files_upload_details(
cloud=auth_cloud_mock,
storage_type="test",
filename="test.txt",
base64md5hash=base64md5hash,
size=2,
)
assert len(aioclient_mock.mock_calls) == 1
# 2 is the body
assert aioclient_mock.mock_calls[0][2] == {
"filename": "test.txt",
"storage_type": "test",
"md5": base64md5hash,
"size": 2,
"metadata": None,
}
assert "Fetched https://example.com/files/upload_details (400) Boom!" in caplog.text
async def test_async_files_delete_file(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_delete_file function."""
aioclient_mock.delete(
"https://example.com/files",
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
await cloud_api.async_files_delete_file(
cloud=auth_cloud_mock,
storage_type="test",
filename="test.txt",
)
assert len(aioclient_mock.mock_calls) == 1
# 2 is the body
assert aioclient_mock.mock_calls[0][2] == {
"filename": "test.txt",
"storage_type": "test",
}
assert "Fetched https://example.com/files (200)" in caplog.text
async def test_async_files_delete_file_error(
auth_cloud_mock: MagicMock,
aioclient_mock: Generator[AiohttpClientMocker, Any, None],
caplog: pytest.LogCaptureFixture,
):
"""Test the async_files_delete_file function with error."""
aioclient_mock.delete(
"https://example.com/files",
status=400,
json={"message": "Boom!"},
)
auth_cloud_mock.id_token = "mock-id-token"
auth_cloud_mock.servicehandlers_server = "example.com"
with pytest.raises(ClientResponseError):
await cloud_api.async_files_delete_file(
cloud=auth_cloud_mock,
storage_type="test",
filename="test.txt",
)
assert len(aioclient_mock.mock_calls) == 1
# 2 is the body
assert aioclient_mock.mock_calls[0][2] == {
"filename": "test.txt",
"storage_type": "test",
}
assert "Fetched https://example.com/files (400) Boom!" in caplog.text
hass-nabucasa-0.101.0/tests/test_cloudhooks.py 0000664 0000000 0000000 00000005565 15011602407 0021316 0 ustar 00root root 0000000 0000000 """Test cloud cloudhooks."""
from unittest.mock import AsyncMock, Mock
import pytest
from hass_nabucasa import cloudhooks
@pytest.fixture
def mock_cloudhooks(auth_cloud_mock):
"""Mock cloudhooks class."""
auth_cloud_mock.run_executor = AsyncMock()
auth_cloud_mock.iot = Mock(async_send_message=AsyncMock())
auth_cloud_mock.cloudhook_server = "webhook-create.url"
return cloudhooks.Cloudhooks(auth_cloud_mock)
async def test_enable(mock_cloudhooks, aioclient_mock):
"""Test enabling cloudhooks."""
aioclient_mock.post(
"https://webhook-create.url/generate",
json={
"cloudhook_id": "mock-cloud-id",
"url": "https://hooks.nabu.casa/ZXCZCXZ",
},
)
hook = {
"webhook_id": "mock-webhook-id",
"cloudhook_id": "mock-cloud-id",
"cloudhook_url": "https://hooks.nabu.casa/ZXCZCXZ",
"managed": False,
}
assert hook == await mock_cloudhooks.async_create("mock-webhook-id", False)
assert mock_cloudhooks.cloud.client.cloudhooks == {"mock-webhook-id": hook}
publish_calls = mock_cloudhooks.cloud.iot.async_send_message.mock_calls
assert len(publish_calls) == 1
assert publish_calls[0][1][0] == "webhook-register"
assert publish_calls[0][1][1] == {"cloudhook_ids": ["mock-cloud-id"]}
async def test_disable(mock_cloudhooks):
"""Test disabling cloudhooks."""
mock_cloudhooks.cloud.client._cloudhooks = {
"mock-webhook-id": {
"webhook_id": "mock-webhook-id",
"cloudhook_id": "mock-cloud-id",
"cloudhook_url": "https://hooks.nabu.casa/ZXCZCXZ",
},
}
await mock_cloudhooks.async_delete("mock-webhook-id")
assert mock_cloudhooks.cloud.client.cloudhooks == {}
publish_calls = mock_cloudhooks.cloud.iot.async_send_message.mock_calls
assert len(publish_calls) == 1
assert publish_calls[0][1][0] == "webhook-register"
assert publish_calls[0][1][1] == {"cloudhook_ids": []}
async def test_create_without_connected(mock_cloudhooks, aioclient_mock):
"""Test we don't publish a hook if not connected."""
mock_cloudhooks.cloud.is_connected = False
# Make sure we fail test when we send a message.
mock_cloudhooks.cloud.iot.async_send_message.side_effect = ValueError
aioclient_mock.post(
"https://webhook-create.url/generate",
json={
"cloudhook_id": "mock-cloud-id",
"url": "https://hooks.nabu.casa/ZXCZCXZ",
},
)
hook = {
"webhook_id": "mock-webhook-id",
"cloudhook_id": "mock-cloud-id",
"cloudhook_url": "https://hooks.nabu.casa/ZXCZCXZ",
"managed": True,
}
assert hook == await mock_cloudhooks.async_create("mock-webhook-id", True)
assert mock_cloudhooks.cloud.client.cloudhooks == {"mock-webhook-id": hook}
assert len(mock_cloudhooks.cloud.iot.async_send_message.mock_calls) == 0
hass-nabucasa-0.101.0/tests/test_files.py 0000664 0000000 0000000 00000041720 15011602407 0020237 0 ustar 00root root 0000000 0000000 """Tests for Files."""
from collections.abc import AsyncIterator, Iterable
import re
from typing import Any
from unittest.mock import AsyncMock
from aiohttp import ClientError
import pytest
from hass_nabucasa import Cloud
from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.files import Files, FilesError, calculate_b64md5
from tests.utils.aiohttp import AiohttpClientMocker
API_HOSTNAME = "example.com"
FILES_API_URL = "https://files.api.fakeurl/path?X-Amz-Algorithm=blah"
STORED_BACKUP = {
"Key": "backup.tar",
"Size": 1024,
"LastModified": "2021-07-01T12:00:00Z",
"Metadata": {"beer": "me"},
}
@pytest.fixture(autouse=True)
def set_hostname(auth_cloud_mock: Cloud):
"""Set API hostname for the mock cloud service."""
auth_cloud_mock.servicehandlers_server = API_HOSTNAME
@pytest.mark.parametrize(
"exception,msg",
[
[TimeoutError, "Timeout reached while calling API"],
[ClientError, "Failed to fetch"],
[Exception, "Unexpected error while calling API"],
],
)
async def test_upload_exceptions_while_getting_details(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
msg: str,
):
"""Test handling exceptions when fetching upload details."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/upload_details",
exc=exception("Boom!"),
)
with pytest.raises(FilesError, match=msg):
await files.upload(
storage_type="test",
open_stream=AsyncMock(),
filename="lorem.ipsum",
base64md5hash="hash",
size=1337,
metadata={"awesome": True},
)
@pytest.mark.parametrize(
"putmockargs,msg",
[
[{"exc": TimeoutError("Boom!")}, "Timeout reached while calling API"],
[{"exc": ClientError("Boom!")}, "Failed to fetch: Boom!"],
[{"exc": Exception("Boom!")}, "Unexpected error while calling API: Boom!"],
[{"status": 400}, "Failed to upload: (400) "],
[
{"status": 400, "text": "Unknown error structure"},
"Failed to upload: (400) Unknown error structure",
],
[
{
"status": 400,
"text": "Pretty error\nWith a linebreak",
},
"Failed to upload: (400) Pretty error With a linebreak",
],
[
{
"status": 400,
"text": "What is this?",
},
"Failed to upload: (400) What is this?",
],
[
{
"status": 400,
"text": f"{'a' * 512}",
},
f"Failed to upload: (400) {'a' * 256}",
],
[
{
"status": 403,
"text": "Pretty error\nWith a linebreak",
},
"Failed to upload: (403) Pretty error With a linebreak",
],
[
{
"status": 500,
"text": "Pretty error\nWith a linebreak",
},
"Failed to upload: (500) ",
],
],
)
async def test_upload_exceptions_while_uploading(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
putmockargs: dict[str, Any],
msg: str,
):
"""Test handling exceptions during file upload."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/upload_details",
json={"url": FILES_API_URL, "headers": {}},
)
aioclient_mock.put(FILES_API_URL, **putmockargs)
with pytest.raises(FilesError, match=f"^{re.escape(msg)}$"):
await files.upload(
storage_type="test",
open_stream=AsyncMock(),
filename="lorem.ipsum",
base64md5hash="hash",
size=1337,
metadata={"awesome": True},
)
@pytest.mark.parametrize(
"exception,getmockargs,log_msg",
[
[
FilesError,
{"status": 400, "json": {"message": "NC-CE-01"}},
"Response for get from example.com/files/upload_details (400) NC-CE-01",
],
[
CloudApiNonRetryableError,
{"status": 400, "json": {"message": "NC-CE-03"}},
"Response for get from example.com/files/upload_details (400) NC-CE-03",
],
[
FilesError,
{"status": 500, "text": "Internal Server Error"},
"Response for get from example.com/files/upload_details (500)",
],
],
)
async def test_upload_bad_status_while_getting_upload_details(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
getmockargs: dict[str, Any],
log_msg: str,
caplog: pytest.LogCaptureFixture,
):
"""Test handling bad status codes when fetching upload details."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/upload_details",
**getmockargs,
)
with pytest.raises(exception):
await files.upload(
storage_type="test",
open_stream=AsyncMock(),
filename="lorem.ipsum",
base64md5hash="hash",
size=1337,
metadata={"awesome": True},
)
assert log_msg in caplog.text
async def test_upload_returning_403_and_expired_subscription(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test handling 403 when the subscription is expired."""
auth_cloud_mock.subscription_expired = True
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/upload_details",
status=403,
json={"message": "Forbidden"},
)
with pytest.raises(CloudApiNonRetryableError, match="Subscription has expired"):
await files.upload(
storage_type="test",
open_stream=AsyncMock(),
filename="lorem.ipsum",
base64md5hash="hash",
size=1337,
metadata={"awesome": True},
)
assert (
"Response for get from example.com/files/upload_details (403) Forbidden"
in caplog.text
)
@pytest.mark.parametrize(
"exception,putmockargs,log_msg",
[
[
FilesError,
{"status": 400, "json": {"message": "Oh no!"}},
"Response for put from files.api.fakeurl (400)",
],
[
FilesError,
{"status": 500, "text": "Internal Server Error"},
"Response for put from files.api.fakeurl (500)",
],
],
)
async def test_upload_bad_status_while_uploading(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
putmockargs: dict[str, Any],
log_msg: str,
caplog: pytest.LogCaptureFixture,
):
"""Test handling bad status codes during file upload."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/upload_details",
json={"url": FILES_API_URL, "headers": {}},
)
aioclient_mock.put(FILES_API_URL, **putmockargs)
with pytest.raises(exception):
await files.upload(
storage_type="test",
open_stream=AsyncMock(),
filename="lorem.ipsum",
base64md5hash="hash",
size=1337,
metadata={"awesome": True},
)
assert log_msg in caplog.text
async def test_upload(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test successful file upload."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/upload_details",
json={"url": FILES_API_URL, "headers": {}},
)
aioclient_mock.put(FILES_API_URL, status=200)
await files.upload(
storage_type="test",
open_stream=AsyncMock(),
filename="lorem.ipsum",
base64md5hash="hash",
size=1337,
metadata={"awesome": True},
)
assert "Uploading test file with name lorem.ipsum" in caplog.text
assert "Response for get from example.com/files/upload_details (200)" in caplog.text
assert "Response for put from files.api.fakeurl (200)" in caplog.text
@pytest.mark.parametrize(
"exception,msg",
[
[TimeoutError, "Timeout reached while calling API"],
[ClientError, "Failed to fetch"],
[Exception, "Unexpected error while calling API"],
],
)
async def test_download_exceptions_while_getting_details(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
msg: str,
):
"""Test handling exceptions when fetching download details."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/download_details/test/lorem.ipsum",
exc=exception("Boom!"),
)
with pytest.raises(FilesError, match=msg):
await files.download(
storage_type="test",
filename="lorem.ipsum",
)
@pytest.mark.parametrize(
"exception,msg",
[
[TimeoutError, "Timeout reached while calling API"],
[ClientError, "Failed to fetch"],
[Exception, "Unexpected error while calling API"],
],
)
async def test_upload_exceptions_while_downloading(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
msg: str,
):
"""Test handling exceptions during file download."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/download_details/test/lorem.ipsum",
json={"url": FILES_API_URL},
)
aioclient_mock.get(FILES_API_URL, exc=exception("Boom!"))
with pytest.raises(FilesError, match=msg):
await files.download(
storage_type="test",
filename="lorem.ipsum",
)
@pytest.mark.parametrize(
"exception,getmockargs,log_msg",
[
[
CloudApiNonRetryableError,
{"status": 400, "json": {"message": "NC-SH-FH-03 (abc-123)"}},
"Response for get from example.com/files/download_details/test/lorem.ipsum "
"(400) NC-SH-FH-03 (abc-123)",
],
[
CloudApiNonRetryableError,
{"status": 400, "json": {"message": "NC-CE-03"}},
"Response for get from example.com/files/download_details/test/lorem.ipsum "
"(400) NC-CE-03",
],
[
FilesError,
{"status": 400, "json": {"message": "NC-CE-01"}},
"Response for get from example.com/files/download_details/test/lorem.ipsum "
"(400) NC-CE-01",
],
[
FilesError,
{"status": 500, "text": "Internal Server Error"},
"Response for get from example.com/files/download_details/test/lorem.ipsum"
" (500)",
],
],
)
async def test_upload_bad_status_while_getting_download_details(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
getmockargs: dict[str, Any],
log_msg: str,
caplog: pytest.LogCaptureFixture,
):
"""Test handling bad status codes when fetching download details."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/download_details/test/lorem.ipsum",
**getmockargs,
)
with pytest.raises(exception):
await files.download(
storage_type="test",
filename="lorem.ipsum",
)
assert log_msg in caplog.text
@pytest.mark.parametrize(
"exception,getmockargs,log_msg",
[
[
FilesError,
{"status": 400, "json": {"message": "Oh no!"}},
"Response for get from files.api.fakeurl (400)",
],
[
FilesError,
{"status": 500, "text": "Internal Server Error"},
"Response for get from files.api.fakeurl (500)",
],
],
)
async def test_upload_bad_status_while_downloading(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
getmockargs: dict[str, Any],
log_msg: str,
caplog: pytest.LogCaptureFixture,
):
"""Test handling bad status codes during file download."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/download_details/test/lorem.ipsum",
json={"url": FILES_API_URL},
)
aioclient_mock.get(FILES_API_URL, **getmockargs)
with pytest.raises(exception):
await files.download(
storage_type="test",
filename="lorem.ipsum",
)
assert log_msg in caplog.text
async def test_downlaod(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test successful file download."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/download_details/test/lorem.ipsum",
json={"url": FILES_API_URL},
)
aioclient_mock.get(FILES_API_URL, status=200)
await files.download(
storage_type="test",
filename="lorem.ipsum",
)
assert "Downloading test file with name lorem.ipsum" in caplog.text
assert len(aioclient_mock.mock_calls) == 2
assert (
"Response for get from example.com/files/download_details/test/lorem.ipsum "
"(200)" in caplog.text
)
assert "Response for get from files.api.fakeurl (200)" in caplog.text
async def test_list(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test listing files."""
files = Files(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/files/test",
json=[STORED_BACKUP],
)
files = await files.list(storage_type="test")
assert files[0] == STORED_BACKUP
assert len(aioclient_mock.mock_calls) == 1
assert "Listing test files" in caplog.text
assert "Response for get from example.com/files/test (200)" in caplog.text
@pytest.mark.parametrize(
"exception,msg",
[
[TimeoutError, "Timeout reached while calling API"],
[ClientError, "Failed to fetch"],
[Exception, "Unexpected error while calling API"],
],
)
async def test_exceptions_while_listing(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
msg: str,
):
"""Test handling exceptions during file download."""
files = Files(auth_cloud_mock)
aioclient_mock.get(f"https://{API_HOSTNAME}/files/test", exc=exception("Boom!"))
with pytest.raises(FilesError, match=msg):
await files.list(storage_type="test")
assert len(aioclient_mock.mock_calls) == 1
async def test_delete(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test listing files."""
files = Files(auth_cloud_mock)
aioclient_mock.delete(f"https://{API_HOSTNAME}/files")
await files.delete(storage_type="test", filename="lorem.ipsum")
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == {
"filename": "lorem.ipsum",
"storage_type": "test",
}
assert "Deleting test file with name lorem.ipsum" in caplog.text
assert "Response for delete from example.com/files (200)" in caplog.text
@pytest.mark.parametrize(
"exception_msg,deletemockargs,log_msg",
[
[
"Failed to fetch: (400) ",
{"status": 400, "json": {"message": "NC-CE-01"}},
"Response for delete from example.com/files (400) NC-CE-01",
],
[
"Failed to fetch: (500) ",
{"status": 500, "text": "Internal Server Error"},
"Response for delete from example.com/files (500)",
],
],
)
async def test_exceptions_while_deleting(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception_msg: str,
log_msg: str,
deletemockargs: dict[str, Any],
caplog: pytest.LogCaptureFixture,
):
"""Test handling exceptions during file download."""
files = Files(auth_cloud_mock)
aioclient_mock.delete(f"https://{API_HOSTNAME}/files", **deletemockargs)
with pytest.raises(FilesError, match=re.escape(exception_msg)):
await files.delete(storage_type="test", filename="lorem.ipsum")
assert len(aioclient_mock.mock_calls) == 1
assert log_msg in caplog.text
async def aiter_from_iter(iterable: Iterable) -> AsyncIterator:
"""Convert an iterable to an async iterator."""
for i in iterable:
yield i
async def test_calculate_b64md5():
"""Test calculating base64 md5 hash."""
async def open_stream() -> AsyncIterator[bytes]:
"""Mock open stream."""
return aiter_from_iter((b"backup", b"data"))
assert await calculate_b64md5(open_stream, 10) == "p17gbFrsI2suQNBhkdO1Gw=="
with pytest.raises(
FilesError,
match="Indicated size 9 does not match actual size 10",
):
await calculate_b64md5(open_stream, 9)
hass-nabucasa-0.101.0/tests/test_google_report_state.py 0000664 0000000 0000000 00000007034 15011602407 0023204 0 ustar 00root root 0000000 0000000 """Tests for Google Report State."""
import asyncio
from unittest.mock import AsyncMock, Mock, patch
from hass_nabucasa import iot_base
from hass_nabucasa.google_report_state import ErrorResponse, GoogleReportState
from .common import MockClient
async def create_grs(ws_server, server_msg_handler) -> GoogleReportState:
"""Create a grs instance."""
client = await ws_server(server_msg_handler)
mock_cloud = Mock(
subscription_expired=False,
remotestate_server="mock-report-state-url.com",
auth=Mock(async_check_token=AsyncMock()),
websession=Mock(ws_connect=AsyncMock(return_value=client)),
client=Mock(spec_set=MockClient),
)
return GoogleReportState(mock_cloud)
async def test_ws_server_url():
"""Test generating ws server url."""
assert (
GoogleReportState(Mock(remotestate_server="example.com")).ws_server_url
== "wss://example.com/v1"
)
async def test_send_messages(ws_server):
"""Test that we connect if we are not connected."""
server_msgs = []
async def handle_server_msg(msg):
"""Handle a server msg."""
incoming = msg.json()
server_msgs.append(incoming["payload"])
# First msg is ok
if incoming["payload"]["hello"] == 0:
return {"msgid": incoming["msgid"], "payload": "mock-response"}
# Second msg is error
return {
"msgid": incoming["msgid"],
"error": "mock-code",
"message": "mock-message",
}
grs = await create_grs(ws_server, handle_server_msg)
assert grs.state == iot_base.STATE_DISCONNECTED
# Test we can handle two simultaneous messages while disconnected
responses = await asyncio.gather(
*[grs.async_send_message({"hello": 0}), grs.async_send_message({"hello": 1})],
return_exceptions=True,
)
assert grs.state == iot_base.STATE_CONNECTED
assert len(responses) == 2
assert responses[0] == "mock-response"
assert isinstance(responses[1], ErrorResponse)
assert responses[1].code == "mock-code"
assert responses[1].message == "mock-message"
assert sorted(server_msgs, key=lambda val: val["hello"]) == [
{"hello": 0},
{"hello": 1},
]
await grs.disconnect()
assert grs.state == iot_base.STATE_DISCONNECTED
assert grs._message_sender_task is None
async def test_max_queue_message(ws_server):
"""Test that we connect if we are not connected."""
server_msgs = []
async def handle_server_msg(msg):
"""Handle a server msg."""
incoming = msg.json()
server_msgs.append(incoming["payload"])
return {"msgid": incoming["msgid"], "payload": incoming["payload"]["hello"]}
grs = await create_grs(ws_server, handle_server_msg)
# Test we can handle sending more messages than queue fits
with patch.object(grs, "_async_message_sender"):
gather_task = asyncio.gather(
*[grs.async_send_message({"hello": i}) for i in range(150)],
return_exceptions=True,
)
# One per message
for _ in range(150):
await asyncio.sleep(0)
# Start handling messages.
await grs._async_on_connect()
# One per message
for _ in range(150):
await asyncio.sleep(0)
assert len(server_msgs) == 100
results = await gather_task
assert len(results) == 150
assert sum(isinstance(result, ErrorResponse) for result in results) == 50
await grs.disconnect()
assert grs.state == iot_base.STATE_DISCONNECTED
assert grs._message_sender_task is None
hass-nabucasa-0.101.0/tests/test_ice_servers.py 0000664 0000000 0000000 00000017755 15011602407 0021461 0 ustar 00root root 0000000 0000000 """Test the ICE servers module."""
import asyncio
import time
import pytest
from webrtc_models import RTCIceServer
from hass_nabucasa import ice_servers
from tests.utils.aiohttp import AiohttpClientMocker
@pytest.fixture
def ice_servers_api(auth_cloud_mock) -> ice_servers.IceServers:
"""ICE servers API fixture."""
auth_cloud_mock.servicehandlers_server = "example.com/test"
auth_cloud_mock.id_token = "mock-id-token"
return ice_servers.IceServers(auth_cloud_mock)
@pytest.fixture
def mock_ice_servers(aioclient_mock: AiohttpClientMocker):
"""Mock ICE servers."""
aioclient_mock.get(
"https://example.com/test/webrtc/ice_servers",
json=[
{
"urls": "turn:example.com:80",
"username": "12345678:test-user",
"credential": "secret-value",
},
],
)
async def test_ice_servers_listener_registration_triggers_periodic_ice_servers_update(
ice_servers_api: ice_servers.IceServers,
mock_ice_servers,
):
"""Test that registering an ICE servers listener triggers a periodic update."""
times_register_called_successfully = 0
ice_servers_api._get_refresh_sleep_time = lambda: 0
async def register_ice_servers(ice_servers: list[RTCIceServer]):
nonlocal times_register_called_successfully
# These asserts will silently fail and variable will not be incremented
assert len(ice_servers) == 1
assert ice_servers[0].urls == "turn:example.com:80"
assert ice_servers[0].username == "12345678:test-user"
assert ice_servers[0].credential == "secret-value"
times_register_called_successfully += 1
def unregister():
pass
return unregister
unregister = await ice_servers_api.async_register_ice_servers_listener(
register_ice_servers,
)
# Let the periodic update run once
await asyncio.sleep(0)
# Let the periodic update run again
await asyncio.sleep(0)
assert times_register_called_successfully == 2
unregister()
# The periodic update should not run again
await asyncio.sleep(0)
assert times_register_called_successfully == 2
assert ice_servers_api._refresh_task is None
assert ice_servers_api._ice_servers == []
assert ice_servers_api._ice_servers_listener is None
assert ice_servers_api._ice_servers_listener_unregister is None
async def test_ice_server_refresh_sets_ice_server_list_empty_on_expired_subscription(
ice_servers_api: ice_servers.IceServers,
aioclient_mock: AiohttpClientMocker,
):
"""Test that the ICE server list is set to empty when the subscription expires."""
times_register_called_successfully = 0
ice_servers_api._get_refresh_sleep_time = lambda: 0
ice_servers_api.cloud.subscription_expired = True
async def register_ice_servers(ice_servers: list[RTCIceServer]):
nonlocal times_register_called_successfully
# This assert will silently fail and variable will not be incremented
assert len(ice_servers) == 0
times_register_called_successfully += 1
def unregister():
pass
return unregister
await ice_servers_api.async_register_ice_servers_listener(register_ice_servers)
# Let the periodic update run once
await asyncio.sleep(0)
assert ice_servers_api._ice_servers == []
assert len(aioclient_mock.mock_calls) == 0
assert times_register_called_successfully == 1
assert ice_servers_api._refresh_task is not None
assert ice_servers_api._ice_servers_listener is not None
assert ice_servers_api._ice_servers_listener_unregister is not None
async def test_ice_server_refresh_sets_ice_server_list_empty_on_401_403_client_error(
ice_servers_api: ice_servers.IceServers,
aioclient_mock: AiohttpClientMocker,
):
"""Test that ICE server list is empty when server returns 401 or 403 errors."""
aioclient_mock.get(
"https://example.com/test/webrtc/ice_servers",
status=403,
json={"message": "Boom!"},
)
times_register_called_successfully = 0
ice_servers_api._get_refresh_sleep_time = lambda: 0
ice_servers_api._ice_servers = [
RTCIceServer(
urls="turn:example.com:80",
username="12345678:test-user",
credential="secret-value",
),
]
async def register_ice_servers(ice_servers: list[RTCIceServer]):
nonlocal times_register_called_successfully
# This assert will silently fail and variable will not be incremented
assert len(ice_servers) == 0
times_register_called_successfully += 1
def unregister():
pass
return unregister
await ice_servers_api.async_register_ice_servers_listener(register_ice_servers)
# Let the periodic update run once
await asyncio.sleep(0)
assert ice_servers_api._ice_servers == []
assert times_register_called_successfully == 1
assert ice_servers_api._refresh_task is not None
assert ice_servers_api._ice_servers_listener is not None
assert ice_servers_api._ice_servers_listener_unregister is not None
async def test_ice_server_refresh_keeps_ice_server_list_on_other_client_errors(
ice_servers_api: ice_servers.IceServers,
aioclient_mock,
):
"""Test that ICE server list is not set to empty when server returns an error."""
aioclient_mock.get(
"https://example.com/test/webrtc/ice_servers",
status=500,
json={"message": "Boom!"},
)
times_register_called_successfully = 0
ice_servers_api._get_refresh_sleep_time = lambda: 0
ice_servers_api._ice_servers = [
RTCIceServer(
urls="turn:example.com:80",
username="12345678:test-user",
credential="secret-value",
),
]
async def register_ice_servers(ice_servers: list[RTCIceServer]):
nonlocal times_register_called_successfully
# These asserts will silently fail and variable will not be incremented
assert len(ice_servers) == 1
assert ice_servers[0].urls == "turn:example.com:80"
assert ice_servers[0].username == "12345678:test-user"
assert ice_servers[0].credential == "secret-value"
times_register_called_successfully += 1
def unregister():
pass
return unregister
await ice_servers_api.async_register_ice_servers_listener(register_ice_servers)
# Let the periodic update run once
await asyncio.sleep(0)
assert ice_servers_api._ice_servers != []
assert times_register_called_successfully == 1
assert ice_servers_api._refresh_task is not None
assert ice_servers_api._ice_servers_listener is not None
assert ice_servers_api._ice_servers_listener_unregister is not None
def test_get_refresh_sleep_time(ice_servers_api: ice_servers.IceServers):
"""Test get refresh sleep time."""
min_timestamp = 8888888888
ice_servers_api._ice_servers = [
RTCIceServer(urls="turn:example.com:80", username="9999999999:test-user"),
RTCIceServer(
urls="turn:example.com:80",
username=f"{min_timestamp!s}:test-user",
),
]
assert (
ice_servers_api._get_refresh_sleep_time()
== min_timestamp - int(time.time()) - 3600
)
def test_get_refresh_sleep_time_no_turn_servers(
ice_servers_api: ice_servers.IceServers,
):
"""Test get refresh sleep time."""
refresh_time = ice_servers_api._get_refresh_sleep_time()
assert refresh_time >= 3600
assert refresh_time <= 43200
def test_get_refresh_sleep_time_expiration_less_than_one_hour(
ice_servers_api: ice_servers.IceServers,
):
"""Test get refresh sleep time."""
min_timestamp = 10
ice_servers_api._ice_servers = [
RTCIceServer(urls="turn:example.com:80", username="12345678:test-user"),
RTCIceServer(
urls="turn:example.com:80",
username=f"{min_timestamp!s}:test-user",
),
]
refresh_time = ice_servers_api._get_refresh_sleep_time()
assert refresh_time >= 100
assert refresh_time <= 300
hass-nabucasa-0.101.0/tests/test_init.py 0000664 0000000 0000000 00000035505 15011602407 0020104 0 ustar 00root root 0000000 0000000 """Test the cloud component."""
import asyncio
from datetime import timedelta
import json
from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch
import pytest
import hass_nabucasa as cloud
from hass_nabucasa.const import SubscriptionReconnectionReason
from hass_nabucasa.utils import utcnow
from .common import MockClient
@pytest.fixture(autouse=True)
def mock_subscription_info(aioclient_mock):
"""Mock subscription info."""
aioclient_mock.get(
"https://example.com/payments/subscription_info",
json={
"success": True,
"billing_plan_type": "mock-plan",
},
)
@pytest.fixture
def cl(cloud_client) -> cloud.Cloud:
"""Mock cloud client."""
return cloud.Cloud(cloud_client, cloud.MODE_DEV, accounts_server="example.com")
def test_constructor_loads_info_from_constant(cloud_client):
"""Test non-dev mode loads info from SERVERS constant."""
with (
patch.dict(
cloud.DEFAULT_VALUES,
{
"beer": {
"cognito_client_id": "test-cognito_client_id",
"user_pool_id": "test-user_pool_id",
"region": "test-region",
},
},
),
patch.dict(
cloud.DEFAULT_SERVERS,
{
"beer": {
"relayer": "test-relayer",
"accounts": "test-subscription-info-url",
"cloudhook": "test-cloudhook_server",
"acme": "test-acme-directory-server",
"remotestate": "test-google-actions-report-state-url",
"account_link": "test-account-link-url",
"servicehandlers": "test-servicehandlers-url",
},
},
),
):
cl = cloud.Cloud(cloud_client, "beer")
assert cl.mode == "beer"
assert cl.cognito_client_id == "test-cognito_client_id"
assert cl.user_pool_id == "test-user_pool_id"
assert cl.region == "test-region"
assert cl.relayer_server == "test-relayer"
assert cl.accounts_server == "test-subscription-info-url"
assert cl.cloudhook_server == "test-cloudhook_server"
assert cl.acme_server == "test-acme-directory-server"
assert cl.remotestate_server == "test-google-actions-report-state-url"
assert cl.account_link_server == "test-account-link-url"
async def test_initialize_loads_info(cl: cloud.Cloud) -> None:
"""Test initialize will load info from config file.
Also tests that on_initialized callbacks are called when initialization finishes.
"""
assert len(cl._on_start) == 2
cl._on_start.clear()
assert len(cl._on_stop) == 3
cl._on_stop.clear()
info_file = MagicMock(
read_text=Mock(
return_value=json.dumps(
{
"id_token": "test-id-token",
"access_token": "test-access-token",
"refresh_token": "test-refresh-token",
},
),
),
exists=Mock(return_value=True),
)
cl.iot = MagicMock()
cl.iot.connect = AsyncMock()
cl.remote = MagicMock()
cl.remote.connect = AsyncMock()
start_done_event = asyncio.Event()
async def start_done():
start_done_event.set()
cl._on_start.extend([cl.iot.connect, cl.remote.connect])
cl.register_on_initialized(start_done)
with (
patch(
"hass_nabucasa.Cloud._decode_claims",
return_value={"custom:sub-exp": "2080-01-01"},
),
patch(
"hass_nabucasa.Cloud.user_info_path",
new_callable=PropertyMock(return_value=info_file),
),
patch("hass_nabucasa.auth.CognitoAuth.async_check_token"),
):
await cl.initialize()
await start_done_event.wait()
assert cl.id_token == "test-id-token"
assert cl.access_token == "test-access-token"
assert cl.refresh_token == "test-refresh-token"
assert len(cl.iot.connect.mock_calls) == 1
assert len(cl.remote.connect.mock_calls) == 1
async def test_initialize_loads_invalid_info(
cloud_client: MockClient,
cl: cloud.Cloud,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test initialize load invalid info from config file."""
info_file = MagicMock(
read_text=Mock(return_value="invalid json"),
exists=Mock(return_value=True),
relative_to=Mock(return_value=".cloud/production_auth.json"),
)
cl.iot = MagicMock()
cl.iot.connect = AsyncMock()
cl.remote = MagicMock()
cl.remote.connect = AsyncMock()
cl._on_start.extend([cl.iot.connect, cl.remote.connect])
with (
patch("hass_nabucasa.Cloud._decode_claims"),
patch(
"hass_nabucasa.Cloud.user_info_path",
new_callable=PropertyMock(return_value=info_file),
),
):
await cl.initialize()
await asyncio.sleep(0) # Flush out scheduled callbacks
assert cl.id_token is None
assert len(cl.iot.connect.mock_calls) == 0
assert len(cl.remote.connect.mock_calls) == 0
assert (
"Error loading cloud authentication info from .cloud/production_auth.json: "
"Expecting value: line 1 column 1 (char 0)" in caplog.text
)
assert cloud_client.mock_user
assert cloud_client.mock_user[0] == (
"load_auth_data",
"Home Assistant Cloud error",
(
"Unable to load authentication from .cloud/production_auth.json. "
"[Please login again](/config/cloud)"
),
)
async def test_logout_clears_info(cl: cloud.Cloud):
"""Test logging out disconnects and removes info."""
assert len(cl._on_start) == 2
cl._on_start.clear()
assert len(cl._on_stop) == 3
cl._on_stop.clear()
info_file = MagicMock(
exists=Mock(return_value=True),
unlink=Mock(return_value=True),
)
cl.id_token = "id_token"
cl.access_token = "access_token"
cl.refresh_token = "refresh_token"
cl.iot = MagicMock()
cl.iot.disconnect = AsyncMock()
cl.google_report_state = MagicMock()
cl.google_report_state.disconnect = AsyncMock()
cl.remote = MagicMock()
cl.remote.disconnect = AsyncMock()
cl._on_stop.extend(
[cl.iot.disconnect, cl.remote.disconnect, cl.google_report_state.disconnect],
)
with patch(
"hass_nabucasa.Cloud.user_info_path",
new_callable=PropertyMock(return_value=info_file),
):
await cl.logout()
assert len(cl.iot.disconnect.mock_calls) == 1
assert len(cl.google_report_state.disconnect.mock_calls) == 1
assert len(cl.remote.disconnect.mock_calls) == 1
assert cl.id_token is None
assert cl.access_token is None
assert cl.refresh_token is None
assert info_file.unlink.called
async def test_remove_data(cloud_client: MockClient, cl: cloud.Cloud) -> None:
"""Test removing data."""
cloud_dir = cloud_client.base_path / ".cloud"
cloud_dir.mkdir()
open(cloud_dir / "unexpected_file", "w")
await cl.remove_data()
assert not cloud_dir.exists()
async def test_remove_data_file(cloud_client: MockClient, cl: cloud.Cloud) -> None:
"""Test removing data when .cloud is not a directory."""
cloud_dir = cloud_client.base_path / ".cloud"
open(cloud_dir, "w")
await cl.remove_data()
assert not cloud_dir.exists()
async def test_remove_data_started(cloud_client: MockClient, cl: cloud.Cloud) -> None:
"""Test removing data when cloud is started."""
cloud_dir = cloud_client.base_path / ".cloud"
cloud_dir.mkdir()
cl.started = True
with pytest.raises(ValueError, match="Cloud not stopped"):
await cl.remove_data()
assert cloud_dir.exists()
cloud_dir.rmdir()
def test_write_user_info(cl: cloud.Cloud):
"""Test writing user info works."""
cl.id_token = "test-id-token"
cl.access_token = "test-access-token"
cl.refresh_token = "test-refresh-token"
with patch("pathlib.Path.chmod"), patch("hass_nabucasa.atomic_write") as mock_write:
cl._write_user_info()
mock_file = mock_write.return_value.__enter__.return_value
assert mock_file.write.called
data = json.loads(mock_file.write.mock_calls[0][1][0])
assert data == {
"access_token": "test-access-token",
"id_token": "test-id-token",
"refresh_token": "test-refresh-token",
}
def test_subscription_expired(cl: cloud.Cloud):
"""Test subscription being expired after 3 days of expiration."""
token_val = {"custom:sub-exp": "2017-11-13"}
with (
patch.object(cl, "_decode_claims", return_value=token_val),
patch(
"hass_nabucasa.utcnow",
return_value=utcnow().replace(year=2017, month=11, day=13),
),
):
assert not cl.subscription_expired
with (
patch.object(cl, "_decode_claims", return_value=token_val),
patch(
"hass_nabucasa.utcnow",
return_value=utcnow().replace(
year=2017,
month=11,
day=19,
hour=23,
minute=59,
second=59,
),
),
):
assert not cl.subscription_expired
with (
patch.object(cl, "_decode_claims", return_value=token_val),
patch(
"hass_nabucasa.utcnow",
return_value=utcnow().replace(
year=2017,
month=11,
day=20,
hour=0,
minute=0,
second=0,
),
),
):
assert cl.subscription_expired
def test_subscription_not_expired(cl: cloud.Cloud):
"""Test subscription not being expired."""
token_val = {"custom:sub-exp": "2017-11-13"}
with (
patch.object(cl, "_decode_claims", return_value=token_val),
patch(
"hass_nabucasa.utcnow",
return_value=utcnow().replace(year=2017, month=11, day=9),
),
):
assert not cl.subscription_expired
async def test_claims_decoding(cl: cloud.Cloud):
"""Test decoding claims."""
payload = {"cognito:username": "abc123", "some": "value"}
encoded_token = cloud.jwt.encode(payload, key="secret")
await cl.update_token(encoded_token, None)
assert cl.claims == payload
assert cl.username == "abc123"
@pytest.mark.parametrize(
("since_expired", "expected_sleep_hours"),
[
(timedelta(hours=1), 3),
(timedelta(days=1), 12),
(timedelta(days=8), 24),
(timedelta(days=31), 24),
(timedelta(days=180), 96),
],
)
async def test_subscription_reconnection_handler_renews_and_starts(
cl: cloud.Cloud,
since_expired: timedelta,
expected_sleep_hours: int,
caplog: pytest.LogCaptureFixture,
):
"""Test the subscription expired handler."""
basedate = utcnow()
_decode_claims_mocker = Mock(
return_value={
"custom:sub-exp": (basedate - since_expired).strftime("%Y-%m-%d")
},
)
async def async_renew_access_token(*args, **kwargs):
_decode_claims_mocker.return_value = {
"custom:sub-exp": basedate.strftime("%Y-%m-%d"),
}
with (
patch("hass_nabucasa.Cloud.initialize", AsyncMock()) as _initialize_mocker,
patch(
"hass_nabucasa.CognitoAuth.async_renew_access_token",
side_effect=async_renew_access_token,
),
patch("hass_nabucasa.asyncio.sleep", AsyncMock()) as sleep_mock,
patch(
"hass_nabucasa.Cloud._decode_claims",
_decode_claims_mocker,
),
patch(
"hass_nabucasa.Cloud.is_logged_in",
return_value=True,
),
):
await cl._subscription_reconnection_handler(
SubscriptionReconnectionReason.SUBSCRIPTION_EXPIRED
)
sleep_mock.assert_called_with(expected_sleep_hours * 60 * 60)
_initialize_mocker.assert_awaited_once()
assert "Stopping subscription reconnection handler" in caplog.text
async def test_subscription_reconnection_handler_aborts(
cl: cloud.Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test the subscription expired handler abort."""
basedate = utcnow()
with (
patch("hass_nabucasa.Cloud._start", AsyncMock()) as start_mock,
patch("hass_nabucasa.remote.RemoteUI.start", AsyncMock()) as remote_start_mock,
patch("hass_nabucasa.asyncio.sleep", AsyncMock()) as sleep_mock,
patch(
"hass_nabucasa.Cloud._decode_claims",
return_value={
"custom:sub-exp": (basedate - timedelta(days=450)).strftime("%Y-%m-%d")
},
),
):
await cl._subscription_reconnection_handler(
SubscriptionReconnectionReason.SUBSCRIPTION_EXPIRED
)
sleep_mock.assert_not_awaited()
sleep_mock.assert_not_called()
start_mock.assert_not_awaited()
start_mock.assert_not_called()
remote_start_mock.assert_not_awaited()
remote_start_mock.assert_not_called()
assert "Stopping subscription reconnection handler" in caplog.text
async def test_subscription_reconnect_for_no_subscription(
cl: cloud.Cloud,
caplog: pytest.LogCaptureFixture,
):
"""Test the subscription expired handler for no subscription."""
cl._on_start.clear()
cl._on_stop.clear()
info_file = MagicMock(
read_text=Mock(
return_value=json.dumps(
{
"id_token": "test-id-token",
"access_token": "test-access-token",
"refresh_token": "test-refresh-token",
},
),
),
exists=Mock(return_value=True),
)
cl.iot = MagicMock()
cl.iot.connect = AsyncMock()
cl.remote = MagicMock()
cl.remote.connect = AsyncMock()
start_done_event = asyncio.Event()
async def start_done():
start_done_event.set()
cl._on_start.extend([cl.iot.connect, cl.remote.connect])
cl.register_on_initialized(start_done)
def subscription_info_mock(billing_plan_type):
return {"billing_plan_type": billing_plan_type}
with (
patch(
"hass_nabucasa.Cloud._decode_claims",
return_value={"custom:sub-exp": "2080-01-01"},
),
patch(
"hass_nabucasa.Cloud.user_info_path",
new_callable=PropertyMock(return_value=info_file),
),
patch("hass_nabucasa.auth.CognitoAuth.async_check_token"),
patch(
"hass_nabucasa.CognitoAuth.async_renew_access_token",
),
patch("hass_nabucasa.asyncio.sleep", AsyncMock()),
patch(
"hass_nabucasa.async_subscription_info",
side_effect=[
subscription_info_mock("no_subscription"),
subscription_info_mock("mock-plan"),
],
),
):
await cl.initialize()
await start_done_event.wait()
assert "No subscription found" in caplog.text
assert "Stopping subscription reconnection handler" in caplog.text
hass-nabucasa-0.101.0/tests/test_instance_api.py 0000664 0000000 0000000 00000005344 15011602407 0021574 0 ustar 00root root 0000000 0000000 """Tests for Instance API."""
from typing import Any
from aiohttp import ClientError
import pytest
from hass_nabucasa import Cloud
from hass_nabucasa.instance_api import (
InstanceApi,
InstanceApiError,
InstanceConnection,
)
from tests.utils.aiohttp import AiohttpClientMocker
API_HOSTNAME = "example.com"
@pytest.fixture(autouse=True)
def set_hostname(auth_cloud_mock: Cloud):
"""Set API hostname for the mock cloud service."""
auth_cloud_mock.servicehandlers_server = API_HOSTNAME
@pytest.mark.parametrize(
"exception,getmockargs,log_msg,exception_msg",
[
[
InstanceApiError,
{"status": 500, "text": "Internal Server Error"},
"Response for get from example.com/instance/connection (500)",
"Failed to parse API response",
],
[
InstanceApiError,
{"status": 429, "text": "Too fast"},
"Response for get from example.com/instance/connection (429)",
"Failed to parse API response",
],
[
InstanceApiError,
{"exc": TimeoutError()},
"",
"Timeout reached while calling API",
],
[
InstanceApiError,
{"exc": ClientError("boom!")},
"",
"Failed to fetch: boom!",
],
[
InstanceApiError,
{"exc": Exception("boom!")},
"",
"Unexpected error while calling API: boom!",
],
],
)
async def test_problems_getting_conntection(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
getmockargs: dict[str, Any],
log_msg: str,
exception_msg: str,
caplog: pytest.LogCaptureFixture,
):
"""Test problems getting connection details."""
instance_api = InstanceApi(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/instance/connection",
**getmockargs,
)
with pytest.raises(exception, match=exception_msg):
await instance_api.connection()
assert log_msg in caplog.text
@pytest.mark.parametrize("connection_response", [{"connected": True, "details": {}}])
async def test_getting_connection(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
connection_response: InstanceConnection,
caplog: pytest.LogCaptureFixture,
):
"""Test getting connection details."""
instance_api = InstanceApi(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/instance/connection",
json=connection_response,
)
connection = await instance_api.connection()
assert connection == connection_response
assert "Response for get from example.com/instance/connection (200)" in caplog.text
hass-nabucasa-0.101.0/tests/test_iot.py 0000664 0000000 0000000 00000024126 15011602407 0017731 0 ustar 00root root 0000000 0000000 """Test the cloud.iot module."""
import asyncio
from unittest.mock import AsyncMock, MagicMock, Mock, call, patch
from aiohttp import WSMsgType
import pytest
from hass_nabucasa import iot, iot_base
@pytest.fixture
def cloud_mock_iot(auth_cloud_mock):
"""Mock cloud class."""
auth_cloud_mock.subscription_expired = False
return auth_cloud_mock
def mock_handler_message(conn, mock_iot_client, msg):
"""Send a message to a handler."""
handler_respond_set = asyncio.Event()
messages = [
msg,
MagicMock(
type=WSMsgType.CLOSE,
),
]
async def receive_mock(_timeout):
if len(messages) == 1:
await handler_respond_set.wait()
return messages.pop(0)
mock_iot_client.receive = receive_mock
mock_iot_client.send_json = AsyncMock(
side_effect=lambda _: handler_respond_set.set(),
)
@pytest.mark.parametrize(
"message",
(
{
"msgid": "test-msg-id",
"handler": "test-handler",
"payload": "test-payload",
},
{
"msgid": "test-msg-id",
"handler": "test-handler",
},
),
)
async def test_cloud_calling_handler(mock_iot_client, cloud_mock_iot, message):
"""Test we call handle message with correct info."""
conn = iot.CloudIoT(cloud_mock_iot)
mock_handler_message(
conn,
mock_iot_client,
MagicMock(
type=WSMsgType.text,
json=MagicMock(return_value=message),
),
)
mock_handler = AsyncMock(return_value="response")
with patch.dict(iot.HANDLERS, {"test-handler": mock_handler}, clear=True):
await conn.connect()
# Check that we sent message to handler correctly
assert len(mock_handler.mock_calls) == 1
cloud, payload = mock_handler.mock_calls[0][1]
assert cloud is cloud_mock_iot
assert payload == message.get("payload")
# Check that we forwarded response from handler to cloud
assert len(mock_iot_client.send_json.mock_calls) == 1
assert mock_iot_client.send_json.mock_calls[0][1][0] == {
"msgid": "test-msg-id",
"payload": "response",
}
async def test_connection_msg_for_unknown_handler(mock_iot_client, cloud_mock_iot):
"""Test a msg for an unknown handler."""
conn = iot.CloudIoT(cloud_mock_iot)
mock_handler_message(
conn,
mock_iot_client,
MagicMock(
type=WSMsgType.text,
json=MagicMock(
return_value={
"msgid": "test-msg-id",
"handler": "non-existing-test-handler",
"payload": "test-payload",
},
),
),
)
await conn.connect()
# Check that we sent the correct error
assert len(mock_iot_client.send_json.mock_calls) == 1
assert mock_iot_client.send_json.mock_calls[0][1][0] == {
"msgid": "test-msg-id",
"error": "unknown-handler",
}
async def test_connection_msg_for_handler_raising_handler_error(
mock_iot_client,
cloud_mock_iot,
):
"""Test we sent error when handler raises HandlerError exception."""
conn = iot.CloudIoT(cloud_mock_iot)
mock_handler_message(
conn,
mock_iot_client,
MagicMock(
type=WSMsgType.text,
json=MagicMock(
return_value={
"msgid": "test-msg-id",
"handler": "test-handler",
"payload": "test-payload",
},
),
),
)
with patch.dict(
iot.HANDLERS,
{"test-handler": Mock(side_effect=iot.HandlerError("specific_error"))},
):
await conn.connect()
# Check that we sent the correct error
assert len(mock_iot_client.send_json.mock_calls) == 1
assert mock_iot_client.send_json.mock_calls[0][1][0] == {
"msgid": "test-msg-id",
"error": "specific_error",
}
async def test_connection_msg_for_handler_raising(mock_iot_client, cloud_mock_iot):
"""Test we sent error when handler raises exception."""
conn = iot.CloudIoT(cloud_mock_iot)
mock_handler_message(
conn,
mock_iot_client,
MagicMock(
type=WSMsgType.text,
json=MagicMock(
return_value={
"msgid": "test-msg-id",
"handler": "test-handler",
"payload": "test-payload",
},
),
),
)
with patch.dict(
iot.HANDLERS,
{"test-handler": Mock(side_effect=Exception("Broken"))},
):
await conn.connect()
# Check that we sent the correct error
assert len(mock_iot_client.send_json.mock_calls) == 1
assert mock_iot_client.send_json.mock_calls[0][1][0] == {
"msgid": "test-msg-id",
"error": "exception",
}
async def test_handling_core_messages_logout(cloud_mock_iot):
"""Test handling core messages."""
cloud_mock_iot.logout = AsyncMock()
await iot.async_handle_cloud(
cloud_mock_iot,
{"action": "logout", "reason": "Logged in at two places."},
)
assert len(cloud_mock_iot.logout.mock_calls) == 1
async def test_handler_alexa(cloud_mock):
"""Test handler Alexa."""
cloud_mock.client.mock_return.append({"test": 5})
resp = await iot.async_handle_alexa(cloud_mock, {"test-discovery": True})
assert len(cloud_mock.client.mock_alexa) == 1
assert resp == {"test": 5}
async def test_handler_google(cloud_mock):
"""Test handler Google."""
cloud_mock.client.mock_return.append({"test": 5})
resp = await iot.async_handle_google_actions(cloud_mock, {"test-discovery": True})
assert len(cloud_mock.client.mock_google) == 1
assert resp == {"test": 5}
async def test_handler_webhook(cloud_mock):
"""Test handler Webhook."""
cloud_mock.client.mock_return.append({"test": 5})
resp = await iot.async_handle_webhook(cloud_mock, {"test-discovery": True})
assert len(cloud_mock.client.mock_webhooks) == 1
assert resp == {"test": 5}
async def test_handler_system(cloud_mock):
"""Test handler system."""
cloud_mock.client.mock_return.append({"test": 5})
resp = await iot.async_handle_system(cloud_mock, {"test-discovery": True})
assert len(cloud_mock.client.mock_system) == 1
assert resp == {"test": 5}
async def test_handler_remote_sni(cloud_mock):
"""Test handler Webhook."""
assert not cloud_mock.client.pref_should_connect
cloud_mock.remote.snitun_server = "1.1.1.1"
resp = await iot.async_handle_remote_sni(cloud_mock, {"ip_address": "8.8.8.8"})
assert cloud_mock.client.pref_should_connect
assert resp == {"server": "1.1.1.1"}
async def test_handler_remote_sni_action(cloud_mock):
"""Test connection info."""
cloud_mock.client.mock_return.append({"test": 5})
resp = await iot.async_handle_connection_info(cloud_mock, {})
assert resp == {"test": 5}
async def test_send_message_no_answer(cloud_mock_iot):
"""Test sending a message that expects no answer."""
cloud_iot = iot.CloudIoT(cloud_mock_iot)
cloud_iot.state = iot_base.STATE_CONNECTED
cloud_iot.client = MagicMock(send_json=AsyncMock())
await cloud_iot.async_send_message("webhook", {"msg": "yo"}, expect_answer=False)
assert not cloud_iot._response_handler
assert len(cloud_iot.client.send_json.mock_calls) == 1
msg = cloud_iot.client.send_json.mock_calls[0][1][0]
assert msg["handler"] == "webhook"
assert msg["payload"] == {"msg": "yo"}
async def test_send_message_answer(cloud_mock_iot):
"""Test sending a message that expects an answer."""
cloud_iot = iot.CloudIoT(cloud_mock_iot)
cloud_iot.state = iot_base.STATE_CONNECTED
cloud_iot.client = MagicMock(send_json=AsyncMock())
uuid = 5
with patch("hass_nabucasa.iot.uuid.uuid4", return_value=MagicMock(hex=uuid)):
send_task = asyncio.create_task(
cloud_iot.async_send_message("webhook", {"msg": "yo"}),
)
await asyncio.sleep(0)
assert len(cloud_iot.client.send_json.mock_calls) == 1
assert len(cloud_iot._response_handler) == 1
msg = cloud_iot.client.send_json.mock_calls[0][1][0]
assert msg["handler"] == "webhook"
assert msg["payload"] == {"msg": "yo"}
cloud_iot._response_handler[uuid].set_result({"response": True})
response = await send_task
assert response == {"response": True}
async def test_handling_core_messages_user_notifcation(cloud_mock_iot):
"""Test handling core messages."""
cloud_mock_iot.client.user_message = MagicMock()
await iot.async_handle_cloud(
cloud_mock_iot,
{"action": "user_notification", "title": "Test", "message": "My message"},
)
assert len(cloud_mock_iot.client.user_message.mock_calls) == 1
async def test_handling_core_messages_critical_user_notifcation(cloud_mock_iot):
"""Test handling core messages."""
cloud_mock_iot.client.user_message = MagicMock()
await iot.async_handle_cloud(
cloud_mock_iot,
{
"action": "critical_user_notification",
"title": "Test",
"message": "My message",
},
)
assert len(cloud_mock_iot.client.user_message.mock_calls) == 1
async def test_handling_core_messages_remote_disconnect(cloud_mock_iot):
"""Test handling core messages."""
cloud_mock_iot.remote.disconnect = AsyncMock()
await iot.async_handle_cloud(
cloud_mock_iot,
{"action": "disconnect_remote"},
)
assert len(cloud_mock_iot.remote.disconnect.mock_calls) == 1
async def test_handling_core_messages_evaluate_remote_security(cloud_mock_iot):
"""Test handling core messages."""
cloud_mock_iot.remote.connect = AsyncMock()
cloud_mock_iot.remote.disconnect = AsyncMock()
with patch("hass_nabucasa.iot.random.randint", return_value=0):
await iot.async_handle_cloud(
cloud_mock_iot,
{"action": "evaluate_remote_security"},
)
await asyncio.sleep(0.1)
assert cloud_mock_iot.remote.disconnect.call_count == 1
assert cloud_mock_iot.remote.disconnect.call_args == call(clear_snitun_token=True)
assert cloud_mock_iot.remote.connect.call_count == 1
hass-nabucasa-0.101.0/tests/test_iot_base.py 0000664 0000000 0000000 00000015416 15011602407 0020725 0 ustar 00root root 0000000 0000000 """Test the cloud.iot_base module."""
from unittest.mock import AsyncMock, MagicMock, Mock
from aiohttp import WSMessage, WSMsgType, client_exceptions
import pytest
from hass_nabucasa import auth as auth_api, iot_base
class MockIoT(iot_base.BaseIoT):
"""Mock class for IoT."""
def __init__(self, cloud, require_subscription=True) -> None:
"""Initialize test IoT class."""
super().__init__(cloud)
self.received = []
self._require_subscription = require_subscription
@property
def package_name(self) -> str:
"""Return package name for logging."""
return __name__
@property
def ws_server_url(self) -> str:
"""Server to connect to."""
return "http://example.com"
@property
def require_subscription(self) -> bool:
"""If the server requires a valid subscription."""
return self._require_subscription
def async_handle_message(self, msg) -> None:
"""Handle incoming message.
Run all async tasks in a wrapper to log appropriately.
"""
@pytest.fixture
def cloud_mock_iot(auth_cloud_mock):
"""Mock cloud class."""
auth_cloud_mock.subscription_expired = False
return auth_cloud_mock
@pytest.mark.parametrize(
"require_first_message,messages,disconnect_reason",
[
(
False,
[
WSMessage(
type=WSMsgType.CLOSING,
data=4002,
extra="Another instance connected",
),
],
iot_base.DisconnectReason(
True,
"Connection closed: Closed by server. "
"Another instance connected (4002)",
),
),
(
True,
[
WSMessage(
type=WSMsgType.CLOSING,
data=4002,
extra="Another instance connected",
),
],
iot_base.DisconnectReason(
False,
"Connection closed: Closed by server. "
"Another instance connected (4002)",
),
),
(
True,
[
WSMessage(
type=WSMsgType.TEXT,
data='{"msgid": "1", "handler": "system"}',
extra=None,
),
WSMessage(
type=WSMsgType.CLOSING,
data=4002,
extra="Another instance connected",
),
],
iot_base.DisconnectReason(
True,
"Connection closed: Closed by server. "
"Another instance connected (4002)",
),
),
],
)
async def test_cloud_getting_disconnected_by_server(
mock_iot_client,
caplog,
cloud_mock_iot,
require_first_message,
messages,
disconnect_reason,
):
"""Test server disconnecting instance."""
conn = MockIoT(cloud_mock_iot)
conn.mark_connected_after_first_message = require_first_message
mock_iot_client.receive = AsyncMock(side_effect=messages)
await conn.connect()
assert "Connection closed" in caplog.text
assert conn.last_disconnect_reason == disconnect_reason
async def test_cloud_receiving_bytes(mock_iot_client, caplog, cloud_mock_iot):
"""Test server disconnecting instance."""
conn = MockIoT(cloud_mock_iot)
mock_iot_client.receive = AsyncMock(return_value=MagicMock(type=WSMsgType.BINARY))
await conn.connect()
assert "Connection closed: Received non-Text message" in caplog.text
async def test_cloud_sending_invalid_json(mock_iot_client, caplog, cloud_mock_iot):
"""Test cloud sending invalid JSON."""
conn = MockIoT(cloud_mock_iot)
mock_iot_client.receive = AsyncMock(
return_value=MagicMock(
type=WSMsgType.TEXT,
json=MagicMock(side_effect=ValueError),
),
)
await conn.connect()
assert "Connection closed: Received invalid JSON." in caplog.text
async def test_cloud_check_token_raising(mock_iot_client, caplog, cloud_mock_iot):
"""Test cloud unable to check token."""
conn = MockIoT(cloud_mock_iot)
cloud_mock_iot.auth.async_check_token.side_effect = auth_api.CloudError("BLA")
await conn.connect()
assert "Cannot connect because unable to refresh token: BLA" in caplog.text
async def test_cloud_connect_invalid_auth(mock_iot_client, caplog, cloud_mock_iot):
"""Test invalid auth detected by server."""
conn = MockIoT(cloud_mock_iot)
request_info = Mock(real_url="http://example.com")
mock_iot_client.receive.side_effect = client_exceptions.WSServerHandshakeError(
request_info=request_info,
history=None,
status=401,
)
await conn.connect()
assert "Connection closed: Invalid auth." in caplog.text
async def test_cloud_unable_to_connect(
cloud_mock,
caplog,
cloud_mock_iot,
mock_iot_client,
):
"""Test unable to connect error."""
conn = MockIoT(cloud_mock_iot)
cloud_mock.websession.ws_connect.side_effect = client_exceptions.ClientError(
"SSL Verification failed",
)
await conn.connect()
assert conn.last_disconnect_reason == iot_base.DisconnectReason(
False,
"Unable to connect: SSL Verification failed",
)
assert "Unable to connect:" in caplog.text
async def test_cloud_connection_reset_exception(
mock_iot_client,
caplog,
cloud_mock_iot,
):
"""Test connection reset exception."""
conn = MockIoT(cloud_mock_iot)
mock_iot_client.receive.side_effect = ConnectionResetError(
"Cannot write to closing transport",
)
await conn.connect()
assert conn.last_disconnect_reason == iot_base.DisconnectReason(
False,
"Connection closed: Cannot write to closing transport",
)
assert "Cannot write to closing transport" in caplog.text
async def test_cloud_random_exception(mock_iot_client, caplog, cloud_mock_iot):
"""Test random exception."""
conn = MockIoT(cloud_mock_iot)
mock_iot_client.receive.side_effect = Exception
await conn.connect()
assert "Unexpected error" in caplog.text
async def test_refresh_token_before_expiration_fails(auth_cloud_mock):
"""Test that we don't connect if token is expired."""
auth_cloud_mock.subscription_expired = True
conn = MockIoT(auth_cloud_mock)
await conn.connect()
assert len(auth_cloud_mock.auth.async_check_token.mock_calls) == 1
assert len(auth_cloud_mock.client.mock_user) == 1
async def test_send_message_not_connected(cloud_mock_iot):
"""Test sending a message that expects no answer."""
cloud_iot = MockIoT(cloud_mock_iot)
with pytest.raises(iot_base.NotConnected):
await cloud_iot.async_send_json_message({"msg": "yo"})
hass-nabucasa-0.101.0/tests/test_remote.py 0000664 0000000 0000000 00000073661 15011602407 0020441 0 ustar 00root root 0000000 0000000 """Test remote sni handler."""
import asyncio
from datetime import timedelta
from ssl import SSLError
from unittest.mock import patch
from acme import client, messages
import pytest
from hass_nabucasa import utils
from hass_nabucasa.acme import AcmeHandler
from hass_nabucasa.const import (
DISPATCH_REMOTE_BACKEND_DOWN,
DISPATCH_REMOTE_BACKEND_UP,
DISPATCH_REMOTE_CONNECT,
DISPATCH_REMOTE_DISCONNECT,
)
from hass_nabucasa.remote import (
RENEW_IF_EXPIRES_DAYS,
WARN_RENEW_FAILED_DAYS,
CertificateStatus,
RemoteUI,
SubscriptionExpired,
)
from hass_nabucasa.utils import utcnow
from .common import MockAcme, MockSnitun
# pylint: disable=protected-access
@pytest.fixture(autouse=True)
def ignore_context():
"""Ignore ssl context."""
with patch(
"hass_nabucasa.remote.RemoteUI._create_context",
return_value=None,
) as context:
yield context
@pytest.fixture
def acme_mock():
"""Mock ACME client."""
with patch("hass_nabucasa.remote.AcmeHandler", new_callable=MockAcme) as acme:
yield acme
@pytest.fixture
def valid_acme_mock(acme_mock):
"""Mock ACME client with valid cert."""
acme_mock.common_name = "test.dui.nabu.casa"
acme_mock.alternative_names = ["test.dui.nabu.casa"]
acme_mock.expire_date = utcnow() + timedelta(days=60)
return acme_mock
@pytest.fixture
async def snitun_mock():
"""Mock ACME client."""
with patch("hass_nabucasa.remote.SniTunClientAioHttp", MockSnitun()) as snitun:
yield snitun
def test_init_remote(auth_cloud_mock):
"""Init remote object."""
RemoteUI(auth_cloud_mock)
assert len(auth_cloud_mock.register_on_start.mock_calls) == 1
assert len(auth_cloud_mock.register_on_stop.mock_calls) == 1
async def test_load_backend_exists_cert(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
assert remote.certificate_status is None
assert not remote.is_connected
await remote.start()
await remote._info_loaded.wait()
assert remote.snitun_server == "rest-remote.nabu.casa"
assert remote.instance_domain == "test.dui.nabu.casa"
assert not valid_acme_mock.call_issue
assert valid_acme_mock.init_args == (
auth_cloud_mock,
["test.dui.nabu.casa"],
"test@nabucasa.inc",
)
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert snitun_mock.start_whitelist is not None
assert snitun_mock.start_endpoint_connection_error_callback is not None
await asyncio.sleep(0.1)
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
assert remote.is_connected
assert remote._acme_task
assert remote._reconnect_task
assert auth_cloud_mock.client.mock_dispatcher[0][0] == DISPATCH_REMOTE_BACKEND_UP
assert auth_cloud_mock.client.mock_dispatcher[1][0] == DISPATCH_REMOTE_CONNECT
await remote.stop()
await asyncio.sleep(0.1)
assert not remote._acme_task
assert remote.certificate_status == CertificateStatus.READY
async def test_load_backend_not_exists_cert(
auth_cloud_mock,
acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
acme_mock.set_false()
await remote.start()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert acme_mock.call_issue
assert acme_mock.init_args == (
auth_cloud_mock,
["test.dui.nabu.casa"],
"test@nabucasa.inc",
)
assert acme_mock.call_hardening
assert snitun_mock.call_start
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
assert remote._acme_task
assert remote._reconnect_task
await remote.stop()
await asyncio.sleep(0.1)
assert not remote._acme_task
async def test_load_and_unload_backend(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
await remote.start()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert not valid_acme_mock.call_issue
assert valid_acme_mock.init_args == (
auth_cloud_mock,
["test.dui.nabu.casa"],
"test@nabucasa.inc",
)
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert not snitun_mock.call_stop
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert remote._acme_task
assert remote._reconnect_task
await remote.stop()
await asyncio.sleep(0.1)
assert snitun_mock.call_stop
assert not remote._acme_task
assert not remote._reconnect_task
assert auth_cloud_mock.client.mock_dispatcher[-1][0] == DISPATCH_REMOTE_BACKEND_DOWN
async def test_load_backend_exists_wrong_cert(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
"alias": ["example.com"],
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
aioclient_mock.post(
"https://example.com/instance/resolve_dns_cname",
json=["test.dui.nabu.casa", "_acme-challenge.test.dui.nabu.casa"],
)
auth_cloud_mock.accounts_server = "example.com"
valid_acme_mock.common_name = "test.dui.nabu.casa"
valid_acme_mock.alternative_names = ["test.dui.nabu.casa"]
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert valid_acme_mock.call_reset
assert valid_acme_mock.init_args == (
auth_cloud_mock,
["test.dui.nabu.casa", "example.com"],
"test@nabucasa.inc",
)
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
await remote.disconnect()
await asyncio.sleep(0.1)
assert snitun_mock.call_disconnect
async def test_call_disconnect(
auth_cloud_mock,
acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
assert not remote.is_connected
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.is_connected
await remote.disconnect()
assert snitun_mock.call_disconnect
assert not remote.is_connected
assert remote._token
assert auth_cloud_mock.client.mock_dispatcher[-1][0] == DISPATCH_REMOTE_DISCONNECT
async def test_load_backend_no_autostart(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
auth_cloud_mock.client.prop_remote_autostart = False
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert not valid_acme_mock.call_issue
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert not snitun_mock.call_connect
await remote.connect()
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
assert auth_cloud_mock.client.mock_dispatcher[-1][0] == DISPATCH_REMOTE_CONNECT
await remote.disconnect()
await asyncio.sleep(0.1)
assert snitun_mock.call_disconnect
async def test_get_certificate_details(
auth_cloud_mock,
acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
assert remote.certificate is None
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
auth_cloud_mock.client.prop_remote_autostart = False
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.certificate is None
acme_mock.common_name = "test"
acme_mock.alternative_names = ["test"]
acme_mock.expire_date = valid
acme_mock.fingerprint = "ffff"
certificate = remote.certificate
assert certificate.common_name == "test"
assert certificate.alternative_names == ["test"]
assert certificate.expire_date == valid
assert certificate.fingerprint == "ffff"
async def test_certificate_task_no_backend(
auth_cloud_mock,
acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
acme_mock.expire_date = valid
with (
patch("hass_nabucasa.utils.next_midnight", return_value=0),
patch("random.randint", return_value=0),
):
acme_task = remote._acme_task = asyncio.create_task(
remote._certificate_handler(),
)
await asyncio.sleep(0.1)
assert acme_mock.call_issue
assert snitun_mock.call_start
await remote.stop()
await asyncio.sleep(0.1)
assert acme_task.done()
async def test_certificate_task_renew_cert(
auth_cloud_mock,
acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
acme_mock.expire_date = utcnow() + timedelta(days=-40)
with (
patch("hass_nabucasa.utils.next_midnight", return_value=0),
patch("random.randint", return_value=0),
):
acme_task = remote._acme_task = asyncio.create_task(
remote._certificate_handler(),
)
await remote.load_backend()
await asyncio.sleep(0.1)
assert acme_mock.call_issue
await remote.stop()
await asyncio.sleep(0.1)
assert acme_task.done()
async def test_refresh_token_no_sub(auth_cloud_mock):
"""Test that we rais SubscriptionExpired if expired sub."""
auth_cloud_mock.subscription_expired = True
with pytest.raises(SubscriptionExpired):
await RemoteUI(auth_cloud_mock)._refresh_snitun_token()
async def test_load_connect_insecure(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
status=409,
)
auth_cloud_mock.client.prop_remote_autostart = True
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert not valid_acme_mock.call_issue
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert not snitun_mock.call_connect
assert not snitun_mock.call_connect
assert auth_cloud_mock.client.mock_dispatcher[-1][0] == DISPATCH_REMOTE_BACKEND_UP
async def test_load_connect_forbidden(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
caplog,
):
"""Initialize backend."""
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"message": "lorem_ipsum",
},
status=403,
headers={"content-type": "application/json; charset=utf-8"},
)
auth_cloud_mock.client.prop_remote_autostart = True
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert not valid_acme_mock.call_issue
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert not snitun_mock.call_connect
assert "Remote connection is not allowed lorem_ipsum" in caplog.text
async def test_call_disconnect_clean_token(
auth_cloud_mock,
acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Initialize backend."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
assert not remote.is_connected
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.is_connected
assert remote._token
await remote.disconnect(clear_snitun_token=True)
assert snitun_mock.call_disconnect
assert not remote.is_connected
assert remote._token is None
assert auth_cloud_mock.client.mock_dispatcher[-1][0] == DISPATCH_REMOTE_DISCONNECT
async def test_recreating_old_certificate_with_bad_dns_config(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Test recreating old certificate with bad DNS config for alias."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
"alias": ["example.com"],
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
aioclient_mock.post(
"https://example.com/instance/resolve_dns_cname",
json=["test.dui.nabu.casa"],
)
auth_cloud_mock.accounts_server = "example.com"
valid_acme_mock.common_name = "test.dui.nabu.casa"
valid_acme_mock.alternative_names = ["test.dui.nabu.casa", "example.com"]
valid_acme_mock.expire_date = utils.utcnow() + timedelta(
days=WARN_RENEW_FAILED_DAYS,
)
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert valid_acme_mock.call_reset
assert valid_acme_mock.init_args == (
auth_cloud_mock,
["test.dui.nabu.casa"],
"test@nabucasa.inc",
)
assert valid_acme_mock.call_hardening
assert snitun_mock.call_start
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert len(auth_cloud_mock.client.mock_repairs) == 1
repair = auth_cloud_mock.client.mock_repairs[0]
assert set(repair.keys()) == {
"identifier",
"translation_key",
"severity",
"placeholders",
}
assert repair["identifier"].startswith("reset_bad_custom_domain_configuration_")
assert repair["translation_key"] == "reset_bad_custom_domain_configuration"
assert repair["severity"] == "error"
assert repair["placeholders"] == {"custom_domains": "example.com"}
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
await remote.disconnect()
await asyncio.sleep(0.1)
assert snitun_mock.call_disconnect
async def test_warn_about_bad_dns_config_for_old_certificate(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Test warn about old certificate with bad DNS config for alias."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
"alias": ["example.com"],
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
aioclient_mock.post(
"https://example.com/instance/resolve_dns_cname",
status=400,
)
auth_cloud_mock.accounts_server = "example.com"
valid_acme_mock.common_name = "test.dui.nabu.casa"
valid_acme_mock.alternative_names = ["test.dui.nabu.casa", "example.com"]
valid_acme_mock.expire_date = utils.utcnow() + timedelta(days=RENEW_IF_EXPIRES_DAYS)
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert not valid_acme_mock.call_reset
assert snitun_mock.call_start
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert len(auth_cloud_mock.client.mock_repairs) == 1
repair = auth_cloud_mock.client.mock_repairs[0]
assert set(repair.keys()) == {
"identifier",
"translation_key",
"severity",
"placeholders",
}
assert repair["identifier"].startswith("warn_bad_custom_domain_configuration_")
assert repair["translation_key"] == "warn_bad_custom_domain_configuration"
assert repair["severity"] == "warning"
assert repair["placeholders"] == {"custom_domains": "example.com"}
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
await remote.disconnect()
await asyncio.sleep(0.1)
assert snitun_mock.call_disconnect
async def test_regeneration_without_warning_for_good_dns_config(
auth_cloud_mock,
valid_acme_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
):
"""Test no warning for good dns config."""
valid = utcnow() + timedelta(days=1)
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
"alias": ["example.com"],
},
)
aioclient_mock.post(
"https://test.local/instance/snitun_token",
json={
"token": "test-token",
"server": "rest-remote.nabu.casa",
"valid": valid.timestamp(),
"throttling": 400,
},
)
aioclient_mock.post(
"https://example.com/instance/resolve_dns_cname",
json=["test.dui.nabu.casa", "_acme-challenge.test.dui.nabu.casa"],
)
auth_cloud_mock.accounts_server = "example.com"
valid_acme_mock.common_name = "test.dui.nabu.casa"
valid_acme_mock.alternative_names = ["test.dui.nabu.casa", "example.com"]
valid_acme_mock.expire_date = utils.utcnow() + timedelta(days=RENEW_IF_EXPIRES_DAYS)
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote.snitun_server == "rest-remote.nabu.casa"
assert not valid_acme_mock.call_reset
assert valid_acme_mock.call_issue
assert snitun_mock.call_start
assert snitun_mock.init_args == (auth_cloud_mock.client.aiohttp_runner, None)
assert snitun_mock.init_kwarg == {
"snitun_server": "rest-remote.nabu.casa",
"snitun_port": 443,
}
assert len(auth_cloud_mock.client.mock_repairs) == 0
assert snitun_mock.call_connect
assert snitun_mock.connect_args[0] == b"test-token"
assert snitun_mock.connect_args[3] == 400
await remote.disconnect()
await asyncio.sleep(0.1)
assert snitun_mock.call_disconnect
@pytest.mark.parametrize(
("json_error", "should_reset"),
(
(
{
"type": "urn:ietf:params:acme:error:malformed",
"detail": "JWS verification error",
},
True,
),
(
{
"type": "urn:ietf:params:acme:error:malformed",
"detail": "Some other malformed reason",
},
False,
),
(
{
"type": "about:blank",
"detail": "Boom",
},
False,
),
),
)
async def test_acme_client_new_order_errors(
auth_cloud_mock,
mock_cognito,
aioclient_mock,
snitun_mock,
json_error,
should_reset,
):
"""Initialize backend."""
auth_cloud_mock.servicehandlers_server = "test.local"
class _MockAcmeClient(client.ClientV2):
def __init__(self) -> None:
pass
def new_order(self, _):
raise messages.Error.from_json(json_error)
class _MockAcme(AcmeHandler):
call_reset = False
cloud = auth_cloud_mock
@property
def certificate_available(self):
return True
@property
def alternative_names(self):
return ["test.dui.nabu.casa"]
def _generate_csr(self):
return b""
def _create_client(self):
self._acme_client = _MockAcmeClient()
async def reset_acme(self):
self.call_reset = True
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
with patch(
"hass_nabucasa.remote.AcmeHandler",
return_value=_MockAcme(auth_cloud_mock, [], "test@nabucasa.inc"),
):
assert remote._certificate_status is None
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote._acme.call_reset == should_reset
assert remote._certificate_status is CertificateStatus.ERROR
await remote.stop()
@pytest.mark.parametrize(
("reason", "should_reset"),
(
(
"KEY_VALUES_MISMATCH",
True,
),
(
"Boom",
False,
),
),
)
async def test_context_error_handling(
auth_cloud_mock,
mock_cognito,
valid_acme_mock,
aioclient_mock,
snitun_mock,
reason,
should_reset,
):
"""Test that we reset if we hit an error reason that require resetting."""
auth_cloud_mock.servicehandlers_server = "test.local"
remote = RemoteUI(auth_cloud_mock)
aioclient_mock.post(
"https://test.local/instance/register",
json={
"domain": "test.dui.nabu.casa",
"email": "test@nabucasa.inc",
"server": "rest-remote.nabu.casa",
},
)
ssl_error = SSLError()
ssl_error.reason = reason
with patch(
"hass_nabucasa.remote.RemoteUI._create_context",
side_effect=ssl_error,
):
assert remote._certificate_status is None
await remote.load_backend()
await asyncio.sleep(0.1)
assert remote._acme.call_reset == should_reset
assert remote._certificate_status is CertificateStatus.ERROR
await remote.stop()
hass-nabucasa-0.101.0/tests/test_utils.py 0000664 0000000 0000000 00000002227 15011602407 0020274 0 ustar 00root root 0000000 0000000 """Tests for hass_nabucaa utils."""
import pytest
from hass_nabucasa import utils
@pytest.mark.parametrize(
"input_str",
[
"2020-02-30",
"2019-02-29",
"2021-04-31",
"2023-06-31",
"2018-09-31",
"2015-11-31",
"2022-02-30",
"2020-04-31",
"2021-06-31",
"2017-09-31",
"2019-04-31",
"2023-11-31",
"2020-06-31",
"2016-02-30",
"2021-11-31",
"invalid",
"2023/12/12",
],
)
def test_parse_date_with_invalid_dates(input_str):
"""Test the parse_date util."""
assert utils.parse_date(input_str) is None
@pytest.mark.parametrize(
"input_str",
[
"2020-02-29",
"2019-03-15",
"2021-04-30",
"2023-06-15",
"2018-09-30",
"2015-12-25",
"2022-02-28",
"2020-07-04",
"2021-08-21",
"2017-10-31",
"2019-01-01",
"2023-11-30",
"2020-05-05",
"2016-12-12",
"2021-03-14",
],
)
def test_parse_date_with_valid_dates(input_str):
"""Test the parse_date util."""
assert utils.parse_date(input_str) is not None
hass-nabucasa-0.101.0/tests/test_voice.py 0000664 0000000 0000000 00000020306 15011602407 0020237 0 ustar 00root root 0000000 0000000 """Test for voice functions."""
from datetime import timedelta
import pytest
import xmltodict
from hass_nabucasa import voice
from hass_nabucasa.auth import Unauthenticated
from hass_nabucasa.voice_api import VoiceApi
@pytest.fixture
def voice_api(auth_cloud_mock):
"""Voice api fixture."""
auth_cloud_mock.servicehandlers_server = "test.local"
auth_cloud_mock.voice_api = VoiceApi(auth_cloud_mock)
return voice.Voice(auth_cloud_mock)
@pytest.fixture(autouse=True)
def mock_voice_connection_details(aioclient_mock):
"""Mock voice connection details."""
aioclient_mock.get(
"https://test.local/voice/connection_details",
json={
"authorized_key": "test-key",
"endpoint_stt": "stt-url",
"endpoint_tts": "tts-url",
"valid": f"{(voice.utcnow() + timedelta(minutes=9)).timestamp()}",
},
)
async def test_token_handling(voice_api, aioclient_mock, mock_voice_connection_details):
"""Test handling around token."""
assert not voice_api._validate_token()
await voice_api._update_token()
assert voice_api._validate_token()
assert voice_api._endpoint_stt == "stt-url"
assert voice_api._endpoint_tts == "tts-url"
assert voice_api._token == "test-key"
async def test_process_stt(voice_api, aioclient_mock, mock_voice_connection_details):
"""Test handling around stt."""
aioclient_mock.post(
"stt-url?language=en-US",
json={"RecognitionStatus": "Success", "DisplayText": "My Text"},
)
result = await voice_api.process_stt(
stream=b"feet",
content_type="video=test",
language="en-US",
)
assert result.success
assert result.text == "My Text"
async def test_process_stt_bad_language(voice_api):
"""Test language handling around stt."""
with pytest.raises(voice.VoiceError, match="Language en-BAD not supported"):
await voice_api.process_stt(
stream=b"feet",
content_type="video=test",
language="en-BAD",
)
async def test_process_tts_with_gender(
voice_api,
aioclient_mock,
mock_voice_connection_details,
snapshot,
):
"""Test handling around tts."""
aioclient_mock.post(
"tts-url",
content=b"My sound",
)
result = await voice_api.process_tts(
text="Text for Saying",
language="en-US",
gender=voice.Gender.FEMALE,
output=voice.AudioOutput.MP3,
)
assert result == b"My sound"
assert aioclient_mock.mock_calls[1][3] == {
"Authorization": "Bearer test-key",
"Content-Type": "application/ssml+xml",
"X-Microsoft-OutputFormat": "audio-24khz-48kbitrate-mono-mp3",
"User-Agent": "hass-nabucasa/tests",
}
assert xmltodict.parse(aioclient_mock.mock_calls[1][2]) == snapshot
async def test_process_tts_with_voice(
voice_api,
aioclient_mock,
mock_voice_connection_details,
snapshot,
):
"""Test handling around tts."""
aioclient_mock.post(
"tts-url",
content=b"My sound",
)
result = await voice_api.process_tts(
text="Text for Saying",
language="nl-NL",
voice="FennaNeural",
output=voice.AudioOutput.RAW,
)
assert result == b"My sound"
assert aioclient_mock.mock_calls[1][3] == {
"Authorization": "Bearer test-key",
"Content-Type": "application/ssml+xml",
"X-Microsoft-OutputFormat": "raw-16khz-16bit-mono-pcm",
"User-Agent": "hass-nabucasa/tests",
}
assert xmltodict.parse(aioclient_mock.mock_calls[1][2]) == snapshot
async def test_process_tts_with_voice_and_style(
voice_api,
aioclient_mock,
mock_voice_connection_details,
snapshot,
):
"""Test handling around tts."""
aioclient_mock.post(
"tts-url",
content=b"My sound",
)
# Voice with variants
result = await voice_api.process_tts(
text="Text for Saying",
language="de-DE",
voice="ConradNeural",
style="cheerful",
output=voice.AudioOutput.RAW,
)
assert result == b"My sound"
assert aioclient_mock.mock_calls[1][3] == {
"Authorization": "Bearer test-key",
"Content-Type": "application/ssml+xml",
"X-Microsoft-OutputFormat": "raw-16khz-16bit-mono-pcm",
"User-Agent": "hass-nabucasa/tests",
}
assert xmltodict.parse(aioclient_mock.mock_calls[1][2]) == snapshot
with pytest.raises(
voice.VoiceError,
match="Unsupported style non-existing-style "
"for voice ConradNeural in language de-DE",
):
await voice_api.process_tts(
text="Text for Saying",
language="de-DE",
voice="ConradNeural",
style="non-existing-style",
output=voice.AudioOutput.RAW,
)
# Voice without variants
result = await voice_api.process_tts(
text="Text for Saying 2",
language="en-US",
voice="MichelleNeural",
output=voice.AudioOutput.RAW,
)
assert result == b"My sound"
assert aioclient_mock.mock_calls[1][3] == {
"Authorization": "Bearer test-key",
"Content-Type": "application/ssml+xml",
"X-Microsoft-OutputFormat": "raw-16khz-16bit-mono-pcm",
"User-Agent": "hass-nabucasa/tests",
}
assert xmltodict.parse(aioclient_mock.mock_calls[2][2]) == snapshot
with pytest.raises(
voice.VoiceError,
match="Unsupported style non-existing-style "
"for voice MichelleNeural in language en-US",
):
await voice_api.process_tts(
text="Text for Saying 2",
language="en-US",
voice="MichelleNeural",
style="non-existing-style",
output=voice.AudioOutput.RAW,
)
async def test_process_tts_bad_language(voice_api):
"""Test language error handling around tts."""
with pytest.raises(voice.VoiceError, match="Unsupported language en-BAD"):
await voice_api.process_tts(
text="Text for Saying",
language="en-BAD",
output=voice.AudioOutput.MP3,
)
async def test_process_tts_bad_voice(voice_api):
"""Test voice error handling around tts."""
with pytest.raises(
voice.VoiceError, match="Unsupported voice Not a US voice for language en-US"
):
await voice_api.process_tts(
text="Text for Saying",
language="en-US",
voice="Not a US voice",
output=voice.AudioOutput.MP3,
)
async def test_process_tss_429(
voice_api,
mock_voice_connection_details,
aioclient_mock,
caplog,
):
"""Test handling of voice with 429."""
aioclient_mock.post(
"tts-url",
status=429,
)
with pytest.raises(
voice.VoiceError, match="Error receiving TTS with en-US/JennyNeural: 429 "
):
await voice_api.process_tts(
text="Text for Saying",
language="en-US",
gender=voice.Gender.FEMALE,
output=voice.AudioOutput.MP3,
)
assert len(aioclient_mock.mock_calls) == 4
assert "Retrying with new token" in caplog.text
async def test_process_stt_429(
voice_api,
mock_voice_connection_details,
aioclient_mock,
caplog,
):
"""Test handling of voice with 429."""
aioclient_mock.post(
"stt-url",
status=429,
)
with pytest.raises(voice.VoiceError, match="Error processing en-US speech: 429 "):
await voice_api.process_stt(
stream=b"feet",
content_type="video=test",
language="en-US",
)
assert len(aioclient_mock.mock_calls) == 4
assert "Retrying with new token" in caplog.text
async def test_process_tts_without_authentication(
voice_api: voice.Voice,
):
"""Test handling of voice without authentication."""
async def async_check_token(*args, **kwargs):
"""Mock token check."""
raise Unauthenticated("No authentication")
voice_api.cloud.auth.async_check_token = async_check_token
with (
pytest.raises(
voice.VoiceError,
match="No authentication",
),
):
await voice_api.process_stt(
stream=b"feet",
content_type="video=test",
language="en-US",
)
hass-nabucasa-0.101.0/tests/test_voice_api.py 0000664 0000000 0000000 00000005774 15011602407 0021104 0 ustar 00root root 0000000 0000000 """Test the voice connection details API."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from aiohttp import ClientError
import pytest
from hass_nabucasa.voice_api import (
VoiceApi,
VoiceApiError,
VoiceConnectionDetails,
)
if TYPE_CHECKING:
from hass_nabucasa import Cloud
from tests.utils.aiohttp import AiohttpClientMocker
API_HOSTNAME = "example.com"
@pytest.fixture(autouse=True)
def set_hostname(auth_cloud_mock):
"""Set API hostname for the mock cloud service."""
auth_cloud_mock.servicehandlers_server = API_HOSTNAME
@pytest.mark.parametrize(
"exception,getmockargs,log_msg,exception_msg",
[
[
VoiceApiError,
{"status": 500, "text": "Internal Server Error"},
"Response for get from example.com/voice/connection_details (500)",
"Failed to parse API response",
],
[
VoiceApiError,
{"status": 429, "text": "Too fast"},
"Response for get from example.com/voice/connection_details (429)",
"Failed to parse API response",
],
[
VoiceApiError,
{"exc": TimeoutError()},
"",
"Timeout reached while calling API",
],
[
VoiceApiError,
{"exc": ClientError("boom!")},
"",
"Failed to fetch: boom!",
],
[
VoiceApiError,
{"exc": Exception("boom!")},
"",
"Unexpected error while calling API: boom!",
],
],
)
async def test_problems_getting_connection_details(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
exception: Exception,
getmockargs,
log_msg,
exception_msg,
caplog: pytest.LogCaptureFixture,
):
"""Test problems getting connection details."""
voice_api = VoiceApi(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/voice/connection_details",
**getmockargs,
)
with pytest.raises(exception, match=exception_msg):
await voice_api.connection_details()
if log_msg:
assert log_msg in caplog.text
@pytest.mark.parametrize(
"response",
[
{
"valid": "123456789",
"authorized_key": "test_key",
"endpoint_stt": "http://example.com/stt",
"endpoint_tts": "http://example.com/tts",
}
],
)
async def test_getting_connection_details(
aioclient_mock: AiohttpClientMocker,
auth_cloud_mock: Cloud,
response: dict[str, Any],
caplog: pytest.LogCaptureFixture,
):
"""Test getting connection details."""
voice_api = VoiceApi(auth_cloud_mock)
aioclient_mock.get(
f"https://{API_HOSTNAME}/voice/connection_details",
json=response,
)
details = await voice_api.connection_details()
assert details == VoiceConnectionDetails(**response)
assert (
"Response for get from example.com/voice/connection_details (200)"
in caplog.text
)
hass-nabucasa-0.101.0/tests/utils/ 0000775 0000000 0000000 00000000000 15011602407 0016660 5 ustar 00root root 0000000 0000000 hass-nabucasa-0.101.0/tests/utils/__init__.py 0000664 0000000 0000000 00000000044 15011602407 0020767 0 ustar 00root root 0000000 0000000 """Tests for the test utilities."""
hass-nabucasa-0.101.0/tests/utils/aiohttp.py 0000664 0000000 0000000 00000016570 15011602407 0020713 0 ustar 00root root 0000000 0000000 """Aiohttp test utils."""
from contextlib import contextmanager
import json as _json
import re
from types import TracebackType
from typing import Self
from unittest import mock
from urllib.parse import parse_qs
from aiohttp import ClientSession, RequestInfo
from aiohttp.client_exceptions import ClientResponseError
from aiohttp.streams import StreamReader
import pytest
from yarl import URL
retype = type(re.compile(""))
def mock_stream(data):
"""Mock a stream with data."""
protocol = mock.Mock(_reading_paused=False)
stream = StreamReader(protocol, 1024)
stream.feed_data(data)
stream.feed_eof()
return stream
class AiohttpClientMocker:
"""Mock Aiohttp client requests."""
def __init__(self) -> None:
"""Initialize the request mocker."""
self._mocks = []
self._cookies = {}
self.mock_calls = []
def request(
self,
method,
url,
*,
auth=None,
status=200,
text=None,
data=None,
content=None,
json=None,
params=None,
headers=None,
exc=None,
cookies=None,
):
"""Mock a request."""
if json is not None:
text = _json.dumps(json)
if text is not None:
content = text.encode("utf-8")
if content is None:
content = b""
if not isinstance(url, retype):
url = URL(url)
if params:
url = url.with_query(params)
self._mocks.append(
AiohttpClientMockResponse(
method,
url,
status,
content,
cookies,
exc,
headers or {},
),
)
def get(self, *args, **kwargs):
"""Register a mock get request."""
self.request("get", *args, **kwargs)
def put(self, *args, **kwargs):
"""Register a mock put request."""
self.request("put", *args, **kwargs)
def post(self, *args, **kwargs):
"""Register a mock post request."""
self.request("post", *args, **kwargs)
def delete(self, *args, **kwargs):
"""Register a mock delete request."""
self.request("delete", *args, **kwargs)
def options(self, *args, **kwargs):
"""Register a mock options request."""
self.request("options", *args, **kwargs)
@property
def call_count(self):
"""Return the number of requests made."""
return len(self.mock_calls)
def clear_requests(self):
"""Reset mock calls."""
self._mocks.clear()
self._cookies.clear()
self.mock_calls.clear()
def create_session(self, loop):
"""Create a ClientSession that is bound to this mocker."""
session = ClientSession(loop=loop)
# Setting directly on `session` will raise deprecation warning
object.__setattr__(session, "_request", self.match_request)
return session
async def match_request(
self,
method,
url,
*,
data=None,
auth=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None,
json=None,
expect100=None,
chunked=None,
):
"""Match a request against pre-registered requests."""
data = data or json
url = URL(url)
if params:
url = url.with_query(params)
for response in self._mocks:
if response.match_request(method, url, params):
self.mock_calls.append((method, url, data, headers))
if response.exc:
raise response.exc
return response
pytest.fail(f"No mock registered for {method.upper()} {url} {params}")
class AiohttpClientMockResponse:
"""Mock Aiohttp client response."""
def __init__(
self,
method,
url,
status,
response,
cookies=None,
exc=None,
headers=None,
) -> None:
"""Initialize a fake response."""
self.method = method
self._url = url
self.status = status
self.response = response
self.exc = exc
self._headers = headers or {}
self._cookies = {}
if cookies:
for name, data in cookies.items():
cookie = mock.MagicMock()
cookie.value = data
self._cookies[name] = cookie
def match_request(self, method, url, params=None):
"""Test if response answers request."""
if method.lower() != self.method.lower():
return False
# regular expression matching
if isinstance(self._url, retype):
return self._url.search(str(url)) is not None
if (
self._url.scheme != url.scheme
or self._url.host != url.host
or self._url.path != url.path
):
return False
# Ensure all query components in matcher are present in the request
request_qs = parse_qs(url.query_string)
matcher_qs = parse_qs(self._url.query_string)
for key, vals in matcher_qs.items():
for val in vals:
try:
request_qs.get(key, []).remove(val)
except ValueError:
return False
return True
@property
def headers(self):
"""Return content_type."""
return self._headers
@property
def cookies(self):
"""Return dict of cookies."""
return self._cookies
@property
def url(self):
"""Return yarl of URL."""
return self._url
@property
def content_type(self):
"""Return yarl of URL."""
return self._headers.get("content-type")
@property
def content(self):
"""Return content."""
return mock_stream(self.response)
async def read(self):
"""Return mock response."""
return self.response
async def text(self, encoding="utf-8"):
"""Return mock response as a string."""
return self.response.decode(encoding)
async def json(self, encoding="utf-8"):
"""Return mock response as a json."""
return _json.loads(self.response.decode(encoding))
def release(self):
"""Mock release."""
def raise_for_status(self):
"""Raise error if status is 400 or higher."""
if self.status >= 400:
raise ClientResponseError(
RequestInfo(self.url, self.method, headers=self.headers),
None,
status=self.status,
headers=self.headers,
)
def close(self):
"""Mock close."""
async def wait_for_close(self):
"""Mock wait_for_close."""
async def __aenter__(self) -> Self:
"""Enter the context manager."""
return self
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
"""Exit the context manager."""
@contextmanager
def mock_aiohttp_client(loop):
"""Context manager to mock aiohttp client."""
mocker = AiohttpClientMocker()
with mock.patch(
"hass_nabucasa.Cloud.websession",
new_callable=mock.PropertyMock,
) as mock_websession:
session = mocker.create_session(loop)
mock_websession.return_value = session
yield mocker