diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml
index 9065b5e..4a3a46b 100644
--- a/.github/workflows/black.yml
+++ b/.github/workflows/black.yml
@@ -6,5 +6,6 @@ jobs:
lint:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ - uses: actions/checkout@v4
- uses: psf/black@stable
diff --git a/.github/workflows/name: pre-commit.yml b/.github/workflows/name: pre-commit.yml
new file mode 100644
index 0000000..8a4a9bb
--- /dev/null
+++ b/.github/workflows/name: pre-commit.yml
@@ -0,0 +1,14 @@
+name: pre-commit
+
+on:
+ [push, pull_request]
+
+jobs:
+ pre-commit:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v4
+ with:
+ python-version: "3.11"
+ - uses: pre-commit/action@v3.0.0
diff --git a/.gitignore b/.gitignore
index 1c513bc..77b4864 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ __pycache__
.mypy_cache
*.swp
.vscode/settings.json
+.coverage
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 12b7bac..6a073f5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -4,6 +4,11 @@ repos:
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
+- repo: https://github.com/asottile/pyupgrade
+ rev: v3.15.0
+ hooks:
+ - id: pyupgrade
+ args: [--py39-plus]
- repo: https://github.com/psf/black
rev: 24.3.0
hooks:
@@ -32,3 +37,4 @@ repos:
rev: 'v0.3.4'
hooks:
- id: ruff
+
diff --git a/.vscode/launch.json b/.vscode/launch.json
index fbe98a3..68224d6 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -4,6 +4,15 @@
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
+ {
+ "name": "Python: Debug Tests",
+ "type": "python",
+ "request": "launch",
+ "program": "${file}",
+ "purpose": ["debug-test"],
+ "console": "integratedTerminal",
+ "justMyCode": false
+ },
{
"name": "Python: Current File",
"type": "python",
diff --git a/.vscode/settings.json b/.vscode/settings.json
index ac7a5e5..6107dc4 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,4 +1,9 @@
{
"python.analysis.typeCheckingMode": "basic",
- "python.terminal.activateEnvironment": true
+ "python.terminal.activateEnvironment": true,
+ "python.testing.pytestArgs": [
+ "tests"
+ ],
+ "python.testing.unittestEnabled": false,
+ "python.testing.pytestEnabled": true
}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..4c070f7
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,35 @@
+{
+ // See https://go.microsoft.com/fwlink/?LinkId=733558
+ // for the documentation about the tasks.json format
+ // .vscode/tasks.json
+ {
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Run pytest with coverage",
+ "type": "shell",
+ "command": "pytest",
+ "args": [
+ "--cov=pyadtpulse",
+ "--cov-report=html",
+ "${workspaceFolder}/tests"
+ ],
+ "group": {
+ "kind": "test",
+ "isDefault": false
+ }
+ },
+ {
+ "label": "Run pytest without coverage",
+ "type": "shell",
+ "command": "pytest",
+ "args": [
+ "${workspaceFolder}/tests"
+ ],
+ "group": {
+ "kind": "test",
+ "isDefault": true
+ }
+ }
+ ]
+ }
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d589094..e7c48ef 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,64 @@
+## 1.2.9 (2024-04-21)
+
+* ignore query string in check_login_errors(). This should fix a bug where the task was logged out
+ but not correctly being identified
+* remove unnecessary warning in alarm status check
+* add arm night
+* refactor update_alarm_from_etree()
+* bump to newer user agent
+* skip sync check if it will back off
+* fix linter issue in _initialize_sites
+
+## 1.2.8 (2024-03-07)
+
+* add more detail to "invalid sync check" error logging
+* don't exit sync check task on service temporarily unavailable or invalid login
+* don't use empty site id for logins
+
+## 1.2.7 (2024-02-23)
+
+* catch site is None on logout to prevent "have you logged in" errors
+* speed improvements via aiohttp-zlib-ng
+
+## 1.2.6 (2024-02-23)
+
+Performance improvements including:
+
+* switch from BeautifulSoup to lxml for faster parsing
+* optimize zone parsing to only update zones which have changed
+* change wait_for_update() to pass the changed zones/alarm state to caller
+
+## 1.2.5 (2024-02-10)
+
+* don't raise not logged in exception when sync check task logs out
+* change full logout interval to approximately every 6 hours
+
+## 1.2.4 (2024-02-08)
+
+* change yarl dependencies
+
+## 1.2.3 (2024-02-08)
+
+* change aiohttp dependencies
+
+## 1.2.2 (2024-02-07)
+
+* add yarl as dependency
+
+## 1.2.1 (2024-02-07)
+
+* add timing loggin for zone/site updates
+* do full logout once per day
+* have keepalive task wait for sync check task to sleep before logging out
+
+## 1.2.0 (2024-01-30)
+
+* add exceptions and exception handling
+* make code more robust for error handling
+* refactor code into smaller objects
+* add testing framework
+* add poetry
+
## 1.1.5 (2023-12-22)
* fix more zone html parsing due to changes in Pulse v27
diff --git a/README.md b/README.md
index 7a01fe4..b5a8019 100644
--- a/README.md
+++ b/README.md
@@ -98,9 +98,12 @@ ADT Pulse requires 2 factor authentication to log into their site. When you perf
Internally, ADT uses some Javascript code to create a browser fingerprint. This (very long) string is used to check that the browser has been saved upon subsequent logins. It is the "fingerprint" parameter required to be passed in to the PyADTPulse object constructor.
-### Note:
+### Notes:
+
+The browser fingerprint will change with a browser/OS upgrade. While it is not strictly necessary to create a separate username/password for logging in through pyadtpulse, it is recommended to do so.
+
+**Warning: If another connection is made to the Pulse portal with the same fingerprint, the first connection will be logged out. For this reason it is recommended to use a browser/machine you would not normally use to log into the Pulse web site to generate the fingerprint.**
-The browser fingerprint will change with a browser/OS upgrade. For this reason, it is recommended to create a separate username in ADT Pulse just for monitoring.
There are 2 ways to determine this fingerprint:
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..da932af
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,458 @@
+"""Pulse Test Configuration."""
+
+import os
+import re
+import sys
+from collections.abc import Generator
+from datetime import datetime
+from enum import Enum
+from pathlib import Path
+from typing import Any
+from unittest.mock import AsyncMock, patch
+from urllib import parse
+
+import freezegun
+import pytest
+from aiohttp import client_exceptions, web
+from aioresponses import aioresponses
+
+# Get the root directory of your project
+project_root = Path(__file__).resolve().parent
+
+# Modify sys.path to include the project root
+sys.path.insert(0, str(project_root))
+test_file_dir = project_root / "tests" / "data_files"
+# pylint: disable=wrong-import-position
+# ruff: noqa: E402
+# flake8: noqa: E402
+from pyadtpulse.const import (
+ ADT_DEVICE_URI,
+ ADT_GATEWAY_URI,
+ ADT_LOGIN_URI,
+ ADT_LOGOUT_URI,
+ ADT_MFA_FAIL_URI,
+ ADT_ORB_URI,
+ ADT_SUMMARY_URI,
+ ADT_SYNC_CHECK_URI,
+ ADT_SYSTEM_SETTINGS,
+ ADT_SYSTEM_URI,
+ DEFAULT_API_HOST,
+)
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.util import remove_prefix
+
+MOCKED_API_VERSION = "27.0.0-140"
+
+
+class LoginType(Enum):
+ """Login Types."""
+
+ SUCCESS = "signin.html"
+ MFA = "mfa.html"
+ FAIL = "signin_fail.html"
+ LOCKED = "signin_locked.html"
+ NOT_SIGNED_IN = "not_signed_in.html"
+
+
+@pytest.fixture
+def read_file():
+ """Fixture to read a file.
+
+ Args:
+ file_name (str): Name of the file to read
+ """
+
+ def _read_file(file_name: str) -> str:
+ file_path = test_file_dir / file_name
+ return file_path.read_text(encoding="utf-8")
+
+ return _read_file
+
+
+@pytest.fixture
+def mock_sleep(mocker):
+ """Fixture to mock asyncio.sleep."""
+ return mocker.patch("asyncio.sleep", new_callable=AsyncMock)
+
+
+@pytest.fixture
+def freeze_time_to_now():
+ """Fixture to freeze time to now."""
+ current_time = datetime.now()
+ with freezegun.freeze_time(current_time) as frozen_time:
+ yield frozen_time
+
+
+@pytest.fixture
+def get_mocked_connection_properties() -> PulseConnectionProperties:
+ """Fixture to get the test connection properties."""
+ p = PulseConnectionProperties(DEFAULT_API_HOST)
+ p.api_version = MOCKED_API_VERSION
+ return p
+
+
+@pytest.fixture
+def mock_server_down():
+ """Fixture to mock server down."""
+ with aioresponses() as m:
+ m.get(
+ DEFAULT_API_HOST,
+ status=500,
+ exception=client_exceptions.ServerConnectionError(),
+ repeat=True,
+ )
+ yield m
+
+
+@pytest.fixture
+def mock_server_temporarily_down(get_mocked_url, read_file):
+ """Fixture to mock server temporarily down."""
+ with aioresponses() as responses:
+ responses.get(
+ DEFAULT_API_HOST,
+ status=500,
+ exception=client_exceptions.ServerConnectionError(),
+ )
+ responses.get(
+ DEFAULT_API_HOST,
+ status=500,
+ exception=client_exceptions.ServerConnectionError(),
+ )
+ responses.get(
+ DEFAULT_API_HOST,
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ )
+ responses.get(
+ f"{DEFAULT_API_HOST}/{ADT_LOGIN_URI}",
+ status=307,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ responses.get(
+ get_mocked_url(ADT_LOGIN_URI),
+ body=read_file("signin.html"),
+ content_type="text/html",
+ )
+
+ yield responses
+
+
+@pytest.fixture
+def get_mocked_url(get_mocked_connection_properties):
+ """Fixture to get the test url."""
+
+ def _get_mocked_url(path: str) -> str:
+ return get_mocked_connection_properties.make_url(path)
+
+ return _get_mocked_url
+
+
+@pytest.fixture
+def get_relative_mocked_url(get_mocked_connection_properties):
+ def _get_relative_mocked_url(path: str) -> str:
+ return remove_prefix(
+ get_mocked_connection_properties.make_url(path), DEFAULT_API_HOST
+ )
+
+ return _get_relative_mocked_url
+
+
+@pytest.fixture
+def get_mocked_mapped_static_responses(get_mocked_url) -> dict[str, str]:
+ """Fixture to get the test mapped responses."""
+ return {
+ get_mocked_url(ADT_SUMMARY_URI): "summary.html",
+ get_mocked_url(ADT_SYSTEM_URI): "system.html",
+ get_mocked_url(ADT_GATEWAY_URI): "gateway.html",
+ get_mocked_url(ADT_MFA_FAIL_URI): "mfa.html",
+ }
+
+
+@pytest.fixture
+def extract_ids_from_data_directory() -> list[str]:
+ """Extract the device ids all the device files in the data directory."""
+ id_pattern = re.compile(r"device_(\d{1,})\.html")
+ ids = set()
+ for file_name in os.listdir(test_file_dir):
+ match = id_pattern.match(file_name)
+ if match:
+ ids.add(match.group(1))
+ return list(ids)
+
+
+@pytest.fixture
+def mocked_server_responses(
+ get_mocked_mapped_static_responses: dict[str, str],
+ read_file,
+ get_mocked_url,
+ extract_ids_from_data_directory: list[str],
+) -> Generator[aioresponses, Any, None]:
+ """Fixture to get the test mapped responses."""
+ static_responses = get_mocked_mapped_static_responses
+ with aioresponses() as responses:
+ for url, file_name in static_responses.items():
+ responses.get(
+ url, body=read_file(file_name), content_type="text/html", repeat=True
+ )
+
+ # device id rewriting
+ for device_id in extract_ids_from_data_directory:
+ responses.get(
+ f"{get_mocked_url(ADT_DEVICE_URI)}?id={device_id}",
+ body=read_file(f"device_{device_id}.html"),
+ content_type="text/html",
+ )
+ # redirects
+ responses.get(
+ get_mocked_url(ADT_LOGIN_URI),
+ body=read_file("signin.html"),
+ content_type="text/html",
+ )
+ responses.get(
+ DEFAULT_API_HOST,
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ responses.get(
+ f"{DEFAULT_API_HOST}/",
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ responses.get(
+ f"{DEFAULT_API_HOST}/{ADT_LOGIN_URI}",
+ status=307,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+ # login/logout
+
+ logout_pattern = re.compile(
+ rf"{re.escape(get_mocked_url(ADT_LOGOUT_URI))}/?.*$"
+ )
+ responses.get(
+ logout_pattern,
+ status=302,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ repeat=True,
+ )
+
+ # not doing default sync check response or keepalive
+ # because we need to set it on each test
+ yield responses
+
+
+def add_custom_response(
+ mocked_server_responses,
+ read_file,
+ url: str,
+ method: str = "GET",
+ status: int = 200,
+ file_name: str | None = None,
+ headers: dict[str, Any] | None = None,
+):
+ if method.upper() not in ("GET", "POST"):
+ raise ValueError("Unsupported HTTP method. Only GET and POST are supported.")
+
+ mocked_server_responses.add(
+ url,
+ method,
+ status=status,
+ body=read_file(file_name) if file_name else "",
+ content_type="text/html",
+ headers=headers,
+ )
+
+
+def add_signin(
+ signin_type: LoginType, mocked_server_responses, get_mocked_url, read_file
+):
+ if signin_type != LoginType.SUCCESS:
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name=signin_type.value,
+ )
+ redirect = get_mocked_url(ADT_LOGIN_URI)
+ if signin_type == LoginType.MFA:
+ redirect = get_mocked_url(ADT_MFA_FAIL_URI)
+ if signin_type == LoginType.SUCCESS:
+ redirect = get_mocked_url(ADT_SUMMARY_URI)
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ status=307,
+ method="POST",
+ headers={"Location": redirect},
+ )
+
+
+def add_logout(mocked_server_responses, get_mocked_url, read_file):
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGOUT_URI),
+ file_name=LoginType.SUCCESS.value,
+ )
+
+
+@pytest.fixture
+def patched_sync_task_sleep() -> Generator[AsyncMock, Any, Any]:
+ """Fixture to patch asyncio.sleep in async_query()."""
+ a = AsyncMock()
+ with patch(
+ "pyadtpulse.PyADTPulseAsync._sync_task.asyncio.sleep", side_effect=a
+ ) as mock:
+ yield mock
+
+
+# not using this currently
+class PulseMockedWebServer:
+ """Mocked Pulse Web Server."""
+
+ def __init__(self, pulse_properties: PulseConnectionProperties):
+ """Initialize the PulseMockedWebServer."""
+ self.app = web.Application()
+ self.logged_in = False
+ self.status_code = 200
+ self.retry_after_header: str | None = None
+ self.pcp = pulse_properties
+ self.uri_mapping: dict[str, list[str]] = {
+ "/": ["signin.html"],
+ self._make_local_prefix(ADT_LOGIN_URI): ["signin.html"],
+ self._make_local_prefix(ADT_LOGOUT_URI): ["signout.html"],
+ self._make_local_prefix(ADT_SUMMARY_URI): ["summary.html"],
+ self._make_local_prefix(ADT_SYSTEM_URI): ["system.html"],
+ self._make_local_prefix(ADT_SYNC_CHECK_URI): ["sync_check.html"],
+ self._make_local_prefix(ADT_ORB_URI): ["orb.html"],
+ self._make_local_prefix(ADT_SYSTEM_SETTINGS): ["system_settings.html"],
+ }
+ super().__init__()
+ self.app.router.add_route("*", "/{path_info:.*}", self.handler)
+
+ def _make_local_prefix(self, uri: str) -> str:
+ return remove_prefix(self.pcp.make_url(uri), "https://")
+
+ async def handler(self, request: web.Request) -> web.Response | web.FileResponse:
+ """Handler for the PulseMockedWebServer."""
+ path = request.path
+
+ # Check if there is a query parameter for retry_after
+ query_params = parse.parse_qs(request.query_string)
+ retry_after_param = query_params.get("retry_after")
+
+ def serve_file(filename: str) -> web.Response | web.FileResponse:
+ try:
+ return web.FileResponse(filename)
+ except FileNotFoundError:
+ return web.Response(text="Not found", status=404)
+
+ # Function to parse the retry_after parameter
+ def parse_retry_after(value) -> int | datetime:
+ try:
+ return int(value)
+ except ValueError:
+ return datetime.fromisoformat(value)
+
+ # Simulate service unavailable for a specific path
+ def handle_service_unavailable(path: str) -> web.Response | None:
+ if path == "/service_unavailable" or self.status_code == 503:
+ retry_after = retry_after_param[0] if retry_after_param else None
+ self.retry_after_header = str(parse_retry_after(retry_after))
+ self.status_code = 503
+ return web.Response(
+ text="Service Unavailable",
+ status=self.status_code,
+ headers=(
+ {"Retry-After": self.retry_after_header}
+ if self.retry_after_header
+ else None
+ ),
+ )
+ return None
+
+ def handle_rate_limit_exceeded(path: str) -> web.Response | None:
+ # Simulate rate limit exceeded for a specific path
+ if path == "/rate_limit_exceeded" or self.status_code == 429:
+ retry_after = retry_after_param[0] if retry_after_param else None
+ self.retry_after_header = str(parse_retry_after(retry_after))
+ self.status_code = 429
+ return web.Response(
+ text="Rate Limit Exceeded",
+ status=self.status_code,
+ headers=(
+ {"Retry-After": self.retry_after_header}
+ if self.retry_after_header
+ else None
+ ),
+ )
+ return None
+
+ def handle_clear_status(path: str) -> web.Response | None:
+ # Simulate clear status for a specific path
+ if path == "/clear_status":
+ self.status_code = 200
+ self.retry_after_header = None
+ return web.Response(text="Default Response", status=self.status_code)
+ return None
+
+ def handle_add_response(
+ path: str, query_string: dict[str, list[str]]
+ ) -> web.Response | None:
+ if path == "/add_response":
+ patched_uri = query_string["uri"][0]
+ if patched_uri in self.uri_mapping:
+ files = query_string.get("files")
+ if files is not None:
+ self.uri_mapping[patched_uri].extend(files)
+ return web.Response(
+ text="Default Response",
+ status=200,
+ headers={"Content-Type": "text/plain"},
+ )
+ return web.Response(text="URI not found", status=404)
+
+ if (retval := handle_rate_limit_exceeded(path)) is not None:
+ return retval
+ if (retval := handle_service_unavailable(path)) is not None:
+ return retval
+ if (retval := handle_clear_status(path)) is not None:
+ return retval
+ if (retval := handle_add_response(path, query_params)) is not None:
+ return retval
+ # do the actual request handling
+ if (
+ path == self._make_local_prefix(ADT_LOGIN_URI)
+ ) and request.method == "POST":
+ self.logged_in = True
+ raise web.HTTPFound(ADT_SUMMARY_URI)
+ if (
+ path == self._make_local_prefix(ADT_LOGOUT_URI)
+ ) and request.method == "POST":
+ self.logged_in = False
+ raise web.HTTPFound(ADT_LOGIN_URI)
+ if not self.logged_in:
+ return serve_file("signin_fail.html")
+ if path == self._make_local_prefix(ADT_DEVICE_URI):
+ device_id = query_params["id"][0]
+ return serve_file(f"device-{device_id}.html")
+ files_to_serve = self.uri_mapping.get(path)
+ if not files_to_serve:
+ return web.Response(text="URI not found", status=404)
+ file_to_serve = files_to_serve[0]
+ if len(files_to_serve) > 1:
+ file_to_serve = self.uri_mapping[path].pop(1)
+ return serve_file(file_to_serve)
+
+
+@pytest.fixture
+@pytest.mark.asyncio
+async def mocked_pulse_server() -> PulseMockedWebServer:
+ """Fixture to create a mocked Pulse server."""
+ pulse_properties = get_mocked_connection_properties()
+ m = PulseMockedWebServer(pulse_properties)
+ return m
diff --git a/example-client.py b/example-client.py
index 56030d4..4657b78 100755
--- a/example-client.py
+++ b/example-client.py
@@ -7,8 +7,7 @@
import json
import sys
from pprint import pprint
-from time import sleep
-from typing import Dict, Optional
+from time import sleep, time
from pyadtpulse import PyADTPulse
from pyadtpulse.const import (
@@ -18,8 +17,17 @@
API_HOST_CA,
DEFAULT_API_HOST,
)
+from pyadtpulse.exceptions import (
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseConnectionError,
+ PulseGatewayOfflineError,
+ PulseLoginException,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pyadtpulse_async import PyADTPulseAsync
from pyadtpulse.site import ADTPulseSite
-from pyadtpulse.util import AuthenticationException
USER = "adtpulse_user"
PASSWD = "adtpulse_password"
@@ -33,14 +41,22 @@
RELOGIN_INTERVAL = "relogin_interval"
SERVICE_HOST = "service_host"
POLL_INTERVAL = "poll_interval"
-
-BOOLEAN_PARAMS = {USE_ASYNC, DEBUG_LOCKS, PULSE_DEBUG, TEST_ALARM}
+DETAILED_DEBUG_LOGGING = "detailed_debug_logging"
+
+BOOLEAN_PARAMS = {
+ USE_ASYNC,
+ DEBUG_LOCKS,
+ PULSE_DEBUG,
+ TEST_ALARM,
+ DETAILED_DEBUG_LOGGING,
+}
INT_PARAMS = {SLEEP_INTERVAL, KEEPALIVE_INTERVAL, RELOGIN_INTERVAL}
FLOAT_PARAMS = {POLL_INTERVAL}
# Default values
DEFAULT_USE_ASYNC = True
DEFAULT_DEBUG = False
+DEFAULT_DETAILED_DEBUG_LOGGING = False
DEFAULT_TEST_ALARM = False
DEFAULT_SLEEP_INTERVAL = 5
DEFAULT_DEBUG_LOCKS = False
@@ -96,6 +112,12 @@ def handle_args() -> argparse.Namespace:
default=None,
help="Set True to enable debugging",
)
+ parser.add_argument(
+ f"--{DETAILED_DEBUG_LOGGING}",
+ type=bool,
+ default=None,
+ help="Set True to enable detailed debug logging",
+ )
parser.add_argument(
f"--{TEST_ALARM}",
type=bool,
@@ -162,6 +184,11 @@ def handle_args() -> argparse.Namespace:
args.debug_locks if args.debug_locks is not None else DEFAULT_DEBUG_LOCKS
)
args.debug = args.debug if args.debug is not None else DEFAULT_DEBUG
+ args.detailed_debug_logging = (
+ args.detailed_debug_logging
+ if args.detailed_debug_logging is not None
+ else DEFAULT_DETAILED_DEBUG_LOGGING
+ )
args.test_alarm = (
args.test_alarm if args.test_alarm is not None else DEFAULT_TEST_ALARM
)
@@ -189,7 +216,7 @@ def handle_args() -> argparse.Namespace:
return args
-def load_parameters_from_json(json_file: str) -> Optional[Dict]:
+def load_parameters_from_json(json_file: str) -> dict | None:
"""Load parameters from a JSON file.
Args:
@@ -353,6 +380,7 @@ def sync_example(
poll_interval: float,
keepalive_interval: int,
relogin_interval: int,
+ detailed_debug_logging: bool,
) -> None:
"""Run example of sync pyadtpulse calls.
@@ -365,23 +393,37 @@ def sync_example(
debug_locks: bool: True to enable thread lock debugging
keepalive_interval (int): keepalive interval in minutes
relogin_interval (int): relogin interval in minutes
+ detailed_debug_logging (bool): True to enable detailed debug logging
"""
- try:
- adt = PyADTPulse(
- username,
- password,
- fingerprint,
- debug_locks=debug_locks,
- keepalive_interval=keepalive_interval,
- relogin_interval=relogin_interval,
- )
- except AuthenticationException:
- print("Invalid credentials for ADT Pulse site")
- sys.exit()
- except BaseException as e:
- print("Received exception logging into ADT Pulse site")
- print(f"{e}")
- sys.exit()
+ while True:
+ try:
+ adt = PyADTPulse(
+ username,
+ password,
+ fingerprint,
+ debug_locks=debug_locks,
+ keepalive_interval=keepalive_interval,
+ relogin_interval=relogin_interval,
+ detailed_debug_logging=detailed_debug_logging,
+ )
+ break
+ except PulseLoginException as e:
+ print(f"ADT Pulse login failed with authentication error: {e}")
+ return
+ except (PulseClientConnectionError, PulseServerConnectionError) as e:
+ backoff_interval = e.backoff.get_current_backoff_interval()
+ print(
+ f"ADT Pulse login failed with connection error: {e}, retrying in {backoff_interval} seconds"
+ )
+ sleep(backoff_interval)
+ continue
+ except PulseServiceTemporarilyUnavailableError as e:
+ backoff_interval = e.backoff.expiration_time - time()
+ print(
+ f"ADT Pulse login failed with service unavailable error: {e}, retrying in {backoff_interval} seconds"
+ )
+ sleep(backoff_interval)
+ continue
if not adt.is_connected:
print("Error: Could not log into ADT Pulse site")
@@ -406,15 +448,33 @@ def sync_example(
test_alarm(adt.site, adt)
done = False
+ have_exception = False
while not done:
try:
- print_site(adt.site)
- print("----")
- if not adt.site.zones:
- print("Error, no zones exist, exiting...")
+ if not have_exception:
+ print_site(adt.site)
+ print("----")
+ if not adt.site.zones:
+ print("Error, no zones exist, exiting...")
+ done = True
+ break
+ have_updates = False
+ try:
+ have_updates = adt.updates_exist
+ have_exception = False
+ except PulseGatewayOfflineError:
+ print("ADT Pulse gateway is offline, re-polling")
+ have_exception = True
+ continue
+ except PulseConnectionError as ex:
+ print("ADT Pulse connection error: %s, re-polling", ex.args[0])
+ have_exception = True
+ continue
+ except PulseAuthenticationError as ex:
+ print("ADT Pulse authentication error: %s, exiting...", ex.args[0])
done = True
break
- if adt.updates_exist:
+ if have_updates and not have_exception:
print("Updates exist, refreshing")
# Don't need to explicitly call update() anymore
# Background thread will already have updated
@@ -450,7 +510,8 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
print("Arming stay pending check succeeded")
else:
print(
- f"FAIL: Arming home pending check failed {adt.site.alarm_control_panel} "
+ "FAIL: Arming home pending check failed "
+ f"{adt.site.alarm_control_panel} "
)
await adt.wait_for_update()
if adt.site.alarm_control_panel.is_home:
@@ -459,7 +520,6 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
while not adt.site.alarm_control_panel.is_home:
pprint(f"FAIL: Arm stay value incorrect {adt.site.alarm_control_panel}")
await adt.wait_for_update()
-
print("Testing invalid alarm state change from armed home to armed away")
if await adt.site.async_arm_away():
print(
@@ -498,7 +558,7 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
f"{adt.site.alarm_control_panel}"
)
await adt.wait_for_update()
- print("Test finally succeeded")
+ print("Test finally succeeded")
print("Testing disarming twice")
if await adt.site.async_disarm():
print("Double disarm call succeeded")
@@ -521,7 +581,7 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
f"{adt.site.alarm_control_panel}"
)
await adt.wait_for_update()
- print("Test finally succeeded")
+ print("Test finally succeeded")
else:
print("Disarming failed")
print("Arming alarm away")
@@ -541,7 +601,7 @@ async def async_test_alarm(adt: PyADTPulse) -> None:
"f{adt.site.alarm_control_panel}"
)
await adt.wait_for_update()
- print("Test finally succeeded")
+ print("Test finally succeeded")
else:
print("Arm away failed")
await adt.site.async_disarm()
@@ -557,6 +617,7 @@ async def async_example(
poll_interval: float,
keepalive_interval: int,
relogin_interval: int,
+ detailed_debug_logging: bool,
) -> None:
"""Run example of pytadtpulse async usage.
@@ -569,20 +630,39 @@ async def async_example(
poll_interval (float): polling interval in seconds
keepalive_interval (int): keepalive interval in minutes
relogin_interval (int): relogin interval in minutes
+ detailed_debug_logging (bool): enable detailed debug logging
"""
- adt = PyADTPulse(
+ adt = PyADTPulseAsync(
username,
password,
fingerprint,
- do_login=False,
debug_locks=debug_locks,
keepalive_interval=keepalive_interval,
relogin_interval=relogin_interval,
+ detailed_debug_logging=detailed_debug_logging,
)
- if not await adt.async_login():
- print("ADT Pulse login failed")
- return
+ while True:
+ try:
+ await adt.async_login()
+ break
+ except PulseLoginException as e:
+ print(f"ADT Pulse login failed with authentication error: {e}")
+ return
+ except (PulseClientConnectionError, PulseServerConnectionError) as e:
+ backoff_interval = e.backoff.get_current_backoff_interval()
+ print(
+ f"ADT Pulse login failed with connection error: {e}, retrying in {backoff_interval} seconds"
+ )
+ await asyncio.sleep(backoff_interval)
+ continue
+ except PulseServiceTemporarilyUnavailableError as e:
+ backoff_interval = e.backoff.expiration_time - time()
+ print(
+ f"ADT Pulse login failed with service unavailable error: {e}, retrying in {backoff_interval} seconds"
+ )
+ await asyncio.sleep(backoff_interval)
+ continue
if not adt.is_connected:
print("Error: could not log into ADT Pulse site")
@@ -604,20 +684,44 @@ async def async_example(
await async_test_alarm(adt)
done = False
+ have_exception = False
while not done:
+ updated_zones: set[int] = set()
+ alarm_updated = False
try:
- print(f"Gateway online: {adt.site.gateway.is_online}")
- print_site(adt.site)
- print("----")
- if not adt.site.zones:
- print("No zones exist, exiting...")
+ if not have_exception:
+ print(f"Gateway online: {adt.site.gateway.is_online}")
+ print_site(adt.site)
+ print("----")
+ if not adt.site.zones:
+ print("No zones exist, exiting...")
+ done = True
+ break
+ print("\nZones:")
+ pprint(adt.site.zones, compact=True)
+ try:
+ (alarm_updated, updated_zones) = await adt.wait_for_update()
+ have_exception = False
+ except PulseGatewayOfflineError as ex:
+ print(
+ f"ADT Pulse gateway is offline, re-polling in {ex.backoff.get_current_backoff_interval()}"
+ )
+ have_exception = True
+ continue
+ except (PulseClientConnectionError, PulseServerConnectionError) as ex:
+ print(
+ f"ADT Pulse connection error: {ex.args[0]}, re-polling in {ex.backoff.get_current_backoff_interval()}"
+ )
+ have_exception = True
+ continue
+ except PulseAuthenticationError as ex:
+ print("ADT Pulse authentication error: %s, exiting...", ex.args[0])
done = True
break
- print("\nZones:")
- pprint(adt.site.zones, compact=True)
- await adt.wait_for_update()
- print("Updates exist, refreshing")
- # no need to call an update method
+ print(
+ f"Updates exist: alarm: {alarm_updated}, zones: {updated_zones}, refreshing",
+ )
+ # no need to call an update method
except KeyboardInterrupt:
print("exiting...")
done = True
@@ -656,6 +760,7 @@ def main():
args.poll_interval,
args.keepalive_interval,
args.relogin_interval,
+ args.detailed_debug_logging,
)
else:
asyncio.run(
@@ -668,6 +773,7 @@ def main():
args.poll_interval,
args.keepalive_interval,
args.relogin_interval,
+ args.detailed_debug_logging,
)
)
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..93a0ef3
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1621 @@
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+
+[[package]]
+name = "aiohttp"
+version = "3.9.3"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"},
+ {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"},
+ {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"},
+ {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"},
+ {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"},
+ {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"},
+ {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"},
+ {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"},
+ {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"},
+ {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"},
+ {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"},
+ {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"},
+ {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"},
+ {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"},
+ {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"},
+ {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"},
+ {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"},
+ {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"},
+ {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"},
+ {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"},
+ {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"},
+ {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"},
+ {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"},
+ {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"},
+ {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"},
+ {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"},
+ {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"},
+ {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"},
+ {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"},
+ {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"},
+ {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"},
+ {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"},
+ {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"},
+ {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"},
+ {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"},
+ {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"},
+ {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"},
+ {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"},
+ {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"},
+ {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"},
+ {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"},
+ {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"},
+ {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"},
+ {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"},
+ {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"},
+ {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"},
+ {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"},
+ {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"},
+ {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"},
+ {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"},
+ {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"},
+ {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"},
+ {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"},
+ {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"},
+ {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"},
+ {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"},
+ {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"},
+ {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"},
+ {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"},
+ {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"},
+ {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"},
+ {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"},
+ {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"},
+ {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"},
+ {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"},
+ {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"},
+ {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"},
+ {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"},
+ {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"},
+ {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"},
+ {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"},
+ {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"},
+ {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"},
+ {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"},
+ {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"},
+ {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+attrs = ">=17.3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "brotlicffi"]
+
+[[package]]
+name = "aiohttp-zlib-ng"
+version = "0.3.1"
+description = "Enable zlib_ng on aiohttp"
+optional = false
+python-versions = ">=3.8,<4.0"
+files = [
+ {file = "aiohttp_zlib_ng-0.3.1-py3-none-any.whl", hash = "sha256:aaf6de6ba3d6e0ec083adee45e437818965f19567973f51f0832721892ec3aaf"},
+ {file = "aiohttp_zlib_ng-0.3.1.tar.gz", hash = "sha256:e8ac72b855a194da4c869e89f69a9dc9339229d2366931dfea34cff93fb960fa"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.8.5"
+zlib-ng = ">=0.3.0"
+
+[package.extras]
+isal = ["isal (>=1.5.3)"]
+
+[[package]]
+name = "aioresponses"
+version = "0.7.6"
+description = "Mock out requests made by ClientSession from aiohttp package"
+optional = false
+python-versions = "*"
+files = [
+ {file = "aioresponses-0.7.6-py2.py3-none-any.whl", hash = "sha256:d2c26defbb9b440ea2685ec132e90700907fd10bcca3e85ec2f157219f0d26f7"},
+ {file = "aioresponses-0.7.6.tar.gz", hash = "sha256:f795d9dbda2d61774840e7e32f5366f45752d1adc1b74c9362afd017296c7ee1"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.3.0,<4.0.0"
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "astroid"
+version = "3.0.3"
+description = "An abstract syntax tree for Python with inference support."
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "astroid-3.0.3-py3-none-any.whl", hash = "sha256:92fcf218b89f449cdf9f7b39a269f8d5d617b27be68434912e11e79203963a17"},
+ {file = "astroid-3.0.3.tar.gz", hash = "sha256:4148645659b08b70d72460ed1921158027a9e53ae8b7234149b1400eddacbb93"},
+]
+
+[[package]]
+name = "attrs"
+version = "23.2.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
+ {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
+tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "black"
+version = "23.12.1"
+description = "The uncompromising code formatter."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"},
+ {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"},
+ {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"},
+ {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"},
+ {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"},
+ {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"},
+ {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"},
+ {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"},
+ {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
+ {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
+ {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
+ {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
+ {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"},
+ {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"},
+ {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"},
+ {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"},
+ {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"},
+ {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"},
+ {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"},
+ {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"},
+ {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
+ {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "cfgv"
+version = "3.4.0"
+description = "Validate configuration and produce human readable error messages."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.4.1"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"},
+ {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"},
+ {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"},
+ {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"},
+ {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"},
+ {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"},
+ {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"},
+ {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"},
+ {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"},
+ {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"},
+ {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"},
+ {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"},
+ {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"},
+ {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"},
+ {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"},
+ {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"},
+ {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"},
+ {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"},
+ {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"},
+ {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"},
+ {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"},
+ {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"},
+ {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"},
+ {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"},
+ {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"},
+ {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"},
+ {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"},
+ {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"},
+ {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"},
+ {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"},
+ {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"},
+ {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"},
+ {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"},
+ {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"},
+ {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"},
+ {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"},
+ {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"},
+ {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"},
+ {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"},
+ {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"},
+ {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"},
+ {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"},
+ {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"},
+ {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"},
+ {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"},
+ {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"},
+ {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"},
+ {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"},
+ {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"},
+ {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"},
+ {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"},
+ {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"},
+]
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "dill"
+version = "0.3.8"
+description = "serialize all of Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
+ {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
+]
+
+[package.extras]
+graph = ["objgraph (>=1.7.2)"]
+profile = ["gprof2dot (>=2022.7.29)"]
+
+[[package]]
+name = "distlib"
+version = "0.3.8"
+description = "Distribution utilities"
+optional = false
+python-versions = "*"
+files = [
+ {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
+ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
+]
+
+[[package]]
+name = "execnet"
+version = "2.0.2"
+description = "execnet: rapid multi-Python deployment"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
+ {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
+]
+
+[package.extras]
+testing = ["hatch", "pre-commit", "pytest", "tox"]
+
+[[package]]
+name = "filelock"
+version = "3.13.1"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
+ {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
+typing = ["typing-extensions (>=4.8)"]
+
+[[package]]
+name = "freezegun"
+version = "1.4.0"
+description = "Let your Python tests travel through time"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"},
+ {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.7"
+
+[[package]]
+name = "frozenlist"
+version = "1.4.1"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"},
+ {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"},
+ {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"},
+ {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"},
+ {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"},
+ {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"},
+ {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"},
+ {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"},
+ {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"},
+ {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"},
+ {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"},
+ {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"},
+ {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"},
+ {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"},
+ {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"},
+ {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"},
+ {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"},
+ {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"},
+ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
+]
+
+[[package]]
+name = "identify"
+version = "2.5.33"
+description = "File identification library for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"},
+ {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"},
+]
+
+[package.extras]
+license = ["ukkonen"]
+
+[[package]]
+name = "idna"
+version = "3.6"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
+ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "isort"
+version = "5.13.2"
+description = "A Python utility / library to sort Python imports."
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
+ {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
+]
+
+[package.extras]
+colors = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "libcst"
+version = "1.1.0"
+description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "libcst-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:63f75656fd733dc20354c46253fde3cf155613e37643c3eaf6f8818e95b7a3d1"},
+ {file = "libcst-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ae11eb1ea55a16dc0cdc61b41b29ac347da70fec14cc4381248e141ee2fbe6c"},
+ {file = "libcst-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bc745d0c06420fe2644c28d6ddccea9474fb68a2135904043676deb4fa1e6bc"},
+ {file = "libcst-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1f2da45f1c45634090fd8672c15e0159fdc46853336686959b2d093b6e10fa"},
+ {file = "libcst-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:003e5e83a12eed23542c4ea20fdc8de830887cc03662432bb36f84f8c4841b81"},
+ {file = "libcst-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:3ebbb9732ae3cc4ae7a0e97890bed0a57c11d6df28790c2b9c869f7da653c7c7"},
+ {file = "libcst-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d68c34e3038d3d1d6324eb47744cbf13f2c65e1214cf49db6ff2a6603c1cd838"},
+ {file = "libcst-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dffa1795c2804d183efb01c0f1efd20a7831db6a21a0311edf90b4100d67436"},
+ {file = "libcst-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc9b6ac36d7ec9db2f053014ea488086ca2ed9c322be104fbe2c71ca759da4bb"},
+ {file = "libcst-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b7a38ec4c1c009ac39027d51558b52851fb9234669ba5ba62283185963a31c"},
+ {file = "libcst-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5297a16e575be8173185e936b7765c89a3ca69d4ae217a4af161814a0f9745a7"},
+ {file = "libcst-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:7ccaf53925f81118aeaadb068a911fac8abaff608817d7343da280616a5ca9c1"},
+ {file = "libcst-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:75816647736f7e09c6120bdbf408456f99b248d6272277eed9a58cf50fb8bc7d"},
+ {file = "libcst-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c8f26250f87ca849a7303ed7a4fd6b2c7ac4dec16b7d7e68ca6a476d7c9bfcdb"},
+ {file = "libcst-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d37326bd6f379c64190a28947a586b949de3a76be00176b0732c8ee87d67ebe"},
+ {file = "libcst-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d8cf974cfa2487b28f23f56c4bff90d550ef16505e58b0dca0493d5293784b"},
+ {file = "libcst-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d1271403509b0a4ee6ff7917c2d33b5a015f44d1e208abb1da06ba93b2a378"},
+ {file = "libcst-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bca1841693941fdd18371824bb19a9702d5784cd347cb8231317dbdc7062c5bc"},
+ {file = "libcst-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f36f592e035ef84f312a12b75989dde6a5f6767fe99146cdae6a9ee9aff40dd0"},
+ {file = "libcst-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c"},
+ {file = "libcst-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97fbc73c87e9040e148881041fd5ffa2a6ebf11f64b4ccb5b52e574b95df1a15"},
+ {file = "libcst-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99fdc1929703fd9e7408aed2e03f58701c5280b05c8911753a8d8619f7dfdda5"},
+ {file = "libcst-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bf69cbbab5016d938aac4d3ae70ba9ccb3f90363c588b3b97be434e6ba95403"},
+ {file = "libcst-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90"},
+ {file = "libcst-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73c086705ed34dbad16c62c9adca4249a556c1b022993d511da70ea85feaf669"},
+ {file = "libcst-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a07ecfabbbb8b93209f952a365549e65e658831e9231649f4f4e4263cad24b1"},
+ {file = "libcst-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c653d9121d6572d8b7f8abf20f88b0a41aab77ff5a6a36e5a0ec0f19af0072e8"},
+ {file = "libcst-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f1cd308a4c2f71d5e4eec6ee693819933a03b78edb2e4cc5e3ad1afd5fb3f07"},
+ {file = "libcst-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8afb6101b8b3c86c5f9cec6b90ab4da16c3c236fe7396f88e8b93542bb341f7c"},
+ {file = "libcst-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:d22d1abfe49aa60fc61fa867e10875a9b3024ba5a801112f4d7ba42d8d53242e"},
+ {file = "libcst-1.1.0.tar.gz", hash = "sha256:0acbacb9a170455701845b7e940e2d7b9519db35a86768d86330a0b0deae1086"},
+]
+
+[package.dependencies]
+pyyaml = ">=5.2"
+typing-extensions = ">=3.7.4.2"
+typing-inspect = ">=0.4.0"
+
+[package.extras]
+dev = ["Sphinx (>=5.1.1)", "black (==23.9.1)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==2.0.0.post1)", "flake8 (>=3.7.8,<5)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.2)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.16)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.2.0)", "usort (==1.0.7)"]
+
+[[package]]
+name = "lxml"
+version = "5.1.0"
+description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"},
+ {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"},
+ {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"},
+ {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"},
+ {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"},
+ {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"},
+ {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"},
+ {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"},
+ {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"},
+ {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"},
+ {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"},
+ {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"},
+ {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"},
+ {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"},
+ {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"},
+ {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"},
+ {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"},
+ {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"},
+ {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"},
+ {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"},
+ {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"},
+ {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"},
+ {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"},
+ {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"},
+ {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"},
+ {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"},
+ {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"},
+ {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"},
+ {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"},
+ {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"},
+ {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"},
+ {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"},
+ {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"},
+ {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"},
+ {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"},
+ {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"},
+ {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"},
+ {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"},
+ {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"},
+ {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"},
+ {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"},
+ {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"},
+ {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"},
+ {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"},
+ {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"},
+ {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"},
+ {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"},
+ {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"},
+ {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"},
+ {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"},
+ {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"},
+ {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"},
+ {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"},
+ {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"},
+ {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"},
+ {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"},
+ {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"},
+ {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"},
+ {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"},
+ {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"},
+ {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"},
+ {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"},
+ {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"},
+ {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"},
+ {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"},
+ {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"},
+ {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"},
+ {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"},
+ {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"},
+ {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"},
+ {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"},
+ {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"},
+ {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"},
+ {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"},
+ {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"},
+ {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"},
+ {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"},
+ {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"},
+]
+
+[package.extras]
+cssselect = ["cssselect (>=0.7)"]
+html5 = ["html5lib"]
+htmlsoup = ["BeautifulSoup4"]
+source = ["Cython (>=3.0.7)"]
+
+[[package]]
+name = "mccabe"
+version = "0.7.0"
+description = "McCabe checker, plugin for flake8"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.5"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
+ {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
+ {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
+ {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
+ {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
+ {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
+ {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
+ {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
+ {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
+ {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
+ {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
+ {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
+ {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
+ {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
+ {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
+ {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
+ {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
+ {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
+ {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
+ {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
+ {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
+ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
+]
+
+[[package]]
+name = "mypy"
+version = "1.8.0"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"},
+ {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"},
+ {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"},
+ {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"},
+ {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"},
+ {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"},
+ {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"},
+ {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"},
+ {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"},
+ {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"},
+ {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"},
+ {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"},
+ {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"},
+ {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"},
+ {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"},
+ {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"},
+ {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"},
+ {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"},
+ {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"},
+ {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"},
+ {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"},
+ {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"},
+ {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"},
+ {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"},
+ {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"},
+ {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"},
+ {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+typing-extensions = ">=4.1.0"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+mypyc = ["setuptools (>=50)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.8.0"
+description = "Node.js virtual environment builder"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
+ {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.2.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"},
+ {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
+
+[[package]]
+name = "pluggy"
+version = "1.4.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
+ {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pre-commit"
+version = "3.6.0"
+description = "A framework for managing and maintaining multi-language pre-commit hooks."
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"},
+ {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"},
+]
+
+[package.dependencies]
+cfgv = ">=2.0.0"
+identify = ">=1.0.0"
+nodeenv = ">=0.11.1"
+pyyaml = ">=5.1"
+virtualenv = ">=20.10.0"
+
+[[package]]
+name = "pycln"
+version = "2.4.0"
+description = "A formatter for finding and removing unused import statements."
+optional = false
+python-versions = ">=3.7.0,<4"
+files = [
+ {file = "pycln-2.4.0-py3-none-any.whl", hash = "sha256:d1bf648df17077306100815d255d45430035b36f66bac635df04a323c61ba126"},
+ {file = "pycln-2.4.0.tar.gz", hash = "sha256:1f3eefb7be18a9ee06c3bdd0ba2e91218cd39317e20130325f107e96eb84b9f6"},
+]
+
+[package.dependencies]
+libcst = ">=0.3.10"
+pathspec = ">=0.9.0"
+pyyaml = ">=5.3.1"
+tomlkit = ">=0.11.1"
+typer = ">=0.4.1"
+
+[[package]]
+name = "pylint"
+version = "3.0.3"
+description = "python code static checker"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"},
+ {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"},
+]
+
+[package.dependencies]
+astroid = ">=3.0.1,<=3.1.0-dev0"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+dill = [
+ {version = ">=0.3.7", markers = "python_version >= \"3.12\""},
+ {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
+]
+isort = ">=4.2.5,<5.13.0 || >5.13.0,<6"
+mccabe = ">=0.6,<0.8"
+platformdirs = ">=2.2.0"
+tomlkit = ">=0.10.1"
+
+[package.extras]
+spelling = ["pyenchant (>=3.2,<4.0)"]
+testutils = ["gitpython (>3)"]
+
+[[package]]
+name = "pytest"
+version = "7.4.4"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
+ {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-aiohttp"
+version = "1.0.5"
+description = "Pytest plugin for aiohttp support"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-aiohttp-1.0.5.tar.gz", hash = "sha256:880262bc5951e934463b15e3af8bb298f11f7d4d3ebac970aab425aff10a780a"},
+ {file = "pytest_aiohttp-1.0.5-py3-none-any.whl", hash = "sha256:63a5360fd2f34dda4ab8e6baee4c5f5be4cd186a403cabd498fced82ac9c561e"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.8.1"
+pytest = ">=6.1.0"
+pytest-asyncio = ">=0.17.2"
+
+[package.extras]
+testing = ["coverage (==6.2)", "mypy (==0.931)"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "0.21.1"
+description = "Pytest support for asyncio"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"},
+ {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"},
+]
+
+[package.dependencies]
+pytest = ">=7.0.0"
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
+testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
+
+[[package]]
+name = "pytest-cov"
+version = "4.1.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
+ {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+
+[[package]]
+name = "pytest-cover"
+version = "3.0.0"
+description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytest-cover-3.0.0.tar.gz", hash = "sha256:5bdb6c1cc3dd75583bb7bc2c57f5e1034a1bfcb79d27c71aceb0b16af981dbf4"},
+ {file = "pytest_cover-3.0.0-py2.py3-none-any.whl", hash = "sha256:578249955eb3b5f3991209df6e532bb770b647743b7392d3d97698dc02f39ebb"},
+]
+
+[package.dependencies]
+pytest-cov = ">=2.0"
+
+[[package]]
+name = "pytest-coverage"
+version = "0.0"
+description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytest-coverage-0.0.tar.gz", hash = "sha256:db6af2cbd7e458c7c9fd2b4207cee75258243c8a81cad31a7ee8cfad5be93c05"},
+ {file = "pytest_coverage-0.0-py2.py3-none-any.whl", hash = "sha256:dedd084c5e74d8e669355325916dc011539b190355021b037242514dee546368"},
+]
+
+[package.dependencies]
+pytest-cover = "*"
+
+[[package]]
+name = "pytest-mock"
+version = "3.12.0"
+description = "Thin-wrapper around the mock package for easier use with pytest"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
+ {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
+]
+
+[package.dependencies]
+pytest = ">=5.0"
+
+[package.extras]
+dev = ["pre-commit", "pytest-asyncio", "tox"]
+
+[[package]]
+name = "pytest-timeout"
+version = "2.2.0"
+description = "pytest plugin to abort hanging tests"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"},
+ {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"},
+]
+
+[package.dependencies]
+pytest = ">=5.0.0"
+
+[[package]]
+name = "pytest-xdist"
+version = "3.5.0"
+description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"},
+ {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"},
+]
+
+[package.dependencies]
+execnet = ">=1.1"
+pytest = ">=6.2.0"
+
+[package.extras]
+psutil = ["psutil (>=3.0)"]
+setproctitle = ["setproctitle"]
+testing = ["filelock"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pyupgrade"
+version = "3.15.0"
+description = "A tool to automatically upgrade syntax for newer versions."
+optional = false
+python-versions = ">=3.8.1"
+files = [
+ {file = "pyupgrade-3.15.0-py2.py3-none-any.whl", hash = "sha256:8dc8ebfaed43566e2c65994162795017c7db11f531558a74bc8aa077907bc305"},
+ {file = "pyupgrade-3.15.0.tar.gz", hash = "sha256:a7fde381060d7c224f55aef7a30fae5ac93bbc428367d27e70a603bc2acd4f00"},
+]
+
+[package.dependencies]
+tokenize-rt = ">=5.2.0"
+
+[[package]]
+name = "pyyaml"
+version = "6.0.1"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
+ {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
+ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+]
+
+[[package]]
+name = "refurb"
+version = "1.28.0"
+description = "A tool for refurbish and modernize Python codebases"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "refurb-1.28.0-py3-none-any.whl", hash = "sha256:c89516b77745df67a1fc099e876bf31fe41ac48b80d1e7c8ced29aae902343ae"},
+ {file = "refurb-1.28.0.tar.gz", hash = "sha256:9fb4dc340e475818a4d2a7f274c77b81861d2ed8115a3f4fabff53e0ee350aa1"},
+]
+
+[package.dependencies]
+mypy = ">=0.981"
+
+[[package]]
+name = "ruff"
+version = "0.1.15"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"},
+ {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"},
+ {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"},
+ {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"},
+ {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"},
+ {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"},
+ {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"},
+ {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"},
+ {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"},
+ {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"},
+ {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"},
+]
+
+[[package]]
+name = "setuptools"
+version = "69.0.3"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"},
+ {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "tokenize-rt"
+version = "5.2.0"
+description = "A wrapper around the stdlib `tokenize` which roundtrips."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"},
+ {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"},
+]
+
+[[package]]
+name = "tomlkit"
+version = "0.12.3"
+description = "Style preserving TOML library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"},
+ {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"},
+]
+
+[[package]]
+name = "typeguard"
+version = "4.1.5"
+description = "Run-time type checker for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typeguard-4.1.5-py3-none-any.whl", hash = "sha256:8923e55f8873caec136c892c3bed1f676eae7be57cdb94819281b3d3bc9c0953"},
+ {file = "typeguard-4.1.5.tar.gz", hash = "sha256:ea0a113bbc111bcffc90789ebb215625c963411f7096a7e9062d4e4630c155fd"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.7.0", markers = "python_version < \"3.12\""}
+
+[package.extras]
+doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"]
+test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"]
+
+[[package]]
+name = "typer"
+version = "0.9.0"
+description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"},
+ {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"},
+]
+
+[package.dependencies]
+click = ">=7.1.1,<9.0.0"
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
+doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
+test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+
+[[package]]
+name = "types-beautifulsoup4"
+version = "4.12.0.20240106"
+description = "Typing stubs for beautifulsoup4"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "types-beautifulsoup4-4.12.0.20240106.tar.gz", hash = "sha256:98d628985b71b140bd3bc22a8cb0ab603c2f2d08f20d37925965eb4a21739be8"},
+ {file = "types_beautifulsoup4-4.12.0.20240106-py3-none-any.whl", hash = "sha256:cbdd60ab8aeac737ac014431b6e921b43e84279c0405fdd25a6900bb0e71da5b"},
+]
+
+[package.dependencies]
+types-html5lib = "*"
+
+[[package]]
+name = "types-html5lib"
+version = "1.1.11.20240106"
+description = "Typing stubs for html5lib"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "types-html5lib-1.1.11.20240106.tar.gz", hash = "sha256:fc3a1b18eb601b3eeaf92c900bd67675c0a4fa1dd1d2a2893ebdb46923547ee9"},
+ {file = "types_html5lib-1.1.11.20240106-py3-none-any.whl", hash = "sha256:61993cb89220107481e0f1da65c388ff8cf3d8c5f6e8483c97559639a596b697"},
+]
+
+[[package]]
+name = "types-lxml"
+version = "2024.2.9"
+description = "Complete lxml external type annotation"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "types-lxml-2024.2.9.tar.gz", hash = "sha256:f584856fe84cc05f7f5fd0da00308101f75a94a1d4e48ca6e6ab9eade0052d49"},
+ {file = "types_lxml-2024.2.9-py3-none-any.whl", hash = "sha256:740e3f09ba8e264c81fe0e1ea79171157f42489fab235c92b0d03ca00963b238"},
+]
+
+[package.dependencies]
+types-beautifulsoup4 = "*"
+typing-extensions = ">=4.5,<5.0"
+
+[package.extras]
+dev = ["tox (>=4.0,<5.0)"]
+
+[[package]]
+name = "typing-extensions"
+version = "4.9.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
+ {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
+]
+
+[[package]]
+name = "typing-inspect"
+version = "0.9.0"
+description = "Runtime inspection utilities for typing module."
+optional = false
+python-versions = "*"
+files = [
+ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
+ {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=0.3.0"
+typing-extensions = ">=3.7.4"
+
+[[package]]
+name = "uvloop"
+version = "0.19.0"
+description = "Fast implementation of asyncio event loop on top of libuv"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"},
+ {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"},
+ {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"},
+ {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"},
+ {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"},
+ {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"},
+ {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"},
+ {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"},
+ {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"},
+ {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"},
+ {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"},
+ {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"},
+ {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"},
+ {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"},
+ {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"},
+ {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"},
+ {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"},
+ {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"},
+ {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"},
+ {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"},
+ {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"},
+ {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"},
+ {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.25.0"
+description = "Virtual Python Environment builder"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
+ {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<5"
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+
+[[package]]
+name = "yarl"
+version = "1.9.4"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
+ {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
+ {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
+ {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
+ {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
+ {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
+ {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
+ {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
+ {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
+ {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
+ {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
+ {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
+ {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
+ {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
+ {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
+ {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
+ {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
+ {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
+ {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
+ {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
+ {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
+ {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+
+[[package]]
+name = "zlib-ng"
+version = "0.4.0"
+description = "Drop-in replacement for zlib and gzip modules using zlib-ng"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zlib-ng-0.4.0.tar.gz", hash = "sha256:3b730881aaeb86f9a4995de5e22499406ccf92f8508b5c017c343d27570a8c0a"},
+ {file = "zlib_ng-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b53458b4baa0554df93430bfda71a6861510d6641ac75192e6b9c2485d01a3a"},
+ {file = "zlib_ng-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ac2405b42420abd9dccfe5d5f05c052aaf88ee66aec0d3fb4ee171826846d8a"},
+ {file = "zlib_ng-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70e192695fd7bac2c3db529a873f57e10a8d42383223b0c5dc281793be4b1b83"},
+ {file = "zlib_ng-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9b49346d528da8e13575bb8bfa2ee5f74398422e81d4be6001afb5c0621dc412"},
+ {file = "zlib_ng-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:55a47ce2249581bc00decc5fc4aadf1f48c5edde770ff5aa649c2f0b782c9aba"},
+ {file = "zlib_ng-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ec04f0a21e711a654a9ea1dc5966c29231301625cfc199ca1ec0cdedbf921377"},
+ {file = "zlib_ng-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:096d319b94454c174dde78886a8d4a0f488186a4fbd006bd1819360e0e8b5348"},
+ {file = "zlib_ng-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07694a956028a66133c52ebf802d6185c6e986fbec5c4e403b997e044b30db8d"},
+ {file = "zlib_ng-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd4c9d4945f366a0f295e9356dd9ef291544adbe42cabcc121a28b202dd8809b"},
+ {file = "zlib_ng-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:39f69f92c7f8d107f406d981c1383c749894d737699116138de14497b0e0b041"},
+ {file = "zlib_ng-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6db621bdd34ef500ec1b44a5190fe5e967eee9386140be6bc8769ec15e355c4b"},
+ {file = "zlib_ng-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b2a635d018b3dbed6844ceca08c0f9a170ebdcc9299ab080e4f63b757faaeae"},
+ {file = "zlib_ng-0.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:274da13e42dc2197f8c24e321cc9cc4d1eef790512485462d72832343fe8f72a"},
+ {file = "zlib_ng-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b87aba7e64de1efb5a965d51551b63efc24d9cc77671b7ea28f336162edc733"},
+ {file = "zlib_ng-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e926e2f8a31a3bfd0e1e2ffc3fb9956126ee17b4477ee98aa4e51b7bdc7ad41d"},
+ {file = "zlib_ng-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba917d5e574ae67b3984835791b5887592d0cb2877d5bfe22c3ab7ef30a28979"},
+ {file = "zlib_ng-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1e122967cacce2f1f04b5ee1ea89642997f8a312be6b527dc4a8e92deb834dd"},
+ {file = "zlib_ng-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:57c53634157142b208b6dd4dbb21b6b67392afb7b181be0e97a09bfc7201819b"},
+ {file = "zlib_ng-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8faa04e00261afd0e532392f70e74428687d00a37b6c3e63e6eb27ad8a81a629"},
+ {file = "zlib_ng-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dca9af5f84edcc98408af60c4fd220fe2ba3f6e7324b6b97483ac430e1ba89b"},
+ {file = "zlib_ng-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f338be6e62e05636467b89c26e0404d0e3d726da74aa3510be1e19e7681832b"},
+ {file = "zlib_ng-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4edeb933440e94d2c47331314edfda20f51a3640eb8e12a7a478859874d35a4a"},
+ {file = "zlib_ng-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:676221d0abb724d97a1b693b99b63fe164b65cd419c31556f5bf538f5a950031"},
+ {file = "zlib_ng-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:08a52fb23236870b956d02400a372f1c3a8adef298552466b6476a05ba061027"},
+ {file = "zlib_ng-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:690d1ee223a75c61bb628b7203d06d08ea4e10e88c822a4fe4fa8bdad0955608"},
+ {file = "zlib_ng-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b53d72a2787ad5170a1c4b2444f14f064017bdc575ac43547054fdf0e8f8c4e"},
+ {file = "zlib_ng-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67d37a39ed6521dfc31230f78ba095141d2317ad41bed9270eddfd1a37b9f076"},
+ {file = "zlib_ng-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:209a4a2bb0797598f49aa7b7a9e8714b9f69a64777957eba476209d26bfec17b"},
+ {file = "zlib_ng-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b585e8ddd357fe4677c0c738e5962ca867e157257f3c33f2fa8965e04bdb5836"},
+ {file = "zlib_ng-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:4a05cafa6a15284406a8c92eed06faa439dfd26b6c9c697719be450b919b459d"},
+ {file = "zlib_ng-0.4.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d62b1eaee0ae8fd6f544e199b4de90c018deaf1572f5e0c67ea5eb1adac7bfd3"},
+ {file = "zlib_ng-0.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ec74451bf33678a77ebbb9f24364390469396d6a1be69114063343dc654167"},
+ {file = "zlib_ng-0.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae20fde7487931146ea1d95b5ea524012c2b20d26d8a8458bf6befff1feaf1b"},
+ {file = "zlib_ng-0.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab9cf1fca3128da63a1b67490fb4c753b6a880b183d826d49d4cd0c61951d0fa"},
+ {file = "zlib_ng-0.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a3731a1e44290a2ca568690b8c6b62994377d929fd9b2808e60ea371f21781f4"},
+ {file = "zlib_ng-0.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8430bbbca7689ce87735970bc3b9dcb1b6d1453aa6c01f5a6850a7c323e100c4"},
+ {file = "zlib_ng-0.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9480339df63dc83dbe17ed01ac2fcac2e1e3fcf95811d86f9118823b6434ac58"},
+ {file = "zlib_ng-0.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:1d4df3e5d779ded9451c3e14686233d0a75762512f2eaa74386eadf8fbb2850d"},
+ {file = "zlib_ng-0.4.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6cf9e47962c86973543fd5c8fe46148063950fbb591da5775cde54abf6aa95aa"},
+ {file = "zlib_ng-0.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0261b43542d1525dfd3475a9e083b624b61dfc5b7fec2a3a7be5908af867fd1b"},
+ {file = "zlib_ng-0.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fd8a3cd1c108e714b08263a9b62d85d2bb1ba91ede319267ed998b6ac73bac8"},
+ {file = "zlib_ng-0.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:959f8951cb7a44df190cbd69327c3ea467b6d6398c448727ecdbd520b6c4ba14"},
+]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.11"
+content-hash = "e1f1bbb3f330fbacc4c516ee0497263741e2ac02d891a02112ea779d41f966ed"
diff --git a/pyadtpulse/__init__.py b/pyadtpulse/__init__.py
index b4d3f06..f017636 100644
--- a/pyadtpulse/__init__.py
+++ b/pyadtpulse/__init__.py
@@ -2,96 +2,30 @@
import logging
import asyncio
-import datetime
-import re
import time
-from contextlib import suppress
-from random import randint
from threading import RLock, Thread
-from typing import List, Optional, Union
from warnings import warn
+import aiohttp_zlib_ng
import uvloop
-from aiohttp import ClientResponse, ClientSession
-from bs4 import BeautifulSoup
-from .alarm_panel import ADT_ALARM_UNKNOWN
from .const import (
- ADT_DEFAULT_HTTP_HEADERS,
+ ADT_DEFAULT_HTTP_USER_AGENT,
ADT_DEFAULT_KEEPALIVE_INTERVAL,
ADT_DEFAULT_RELOGIN_INTERVAL,
- ADT_GATEWAY_STRING,
- ADT_LOGIN_URI,
- ADT_LOGOUT_URI,
- ADT_MAX_KEEPALIVE_INTERVAL,
- ADT_MIN_RELOGIN_INTERVAL,
- ADT_SUMMARY_URI,
- ADT_SYNC_CHECK_URI,
- ADT_TIMEOUT_URI,
- API_HOST_CA,
DEFAULT_API_HOST,
)
-from .pulse_connection import ADTPulseConnection
-from .site import ADTPulseSite
-from .util import (
- AuthenticationException,
- DebugRLock,
- close_response,
- handle_response,
- make_soup,
-)
+from .pyadtpulse_async import SYNC_CHECK_TASK_NAME, PyADTPulseAsync
+from .util import DebugRLock, set_debug_lock
+aiohttp_zlib_ng.enable_zlib_ng()
LOG = logging.getLogger(__name__)
-SYNC_CHECK_TASK_NAME = "ADT Pulse Sync Check Task"
-KEEPALIVE_TASK_NAME = "ADT Pulse Keepalive Task"
-
-class PyADTPulse:
+class PyADTPulse(PyADTPulseAsync):
"""Base object for ADT Pulse service."""
- __slots__ = (
- "_pulse_connection",
- "_sync_task",
- "_timeout_task",
- "_authenticated",
- "_updates_exist",
- "_session_thread",
- "_attribute_lock",
- "_last_login_time",
- "_site",
- "_username",
- "_password",
- "_fingerprint",
- "_login_exception",
- "_relogin_interval",
- "_keepalive_interval",
- )
-
- @staticmethod
- def _check_service_host(service_host: str) -> None:
- if service_host is None or service_host == "":
- raise ValueError("Service host is mandatory")
- if service_host not in (DEFAULT_API_HOST, API_HOST_CA):
- raise ValueError(
- "Service host must be one of {DEFAULT_API_HOST}" f" or {API_HOST_CA}"
- )
-
- @staticmethod
- def _check_keepalive_interval(keepalive_interval: int) -> None:
- if keepalive_interval > ADT_MAX_KEEPALIVE_INTERVAL or keepalive_interval <= 0:
- raise ValueError(
- f"keepalive interval ({keepalive_interval}) must be "
- f"greater than 0 and less than {ADT_MAX_KEEPALIVE_INTERVAL}"
- )
-
- @staticmethod
- def _check_relogin_interval(relogin_interval: int) -> None:
- if relogin_interval < ADT_MIN_RELOGIN_INTERVAL:
- raise ValueError(
- f"relogin interval ({relogin_interval}) must be "
- f"greater than {ADT_MIN_RELOGIN_INTERVAL}"
- )
+ __slots__ = ("_session_thread", "_p_attribute_lock", "_login_exception")
def __init__(
self,
@@ -99,381 +33,125 @@ def __init__(
password: str,
fingerprint: str,
service_host: str = DEFAULT_API_HOST,
- user_agent=ADT_DEFAULT_HTTP_HEADERS["User-Agent"],
- websession: Optional[ClientSession] = None,
+ user_agent=ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"],
do_login: bool = True,
debug_locks: bool = False,
- keepalive_interval: Optional[int] = ADT_DEFAULT_KEEPALIVE_INTERVAL,
- relogin_interval: Optional[int] = ADT_DEFAULT_RELOGIN_INTERVAL,
+ keepalive_interval: int = ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ relogin_interval: int = ADT_DEFAULT_RELOGIN_INTERVAL,
+ detailed_debug_logging: bool = False,
):
- """Create a PyADTPulse object.
-
- Args:
- username (str): Username.
- password (str): Password.
- fingerprint (str): 2FA fingerprint.
- service_host (str, optional): host prefix to use
- i.e. https://portal.adtpulse.com or
- https://portal-ca.adtpulse.com
- user_agent (str, optional): User Agent.
- Defaults to ADT_DEFAULT_HTTP_HEADERS["User-Agent"].
- websession (ClientSession, optional): an initialized
- aiohttp.ClientSession to use, defaults to None
- do_login (bool, optional): login synchronously when creating object
- Should be set to False for asynchronous usage
- and async_login() should be called instead
- Setting websession will override this
- and not login
- Defaults to True
- debug_locks: (bool, optional): use debugging locks
- Defaults to False
- keepalive_interval (int, optional): number of minutes between
- keepalive checks, defaults to ADT_DEFAULT_KEEPALIVE_INTERVAL,
- maxiumum is ADT_MAX_KEEPALIVE_INTERVAL
- relogin_interval (int, optional): number of minutes between relogin checks
- defaults to ADT_DEFAULT_RELOGIN_INTERVAL,
- minimum is ADT_MIN_RELOGIN_INTERVAL
- """
- self._check_service_host(service_host)
- self._init_login_info(username, password, fingerprint)
- self._pulse_connection = ADTPulseConnection(
+ self._p_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse._p_attribute_lockattribute_lock"
+ )
+ warn(
+ "PyADTPulse is deprecated, please use PyADTPulseAsync instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__init__(
+ username,
+ password,
+ fingerprint,
service_host,
- session=websession,
- user_agent=user_agent,
- debug_locks=debug_locks,
+ user_agent,
+ debug_locks,
+ keepalive_interval,
+ relogin_interval,
+ detailed_debug_logging,
)
-
- self._sync_task: Optional[asyncio.Task] = None
- self._timeout_task: Optional[asyncio.Task] = None
-
- # FIXME use thread event/condition, regular condition?
- # defer initialization to make sure we have an event loop
- self._authenticated: Optional[asyncio.locks.Event] = None
- self._login_exception: Optional[BaseException] = None
-
- self._updates_exist: Optional[asyncio.locks.Event] = None
-
- self._session_thread: Optional[Thread] = None
- self._attribute_lock: Union[RLock, DebugRLock]
- if not debug_locks:
- self._attribute_lock = RLock()
- else:
- self._attribute_lock = DebugRLock("PyADTPulse._attribute_lock")
- self._last_login_time: int = 0
-
- self._site: Optional[ADTPulseSite] = None
- self.keepalive_interval = keepalive_interval
- self.relogin_interval = relogin_interval
-
- # authenticate the user
- if do_login and websession is None:
+ self._session_thread: Thread | None = None
+ self._login_exception: Exception | None = None
+ if do_login:
self.login()
- def _init_login_info(self, username: str, password: str, fingerprint: str) -> None:
- if username is None or username == "":
- raise ValueError("Username is mandatory")
-
- pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
- if not re.match(pattern, username):
- raise ValueError("Username must be an email address")
- self._username = username
-
- if password is None or password == "":
- raise ValueError("Password is mandatory")
- self._password = password
-
- if fingerprint is None or fingerprint == "":
- raise ValueError("Fingerprint is required")
- self._fingerprint = fingerprint
-
def __repr__(self) -> str:
"""Object representation."""
- return f"<{self.__class__.__name__}: {self._username}>"
+ return (
+ f"<{self.__class__.__name__}: {self._authentication_properties.username}>"
+ )
# ADTPulse API endpoint is configurable (besides default US ADT Pulse endpoint) to
# support testing as well as alternative ADT Pulse endpoints such as
# portal-ca.adtpulse.com
- @property
- def service_host(self) -> str:
- """Get the Pulse host.
-
- Returns: (str): the ADT Pulse endpoint host
- """
- return self._pulse_connection.service_host
-
- @service_host.setter
- def service_host(self, host: str) -> None:
- """Override the Pulse host (i.e. to use portal-ca.adpulse.com).
-
- Args:
- host (str): name of Pulse endpoint host
- """
- self._check_service_host(host)
- with self._attribute_lock:
- self._pulse_connection.service_host = host
-
- def set_service_host(self, host: str) -> None:
- """Backward compatibility for service host property setter."""
- self.service_host = host
-
- @property
- def username(self) -> str:
- """Get username.
-
- Returns:
- str: the username
- """
- with self._attribute_lock:
- return self._username
-
- @property
- def version(self) -> str:
- """Get the ADT Pulse site version.
-
- Returns:
- str: a string containing the version
- """
- return self._pulse_connection.api_version
-
- @property
- def relogin_interval(self) -> int:
- """Get re-login interval.
-
- Returns:
- int: number of minutes to re-login to Pulse
- 0 means disabled
- """
- with self._attribute_lock:
- return self._relogin_interval
-
- @relogin_interval.setter
- def relogin_interval(self, interval: Optional[int]) -> None:
- """Set re-login interval.
-
- Args:
- interval (int): The number of minutes between logins.
- If set to None, resets to ADT_DEFAULT_RELOGIN_INTERVAL
-
- Raises:
- ValueError: if a relogin interval of less than 10 minutes
- is specified
- """
- if interval is None:
- interval = ADT_DEFAULT_RELOGIN_INTERVAL
- else:
- self._check_relogin_interval(interval)
- with self._attribute_lock:
- self._relogin_interval = interval
- LOG.debug("relogin interval set to %d", self._relogin_interval)
-
- @property
- def keepalive_interval(self) -> int:
- """Get the keepalive interval in minutes.
-
- Returns:
- int: the keepalive interval
+ def _pulse_session_thread(self) -> None:
"""
- with self._attribute_lock:
- return self._keepalive_interval
+ Pulse the session thread.
- @keepalive_interval.setter
- def keepalive_interval(self, interval: Optional[int]) -> None:
- """Set the keepalive interval in minutes.
-
- If set to None, resets to ADT_DEFAULT_KEEPALIVE_INTERVAL
+ Acquires the attribute lock and creates a background thread for the ADT
+ Pulse API. The thread runs the synchronous loop `_sync_loop()` until completion.
+ Once the loop finishes, the thread is closed, the pulse connection's event loop
+ is set to `None`, and the session thread is set to `None`.
"""
- if interval is None:
- interval = ADT_DEFAULT_KEEPALIVE_INTERVAL
- else:
- self._check_keepalive_interval(interval)
- with self._attribute_lock:
- self._keepalive_interval = interval
- LOG.debug("keepalive interval set to %d", self._keepalive_interval)
-
- async def _update_sites(self, soup: BeautifulSoup) -> None:
- with self._attribute_lock:
- if self._site is None:
- await self._initialize_sites(soup)
- if self._site is None:
- raise RuntimeError("pyadtpulse could not retrieve site")
- self._site.alarm_control_panel._update_alarm_from_soup(soup)
- self._site._update_zone_from_soup(soup)
-
- async def _initialize_sites(self, soup: BeautifulSoup) -> None:
- # typically, ADT Pulse accounts have only a single site (premise/location)
- singlePremise = soup.find("span", {"id": "p_singlePremise"})
- if singlePremise:
- site_name = singlePremise.text
-
- # FIXME: this code works, but it doesn't pass the linter
- signout_link = str(
- soup.find("a", {"class": "p_signoutlink"}).get("href") # type: ignore
- )
- if signout_link:
- m = re.search("networkid=(.+)&", signout_link)
- if m and m.group(1) and m.group(1):
- site_id = m.group(1)
- LOG.debug("Discovered site id %s: %s", site_id, site_name)
- new_site = ADTPulseSite(self._pulse_connection, site_id, site_name)
-
- # fetch zones first, so that we can have the status
- # updated with _update_alarm_status
- if not await new_site._fetch_devices(None):
- LOG.error("Could not fetch zones from ADT site")
- new_site.alarm_control_panel._update_alarm_from_soup(soup)
- if new_site.alarm_control_panel.status == ADT_ALARM_UNKNOWN:
- new_site.gateway.is_online = False
- new_site._update_zone_from_soup(soup)
- with self._attribute_lock:
- self._site = new_site
- return
- else:
- LOG.warning(
- "Couldn't find site id for %s in %s", site_name, signout_link
- )
- else:
- LOG.error("ADT Pulse accounts with MULTIPLE sites not supported!!!")
-
- # ...and current network id from:
- #
- #
- # ... or perhaps better, just extract all from /system/settings.jsp
-
- def _check_retry_after(
- self, response: Optional[ClientResponse], task_name: str
- ) -> int:
- if response is None:
- return 0
- header_value = response.headers.get("Retry-After")
- if header_value is None:
- return 0
- if header_value.isnumeric():
- retval = int(header_value)
- else:
- try:
- retval = (
- datetime.datetime.strptime(header_value, "%a, %d %b %G %T %Z")
- - datetime.datetime.now()
- ).seconds
- except ValueError:
- return 0
- reason = "Unknown"
- if response.status == 429:
- reason = "Too many requests"
- elif response.status == 503:
- reason = "Service unavailable"
- LOG.warning(
- "Task %s received Retry-After %s due to %s", task_name, retval, reason
- )
- return retval
-
- async def _keepalive_task(self) -> None:
- retry_after = 0
- response: ClientResponse | None = None
- if self._timeout_task is not None:
- task_name = self._timeout_task.get_name()
- else:
- task_name = f"{KEEPALIVE_TASK_NAME} - possible internal error"
- LOG.debug("creating %s", task_name)
- with self._attribute_lock:
- if self._authenticated is None:
- raise RuntimeError(
- "Keepalive task is running without an authenticated event"
- )
- while self._authenticated.is_set():
- relogin_interval = self.relogin_interval * 60
- if relogin_interval != 0 and time.time() - self._last_login_time > randint(
- int(0.75 * relogin_interval), relogin_interval
- ):
- LOG.info("Login timeout reached, re-logging in")
- # FIXME?: should we just pause the task?
- with self._attribute_lock:
- if self._sync_task is not None:
- self._sync_task.cancel()
- with suppress(Exception):
- await self._sync_task
- await self._do_logout_query()
- if not await self.async_quick_relogin():
- LOG.error("%s could not re-login, exiting", task_name)
- return
- if self._sync_task is not None:
- coro = self._sync_check_task()
- self._sync_task = asyncio.create_task(
- coro, name=f"{SYNC_CHECK_TASK_NAME}: Async session"
- )
- try:
- await asyncio.sleep(self.keepalive_interval * 60.0 + retry_after)
- LOG.debug("Resetting timeout")
- response = await self._pulse_connection.async_query(
- ADT_TIMEOUT_URI, "POST"
- )
- if not handle_response(
- response, logging.INFO, "Failed resetting ADT Pulse cloud timeout"
- ):
- retry_after = self._check_retry_after(response, "Keepalive task")
- close_response(response)
- continue
- close_response(response)
- if self.site.gateway.next_update < time.time():
- await self.site._set_device(ADT_GATEWAY_STRING)
- except asyncio.CancelledError:
- LOG.debug("%s cancelled", task_name)
- close_response(response)
- return
-
- def _pulse_session_thread(self) -> None:
# lock is released in sync_loop()
- self._attribute_lock.acquire()
+ self._p_attribute_lock.acquire()
LOG.debug("Creating ADT Pulse background thread")
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
loop = asyncio.new_event_loop()
- self._pulse_connection.loop = loop
+ self._pulse_connection_properties.loop = loop
loop.run_until_complete(self._sync_loop())
loop.close()
- self._pulse_connection.loop = None
+ self._pulse_connection_properties.loop = None
self._session_thread = None
async def _sync_loop(self) -> None:
- result = await self.async_login()
- self._attribute_lock.release()
- if result:
- if self._timeout_task is not None:
- task_list = (self._timeout_task,)
- try:
- await asyncio.wait(task_list)
- except asyncio.CancelledError:
- pass
- except Exception as e: # pylint: disable=broad-except
- LOG.exception(
- "Received exception while waiting for ADT Pulse service %s", e
- )
- else:
- # we should never get here
- raise RuntimeError("Background pyadtpulse tasks not created")
- if self._authenticated is not None:
- while self._authenticated.is_set():
- # busy wait until logout is done
- await asyncio.sleep(0.5)
+ """
+ Asynchronous function that represents the main loop of the synchronization
+ process.
+
+ This function is responsible for executing the synchronization logic. It starts
+ by calling the `async_login` method to perform the login operation. After that,
+ it releases the `_p_attribute_lock` to allow other tasks to access the
+ attributes.
+ If the login operation was successful, it waits for the `_timeout_task` to
+ complete using the `asyncio.wait` function. If the `_timeout_task` is not set,
+ it raises a `RuntimeError` to indicate that background tasks were not created.
+
+ After the waiting process, it enters a while loop that continues as long as the
+ `_authenticated` event is set. Inside the loop, it waits for 0.5 seconds using
+ the `asyncio.sleep` function. This wait allows the logout process to complete
+ before continuing with the synchronization logic.
+ """
+ try:
+ await self.async_login()
+ except Exception as e:
+ self._login_exception = e
+ self._p_attribute_lock.release()
+ if self._login_exception is not None:
+ return
+ if self._timeout_task is not None:
+ task_list = (self._timeout_task,)
+ try:
+ await asyncio.wait(task_list)
+ except asyncio.CancelledError:
+ pass
+ except Exception as e: # pylint: disable=broad-except
+ LOG.exception(
+ "Received exception while waiting for ADT Pulse service %s", e
+ )
+ else:
+ # we should never get here
+ raise RuntimeError("Background pyadtpulse tasks not created")
+ while self._pulse_connection_status.authenticated_flag.is_set():
+ # busy wait until logout is done
+ await asyncio.sleep(0.5)
def login(self) -> None:
"""Login to ADT Pulse and generate access token.
Raises:
- AuthenticationException if could not login
+ Exception from async_login
"""
- self._attribute_lock.acquire()
+ self._p_attribute_lock.acquire()
# probably shouldn't be a daemon thread
self._session_thread = thread = Thread(
target=self._pulse_session_thread,
name="PyADTPulse Session",
daemon=True,
)
- self._attribute_lock.release()
+ self._p_attribute_lock.release()
self._session_thread.start()
time.sleep(1)
@@ -481,174 +159,11 @@ def login(self) -> None:
# thread will unlock after async_login, so attempt to obtain
# lock to block current thread until then
# if it's still alive, no exception
- self._attribute_lock.acquire()
- self._attribute_lock.release()
+ self._p_attribute_lock.acquire()
+ self._p_attribute_lock.release()
if not thread.is_alive():
- raise AuthenticationException(self._username)
-
- @property
- def attribute_lock(self) -> Union[RLock, DebugRLock]:
- """Get attribute lock for PyADTPulse object.
-
- Returns:
- RLock: thread Rlock
- """
- return self._attribute_lock
-
- @property
- def loop(self) -> Optional[asyncio.AbstractEventLoop]:
- """Get event loop.
-
- Returns:
- Optional[asyncio.AbstractEventLoop]: the event loop object or
- None if no thread is running
- """
- return self._pulse_connection.loop
-
- async def async_quick_relogin(self) -> bool:
- """Quickly re-login to Pulse.
-
- Doesn't do device queries or set connected event unless a failure occurs.
- FIXME: Should probably just re-work login logic."""
- response = await self._do_login_query()
- if not handle_response(response, logging.ERROR, "Could not re-login to Pulse"):
- await self.async_logout()
- return False
- return True
-
- def quick_relogin(self) -> bool:
- """Perform quick_relogin synchronously."""
- coro = self.async_quick_relogin()
- return asyncio.run_coroutine_threadsafe(
- coro,
- self._pulse_connection.check_sync(
- "Attempting to do call sync quick re-login from async"
- ),
- ).result()
-
- async def _do_login_query(self, timeout: int = 30) -> ClientResponse | None:
- try:
- retval = await self._pulse_connection.async_query(
- ADT_LOGIN_URI,
- method="POST",
- extra_params={
- "partner": "adt",
- "e": "ns",
- "usernameForm": self.username,
- "passwordForm": self._password,
- "fingerprint": self._fingerprint,
- "sun": "yes",
- },
- timeout=timeout,
- )
- except Exception as e: # pylint: disable=broad-except
- LOG.error("Could not log into Pulse site: %s", e)
- return None
- if retval is None:
- LOG.error("Could not log into Pulse site.")
- return None
- if not handle_response(
- retval,
- logging.ERROR,
- "Error encountered communicating with Pulse site on login",
- ):
- close_response(retval)
- return None
- self._last_login_time = int(time.time())
- return retval
-
- async def _do_logout_query(self) -> None:
- params = {}
- network: ADTPulseSite = self.site
- if network is not None:
- params.update({"network": str(network.id)})
- params.update({"partner": "adt"})
- await self._pulse_connection.async_query(
- ADT_LOGOUT_URI, extra_params=params, timeout=10
- )
-
- async def async_login(self) -> bool:
- """Login asynchronously to ADT.
-
- Returns: True if login successful
- """
- if self._authenticated is None:
- self._authenticated = asyncio.locks.Event()
- else:
- self._authenticated.clear()
-
- LOG.debug("Authenticating to ADT Pulse cloud service as %s", self._username)
- await self._pulse_connection.async_fetch_version()
-
- response = await self._do_login_query()
- if response is None:
- return False
- if self._pulse_connection.make_url(ADT_SUMMARY_URI) != str(response.url):
- # more specifically:
- # redirect to signin.jsp = username/password error
- # redirect to mfaSignin.jsp = fingerprint error
- LOG.error("Authentication error encountered logging into ADT Pulse")
- close_response(response)
- return False
-
- soup = await make_soup(
- response, logging.ERROR, "Could not log into ADT Pulse site"
- )
- if soup is None:
- return False
-
- # FIXME: should probably raise exceptions
- error = soup.find("div", {"id": "warnMsgContents"})
- if error:
- LOG.error("Invalid ADT Pulse username/password: %s", error)
- return False
- error = soup.find("div", "responsiveContainer")
- if error:
- LOG.error(
- "2FA authentiation required for ADT pulse username %s: %s",
- self.username,
- error,
- )
- return False
- # need to set authenticated here to prevent login loop
- self._authenticated.set()
- await self._update_sites(soup)
- if self._site is None:
- LOG.error("Could not retrieve any sites, login failed")
- self._authenticated.clear()
- return False
-
- # since we received fresh data on the status of the alarm, go ahead
- # and update the sites with the alarm status.
-
- if self._timeout_task is None:
- self._timeout_task = asyncio.create_task(
- self._keepalive_task(), name=f"{KEEPALIVE_TASK_NAME}"
- )
- if self._updates_exist is None:
- self._updates_exist = asyncio.locks.Event()
- await asyncio.sleep(0)
- return True
-
- async def async_logout(self) -> None:
- """Logout of ADT Pulse async."""
- LOG.info("Logging %s out of ADT Pulse", self._username)
- if self._timeout_task is not None:
- try:
- self._timeout_task.cancel()
- except asyncio.CancelledError:
- LOG.debug("%s successfully cancelled", KEEPALIVE_TASK_NAME)
- await self._timeout_task
- if self._sync_task is not None:
- try:
- self._sync_task.cancel()
- except asyncio.CancelledError:
- LOG.debug("%s successfully cancelled", SYNC_CHECK_TASK_NAME)
- await self._sync_task
- self._timeout_task = self._sync_task = None
- await self._do_logout_query()
- if self._authenticated is not None:
- self._authenticated.clear()
+ if self._login_exception is not None:
+ raise self._login_exception
def logout(self) -> None:
"""Log out of ADT Pulse."""
@@ -662,79 +177,24 @@ def logout(self) -> None:
if sync_thread is not None:
sync_thread.join()
- async def _sync_check_task(self) -> None:
- # this should never be true
- if self._sync_task is not None:
- task_name = self._sync_task.get_name()
- else:
- task_name = f"{SYNC_CHECK_TASK_NAME} - possible internal error"
-
- LOG.debug("creating %s", task_name)
- response = None
- retry_after = 0
- last_sync_text = "0-0-0"
- if self._updates_exist is None:
- raise RuntimeError(f"{task_name} started without update event initialized")
- have_updates = False
- while True:
- try:
- self.site.gateway.adjust_backoff_poll_interval()
- if not have_updates:
- pi = self.site.gateway.poll_interval
- else:
- pi = 0.0
- if retry_after == 0:
- await asyncio.sleep(pi)
- else:
- await asyncio.sleep(retry_after)
- response = await self._pulse_connection.async_query(
- ADT_SYNC_CHECK_URI,
- extra_params={"ts": str(int(time.time() * 1000))},
- )
+ @property
+ def attribute_lock(self) -> "RLock| DebugRLock":
+ """Get attribute lock for PyADTPulse object.
- if response is None:
- continue
- retry_after = self._check_retry_after(response, f"{task_name}")
- if retry_after != 0:
- close_response(response)
- continue
- text = await response.text()
- if not handle_response(
- response, logging.ERROR, "Error querying ADT sync"
- ):
- close_response(response)
- continue
- close_response(response)
- pattern = r"\d+[-]\d+[-]\d+"
- if not re.match(pattern, text):
- LOG.warning(
- "Unexpected sync check format (%s), forcing re-auth", pattern
- )
- LOG.debug("Received %s from ADT Pulse site", text)
- await self._do_logout_query()
- if not await self.async_quick_relogin():
- LOG.error("%s couldn't re-login, exiting.", task_name)
- continue
- if text != last_sync_text:
- LOG.debug("Updates exist: %s, requerying", text)
- last_sync_text = text
- have_updates = True
- continue
- if have_updates:
- have_updates = False
- if await self.async_update() is False:
- LOG.debug("Pulse data update from %s failed", task_name)
- continue
- self._updates_exist.set()
- else:
- LOG.debug(
- "Sync token %s indicates no remote updates to process", text
- )
+ Returns:
+ RLock: thread Rlock
+ """
+ return self._p_attribute_lock
- except asyncio.CancelledError:
- LOG.debug("%s cancelled", task_name)
- close_response(response)
- return
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop | None:
+ """Get event loop.
+
+ Returns:
+ Optional[asyncio.AbstractEventLoop]: the event loop object or
+ None if no thread is running
+ """
+ return self._pulse_connection_properties.loop
@property
def updates_exist(self) -> bool:
@@ -743,9 +203,9 @@ def updates_exist(self) -> bool:
Returns:
bool: True if updated data exists
"""
- with self._attribute_lock:
+ with self._p_attribute_lock:
if self._sync_task is None:
- loop = self._pulse_connection.loop
+ loop = self._pulse_connection_properties.loop
if loop is None:
raise RuntimeError(
"ADT pulse sync function updates_exist() "
@@ -755,64 +215,11 @@ def updates_exist(self) -> bool:
self._sync_task = loop.create_task(
coro, name=f"{SYNC_CHECK_TASK_NAME}: Sync session"
)
- if self._updates_exist is None:
- return False
-
- if self._updates_exist.is_set():
- self._updates_exist.clear()
+ if self._pulse_properties.updates_exist.is_set():
+ self._pulse_properties.updates_exist.clear()
return True
return False
- async def wait_for_update(self) -> None:
- """Wait for update.
-
- Blocks current async task until Pulse system
- signals an update
- """
- with self._attribute_lock:
- if self._sync_task is None:
- coro = self._sync_check_task()
- self._sync_task = asyncio.create_task(
- coro, name=f"{SYNC_CHECK_TASK_NAME}: Async session"
- )
- if self._updates_exist is None:
- raise RuntimeError("Update event does not exist")
-
- await self._updates_exist.wait()
- self._updates_exist.clear()
-
- @property
- def is_connected(self) -> bool:
- """Check if connected to ADT Pulse.
-
- Returns:
- bool: True if connected
- """
- with self._attribute_lock:
- if self._authenticated is None:
- return False
- return self._authenticated.is_set()
-
- # FIXME? might have to move this to site for multiple sites
-
- async def async_update(self) -> bool:
- """Update ADT Pulse data.
-
- Returns:
- bool: True if update succeeded.
- """
- LOG.debug("Checking ADT Pulse cloud service for updates")
-
- # FIXME will have to query other URIs for camera/zwave/etc
- soup = await self._pulse_connection.query_orb(
- logging.INFO, "Error returned from ADT Pulse service check"
- )
- if soup is not None:
- await self._update_sites(soup)
- return True
-
- return False
-
def update(self) -> bool:
"""Update ADT Pulse data.
@@ -827,27 +234,20 @@ def update(self) -> bool:
),
).result()
- @property
- def sites(self) -> List[ADTPulseSite]:
- """Return all sites for this ADT Pulse account."""
- warn(
- "multiple sites being removed, use pyADTPulse.site instead",
- PendingDeprecationWarning,
- stacklevel=2,
+ async def async_login(self) -> None:
+ self._pulse_connection_properties.check_async(
+ "Cannot login asynchronously with a synchronous session"
)
- with self._attribute_lock:
- if self._site is None:
- raise RuntimeError(
- "No sites have been retrieved, have you logged in yet?"
- )
- return [self._site]
+ await super().async_login()
- @property
- def site(self) -> ADTPulseSite:
- """Return the site associated with the Pulse login."""
- with self._attribute_lock:
- if self._site is None:
- raise RuntimeError(
- "No sites have been retrieved, have you logged in yet?"
- )
- return self._site
+ async def async_logout(self) -> None:
+ self._pulse_connection_properties.check_async(
+ "Cannot logout asynchronously with a synchronous session"
+ )
+ await super().async_logout()
+
+ async def async_update(self) -> bool:
+ self._pulse_connection_properties.check_async(
+ "Cannot update asynchronously with a synchronous session"
+ )
+ return await super().async_update()
diff --git a/pyadtpulse/alarm_panel.py b/pyadtpulse/alarm_panel.py
index 2c6a6cc..96d561d 100644
--- a/pyadtpulse/alarm_panel.py
+++ b/pyadtpulse/alarm_panel.py
@@ -7,11 +7,12 @@
from threading import RLock
from time import time
-from bs4 import BeautifulSoup
+from lxml import html
+from typeguard import typechecked
from .const import ADT_ARM_DISARM_URI
-from .pulse_connection import ADTPulseConnection
-from .util import make_soup
+from .pulse_connection import PulseConnection
+from .util import make_etree
LOG = logging.getLogger(__name__)
ADT_ALARM_AWAY = "away"
@@ -20,6 +21,24 @@
ADT_ALARM_UNKNOWN = "unknown"
ADT_ALARM_ARMING = "arming"
ADT_ALARM_DISARMING = "disarming"
+ADT_ALARM_NIGHT = "night"
+
+ALARM_STATUSES = (
+ ADT_ALARM_AWAY,
+ ADT_ALARM_HOME,
+ ADT_ALARM_OFF,
+ ADT_ALARM_UNKNOWN,
+ ADT_ALARM_ARMING,
+ ADT_ALARM_DISARMING,
+ ADT_ALARM_NIGHT,
+)
+
+ALARM_POSSIBLE_STATUS_MAP = {
+ "Disarmed": (ADT_ALARM_OFF, ADT_ALARM_ARMING),
+ "Armed Away": (ADT_ALARM_AWAY, ADT_ALARM_DISARMING),
+ "Armed Stay": (ADT_ALARM_HOME, ADT_ALARM_DISARMING),
+ "Armed Night": (ADT_ALARM_NIGHT, ADT_ALARM_DISARMING),
+}
ADT_ARM_DISARM_TIMEOUT: float = 20
@@ -47,6 +66,18 @@ def status(self) -> str:
with self._state_lock:
return self._status
+ @status.setter
+ def status(self, new_status: str) -> None:
+ """Set alarm status.
+
+ Args:
+ new_status (str): the new alarm status
+ """
+ with self._state_lock:
+ if new_status not in ALARM_STATUSES:
+ raise ValueError(f"Alarm status must be one of {ALARM_STATUSES}")
+ self._status = new_status
+
@property
def is_away(self) -> bool:
"""Return wheter the system is armed away.
@@ -107,6 +138,16 @@ def is_disarming(self) -> bool:
with self._state_lock:
return self._status == ADT_ALARM_DISARMING
+ @property
+ def is_armed_night(self) -> bool:
+ """Return if system is in night mode.
+
+ Returns:
+ bool: True if system is in night mode
+ """
+ with self._state_lock:
+ return self._status == ADT_ALARM_NIGHT
+
@property
def last_update(self) -> float:
"""Return last update time.
@@ -117,8 +158,9 @@ def last_update(self) -> float:
with self._state_lock:
return self._last_arm_disarm
+ @typechecked
async def _arm(
- self, connection: ADTPulseConnection, mode: str, force_arm: bool
+ self, connection: PulseConnection, mode: str, force_arm: bool
) -> bool:
"""Set arm status.
@@ -161,19 +203,21 @@ async def _arm(
timeout=10,
)
- soup = await make_soup(
- response,
+ tree = make_etree(
+ response[0],
+ response[1],
+ response[2],
logging.WARNING,
f"Failed updating ADT Pulse alarm {self._sat} to {mode}",
)
- if soup is None:
+ if tree is None:
return False
- arm_result = soup.find("div", {"class": "p_armDisarmWrapper"})
+ arm_result = tree.find(".//div[@class='p_armDisarmWrapper']")
if arm_result is not None:
- error_block = arm_result.find("div")
+ error_block = arm_result.find(".//div")
if error_block is not None:
- error_text = arm_result.get_text().replace(
+ error_text = arm_result.text_content().replace(
"Arm AnywayCancel\n\n", ""
)
LOG.warning(
@@ -188,9 +232,10 @@ async def _arm(
self._last_arm_disarm = int(time())
return True
+ @typechecked
def _sync_set_alarm_mode(
self,
- connection: ADTPulseConnection,
+ connection: PulseConnection,
mode: str,
force_arm: bool = False,
) -> bool:
@@ -202,7 +247,8 @@ def _sync_set_alarm_mode(
),
).result()
- def arm_away(self, connection: ADTPulseConnection, force_arm: bool = False) -> bool:
+ @typechecked
+ def arm_away(self, connection: PulseConnection, force_arm: bool = False) -> bool:
"""Arm the alarm in Away mode.
Args:
@@ -213,7 +259,20 @@ def arm_away(self, connection: ADTPulseConnection, force_arm: bool = False) -> b
"""
return self._sync_set_alarm_mode(connection, ADT_ALARM_AWAY, force_arm)
- def arm_home(self, connection: ADTPulseConnection, force_arm: bool = False) -> bool:
+ @typechecked
+ def arm_night(self, connection: PulseConnection, force_arm: bool = False) -> bool:
+ """Arm the alarm in Night mode.
+
+ Args:
+ force_arm (bool, Optional): force system to arm
+
+ Returns:
+ bool: True if arm succeeded
+ """
+ return self._sync_set_alarm_mode(connection, ADT_ALARM_NIGHT, force_arm)
+
+ @typechecked
+ def arm_home(self, connection: PulseConnection, force_arm: bool = False) -> bool:
"""Arm the alarm in Home mode.
Args:
@@ -224,7 +283,8 @@ def arm_home(self, connection: ADTPulseConnection, force_arm: bool = False) -> b
"""
return self._sync_set_alarm_mode(connection, ADT_ALARM_HOME, force_arm)
- def disarm(self, connection: ADTPulseConnection) -> bool:
+ @typechecked
+ def disarm(self, connection: PulseConnection) -> bool:
"""Disarm the alarm.
Returns:
@@ -232,8 +292,9 @@ def disarm(self, connection: ADTPulseConnection) -> bool:
"""
return self._sync_set_alarm_mode(connection, ADT_ALARM_OFF, False)
+ @typechecked
async def async_arm_away(
- self, connection: ADTPulseConnection, force_arm: bool = False
+ self, connection: PulseConnection, force_arm: bool = False
) -> bool:
"""Arm alarm away async.
@@ -245,8 +306,9 @@ async def async_arm_away(
"""
return await self._arm(connection, ADT_ALARM_AWAY, force_arm)
+ @typechecked
async def async_arm_home(
- self, connection: ADTPulseConnection, force_arm: bool = False
+ self, connection: PulseConnection, force_arm: bool = False
) -> bool:
"""Arm alarm home async.
@@ -257,7 +319,21 @@ async def async_arm_home(
"""
return await self._arm(connection, ADT_ALARM_HOME, force_arm)
- async def async_disarm(self, connection: ADTPulseConnection) -> bool:
+ @typechecked
+ async def async_arm_night(
+ self, connection: PulseConnection, force_arm: bool = False
+ ) -> bool:
+ """Arm alarm night async.
+
+ Args:
+ force_arm (bool, Optional): force system to arm
+ Returns:
+ bool: True if arm succeeded
+ """
+ return await self._arm(connection, ADT_ALARM_NIGHT, force_arm)
+
+ @typechecked
+ async def async_disarm(self, connection: PulseConnection) -> bool:
"""Disarm alarm async.
Returns:
@@ -265,60 +341,62 @@ async def async_disarm(self, connection: ADTPulseConnection) -> bool:
"""
return await self._arm(connection, ADT_ALARM_OFF, False)
- def _update_alarm_from_soup(self, summary_html_soup: BeautifulSoup) -> None:
+ @typechecked
+ def update_alarm_from_etree(self, summary_html_etree: html.HtmlElement) -> None:
+ """
+ Updates the alarm status based on the information extracted from the provided
+ lxml etree
+
+ Args:
+ summary_html_etree: html.HtmlElement: the parsed response tree.
+
+ Returns:
+ None: This function does not return anything.
+ """
LOG.debug("Updating alarm status")
- value = summary_html_soup.find("span", {"class": "p_boldNormalTextLarge"})
+ value = summary_html_etree.find(".//span[@class='p_boldNormalTextLarge']")
sat_location = "security_button_0"
with self._state_lock:
- if value:
- text = value.text
- last_updated = int(time())
-
- if re.match("Disarmed", text):
- if (
- self._status != ADT_ALARM_ARMING
- or last_updated - self._last_arm_disarm > ADT_ARM_DISARM_TIMEOUT
- ):
- self._status = ADT_ALARM_OFF
- self._last_arm_disarm = last_updated
- elif re.match("Armed Away", text):
- if (
- self._status != ADT_ALARM_DISARMING
- or last_updated - self._last_arm_disarm > ADT_ARM_DISARM_TIMEOUT
- ):
- self._status = ADT_ALARM_AWAY
- self._last_arm_disarm = last_updated
- elif re.match("Armed Stay", text):
- if (
- self._status != ADT_ALARM_DISARMING
- or last_updated - self._last_arm_disarm > ADT_ARM_DISARM_TIMEOUT
- ):
- self._status = ADT_ALARM_HOME
- self._last_arm_disarm = last_updated
- else:
+ status_found = False
+ last_updated = int(time())
+ if value is not None:
+ text = value.text_content().lstrip().splitlines()[0]
+
+ for (
+ current_status,
+ possible_statuses,
+ ) in ALARM_POSSIBLE_STATUS_MAP.items():
+ if text.startswith(current_status):
+ status_found = True
+ if (
+ self._status != possible_statuses[1]
+ or last_updated - self._last_arm_disarm
+ > ADT_ARM_DISARM_TIMEOUT
+ ):
+ self._status = possible_statuses[0]
+ self._last_arm_disarm = last_updated
+ break
+
+ if value is None or not status_found:
+ if not text.startswith("Status Unavailable"):
LOG.warning("Failed to get alarm status from '%s'", text)
- self._status = ADT_ALARM_UNKNOWN
- self._last_arm_disarm = last_updated
- return
- LOG.debug("Alarm status = %s", self._status)
-
- if self._sat == "":
- sat_button = summary_html_soup.find(
- "input", {"type": "button", "id": sat_location}
- )
- if sat_button and sat_button.has_attr("onclick"):
- on_click = sat_button["onclick"]
- match = re.search(r"sat=([a-z0-9\-]+)", on_click)
- if match:
- self._sat = match.group(1)
- elif len(self._sat) == 0:
- LOG.warning("No sat recorded and was unable extract sat.")
-
- if len(self._sat) > 0:
- LOG.debug("Extracted sat = %s", self._sat)
- else:
- LOG.warning("Unable to extract sat")
-
+ self._status = ADT_ALARM_UNKNOWN
+ self._last_arm_disarm = last_updated
+ return
+ LOG.debug("Alarm status = %s", self._status)
+ sat_string = f'.//input[@id="{sat_location}"]'
+ sat_button = summary_html_etree.find(sat_string)
+ if sat_button is not None and "onclick" in sat_button.attrib:
+ on_click = sat_button.attrib["onclick"]
+ match = re.search(r"sat=([a-z0-9\-]+)", on_click)
+ if match:
+ self._sat = match.group(1)
+ if not self._sat:
+ LOG.warning("No sat recorded and was unable to extract sat.")
+ else:
+ LOG.debug("Extracted sat = %s", self._sat)
+
+ @typechecked
def set_alarm_attributes(self, alarm_attributes: dict[str, str]) -> None:
"""
Set alarm attributes including model, manufacturer, and online status.
diff --git a/pyadtpulse/const.py b/pyadtpulse/const.py
index 22a2a0c..efb6292 100644
--- a/pyadtpulse/const.py
+++ b/pyadtpulse/const.py
@@ -1,5 +1,7 @@
"""Constants for pyadtpulse."""
-__version__ = "1.1.5"
+
+__version__ = "1.2.9"
+
DEFAULT_API_HOST = "https://portal.adtpulse.com"
API_HOST_CA = "https://portal-ca.adtpulse.com" # Canada
@@ -7,6 +9,7 @@
ADT_LOGIN_URI = "/access/signin.jsp"
ADT_LOGOUT_URI = "/access/signout.jsp"
+ADT_MFA_FAIL_URI = "/mfa/mfaSignIn.jsp?workflow=challenge"
ADT_SUMMARY_URI = "/summary/summary.jsp"
ADT_ZONES_URI = "/ajax/homeViewDevAjax.jsp"
@@ -14,6 +17,7 @@
ADT_SYSTEM_URI = "/system/system.jsp"
ADT_DEVICE_URI = "/system/device.jsp"
ADT_STATES_URI = "/ajax/currentStates.jsp"
+ADT_GATEWAY_URI = "/system/gateway.jsp"
ADT_SYNC_CHECK_URI = "/Ajax/SyncCheckServ"
ADT_TIMEOUT_URI = "/KeepAlive"
# Intervals are all in minutes
@@ -26,24 +30,36 @@
# ADT sets their keepalive to 1 second, so poll a little more often
# than that
ADT_DEFAULT_POLL_INTERVAL = 2.0
-ADT_GATEWAY_OFFLINE_POLL_INTERVAL = 90.0
-ADT_DEFAULT_HTTP_HEADERS = {
+ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL = 600.0
+ADT_MAX_BACKOFF: float = 5.0 * 60.0
+ADT_DEFAULT_HTTP_USER_AGENT = {
"User-Agent": (
- "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
- "Chrome/100.0.4896.127 Safari/537.36 Edg/100.0.1185.44"
- ),
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ "Chrome/122.0.0.0 Safari/537.36"
+ )
}
+ADT_DEFAULT_HTTP_ACCEPT_HEADERS = {
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,"
+ "image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"
+}
+ADT_DEFAULT_SEC_FETCH_HEADERS = {
+ "Sec-Fetch-User": "?1",
+ "Sec-Ch-Ua-Mobile": "?0",
+ "Sec-Fetch-Site": "same-origin",
+ "Sec-Fetch-Mode": "navigate",
+ "Upgrade-Insecure-Requests": "1",
+}
+ADT_OTHER_HTTP_ACCEPT_HEADERS = {
+ "Accept": "*/*",
+}
ADT_ARM_URI = "/quickcontrol/serv/RunRRACommand"
ADT_ARM_DISARM_URI = "/quickcontrol/armDisarm.jsp"
ADT_SYSTEM_SETTINGS = "/system/settings.jsp"
-ADT_DEFAULT_VERSION = "24.0.0-117"
-
-ADT_HTTP_REFERER_URIS = (ADT_LOGIN_URI, ADT_DEVICE_URI, ADT_SUMMARY_URI, ADT_SYSTEM_URI)
+ADT_HTTP_BACKGROUND_URIS = (ADT_ORB_URI, ADT_SYNC_CHECK_URI)
STATE_OK = "OK"
STATE_OPEN = "Open"
STATE_MOTION = "Motion"
@@ -58,3 +74,5 @@
ADT_SENSOR_SMOKE = "smoke"
ADT_SENSOR_CO = "co"
ADT_SENSOR_ALARM = "alarm"
+
+ADT_DEFAULT_LOGIN_TIMEOUT = 30
diff --git a/pyadtpulse/exceptions.py b/pyadtpulse/exceptions.py
new file mode 100644
index 0000000..aef984d
--- /dev/null
+++ b/pyadtpulse/exceptions.py
@@ -0,0 +1,144 @@
+"""Pulse exceptions."""
+
+import datetime
+from time import time
+
+from .pulse_backoff import PulseBackoff
+
+
+def compute_retry_time(retry_time: float | None) -> str:
+ """Compute the retry time."""
+ if not retry_time:
+ return "indefinitely"
+ return str(datetime.datetime.fromtimestamp(retry_time))
+
+
+class PulseExceptionWithBackoff(Exception):
+ """Exception with backoff."""
+
+ def __init__(self, message: str, backoff: PulseBackoff):
+ """Initialize exception."""
+ super().__init__(message)
+ self.backoff = backoff
+ self.backoff.increment_backoff()
+
+ def __str__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}: {self.args[0]}"
+
+ def __repr__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}(message='{self.args[0]}', backoff={self.backoff})"
+
+
+class PulseExceptionWithRetry(PulseExceptionWithBackoff):
+ """Exception with backoff
+
+ If retry_time is None, or is in the past, then just the backoff count will be incremented.
+ """
+
+ def __init__(self, message: str, backoff: PulseBackoff, retry_time: float | None):
+ """Initialize exception."""
+ # super.__init__ will increment the backoff count
+ super().__init__(message, backoff)
+ self.retry_time = retry_time
+ if retry_time and retry_time > time():
+ # set the absolute backoff time will remove the backoff count
+ self.backoff.set_absolute_backoff_time(retry_time)
+ return
+
+ def __str__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}: {self.args[0]}"
+
+ def __repr__(self):
+ """Return a string representation of the exception."""
+ return f"{self.__class__.__name__}(message='{self.args[0]}', backoff={self.backoff}, retry_time={self.retry_time})"
+
+
+class PulseConnectionError(Exception):
+ """Base class for connection errors"""
+
+
+class PulseServerConnectionError(PulseExceptionWithBackoff, PulseConnectionError):
+ """Server error."""
+
+ def __init__(self, message: str, backoff: PulseBackoff):
+ """Initialize Pulse server error exception."""
+ super().__init__(f"Pulse server error: {message}", backoff)
+
+
+class PulseClientConnectionError(PulseExceptionWithBackoff, PulseConnectionError):
+ """Client error."""
+
+ def __init__(self, message: str, backoff: PulseBackoff):
+ """Initialize Pulse client error exception."""
+ super().__init__(f"Client error connecting to Pulse: {message}", backoff)
+
+
+class PulseServiceTemporarilyUnavailableError(
+ PulseExceptionWithRetry, PulseConnectionError
+):
+ """Service temporarily unavailable error.
+
+ For HTTP 503 and 429 errors.
+ """
+
+ def __init__(self, backoff: PulseBackoff, retry_time: float | None = None):
+ """Initialize Pusle service temporarily unavailable error exception."""
+ super().__init__(
+ f"Pulse service temporarily unavailable until {compute_retry_time(retry_time)}",
+ backoff,
+ retry_time,
+ )
+
+
+class PulseLoginException(Exception):
+ """Login exceptions.
+
+ Base class for catching all login exceptions."""
+
+
+class PulseAuthenticationError(PulseLoginException):
+ """Authentication error."""
+
+ def __init__(self):
+ """Initialize Pulse Authentication error exception."""
+ super().__init__("Error authenticating to Pulse")
+
+
+class PulseAccountLockedError(PulseExceptionWithRetry, PulseLoginException):
+ """Account locked error."""
+
+ def __init__(self, backoff: PulseBackoff, retry: float):
+ """Initialize Pulse Account locked error exception."""
+ super().__init__(
+ f"Pulse Account is locked until {compute_retry_time(retry)}", backoff, retry
+ )
+
+
+class PulseGatewayOfflineError(PulseExceptionWithBackoff):
+ """Gateway offline error."""
+
+ def __init__(self, backoff: PulseBackoff):
+ """Initialize Pulse Gateway offline error exception."""
+ super().__init__("Gateway is offline", backoff)
+
+
+class PulseMFARequiredError(PulseLoginException):
+ """MFA required error."""
+
+ def __init__(self):
+ """Initialize Pulse MFA required error exception."""
+ super().__init__("Authentication failed because MFA is required")
+
+
+class PulseNotLoggedInError(PulseLoginException):
+ """Exception to indicate that the application code is not logged in.
+
+ Used for signalling waiters.
+ """
+
+ def __init__(self):
+ """Initialize Pulse Not logged in error exception."""
+ super().__init__("Not logged into Pulse")
diff --git a/pyadtpulse/gateway.py b/pyadtpulse/gateway.py
index dee0393..cb8c67b 100644
--- a/pyadtpulse/gateway.py
+++ b/pyadtpulse/gateway.py
@@ -1,12 +1,16 @@
"""ADT Pulse Gateway Dataclass."""
import logging
+import re
from dataclasses import dataclass
from ipaddress import IPv4Address, IPv6Address, ip_address
from threading import RLock
-from typing import Any, Optional
+from typing import Any
-from .const import ADT_DEFAULT_POLL_INTERVAL, ADT_GATEWAY_OFFLINE_POLL_INTERVAL
+from typeguard import typechecked
+
+from .const import ADT_DEFAULT_POLL_INTERVAL, ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+from .pulse_backoff import PulseBackoff
from .util import parse_pulse_datetime
LOG = logging.getLogger(__name__)
@@ -41,25 +45,26 @@ class ADTPulseGateway:
manufacturer: str = "Unknown"
_status_text: str = "OFFLINE"
- _current_poll_interval: float = ADT_DEFAULT_POLL_INTERVAL
- _initial_poll_interval: float = ADT_DEFAULT_POLL_INTERVAL
+ backoff = PulseBackoff(
+ "Gateway", ADT_DEFAULT_POLL_INTERVAL, ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
_attribute_lock = RLock()
- model: Optional[str] = None
- serial_number: Optional[str] = None
+ model: str | None = None
+ serial_number: str | None = None
next_update: int = 0
last_update: int = 0
- firmware_version: Optional[str] = None
- hardware_version: Optional[str] = None
- primary_connection_type: Optional[str] = None
- broadband_connection_status: Optional[str] = None
- cellular_connection_status: Optional[str] = None
- cellular_connection_signal_strength: float = 0.0
- broadband_lan_ip_address: Optional[IPv4Address | IPv6Address] = None
- broadband_lan_mac: Optional[str] = None
- device_lan_ip_address: Optional[IPv4Address | IPv6Address] = None
- device_lan_mac: Optional[str] = None
- router_lan_ip_address: Optional[IPv4Address | IPv6Address] = None
- router_wan_ip_address: Optional[IPv4Address | IPv6Address] = None
+ firmware_version: str | None = None
+ hardware_version: str | None = None
+ primary_connection_type: str | None = None
+ broadband_connection_status: str | None = None
+ cellular_connection_status: str | None = None
+ _cellular_connection_signal_strength: float = 0.0
+ broadband_lan_ip_address: IPv4Address | IPv6Address | None = None
+ _broadband_lan_mac: str | None = None
+ device_lan_ip_address: IPv4Address | IPv6Address | None = None
+ _device_lan_mac: str | None = None
+ router_lan_ip_address: IPv4Address | IPv6Address | None = None
+ router_wan_ip_address: IPv4Address | IPv6Address | None = None
@property
def is_online(self) -> bool:
@@ -72,87 +77,100 @@ def is_online(self) -> bool:
return self._status_text == "ONLINE"
@is_online.setter
+ @typechecked
def is_online(self, status: bool) -> None:
"""Set gateway status.
Args:
status (bool): True if gateway is online
-
- Also changes the polling intervals
"""
with self._attribute_lock:
if status == self.is_online:
return
-
+ old_status = self._status_text
self._status_text = "ONLINE"
if not status:
self._status_text = "OFFLINE"
- self._current_poll_interval = ADT_GATEWAY_OFFLINE_POLL_INTERVAL
- else:
- self._current_poll_interval = self._initial_poll_interval
LOG.info(
- "ADT Pulse gateway %s, poll interval=%f",
+ "ADT Pulse gateway %s",
self._status_text,
- self._current_poll_interval,
+ )
+ if old_status == "OFFLINE":
+ self.backoff.reset_backoff()
+ LOG.debug(
+ "Gateway poll interval: %d",
+ (
+ self.backoff.initial_backoff_interval
+ if self._status_text == "ONLINE"
+ else self.backoff.get_current_backoff_interval()
+ ),
)
@property
def poll_interval(self) -> float:
- """Set polling interval.
-
- Returns:
- float: number of seconds between polls
- """
+ """Get initial poll interval."""
with self._attribute_lock:
- return self._current_poll_interval
+ return self.backoff.initial_backoff_interval
@poll_interval.setter
- def poll_interval(self, new_interval: Optional[float]) -> None:
- """Set polling interval.
-
- Args:
- new_interval (float): polling interval if gateway is online,
- if set to None, resets to ADT_DEFAULT_POLL_INTERVAL
-
- Raises:
- ValueError: if new_interval is less than 0
- """
- if new_interval is None:
- new_interval = ADT_DEFAULT_POLL_INTERVAL
- elif new_interval < 0.0:
- raise ValueError("ADT Pulse polling interval must be greater than 0")
+ @typechecked
+ def poll_interval(self, new_interval: float) -> None:
with self._attribute_lock:
- self._initial_poll_interval = new_interval
- if self._current_poll_interval != ADT_GATEWAY_OFFLINE_POLL_INTERVAL:
- self._current_poll_interval = new_interval
- LOG.debug("Set poll interval to %f", self._initial_poll_interval)
+ self.backoff.initial_backoff_interval = new_interval
- def adjust_backoff_poll_interval(self) -> None:
- """Calculates the backoff poll interval.
+ @staticmethod
+ def _check_mac_address(mac_address: str) -> bool:
+ pattern = r"^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$"
+ return re.match(pattern, mac_address) is not None
- Each call will adjust current_poll interval with exponential backoff,
- unless gateway is online, in which case, poll interval will be reset to
- initial_poll interval."""
+ @property
+ def broadband_lan_mac(self) -> str | None:
+ """Get current gateway MAC address."""
+ return self._broadband_lan_mac
+
+ @broadband_lan_mac.setter
+ @typechecked
+ def broadband_lan_mac(self, new_mac: str | None) -> None:
+ """Set gateway MAC address."""
+ if new_mac is not None and not self._check_mac_address(new_mac):
+ raise ValueError("Invalid MAC address")
+ self._broadband_lan_mac = new_mac
- with self._attribute_lock:
- if self.is_online:
- self._current_poll_interval = self._initial_poll_interval
- return
- # use an exponential backoff
- self._current_poll_interval = self._current_poll_interval * 2
- if self._current_poll_interval > ADT_GATEWAY_OFFLINE_POLL_INTERVAL:
- self._current_poll_interval = ADT_DEFAULT_POLL_INTERVAL
- LOG.debug(
- "Setting current poll interval to %f", self._current_poll_interval
- )
+ @property
+ def device_lan_mac(self) -> str | None:
+ """Get current gateway MAC address."""
+ return self._device_lan_mac
+
+ @device_lan_mac.setter
+ @typechecked
+ def device_lan_mac(self, new_mac: str | None) -> None:
+ """Set gateway MAC address."""
+ if new_mac is not None and not self._check_mac_address(new_mac):
+ raise ValueError("Invalid MAC address")
+ self._device_lan_mac = new_mac
+
+ @property
+ def cellular_connection_signal_strength(self) -> float:
+ """Get current gateway MAC address."""
+ return self._cellular_connection_signal_strength
+
+ @cellular_connection_signal_strength.setter
+ @typechecked
+ def cellular_connection_signal_strength(
+ self, new_signal_strength: float | None
+ ) -> None:
+ """Set gateway MAC address."""
+ if not new_signal_strength:
+ new_signal_strength = 0.0
+ self._cellular_connection_signal_strength = new_signal_strength
def set_gateway_attributes(self, gateway_attributes: dict[str, str]) -> None:
"""Set gateway attributes from dictionary.
Args:
gateway_attributes (dict[str,str]): dictionary of gateway attributes
- """ """"""
+ """
for i in (
STRING_UPDATEABLE_FIELDS
+ IPADDR_UPDATEABLE_FIELDS
@@ -174,4 +192,5 @@ def set_gateway_attributes(self, gateway_attributes: dict[str, str]) -> None:
temp = int(parse_pulse_datetime(temp).timestamp())
except ValueError:
temp = None
- setattr(self, i, temp)
+ if hasattr(self, i):
+ setattr(self, i, temp)
diff --git a/pyadtpulse/pulse_authentication_properties.py b/pyadtpulse/pulse_authentication_properties.py
new file mode 100644
index 0000000..5d0f98f
--- /dev/null
+++ b/pyadtpulse/pulse_authentication_properties.py
@@ -0,0 +1,135 @@
+"""Pulse Authentication Properties."""
+
+from re import match
+
+from typeguard import typechecked
+
+from .util import set_debug_lock
+
+
+class PulseAuthenticationProperties:
+ """Pulse Authentication Properties."""
+
+ __slots__ = (
+ "_username",
+ "_password",
+ "_fingerprint",
+ "_paa_attribute_lock",
+ "_last_login_time",
+ "_site_id",
+ )
+
+ @staticmethod
+ def check_username(username: str) -> None:
+ """Check if username is valid.
+
+ Raises ValueError if a login parameter is not valid."""
+ if not username:
+ raise ValueError("Username is mandatory")
+ pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
+ if not match(pattern, username):
+ raise ValueError("Username must be an email address")
+
+ @staticmethod
+ @typechecked
+ def check_password(password: str) -> None:
+ """Check if password is valid.
+
+ Raises ValueError if password is not valid.
+ """
+ if not password:
+ raise ValueError("Password is mandatory")
+
+ @staticmethod
+ @typechecked
+ def check_fingerprint(fingerprint: str) -> None:
+ """Check if fingerprint is valid.
+
+ Raises ValueError if password is not valid.
+ """
+ if not fingerprint:
+ raise ValueError("Fingerprint is required")
+
+ @typechecked
+ def __init__(
+ self,
+ username: str,
+ password: str,
+ fingerprint: str,
+ debug_locks: bool = False,
+ ) -> None:
+ """Initialize Pulse Authentication Properties."""
+ self.check_username(username)
+ self.check_password(password)
+ self.check_fingerprint(fingerprint)
+ self._username = username
+ self._password = password
+ self._fingerprint = fingerprint
+ self._paa_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.paa_attribute_lock"
+ )
+ self._last_login_time = 0
+ self._site_id = ""
+
+ @property
+ def last_login_time(self) -> int:
+ """Get the last login time."""
+ with self._paa_attribute_lock:
+ return self._last_login_time
+
+ @last_login_time.setter
+ @typechecked
+ def last_login_time(self, login_time: int) -> None:
+ with self._paa_attribute_lock:
+ self._last_login_time = login_time
+
+ @property
+ def username(self) -> str:
+ """Get the username."""
+ with self._paa_attribute_lock:
+ return self._username
+
+ @username.setter
+ @typechecked
+ def username(self, username: str) -> None:
+ self.check_username(username)
+ with self._paa_attribute_lock:
+ self._username = username
+
+ @property
+ def password(self) -> str:
+ """Get the password."""
+ with self._paa_attribute_lock:
+ return self._password
+
+ @password.setter
+ @typechecked
+ def password(self, password: str) -> None:
+ self.check_password(password)
+ with self._paa_attribute_lock:
+ self._password = password
+
+ @property
+ def fingerprint(self) -> str:
+ """Get the fingerprint."""
+ with self._paa_attribute_lock:
+ return self._fingerprint
+
+ @fingerprint.setter
+ @typechecked
+ def fingerprint(self, fingerprint: str) -> None:
+ self.check_fingerprint(fingerprint)
+ with self._paa_attribute_lock:
+ self._fingerprint = fingerprint
+
+ @property
+ def site_id(self) -> str:
+ """Get the site ID."""
+ with self._paa_attribute_lock:
+ return self._site_id
+
+ @site_id.setter
+ @typechecked
+ def site_id(self, site_id: str) -> None:
+ with self._paa_attribute_lock:
+ self._site_id = site_id
diff --git a/pyadtpulse/pulse_backoff.py b/pyadtpulse/pulse_backoff.py
new file mode 100644
index 0000000..9c3278c
--- /dev/null
+++ b/pyadtpulse/pulse_backoff.py
@@ -0,0 +1,192 @@
+"""Pulse backoff object."""
+
+import asyncio
+import datetime
+from logging import getLogger
+from time import time
+
+from typeguard import typechecked
+
+from .const import ADT_MAX_BACKOFF
+from .util import set_debug_lock
+
+LOG = getLogger(__name__)
+
+
+class PulseBackoff:
+ """Pulse backoff object."""
+
+ __slots__ = (
+ "_b_lock",
+ "_initial_backoff_interval",
+ "_max_backoff_interval",
+ "_backoff_count",
+ "_expiration_time",
+ "_name",
+ "_detailed_debug_logging",
+ "_threshold",
+ )
+
+ @typechecked
+ def __init__(
+ self,
+ name: str,
+ initial_backoff_interval: float,
+ max_backoff_interval: float = ADT_MAX_BACKOFF,
+ threshold: int = 0,
+ debug_locks: bool = False,
+ detailed_debug_logging=False,
+ ) -> None:
+ """Initialize backoff.
+
+ Args:
+ name (str): Name of the backoff.
+ initial_backoff_interval (float): Initial backoff interval in seconds.
+ max_backoff_interval (float, optional): Maximum backoff interval in seconds.
+ Defaults to ADT_MAX_BACKOFF.
+ threshold (int, optional): Threshold for backoff. Defaults to 0.
+ debug_locks (bool, optional): Enable debug locks. Defaults to False.
+ detailed_debug_logging (bool, optional): Enable detailed debug logging.
+ Defaults to False.
+ """
+ self._check_intervals(initial_backoff_interval, max_backoff_interval)
+ self._b_lock = set_debug_lock(debug_locks, "pyadtpulse._b_lock")
+ self._initial_backoff_interval = initial_backoff_interval
+ self._max_backoff_interval = max_backoff_interval
+ self._backoff_count = 0
+ self._expiration_time = 0.0
+ self._name = name
+ self._detailed_debug_logging = detailed_debug_logging
+ self._threshold = threshold
+
+ def _calculate_backoff_interval(self) -> float:
+ """Calculate backoff time."""
+ if self._backoff_count == 0:
+ return 0.0
+ if self._backoff_count <= (self._threshold + 1):
+ return self._initial_backoff_interval
+ return min(
+ self._initial_backoff_interval
+ * 2 ** (self._backoff_count - self._threshold - 1),
+ self._max_backoff_interval,
+ )
+
+ @staticmethod
+ def _check_intervals(
+ initial_backoff_interval: float, max_backoff_interval: float
+ ) -> None:
+ """Check max_backoff_interval is >= initial_backoff_interval
+ and that both invervals are positive."""
+ if initial_backoff_interval <= 0:
+ raise ValueError("initial_backoff_interval must be greater than 0")
+ if max_backoff_interval < initial_backoff_interval:
+ raise ValueError("max_backoff_interval must be >= initial_backoff_interval")
+
+ def get_current_backoff_interval(self) -> float:
+ """Return current backoff time."""
+ with self._b_lock:
+ return self._calculate_backoff_interval()
+
+ def increment_backoff(self) -> None:
+ """Increment backoff."""
+ with self._b_lock:
+ self._backoff_count += 1
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "Pulse backoff %s: incremented to %s",
+ self._name,
+ self._backoff_count,
+ )
+
+ def reset_backoff(self) -> None:
+ """Reset backoff."""
+ with self._b_lock:
+ if self._expiration_time < time():
+ if self._detailed_debug_logging and self._backoff_count != 0:
+ LOG.debug("Pulse backoff %s reset", self._name)
+ self._backoff_count = 0
+ self._expiration_time = 0.0
+
+ @typechecked
+ def set_absolute_backoff_time(self, backoff_time: float) -> None:
+ """Set absolute backoff time."""
+ curr_time = time()
+ if backoff_time < curr_time:
+ raise ValueError("Absolute backoff time must be greater than current time")
+ with self._b_lock:
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "Pulse backoff %s: set to %s",
+ self._name,
+ datetime.datetime.fromtimestamp(backoff_time).strftime(
+ "%m/%d/%Y %H:%M:%S"
+ ),
+ )
+ self._expiration_time = backoff_time
+ self._backoff_count = 0
+
+ async def wait_for_backoff(self) -> None:
+ """Wait for backoff."""
+ with self._b_lock:
+ curr_time = time()
+ if self._expiration_time < curr_time:
+ if self.backoff_count == 0:
+ return
+ diff = self._calculate_backoff_interval()
+ else:
+ diff = self._expiration_time - curr_time
+ if diff > 0:
+ if self._detailed_debug_logging:
+ LOG.debug("Backoff %s: waiting for %s", self._name, diff)
+ await asyncio.sleep(diff)
+
+ def will_backoff(self) -> bool:
+ """Return if backoff is needed."""
+ with self._b_lock:
+ return (
+ self._backoff_count > self._threshold or self._expiration_time >= time()
+ )
+
+ @property
+ def backoff_count(self) -> int:
+ """Return backoff count."""
+ with self._b_lock:
+ return self._backoff_count
+
+ @property
+ def expiration_time(self) -> float:
+ """Return backoff expiration time."""
+ with self._b_lock:
+ return self._expiration_time
+
+ @property
+ def initial_backoff_interval(self) -> float:
+ """Return initial backoff interval."""
+ with self._b_lock:
+ return self._initial_backoff_interval
+
+ @initial_backoff_interval.setter
+ @typechecked
+ def initial_backoff_interval(self, new_interval: float) -> None:
+ """Set initial backoff interval."""
+ with self._b_lock:
+ self._check_intervals(new_interval, self._max_backoff_interval)
+ self._initial_backoff_interval = new_interval
+
+ @property
+ def name(self) -> str:
+ """Return name."""
+ return self._name
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Return detailed debug logging."""
+ with self._b_lock:
+ return self._detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, new_value: bool) -> None:
+ """Set detailed debug logging."""
+ with self._b_lock:
+ self._detailed_debug_logging = new_value
diff --git a/pyadtpulse/pulse_connection.py b/pyadtpulse/pulse_connection.py
index 7eb950f..472ecc1 100644
--- a/pyadtpulse/pulse_connection.py
+++ b/pyadtpulse/pulse_connection.py
@@ -1,330 +1,337 @@
-"""ADT Pulse connection. End users should probably not call this directly."""
+"""ADT Pulse connection. End users should probably not call this directly.
+
+This is the main interface to the http functions to access ADT Pulse.
+"""
import logging
-import asyncio
import re
-from random import uniform
-from threading import Lock, RLock
-from typing import Dict, Optional, Union
-
-from aiohttp import (
- ClientConnectionError,
- ClientConnectorError,
- ClientResponse,
- ClientResponseError,
- ClientSession,
-)
-from bs4 import BeautifulSoup
+from asyncio import AbstractEventLoop
+from time import time
+
+from lxml import html
+from typeguard import typechecked
+from yarl import URL
from .const import (
- ADT_DEFAULT_HTTP_HEADERS,
- ADT_DEFAULT_VERSION,
- ADT_DEVICE_URI,
- ADT_HTTP_REFERER_URIS,
+ ADT_DEFAULT_LOGIN_TIMEOUT,
ADT_LOGIN_URI,
- ADT_ORB_URI,
- ADT_SYSTEM_URI,
- API_PREFIX,
+ ADT_LOGOUT_URI,
+ ADT_MFA_FAIL_URI,
+ ADT_SUMMARY_URI,
)
-from .util import DebugRLock, close_response, make_soup
+from .exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseMFARequiredError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_authentication_properties import PulseAuthenticationProperties
+from .pulse_backoff import PulseBackoff
+from .pulse_connection_properties import PulseConnectionProperties
+from .pulse_connection_status import PulseConnectionStatus
+from .pulse_query_manager import PulseQueryManager
+from .util import make_etree, set_debug_lock
-RECOVERABLE_ERRORS = [429, 500, 502, 503, 504]
LOG = logging.getLogger(__name__)
-class ADTPulseConnection:
- """ADT Pulse connection related attributes."""
+SESSION_COOKIES = {"X-mobile-browser": "false", "ICLocal": "en_US"}
- _api_version = ADT_DEFAULT_VERSION
- _class_threadlock = Lock()
+
+class PulseConnection(PulseQueryManager):
+ """ADT Pulse connection related attributes."""
__slots__ = (
- "_api_host",
- "_allocated_session",
- "_session",
- "_attribute_lock",
- "_loop",
+ "_pc_attribute_lock",
+ "_authentication_properties",
+ "_login_backoff",
+ "_login_in_progress",
)
+ @typechecked
def __init__(
self,
- host: str,
- session: Optional[ClientSession] = None,
- user_agent: str = ADT_DEFAULT_HTTP_HEADERS["User-Agent"],
+ pulse_connection_status: PulseConnectionStatus,
+ pulse_connection_properties: PulseConnectionProperties,
+ pulse_authentication: PulseAuthenticationProperties,
debug_locks: bool = False,
):
"""Initialize ADT Pulse connection."""
- self._api_host = host
- self._allocated_session = False
- if session is None:
- self._allocated_session = True
- self._session = ClientSession()
- else:
- self._session = session
- self._session.headers.update({"User-Agent": user_agent})
- self._attribute_lock: Union[RLock, DebugRLock]
- if not debug_locks:
- self._attribute_lock = RLock()
- else:
- self._attribute_lock = DebugRLock("ADTPulseConnection._attribute_lock")
- self._loop: Optional[asyncio.AbstractEventLoop] = None
-
- def __del__(self):
- """Destructor for ADTPulseConnection."""
- if self._allocated_session and self._session is not None:
- self._session.detach()
- @property
- def api_version(self) -> str:
- """Get the API version."""
- with self._class_threadlock:
- return self._api_version
+ # need to initialize this after the session since we set cookies
+ # based on it
+ super().__init__(
+ pulse_connection_status,
+ pulse_connection_properties,
+ debug_locks,
+ )
+ self._pc_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pc_attribute_lock"
+ )
+ self._connection_properties = pulse_connection_properties
+ self._connection_status = pulse_connection_status
+ self._authentication_properties = pulse_authentication
+ self._login_backoff = PulseBackoff(
+ "Login",
+ pulse_connection_status._backoff.initial_backoff_interval,
+ detailed_debug_logging=self._connection_properties.detailed_debug_logging,
+ )
+ self._login_in_progress = False
+ self._debug_locks = debug_locks
- @property
- def service_host(self) -> str:
- """Get the host prefix for connections."""
- with self._attribute_lock:
- return self._api_host
-
- @service_host.setter
- def service_host(self, host: str) -> None:
- """Set the host prefix for connections."""
- with self._attribute_lock:
- self._session.headers.update({"Host": host})
- self._api_host = host
+ @typechecked
+ def check_login_errors(
+ self, response: tuple[int, str | None, URL | None]
+ ) -> html.HtmlElement:
+ """Check response for login errors.
- @property
- def loop(self) -> Optional[asyncio.AbstractEventLoop]:
- """Get the event loop."""
- with self._attribute_lock:
- return self._loop
-
- @loop.setter
- def loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
- """Set the event loop."""
- with self._attribute_lock:
- self._loop = loop
-
- def check_sync(self, message: str) -> asyncio.AbstractEventLoop:
- """Checks if sync login was performed.
-
- Returns the loop to use for run_coroutine_threadsafe if so.
- Raises RuntimeError with given message if not."""
- with self._attribute_lock:
- if self._loop is None:
- raise RuntimeError(message)
- return self._loop
-
- async def async_query(
- self,
- uri: str,
- method: str = "GET",
- extra_params: Optional[Dict[str, str]] = None,
- extra_headers: Optional[Dict[str, str]] = None,
- timeout=1,
- ) -> Optional[ClientResponse]:
- """Query ADT Pulse async.
+ Will handle setting backoffs and raising exceptions.
Args:
- uri (str): URI to query
- method (str, optional): method to use. Defaults to "GET".
- extra_params (Optional[Dict], optional): query parameters. Defaults to None.
- extra_headers (Optional[Dict], optional): extra HTTP headers.
- Defaults to None.
- timeout (int, optional): timeout in seconds. Defaults to 1.
+ response (tuple[int, str | None, URL | None]): The response
Returns:
- Optional[ClientResponse]: aiohttp.ClientResponse object
- None on failure
- ClientResponse will already be closed.
+ html.HtmlElement: the parsed response tree
+
+ Raises:
+ PulseAuthenticationError: if login fails due to incorrect username/password
+ PulseServerConnectionError: if login fails due to server error
+ PulseAccountLockedError: if login fails due to account locked
+ PulseMFARequiredError: if login fails due to MFA required
+ PulseNotLoggedInError: if login fails due to not logged in
"""
- response = None
- with ADTPulseConnection._class_threadlock:
- if ADTPulseConnection._api_version == ADT_DEFAULT_VERSION:
- await self.async_fetch_version()
- url = self.make_url(uri)
- if uri in ADT_HTTP_REFERER_URIS:
- new_headers = {"Accept": ADT_DEFAULT_HTTP_HEADERS["Accept"]}
- else:
- new_headers = {"Accept": "*/*"}
-
- LOG.debug("Updating HTTP headers: %s", new_headers)
- self._session.headers.update(new_headers)
-
- LOG.debug(
- "Attempting %s %s params=%s timeout=%d", method, uri, extra_params, timeout
- )
- # FIXME: reauthenticate if received:
- # "You have not yet signed in or you
- # have been signed out due to inactivity."
-
- # define connection method
- retry = 0
- max_retries = 3
- while retry < max_retries:
- try:
- if method == "GET":
- async with self._session.get(
- url, headers=extra_headers, params=extra_params, timeout=timeout
- ) as response:
- await response.text()
- elif method == "POST":
- async with self._session.post(
- url, headers=extra_headers, data=extra_params, timeout=timeout
- ) as response:
- await response.text()
+ def extract_seconds_from_string(s: str) -> int:
+ seconds = 0
+ match = re.search(r"\d+", s)
+ if match:
+ seconds = int(match.group())
+ if "minutes" in s:
+ seconds *= 60
+ return seconds
+
+ def determine_error_type():
+ """Determine what type of error we have from the url and the parsed page.
+
+ Will raise the appropriate exception.
+ """
+ self._login_in_progress = False
+ url = self._connection_properties.make_url(ADT_LOGIN_URI)
+ if response_url_string.startswith(url):
+ error = tree.find(".//div[@id='warnMsgContents']")
+ if error is not None:
+ error_text = error.text_content()
+ LOG.error("Error logging into pulse: %s", error_text)
+ if "Try again in" in error_text:
+ if (retry_after := extract_seconds_from_string(error_text)) > 0:
+ raise PulseAccountLockedError(
+ self._login_backoff,
+ retry_after + time(),
+ )
+ elif "You have not yet signed in" in error_text:
+ raise PulseNotLoggedInError()
+ elif "Sign In Unsuccessful" in error_text:
+ raise PulseAuthenticationError()
else:
- LOG.error("Invalid request method %s", method)
- return None
-
- if response.status in RECOVERABLE_ERRORS:
- retry = retry + 1
- LOG.info(
- "query returned recoverable error code %s, "
- "retrying (count = %d)",
- response.status,
- retry,
- )
- if retry == max_retries:
- LOG.warning(
- "Exceeded max retries of %d, giving up", max_retries
- )
- response.raise_for_status()
- await asyncio.sleep(2**retry + uniform(0.0, 1.0))
- continue
-
- response.raise_for_status()
- # success, break loop
- retry = 4
- except (
- asyncio.TimeoutError,
- ClientConnectionError,
- ClientConnectorError,
- ) as ex:
- LOG.debug(
- "Error %s occurred making %s request to %s, retrying",
- ex.args,
- method,
- url,
- exc_info=True,
- )
- await asyncio.sleep(2**retry + uniform(0.0, 1.0))
- continue
- except ClientResponseError as err:
- code = err.code
- LOG.exception(
- "Received HTTP error code %i in request to ADT Pulse", code
- )
- return None
-
- # success!
- # FIXME? login uses redirects so final url is wrong
- if uri in ADT_HTTP_REFERER_URIS:
- if uri == ADT_DEVICE_URI:
- referer = self.make_url(ADT_SYSTEM_URI)
+ LOG.error("Unknown error logging into pulse: no message given")
+ raise PulseNotLoggedInError()
else:
- if response is not None and response.url is not None:
- referer = str(response.url)
- LOG.debug("Setting Referer to: %s", referer)
- self._session.headers.update({"Referer": referer})
-
- return response
-
- def query(
- self,
- uri: str,
- method: str = "GET",
- extra_params: Optional[Dict[str, str]] = None,
- extra_headers: Optional[Dict[str, str]] = None,
- timeout=1,
- ) -> Optional[ClientResponse]:
- """Query ADT Pulse async.
+ url = self._connection_properties.make_url(ADT_MFA_FAIL_URI)
+ if url == response_url_string:
+ raise PulseMFARequiredError()
+
+ tree = make_etree(
+ response[0],
+ response[1],
+ response[2],
+ logging.ERROR,
+ "Could not log into ADT Pulse site",
+ )
+ # this probably should have been handled by async_query()
+ if tree is None:
+ raise PulseServerConnectionError(
+ f"Could not log into ADT Pulse site: code {response[0]}: "
+ f"URL: {response[2]}, response: {response[1]}",
+ self._login_backoff,
+ )
+ url = self._connection_properties.make_url(ADT_SUMMARY_URI)
+ response_url_string = str(response[2])
+ if url != response_url_string:
+ determine_error_type()
+ # if we get here we can't determine the error
+ # raise a generic authentication error
+ LOG.error(
+ "Login received unexpected response from login query: %s",
+ response_url_string,
+ )
+ raise PulseAuthenticationError()
+ return tree
- Args:
- uri (str): URI to query
- method (str, optional): method to use. Defaults to "GET".
- extra_params (Optional[Dict], optional): query parameters. Defaults to None.
- extra_headers (Optional[Dict], optional): extra HTTP headers.
- Defaults to None.
- timeout (int, optional): timeout in seconds. Defaults to 1.
- Returns:
- Optional[ClientResponse]: aiohttp.ClientResponse object
- None on failure
- ClientResponse will already be closed.
+ @typechecked
+ async def async_do_login_query(
+ self, timeout: int = ADT_DEFAULT_LOGIN_TIMEOUT
+ ) -> html.HtmlElement | None:
"""
- coro = self.async_query(uri, method, extra_params, extra_headers, timeout)
- return asyncio.run_coroutine_threadsafe(
- coro, self.check_sync("Attempting to run sync query from async login")
- ).result()
+ Performs a login query to the Pulse site.
- async def query_orb(
- self, level: int, error_message: str
- ) -> Optional[BeautifulSoup]:
- """Query ADT Pulse ORB.
+ Will backoff on login failures.
+
+ Will set login in progress flag.
Args:
- level (int): error level to log on failure
- error_message (str): error message to use on failure
+ timeout (int, optional): The timeout value for the query in seconds.
+ Defaults to ADT_DEFAULT_LOGIN_TIMEOUT.
Returns:
- Optional[BeautifulSoup]: A Beautiful Soup object, or None if failure
+ tree (html.HtmlElement, optional): the parsed response tree for
+ summary.jsp, or None if failure
+ Raises:
+ ValueError: if login parameters are not correct
+ PulseAuthenticationError: if login fails due to incorrect username/password
+ PulseServerConnectionError: if login fails due to server error
+ PulseServiceTemporarilyUnavailableError: if login fails due to too many requests or
+ server is temporarily unavailable
+ PulseAccountLockedError: if login fails due to account locked
+ PulseMFARequiredError: if login fails due to MFA required
+ PulseNotLoggedInError: if login fails due to not logged in
+ (which is probably an internal error)
"""
- response = await self.async_query(ADT_ORB_URI)
- return await make_soup(response, level, error_message)
+ if self.login_in_progress:
+ return None
+ await self.quick_logout()
+ # just raise exceptions if we're not going to be able to log in
+ lockout_time = self._login_backoff.expiration_time
+ if lockout_time > time():
+ raise PulseAccountLockedError(self._login_backoff, lockout_time)
+ cs_backoff = self._connection_status.get_backoff()
+ lockout_time = cs_backoff.expiration_time
+ if lockout_time > time():
+ raise PulseServiceTemporarilyUnavailableError(cs_backoff, lockout_time)
+ self.login_in_progress = True
+ data = {
+ "usernameForm": self._authentication_properties.username,
+ "passwordForm": self._authentication_properties.password,
+ "fingerprint": self._authentication_properties.fingerprint,
+ }
+ if self._authentication_properties.site_id:
+ data["networkid"] = self._authentication_properties.site_id
+ await self._login_backoff.wait_for_backoff()
+ try:
+ response = await self.async_query(
+ ADT_LOGIN_URI,
+ "POST",
+ extra_params=data,
+ timeout=timeout,
+ requires_authentication=False,
+ )
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+ ) as e:
+ LOG.error("Could not log into Pulse site: %s", e)
+ self.login_in_progress = False
+ raise
+ tree = self.check_login_errors(response)
+ self._connection_status.authenticated_flag.set()
+ self._authentication_properties.last_login_time = int(time())
+ self._login_backoff.reset_backoff()
+ self.login_in_progress = False
+ return tree
+
+ @typechecked
+ async def async_do_logout_query(self, site_id: str | None = None) -> None:
+ """Performs a logout query to the ADT Pulse site."""
+ params = {}
+ si = ""
+ self._connection_status.authenticated_flag.clear()
+ if site_id is not None and site_id != "":
+ self._authentication_properties.site_id = site_id
+ si = site_id
+ params.update({"networkid": si})
+
+ params.update({"partner": "adt"})
+ try:
+ await self.async_query(
+ ADT_LOGOUT_URI,
+ extra_params=params,
+ timeout=10,
+ requires_authentication=False,
+ )
+ # FIXME: do we care if this raises exceptions?
+ except (
+ PulseClientConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+ PulseServerConnectionError,
+ ) as e:
+ LOG.debug("Could not logout from Pulse site: %s", e)
- def make_url(self, uri: str) -> str:
- """Create a URL to service host from a URI.
+ @property
+ def is_connected(self) -> bool:
+ """Check if ADT Pulse is connected."""
+ return (
+ self._connection_status.authenticated_flag.is_set()
+ and not self._login_in_progress
+ )
- Args:
- uri (str): the URI to convert
+ @property
+ def login_backoff(self) -> PulseBackoff:
+ """Return backoff object."""
+ with self._pc_attribute_lock:
+ return self._login_backoff
- Returns:
- str: the converted string
+ def check_sync(self, message: str) -> AbstractEventLoop:
+ """Convenience method to check if running from sync context."""
+ return self._connection_properties.check_sync(message)
+
+ @property
+ def debug_locks(self):
+ """Return debug locks."""
+ return self._debug_locks
+
+ @property
+ def login_in_progress(self) -> bool:
+ """Return login in progress."""
+ with self._pc_attribute_lock:
+ return self._login_in_progress
+
+ @login_in_progress.setter
+ @typechecked
+ def login_in_progress(self, value: bool) -> None:
+ """Set login in progress."""
+ with self._pc_attribute_lock:
+ self._login_in_progress = value
+
+ async def quick_logout(self) -> None:
+ """Quickly logout.
+
+ This just resets the authenticated flag and clears the ClientSession.
"""
- with self._attribute_lock:
- return f"{self._api_host}{API_PREFIX}{ADTPulseConnection._api_version}{uri}"
-
- async def async_fetch_version(self) -> None:
- """Fetch ADT Pulse version."""
- with ADTPulseConnection._class_threadlock:
- if ADTPulseConnection._api_version != ADT_DEFAULT_VERSION:
- return
- response = None
- signin_url = f"{self.service_host}/myhome{ADT_LOGIN_URI}"
- if self._session:
- try:
- async with self._session.get(signin_url) as response:
- # we only need the headers here, don't parse response
- response.raise_for_status()
- except (ClientResponseError, ClientConnectionError):
- LOG.warning(
- "Error occurred during API version fetch, defaulting to %s",
- ADT_DEFAULT_VERSION,
- )
- close_response(response)
- return
-
- if response is None:
- LOG.warning(
- "Error occurred during API version fetch, defaulting to %s",
- ADT_DEFAULT_VERSION,
- )
- return
-
- m = re.search("/myhome/(.+)/[a-z]*/", response.real_url.path)
- close_response(response)
- if m is not None:
- ADTPulseConnection._api_version = m.group(1)
- LOG.debug(
- "Discovered ADT Pulse version %s at %s",
- ADTPulseConnection._api_version,
- self.service_host,
- )
- return
-
- LOG.warning(
- "Couldn't auto-detect ADT Pulse version, defaulting to %s",
- ADT_DEFAULT_VERSION,
- )
+ LOG.debug("Resetting session")
+ self._connection_status.authenticated_flag.clear()
+ await self._connection_properties.clear_session()
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Return detailed debug logging."""
+ return (
+ self._login_backoff.detailed_debug_logging
+ and self._connection_properties.detailed_debug_logging
+ and self._connection_status.detailed_debug_logging
+ )
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool):
+ with self._pc_attribute_lock:
+ self._login_backoff.detailed_debug_logging = value
+ self._connection_properties.detailed_debug_logging = value
+ self._connection_status.detailed_debug_logging = value
+
+ def get_login_backoff(self) -> PulseBackoff:
+ """Return login backoff."""
+ return self._login_backoff
diff --git a/pyadtpulse/pulse_connection_properties.py b/pyadtpulse/pulse_connection_properties.py
new file mode 100644
index 0000000..6342ff0
--- /dev/null
+++ b/pyadtpulse/pulse_connection_properties.py
@@ -0,0 +1,238 @@
+"""Pulse connection info."""
+
+from asyncio import AbstractEventLoop
+from re import search
+
+from aiohttp import ClientSession
+from typeguard import typechecked
+
+from .const import (
+ ADT_DEFAULT_HTTP_ACCEPT_HEADERS,
+ ADT_DEFAULT_HTTP_USER_AGENT,
+ ADT_DEFAULT_SEC_FETCH_HEADERS,
+ API_HOST_CA,
+ API_PREFIX,
+ DEFAULT_API_HOST,
+)
+from .util import set_debug_lock
+
+
+class PulseConnectionProperties:
+ """Pulse connection info."""
+
+ __slots__ = (
+ "_api_host",
+ "_session",
+ "_user_agent",
+ "_loop",
+ "_api_version",
+ "_pci_attribute_lock",
+ "_detailed_debug_logging",
+ "_debug_locks",
+ )
+
+ @staticmethod
+ @typechecked
+ def check_service_host(service_host: str) -> None:
+ """Check if service host is valid."""
+ if service_host is None or service_host == "":
+ raise ValueError("Service host is mandatory")
+ if service_host not in (DEFAULT_API_HOST, API_HOST_CA):
+ raise ValueError(
+ f"Service host must be one of {DEFAULT_API_HOST}" f" or {API_HOST_CA}"
+ )
+
+ @staticmethod
+ def get_api_version(response_path: str) -> str | None:
+ """Regex used to exctract the API version.
+
+ Use for testing.
+ """
+ version: str | None = None
+ if not response_path:
+ return None
+ m = search(f"{API_PREFIX}(.+)/[a-z]*/", response_path)
+ if m is not None:
+ version = m.group(1)
+ return version
+
+ def __init__(
+ self,
+ host: str,
+ user_agent=ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"],
+ detailed_debug_logging=False,
+ debug_locks=False,
+ ) -> None:
+ """Initialize Pulse connection information."""
+ self._pci_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pci_attribute_lock"
+ )
+ self.debug_locks = debug_locks
+ self.detailed_debug_logging = detailed_debug_logging
+ self._loop: AbstractEventLoop | None = None
+ self._session: ClientSession | None = None
+ self.service_host = host
+ self._api_version = ""
+ self._user_agent = user_agent
+
+ def __del__(self):
+ """Destructor for ADTPulseConnection."""
+ if self._session is not None and not self._session.closed:
+ self._session.detach()
+
+ def _set_headers(self) -> None:
+ if self._session is not None:
+ self._session.headers.update(ADT_DEFAULT_HTTP_ACCEPT_HEADERS)
+ self._session.headers.update(ADT_DEFAULT_SEC_FETCH_HEADERS)
+ self._session.headers.update({"User-Agent": self._user_agent})
+
+ @property
+ def service_host(self) -> str:
+ """Get the service host."""
+ with self._pci_attribute_lock:
+ return self._api_host
+
+ @service_host.setter
+ @typechecked
+ def service_host(self, host: str):
+ """Set the service host.
+
+ Raises:
+ ValueError if host is not valid.
+ """
+ self.check_service_host(host)
+ with self._pci_attribute_lock:
+ self._api_host = host
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Get the detailed debug logging flag."""
+ with self._pci_attribute_lock:
+ return self._detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool):
+ """Set the detailed debug logging flag."""
+ with self._pci_attribute_lock:
+ self._detailed_debug_logging = value
+
+ @property
+ def debug_locks(self) -> bool:
+ """Get the debug locks flag."""
+ with self._pci_attribute_lock:
+ return self._debug_locks
+
+ @debug_locks.setter
+ @typechecked
+ def debug_locks(self, value: bool):
+ """Set the debug locks flag."""
+ with self._pci_attribute_lock:
+ self._debug_locks = value
+
+ @typechecked
+ def check_sync(self, message: str) -> AbstractEventLoop:
+ """Checks if sync login was performed.
+
+ Returns the loop to use for run_coroutine_threadsafe if so.
+ Raises RuntimeError with given message if not.
+ """
+ with self._pci_attribute_lock:
+ if self._loop is None:
+ raise RuntimeError(message)
+ return self._loop
+
+ @typechecked
+ def check_async(self, message: str) -> None:
+ """Checks if async login was performed.
+
+ Raises RuntimeError with given message if not.
+ """
+ with self._pci_attribute_lock:
+ if self._loop is not None:
+ raise RuntimeError(message)
+
+ @property
+ def loop(self) -> AbstractEventLoop | None:
+ """Get the event loop."""
+ with self._pci_attribute_lock:
+ return self._loop
+
+ @loop.setter
+ @typechecked
+ def loop(self, loop: AbstractEventLoop | None):
+ """Set the event loop."""
+ with self._pci_attribute_lock:
+ self._loop = loop
+
+ @property
+ def session(self) -> ClientSession:
+ """Get the session."""
+ with self._pci_attribute_lock:
+ if self._session is None:
+ self._session = ClientSession()
+ self._set_headers()
+ return self._session
+
+ @property
+ def api_version(self) -> str:
+ """Get the API version."""
+ with self._pci_attribute_lock:
+ return self._api_version
+
+ @api_version.setter
+ @typechecked
+ def api_version(self, version: str):
+ """Set the API version.
+
+ Raises:
+ ValueError: if version is not in the form major.minor.patch-subpatch
+ """
+
+ def check_version_string(value: str):
+ parts = value.split("-")
+ if len(parts) == 2:
+ version_parts = parts[0].split(".")
+ if not (
+ version_parts[0].isdigit()
+ and version_parts[1].isdigit()
+ and version_parts[2].isdigit()
+ and parts[1].isdigit()
+ ):
+ raise ValueError(
+ "API version must be in the form major.minor.patch-subpatch"
+ )
+ if len(version_parts) == 3 and version_parts[0].isdigit():
+ major_version = int(version_parts[0])
+ if major_version >= 26:
+ return
+ else:
+ raise ValueError("API version is numeric but less than 26")
+ raise ValueError(
+ "API version must be in the form major.minor.patch-subpatch"
+ )
+
+ with self._pci_attribute_lock:
+ check_version_string(version)
+ self._api_version = version
+
+ @typechecked
+ def make_url(self, uri: str) -> str:
+ """Create a URL to service host from a URI.
+
+ Args:
+ uri (str): the URI to convert
+
+ Returns:
+ str: the converted string
+ """
+ with self._pci_attribute_lock:
+ return f"{self._api_host}{API_PREFIX}{self._api_version}{uri}"
+
+ async def clear_session(self):
+ """Clear the session."""
+ with self._pci_attribute_lock:
+ old_session = self._session
+ self._session = None
+ if old_session:
+ await old_session.close()
diff --git a/pyadtpulse/pulse_connection_status.py b/pyadtpulse/pulse_connection_status.py
new file mode 100644
index 0000000..288a8b0
--- /dev/null
+++ b/pyadtpulse/pulse_connection_status.py
@@ -0,0 +1,73 @@
+"""Pulse Connection Status."""
+
+from asyncio import Event
+
+from typeguard import typechecked
+
+from .pulse_backoff import PulseBackoff
+from .util import set_debug_lock
+
+
+class PulseConnectionStatus:
+ """Pulse Connection Status."""
+
+ __slots__ = (
+ "_backoff",
+ "_authenticated_flag",
+ "_pcs_attribute_lock",
+ )
+
+ @typechecked
+ def __init__(self, debug_locks: bool = False, detailed_debug_logging=False):
+ self._pcs_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pcs_attribute_lock"
+ )
+ """Initialize the connection status object.
+
+ Args:
+ debug_locks (bool, optional): Enable debug locks. Defaults to False.
+ detailed_debug_logging (bool, optional): Enable detailed debug logging for the backoff.
+ Defaults to False.
+ """
+ self._backoff = PulseBackoff(
+ "Connection Status",
+ initial_backoff_interval=1,
+ detailed_debug_logging=detailed_debug_logging,
+ )
+ self._authenticated_flag = Event()
+
+ @property
+ def authenticated_flag(self) -> Event:
+ """Get the authenticated flag."""
+ with self._pcs_attribute_lock:
+ return self._authenticated_flag
+
+ @property
+ def retry_after(self) -> float:
+ """Get the number of seconds to wait before retrying HTTP requests."""
+ with self._pcs_attribute_lock:
+ return self._backoff.expiration_time
+
+ @retry_after.setter
+ @typechecked
+ def retry_after(self, seconds: float) -> None:
+ """Set time after which HTTP requests can be retried."""
+ with self._pcs_attribute_lock:
+ self._backoff.set_absolute_backoff_time(seconds)
+
+ def get_backoff(self) -> PulseBackoff:
+ """Get the backoff object."""
+ return self._backoff
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Get the detailed debug logging flag."""
+ with self._pcs_attribute_lock:
+ return self._backoff.detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool):
+ """Set the detailed debug logging flag."""
+ with self._pcs_attribute_lock:
+ self._backoff.detailed_debug_logging = value
diff --git a/pyadtpulse/pulse_query_manager.py b/pyadtpulse/pulse_query_manager.py
new file mode 100644
index 0000000..72fd919
--- /dev/null
+++ b/pyadtpulse/pulse_query_manager.py
@@ -0,0 +1,442 @@
+"""Pulse Query Manager."""
+
+from logging import getLogger
+from asyncio import wait_for
+from datetime import datetime
+from http import HTTPStatus
+from time import time
+
+from aiohttp import (
+ ClientConnectionError,
+ ClientConnectorError,
+ ClientError,
+ ClientResponse,
+ ClientResponseError,
+ ServerConnectionError,
+ ServerDisconnectedError,
+ ServerTimeoutError,
+)
+from lxml import html
+from typeguard import typechecked
+from yarl import URL
+
+from .const import (
+ ADT_DEFAULT_LOGIN_TIMEOUT,
+ ADT_HTTP_BACKGROUND_URIS,
+ ADT_ORB_URI,
+ ADT_OTHER_HTTP_ACCEPT_HEADERS,
+)
+from .exceptions import (
+ PulseClientConnectionError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_backoff import PulseBackoff
+from .pulse_connection_properties import PulseConnectionProperties
+from .pulse_connection_status import PulseConnectionStatus
+from .util import make_etree, set_debug_lock
+
+LOG = getLogger(__name__)
+
+RECOVERABLE_ERRORS = {
+ HTTPStatus.INTERNAL_SERVER_ERROR,
+ HTTPStatus.BAD_GATEWAY,
+ HTTPStatus.GATEWAY_TIMEOUT,
+}
+
+MAX_REQUERY_RETRIES = 3
+
+
+class PulseQueryManager:
+ """Pulse Query Manager."""
+
+ __slots__ = (
+ "_pqm_attribute_lock",
+ "_connection_properties",
+ "_connection_status",
+ "_debug_locks",
+ )
+
+ @staticmethod
+ @typechecked
+ def _get_http_status_description(status_code: int) -> str:
+ """Get HTTP status description."""
+ status = HTTPStatus(status_code)
+ return status.description
+
+ @typechecked
+ def __init__(
+ self,
+ connection_status: PulseConnectionStatus,
+ connection_properties: PulseConnectionProperties,
+ debug_locks: bool = False,
+ ) -> None:
+ """Initialize Pulse Query Manager."""
+ self._pqm_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pqm_attribute_lock"
+ )
+ self._connection_status = connection_status
+ self._connection_properties = connection_properties
+ self._debug_locks = debug_locks
+
+ @staticmethod
+ @typechecked
+ async def _handle_query_response(
+ response: ClientResponse | None,
+ ) -> tuple[int, str | None, URL | None, str | None]:
+ if response is None:
+ return 0, None, None, None
+ response_text = await response.text()
+
+ return (
+ response.status,
+ response_text,
+ response.url,
+ response.headers.get("Retry-After"),
+ )
+
+ @typechecked
+ def _handle_http_errors(
+ self, return_value: tuple[int, str | None, URL | None, str | None]
+ ) -> None:
+ """Handle HTTP errors.
+
+ Parameters:
+ return_value (tuple[int, str | None, URL | None, str | None]):
+ The return value from _handle_query_response.
+
+ Raises:
+ PulseServerConnectionError: If the server returns an error code.
+ PulseServiceTemporarilyUnavailableError: If the server returns a
+ HTTP status code of 429 or 503.
+ """
+
+ def get_retry_after(retry_after: str) -> int | None:
+ """
+ Parse the value of the "Retry-After" header.
+
+ Parameters:
+ retry_after (str): The value of the "Retry-After" header
+
+ Returns:
+ int | None: The timestamp in seconds to wait before retrying,
+ or None if the header is invalid.
+ """
+ if retry_after.isnumeric():
+ retval = int(retry_after) + int(time())
+ else:
+ try:
+ retval = int(
+ datetime.strptime(
+ retry_after, "%a, %d %b %Y %H:%M:%S %Z"
+ ).timestamp()
+ )
+ except ValueError:
+ return None
+ return retval
+
+ if return_value[0] in (
+ HTTPStatus.TOO_MANY_REQUESTS,
+ HTTPStatus.SERVICE_UNAVAILABLE,
+ ):
+ retry = None
+ if return_value[3]:
+ retry = get_retry_after(return_value[3])
+ raise PulseServiceTemporarilyUnavailableError(
+ self._connection_status.get_backoff(),
+ retry,
+ )
+ raise PulseServerConnectionError(
+ f"HTTP error {return_value[0]}: {return_value[1]} connecting to {return_value[2]}",
+ self._connection_status.get_backoff(),
+ )
+
+ @typechecked
+ def _handle_network_errors(self, e: Exception) -> None:
+ if type(e) in (
+ ServerConnectionError,
+ ServerTimeoutError,
+ ServerDisconnectedError,
+ ):
+ raise PulseServerConnectionError(
+ str(e), self._connection_status.get_backoff()
+ )
+ if (
+ isinstance(e, ClientConnectionError)
+ and "Connection refused" in str(e)
+ or ("timed out") in str(e)
+ ):
+ raise PulseServerConnectionError(
+ str(e), self._connection_status.get_backoff()
+ )
+ if isinstance(e, ClientConnectorError) and e.os_error not in (
+ TimeoutError,
+ BrokenPipeError,
+ ):
+ raise PulseServerConnectionError(
+ str(e), self._connection_status.get_backoff()
+ )
+ raise PulseClientConnectionError(str(e), self._connection_status.get_backoff())
+
+ @typechecked
+ async def async_query(
+ self,
+ uri: str,
+ method: str = "GET",
+ extra_params: dict[str, str] | None = None,
+ extra_headers: dict[str, str] | None = None,
+ timeout: int = 1,
+ requires_authentication: bool = True,
+ ) -> tuple[int, str | None, URL | None]:
+ """
+ Query ADT Pulse async.
+
+ Args:
+ uri (str): URI to query
+ method (str, optional): method to use. Defaults to "GET".
+ extra_params (Optional[Dict], optional): query/body parameters.
+ Defaults to None.
+ extra_headers (Optional[Dict], optional): extra HTTP headers.
+ Defaults to None.
+ timeout (int, optional): timeout in seconds. Defaults to 1.
+ requires_authentication (bool, optional): True if authentication is
+ required to perform query.
+ Defaults to True.
+ If true and authenticated flag not
+ set, will wait for flag to be set.
+
+ Returns:
+ tuple with integer return code, optional response text, and optional URL of
+ response
+
+ Raises:
+ PulseClientConnectionError: If the client cannot connect
+ PulseServerConnectionError: If there is a server error
+ PulseServiceTemporarilyUnavailableError: If the server returns an HTTP status code of 429 or 503
+ PulseNotLoggedInError: if not logged in and task is waiting for longer than
+ ADT_DEFAULT_LOGIN_TIMEOUT seconds
+ """
+
+ async def setup_query():
+ if method not in ("GET", "POST"):
+ raise ValueError("method must be GET or POST")
+ await self._connection_status.get_backoff().wait_for_backoff()
+ if not self._connection_properties.api_version:
+ await self.async_fetch_version()
+ if not self._connection_properties.api_version:
+ raise ValueError("Could not determine API version for connection")
+
+ retry_after = self._connection_status.retry_after
+ now = time()
+ if retry_after > now:
+ raise PulseServiceTemporarilyUnavailableError(
+ self._connection_status.get_backoff(), retry_after
+ )
+ await setup_query()
+ url = self._connection_properties.make_url(uri)
+ headers = extra_headers if extra_headers is not None else {}
+ if uri in ADT_HTTP_BACKGROUND_URIS:
+ headers.setdefault("Accept", ADT_OTHER_HTTP_ACCEPT_HEADERS["Accept"])
+ if self._connection_properties.detailed_debug_logging:
+ LOG.debug(
+ "Attempting %s %s params=%s timeout=%d",
+ method,
+ url,
+ extra_params,
+ timeout,
+ )
+ retry = 0
+ return_value: tuple[int, str | None, URL | None, str | None] = (
+ HTTPStatus.OK.value,
+ None,
+ None,
+ None,
+ )
+ query_backoff = PulseBackoff(
+ f"Query:{method} {uri}",
+ self._connection_status.get_backoff().initial_backoff_interval,
+ threshold=0,
+ debug_locks=self._debug_locks,
+ detailed_debug_logging=self._connection_properties.detailed_debug_logging,
+ )
+ max_retries = (
+ MAX_REQUERY_RETRIES
+ if not self._connection_status.get_backoff().will_backoff()
+ else 1
+ )
+ while retry < max_retries:
+ try:
+ await query_backoff.wait_for_backoff()
+ retry += 1
+ if (
+ requires_authentication
+ and not self._connection_status.authenticated_flag.is_set()
+ ):
+ if self._connection_properties.detailed_debug_logging:
+ LOG.debug(
+ "%s for %s waiting for authenticated flag to be set",
+ method,
+ uri,
+ )
+ # wait for authenticated flag to be set
+ # use a timeout to prevent waiting forever
+ try:
+ await wait_for(
+ self._connection_status.authenticated_flag.wait(),
+ ADT_DEFAULT_LOGIN_TIMEOUT,
+ )
+ except TimeoutError as ex:
+ LOG.warning(
+ "%s for %s timed out waiting for authenticated flag to be set",
+ method,
+ uri,
+ )
+ raise PulseNotLoggedInError() from ex
+ async with self._connection_properties.session.request(
+ method,
+ url,
+ headers=extra_headers,
+ params=extra_params if method == "GET" else None,
+ data=extra_params if method == "POST" else None,
+ timeout=timeout,
+ ) as response:
+ return_value = await self._handle_query_response(response)
+ if return_value[0] in RECOVERABLE_ERRORS:
+ LOG.debug(
+ "query returned recoverable error code %s: %s,"
+ "retrying (count = %d)",
+ return_value[0],
+ self._get_http_status_description(return_value[0]),
+ retry,
+ )
+ if max_retries > 1 and retry == max_retries:
+ LOG.debug(
+ "Exceeded max retries of %d, giving up", max_retries
+ )
+ else:
+ query_backoff.increment_backoff()
+ response.raise_for_status()
+ continue
+ response.raise_for_status()
+ break
+
+ except ClientResponseError:
+ self._handle_http_errors(return_value)
+ except (
+ ClientConnectorError,
+ ServerTimeoutError,
+ ClientError,
+ ServerConnectionError,
+ ServerDisconnectedError,
+ ) as ex:
+ LOG.debug(
+ "Error %s occurred making %s request to %s",
+ ex.args,
+ method,
+ url,
+ exc_info=True,
+ )
+ if retry == max_retries:
+ self._handle_network_errors(ex)
+ query_backoff.increment_backoff()
+ continue
+ except TimeoutError as ex:
+ if retry == max_retries:
+ LOG.debug("Exceeded max retries of %d, giving up", max_retries)
+ raise PulseServerConnectionError(
+ "Timeout error",
+ self._connection_status.get_backoff(),
+ ) from ex
+ query_backoff.increment_backoff()
+ continue
+ # success
+ self._connection_status.get_backoff().reset_backoff()
+ return (return_value[0], return_value[1], return_value[2])
+
+ async def query_orb(
+ self, level: int, error_message: str
+ ) -> html.HtmlElement | None:
+ """Query ADT Pulse ORB.
+
+ Args:
+ level (int): error level to log on failure
+ error_message (str): error message to use on failure
+
+ Returns:
+ Optional[html.HtmlElement]: the parsed response tree
+
+ Raises:
+ PulseClientConnectionError: If the client cannot connect
+ PulseServerConnectionError: If there is a server error
+ PulseServiceTemporarilyUnavailableError: If the server returns a Retry-After header
+ """
+ code, response, url = await self.async_query(
+ ADT_ORB_URI,
+ extra_headers={"Sec-Fetch-Mode": "cors", "Sec-Fetch-Dest": "empty"},
+ )
+
+ return make_etree(code, response, url, level, error_message)
+
+ async def async_fetch_version(self) -> None:
+ """Fetch ADT Pulse version.
+
+ Exceptions are passed through to the caller since if this fails, there is
+ probably some underlying connection issue.
+ """
+ response_values: tuple[int, str | None, URL | None, str | None] = (
+ HTTPStatus.OK.value,
+ None,
+ None,
+ None,
+ )
+ if self._connection_properties.api_version:
+ return
+
+ signin_url = self._connection_properties.service_host
+ try:
+ async with self._connection_properties.session.get(
+ signin_url, timeout=10
+ ) as response:
+ response_values = await self._handle_query_response(response)
+ response.raise_for_status()
+
+ except ClientResponseError as ex:
+ LOG.error(
+ "Error %s occurred determining Pulse API version",
+ ex.args,
+ exc_info=True,
+ )
+ self._handle_http_errors(response_values)
+ return
+ except (
+ ClientConnectorError,
+ ServerTimeoutError,
+ ClientError,
+ ServerConnectionError,
+ ) as ex:
+ LOG.error(
+ "Error %s occurred determining Pulse API version",
+ ex.args,
+ exc_info=True,
+ )
+ self._handle_network_errors(ex)
+ except TimeoutError as ex:
+ LOG.error(
+ "Timeout occurred determining Pulse API version %s",
+ ex.args,
+ exc_info=True,
+ )
+ raise PulseServerConnectionError(
+ "Timeout occurred determining Pulse API version",
+ self._connection_status.get_backoff(),
+ ) from ex
+ version = self._connection_properties.get_api_version(str(response_values[2]))
+ if version is not None:
+ self._connection_properties.api_version = version
+ LOG.debug(
+ "Discovered ADT Pulse version %s at %s",
+ self._connection_properties.api_version,
+ self._connection_properties.service_host,
+ )
+ self._connection_status.get_backoff().reset_backoff()
diff --git a/pyadtpulse/pyadtpulse_async.py b/pyadtpulse/pyadtpulse_async.py
new file mode 100644
index 0000000..1e95aa0
--- /dev/null
+++ b/pyadtpulse/pyadtpulse_async.py
@@ -0,0 +1,820 @@
+"""ADT Pulse Async API."""
+
+import logging
+import asyncio
+import re
+import time
+from random import randint
+from warnings import warn
+
+from lxml import html
+from typeguard import typechecked
+from yarl import URL
+
+from .alarm_panel import ADT_ALARM_UNKNOWN
+from .const import (
+ ADT_DEFAULT_HTTP_USER_AGENT,
+ ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ ADT_DEFAULT_RELOGIN_INTERVAL,
+ ADT_GATEWAY_STRING,
+ ADT_SYNC_CHECK_URI,
+ ADT_TIMEOUT_URI,
+ DEFAULT_API_HOST,
+)
+from .exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseGatewayOfflineError,
+ PulseMFARequiredError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_authentication_properties import PulseAuthenticationProperties
+from .pulse_connection import PulseConnection
+from .pulse_connection_properties import PulseConnectionProperties
+from .pulse_connection_status import PulseConnectionStatus
+from .pyadtpulse_properties import PyADTPulseProperties
+from .site import ADTPulseSite
+from .util import handle_response, set_debug_lock
+
+LOG = logging.getLogger(__name__)
+SYNC_CHECK_TASK_NAME = "ADT Pulse Sync Check Task"
+KEEPALIVE_TASK_NAME = "ADT Pulse Keepalive Task"
+# backoff time before warning in wait_for_update()
+WARN_TRANSIENT_FAILURE_THRESHOLD = 2
+FULL_LOGOUT_INTERVAL = 6 * 60 * 60
+
+
+class PyADTPulseAsync:
+ """ADT Pulse Async API."""
+
+ __slots__ = (
+ "_sync_task",
+ "_timeout_task",
+ "_pa_attribute_lock",
+ "_pulse_properties",
+ "_authentication_properties",
+ "_pulse_connection_properties",
+ "_pulse_connection",
+ "_pulse_connection_status",
+ "_site",
+ "_detailed_debug_logging",
+ "_sync_check_exception",
+ "_sync_check_sleeping",
+ "_updated_zones",
+ )
+
+ @typechecked
+ def __init__(
+ self,
+ username: str,
+ password: str,
+ fingerprint: str,
+ service_host: str = DEFAULT_API_HOST,
+ user_agent=ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"],
+ debug_locks: bool = False,
+ keepalive_interval: int = ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ relogin_interval: int = ADT_DEFAULT_RELOGIN_INTERVAL,
+ detailed_debug_logging: bool = False,
+ ) -> None:
+ """Create a PyADTPulse object.
+ Args:
+ username (str): Username.
+ password (str): Password.
+ fingerprint (str): 2FA fingerprint.
+ service_host (str, optional): host prefix to use
+ i.e. https://portal.adtpulse.com or
+ https://portal-ca.adtpulse.com
+ user_agent (str, optional): User Agent.
+ Defaults to ADT_DEFAULT_HTTP_HEADERS["User-Agent"].
+ debug_locks: (bool, optional): use debugging locks
+ Defaults to False
+ keepalive_interval (int, optional): number of minutes between
+ keepalive checks, defaults to ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ maxiumum is ADT_MAX_KEEPALIVE_INTERVAL
+ relogin_interval (int, optional): number of minutes between relogin checks
+ defaults to ADT_DEFAULT_RELOGIN_INTERVAL,
+ minimum is ADT_MIN_RELOGIN_INTERVAL
+ detailed_debug_logging (bool, optional): enable detailed debug logging
+ """
+ self._pa_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.pa_attribute_lock"
+ )
+ self._pulse_connection_properties = PulseConnectionProperties(
+ service_host, user_agent, detailed_debug_logging, debug_locks
+ )
+ self._authentication_properties = PulseAuthenticationProperties(
+ username=username,
+ password=password,
+ fingerprint=fingerprint,
+ debug_locks=debug_locks,
+ )
+ self._pulse_connection_status = PulseConnectionStatus(
+ debug_locks=debug_locks, detailed_debug_logging=detailed_debug_logging
+ )
+ self._pulse_properties = PyADTPulseProperties(
+ keepalive_interval=keepalive_interval,
+ relogin_interval=relogin_interval,
+ debug_locks=debug_locks,
+ )
+ self._pulse_connection = PulseConnection(
+ self._pulse_connection_status,
+ self._pulse_connection_properties,
+ self._authentication_properties,
+ debug_locks,
+ )
+ self._sync_task: asyncio.Task | None = None
+ self._timeout_task: asyncio.Task | None = None
+ self._site: ADTPulseSite | None = None
+ self._detailed_debug_logging = detailed_debug_logging
+ pc_backoff = self._pulse_connection.get_login_backoff()
+ self._sync_check_exception: Exception | None = PulseNotLoggedInError()
+ pc_backoff.reset_backoff()
+ self._sync_check_sleeping = asyncio.Event()
+ self._updated_zones: set[int] = set()
+
+ def __repr__(self) -> str:
+ """Object representation."""
+ return (
+ f"<{self.__class__.__name__}: {self._authentication_properties.username}>"
+ )
+
+ async def _update_site(self, tree: html.HtmlElement) -> None:
+ with self._pa_attribute_lock:
+ start_time = 0.0
+ if self._pulse_connection.detailed_debug_logging:
+ start_time = time.time()
+ if self._site is None:
+ await self._initialize_sites(tree)
+ if self._site is None:
+ raise RuntimeError("pyadtpulse could not retrieve site")
+ self._site.alarm_control_panel.update_alarm_from_etree(tree)
+ updated_zones = self._site.update_zone_from_etree(tree)
+ self._updated_zones.update(updated_zones)
+ if self._pulse_connection.detailed_debug_logging:
+ LOG.debug(
+ "Updated site %s in %s seconds",
+ self._site.id,
+ time.time() - start_time,
+ )
+
+ async def _initialize_sites(self, tree: html.HtmlElement) -> None:
+ """
+ Initializes the sites in the ADT Pulse account.
+
+ Args:
+ tree html.HtmlElement: the parsed response tree
+ Raises:
+ PulseGatewayOfflineError: if the gateway is offline
+ """
+ # typically, ADT Pulse accounts have only a single site (premise/location)
+ single_premise = tree.find(".//span[@id='p_singlePremise']")
+ if single_premise is not None and single_premise.text:
+ site_name = single_premise.text
+ start_time = 0.0
+ if self._pulse_connection.detailed_debug_logging:
+ start_time = time.time()
+ temp = tree.find(".//a[@class='p_signoutlink']")
+ signout_link = None
+ if temp is not None:
+ signout_link = str(temp.get("href"))
+ if signout_link:
+ m = re.search("networkid=(.+)&", signout_link)
+ if m and m.group(1) and m.group(1):
+ site_id = m.group(1)
+ LOG.debug("Discovered site id %s: %s", site_id, site_name)
+ new_site = ADTPulseSite(self._pulse_connection, site_id, site_name)
+
+ # fetch zones first, so that we can have the status
+ # updated with _update_alarm_status
+ if not await new_site.fetch_devices(None):
+ LOG.error("Could not fetch zones from ADT site")
+ new_site.alarm_control_panel.update_alarm_from_etree(tree)
+ if new_site.alarm_control_panel.status == ADT_ALARM_UNKNOWN:
+ new_site.gateway.is_online = False
+ new_site.update_zone_from_etree(tree)
+ self._site = new_site
+ if self._pulse_connection.detailed_debug_logging:
+ LOG.debug(
+ "Initialized site %s in %s seconds",
+ self._site.id,
+ time.time() - start_time,
+ )
+ return
+ else:
+ LOG.warning(
+ "Couldn't find site id for %s in %s", site_name, signout_link
+ )
+ else:
+ LOG.error("ADT Pulse accounts with MULTIPLE sites not supported!!!")
+
+ # ...and current network id from:
+ #
+ #
+ # ... or perhaps better, just extract all from /system/settings.jsp
+
+ def _get_task_name(self, task: asyncio.Task | None, default_name) -> str:
+ """
+ Get the name of a task.
+
+ Parameters:
+ task (Task): The task object.
+ default_name (str): The default name to use if the task is None.
+
+ Returns:
+ str: The name of the task if it is not None, otherwise the default name
+ with a suffix indicating a possible internal error.
+ """
+ if task is not None:
+ return task.get_name()
+ return f"{default_name} - possible internal error"
+
+ def _get_sync_task_name(self) -> str:
+ return self._get_task_name(self._sync_task, SYNC_CHECK_TASK_NAME)
+
+ def _get_timeout_task_name(self) -> str:
+ return self._get_task_name(self._timeout_task, KEEPALIVE_TASK_NAME)
+
+ def _set_update_exception(self, e: Exception | None) -> None:
+ self.sync_check_exception = e
+ self._pulse_properties.updates_exist.set()
+
+ async def _keepalive_task(self) -> None:
+ """
+ Asynchronous function that runs a keepalive task to maintain the connection
+ with the ADT Pulse cloud.
+ """
+
+ async def reset_pulse_cloud_timeout() -> tuple[int, str | None, URL | None]:
+ return await self._pulse_connection.async_query(ADT_TIMEOUT_URI, "POST")
+
+ async def update_gateway_device_if_needed() -> None:
+ if self.site.gateway.next_update < time.time():
+ await self.site.set_device(ADT_GATEWAY_STRING)
+
+ def should_relogin(relogin_interval: int) -> bool:
+ return (
+ relogin_interval != 0
+ and time.time() - self._authentication_properties.last_login_time
+ > randint(int(0.75 * relogin_interval), relogin_interval)
+ )
+
+ next_full_logout_time = time.time() + randint(
+ int(0.75 * FULL_LOGOUT_INTERVAL), FULL_LOGOUT_INTERVAL
+ )
+ response: str | None
+ task_name: str = self._get_task_name(self._timeout_task, KEEPALIVE_TASK_NAME)
+ LOG.debug("creating %s", task_name)
+
+ while True:
+ relogin_interval = self._pulse_properties.relogin_interval * 60
+ try:
+ await asyncio.sleep(self._pulse_properties.keepalive_interval * 60)
+ if (
+ self._pulse_connection_status.retry_after > time.time()
+ or self._pulse_connection_status.get_backoff().backoff_count
+ > WARN_TRANSIENT_FAILURE_THRESHOLD
+ ):
+ LOG.debug(
+ "%s: Skipping actions because query will backoff", task_name
+ )
+ continue
+ if not self._pulse_connection.is_connected:
+ LOG.debug("%s: Skipping relogin because not connected", task_name)
+ continue
+ if should_relogin(relogin_interval):
+ msg = "quick"
+ if time.time() > next_full_logout_time:
+ msg = "full"
+ with self._pa_attribute_lock:
+ if self._sync_task:
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "%s: waiting for sync check task to sleep",
+ task_name,
+ )
+ await self._sync_check_sleeping.wait()
+ if msg == "full":
+ next_full_logout_time = time.time() + randint(
+ int(0.75 * FULL_LOGOUT_INTERVAL), FULL_LOGOUT_INTERVAL
+ )
+ await self.async_logout()
+ else:
+ await self._pulse_connection.quick_logout()
+ LOG.debug("%s: performing %s logout", task_name, msg)
+ try:
+ await self._login_looped(task_name)
+ except (PulseAuthenticationError, PulseMFARequiredError) as ex:
+ LOG.error("%s task exiting due to %s", task_name, ex.args[0])
+ return
+ continue
+ LOG.debug("Resetting timeout")
+ try:
+ code, response, url = await reset_pulse_cloud_timeout()
+ except (
+ PulseServiceTemporarilyUnavailableError,
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ ) as ex:
+ LOG.debug(
+ "Could not reset ADT Pulse cloud timeout due to %s, skipping",
+ ex.args[0],
+ )
+ continue
+ if (
+ not handle_response(
+ code,
+ url,
+ logging.WARNING,
+ "Could not reset ADT Pulse cloud timeout",
+ )
+ or response is None
+ ):
+ continue
+ await update_gateway_device_if_needed()
+
+ except asyncio.CancelledError:
+ LOG.debug("%s cancelled", task_name)
+ return
+
+ async def _clean_done_tasks(self) -> None:
+ with self._pa_attribute_lock:
+ if self._sync_task is not None and self._sync_task.done():
+ await self._sync_task
+ self._sync_task = None
+ if self._timeout_task is not None and self._timeout_task.done():
+ await self._timeout_task
+ self._timeout_task = None
+
+ async def _cancel_task(self, task: asyncio.Task | None) -> None:
+ """
+ Cancel a given asyncio task.
+
+ Args:
+ task (asyncio.Task | None): The task to be cancelled.
+ """
+ await self._clean_done_tasks()
+ if task is None:
+ return
+ task_name = task.get_name()
+ LOG.debug("cancelling %s", task_name)
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+ if task == self._sync_task:
+ with self._pa_attribute_lock:
+ self._sync_task = None
+ else:
+ with self._pa_attribute_lock:
+ self._timeout_task = None
+ LOG.debug("%s successfully cancelled", task_name)
+
+ async def _login_looped(self, task_name: str) -> None:
+ """
+ Logs in and loops until successful.
+
+ Args:
+ None.
+ Returns:
+ None
+ """
+ count = 0
+ log_level = logging.DEBUG
+
+ while True:
+ count += 1
+ if count > 5:
+ log_level = logging.WARNING
+ LOG.log(log_level, "%s performming loop login", task_name)
+ try:
+ await self.async_login()
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ ) as ex:
+ LOG.log(
+ log_level,
+ "loop login in task %s received exception %s, retrying",
+ task_name,
+ ex.args[0],
+ )
+ if (
+ log_level == logging.WARNING
+ and self._sync_check_exception is None
+ or self._sync_check_exception != ex
+ ):
+ self._set_update_exception(ex)
+ continue
+ # success, return
+ return
+
+ async def _sync_check_task(self) -> None:
+ """Asynchronous function that performs a synchronization check task."""
+
+ async def perform_sync_check_query():
+ return await self._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI,
+ extra_headers={"Sec-Fetch-Mode": "iframe"},
+ extra_params={"ts": str(int(time.time() * 1000))},
+ )
+
+ task_name = self._get_sync_task_name()
+ LOG.debug("creating %s", task_name)
+
+ response_text: str | None = None
+ code: int = 200
+ have_updates = False
+ url: URL | None = None
+
+ def check_sync_check_response() -> bool:
+ """
+ Validates the sync check response received from the ADT Pulse site.
+ Returns:
+ bool: True if the sync check response indicates updates, False otherwise
+
+ Raises:
+ PulseAccountLockedError if the account is locked and no retry time is available.
+ PulseAuthenticationError if the ADT Pulse site returns an authentication error.
+ PulseMFAError if the ADT Pulse site returns an MFA error.
+ PulseNotLoggedInError if the ADT Pulse site returns a not logged in error.
+ """
+ if response_text is None:
+ LOG.warning("Internal Error: response_text is None")
+ return False
+ pattern = r"\d+[-]\d+[-]\d+"
+ if not re.match(pattern, response_text):
+ warning_msg = "Unexpected sync check format"
+ try:
+ self._pulse_connection.check_login_errors(
+ (code, response_text, url)
+ )
+ except Exception as ex:
+ warning_msg += f": {ex}"
+ raise
+ else:
+ warning_msg += ": skipping"
+ finally:
+ LOG.warning(warning_msg)
+ return False
+ split_text = response_text.split("-")
+ if int(split_text[0]) > 9 or int(split_text[1]) > 9:
+ return False
+ return True
+
+ async def handle_no_updates_exist() -> None:
+ if have_updates:
+ try:
+ success = await self.async_update()
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ PulseGatewayOfflineError,
+ ) as e:
+ LOG.debug("Pulse update failed in task %s due to %s", task_name, e)
+ self._set_update_exception(e)
+ return
+ except PulseNotLoggedInError:
+ LOG.info(
+ "Pulse update failed in task %s due to not logged in, relogging in...",
+ task_name,
+ )
+ await self._pulse_connection.quick_logout()
+ await self._login_looped(task_name)
+ return
+ if not success:
+ LOG.debug("Pulse data update failed in task %s", task_name)
+ return
+ self._set_update_exception(None)
+ else:
+ additional_msg = ""
+ if not self.site.gateway.is_online:
+ # bump backoff and resignal since offline and nothing updated
+ self._set_update_exception(
+ PulseGatewayOfflineError(self.site.gateway.backoff)
+ )
+ additional_msg = ", gateway offline so backoff incremented"
+ if self._detailed_debug_logging:
+ LOG.debug(
+ "Sync token %s indicates no remote updates to process %s ",
+ response_text,
+ additional_msg,
+ )
+
+ async def shutdown_task(ex: Exception):
+ await self._pulse_connection.quick_logout()
+ await self._cancel_task(self._timeout_task)
+ self._set_update_exception(ex)
+
+ while True:
+ try:
+ self._sync_check_sleeping.set()
+ if not have_updates and not self.site.gateway.is_online:
+ # gateway going back online will trigger a sync check of 1-0-0
+ await self.site.gateway.backoff.wait_for_backoff()
+ else:
+ await asyncio.sleep(
+ self.site.gateway.poll_interval if not have_updates else 0.0
+ )
+ self._sync_check_sleeping.clear()
+ try:
+ code, response_text, url = await perform_sync_check_query()
+ except (
+ PulseClientConnectionError,
+ PulseServerConnectionError,
+ ) as e:
+ # temporarily unavailble errors should be reported immediately
+ # since the next query will sleep until the retry-after is over
+ msg = ""
+ if e.backoff.backoff_count > WARN_TRANSIENT_FAILURE_THRESHOLD:
+ self._set_update_exception(e)
+ else:
+ msg = ", ignoring..."
+ LOG.debug("Pulse sync check query failed due to %s%s", e, msg)
+ continue
+ except PulseServiceTemporarilyUnavailableError as e:
+ LOG.error("Pulse sync check query failed due to %s", e)
+ self._set_update_exception(e)
+ continue
+ except PulseNotLoggedInError:
+ LOG.info(
+ "Pulse sync check query failed due to not logged in, relogging in..."
+ )
+ await self._pulse_connection.quick_logout()
+ await self._login_looped(task_name)
+ continue
+ if not handle_response(
+ code, url, logging.WARNING, "Error querying ADT sync"
+ ):
+ continue
+ if response_text is None:
+ LOG.warning("Sync check received no response from ADT Pulse site")
+ continue
+ more_updates = True
+ try:
+ if have_updates:
+ more_updates = check_sync_check_response()
+ else:
+ have_updates = check_sync_check_response()
+ except PulseNotLoggedInError:
+ LOG.info(
+ "Pulse sync check text indicates logged out, re-logging in...."
+ )
+ await self._pulse_connection.quick_logout()
+ await self._login_looped(task_name)
+ except (
+ PulseAuthenticationError,
+ PulseMFARequiredError,
+ PulseAccountLockedError,
+ ) as ex:
+ LOG.error(
+ "Task %s exiting due to error: %s",
+ task_name,
+ ex.args[0],
+ )
+ await shutdown_task(ex)
+ return
+ if have_updates and more_updates:
+ LOG.debug("Updates exist: %s, requerying", response_text)
+ continue
+ await handle_no_updates_exist()
+ have_updates = False
+ continue
+ except asyncio.CancelledError:
+ LOG.debug("%s cancelled", task_name)
+ return
+
+ async def async_login(self) -> None:
+ """Login asynchronously to ADT.
+
+ Returns: None
+
+ Raises:
+ PulseClientConnectionError: if client connection fails
+ PulseServerConnectionError: if server connection fails
+ PulseServiceTemporarilyUnavailableError: if server returns a Retry-After header
+ PulseAuthenticationError: if authentication fails
+ PulseAccountLockedError: if account is locked
+ PulseMFARequiredError: if MFA is required
+ PulseNotLoggedInError: if login fails
+ """
+ if self._pulse_connection.login_in_progress:
+ LOG.debug("Login already in progress, returning")
+ return
+ LOG.debug(
+ "Authenticating to ADT Pulse cloud service as %s",
+ self._authentication_properties.username,
+ )
+ await self._pulse_connection.async_fetch_version()
+ tree = await self._pulse_connection.async_do_login_query()
+ if tree is None:
+ await self._pulse_connection.quick_logout()
+ ex = PulseNotLoggedInError()
+ self.sync_check_exception = ex
+ raise ex
+ self.sync_check_exception = None
+ # if tasks are started, we've already logged in before
+ # clean up completed tasks first
+ await self._clean_done_tasks()
+ if self._timeout_task is not None:
+ return
+ if not self._site:
+ await self._update_site(tree)
+ if self._site is None:
+ LOG.error("Could not retrieve any sites, login failed")
+ await self._pulse_connection.quick_logout()
+ ex = PulseNotLoggedInError()
+ self.sync_check_exception = ex
+ raise ex
+ self.sync_check_exception = None
+ self._timeout_task = asyncio.create_task(
+ self._keepalive_task(), name=KEEPALIVE_TASK_NAME
+ )
+ await asyncio.sleep(0)
+
+ async def async_logout(self) -> None:
+ """Logout of ADT Pulse async."""
+ if self._pulse_connection.login_in_progress:
+ LOG.debug("Login in progress, returning")
+ return
+ LOG.info(
+ "Logging %s out of ADT Pulse", self._authentication_properties.username
+ )
+ if asyncio.current_task() not in (self._sync_task, self._timeout_task):
+ self._set_update_exception(PulseNotLoggedInError())
+ await self._cancel_task(self._timeout_task)
+ await self._cancel_task(self._sync_task)
+ try:
+ site_id = self.site.id
+ except (RuntimeError, ValueError):
+ site_id = None
+ await self._pulse_connection.async_do_logout_query(site_id)
+
+ async def async_update(self) -> bool:
+ """Update ADT Pulse data.
+
+ Returns:
+ bool: True if update succeeded.
+
+ Raises:
+ PulseGatewayOfflineError: if the gateway is offline
+ """
+ LOG.debug("Checking ADT Pulse cloud service for updates")
+
+ # FIXME will have to query other URIs for camera/zwave/etc
+ tree = await self._pulse_connection.query_orb(
+ logging.INFO, "Error returned from ADT Pulse service check"
+ )
+ if tree is not None:
+ await self._update_site(tree)
+ return True
+
+ return False
+
+ async def wait_for_update(self) -> tuple[bool, set[int]]:
+ """Wait for update.
+
+ Blocks current async task until Pulse system
+ signals an update
+
+ Returns:
+ tuple: (bool, set[int]): (True if an update was detected, set of zone ids that were updated)
+ Raises:
+ Every exception from exceptions.py are possible
+ """
+ # FIXME?: This code probably won't work with multiple waiters.
+ await self._clean_done_tasks()
+ if self.sync_check_exception:
+ raise self.sync_check_exception
+ with self._pa_attribute_lock:
+ if self._timeout_task is None:
+ raise PulseNotLoggedInError()
+ if self._sync_task is None:
+ coro = self._sync_check_task()
+ self._sync_task = asyncio.create_task(
+ coro, name=f"{SYNC_CHECK_TASK_NAME}: Async session"
+ )
+ await asyncio.sleep(0)
+ old_alarm_status = self.site.alarm_control_panel.status
+ await self._pulse_properties.updates_exist.wait()
+ self._pulse_properties.updates_exist.clear()
+ curr_exception = self.sync_check_exception
+ self.sync_check_exception = None
+ if curr_exception:
+ raise curr_exception
+ updated_zones = self._updated_zones
+ self._updated_zones = set()
+ return (self.site.alarm_control_panel.status != old_alarm_status, updated_zones)
+
+ @property
+ def sites(self) -> list[ADTPulseSite]:
+ """Return all sites for this ADT Pulse account."""
+ warn(
+ "multiple sites being removed, use pyADTPulse.site instead",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ with self._pa_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return [self._site]
+
+ @property
+ def site(self) -> ADTPulseSite:
+ """Return the site associated with the Pulse login."""
+ with self._pa_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return self._site
+
+ @property
+ def is_connected(self) -> bool:
+ """Convenience method to return whether ADT Pulse is connected."""
+ return self._pulse_connection.is_connected
+
+ @property
+ def detailed_debug_logging(self) -> bool:
+ """Return detailed debug logging."""
+ return self._pulse_connection.detailed_debug_logging
+
+ @detailed_debug_logging.setter
+ @typechecked
+ def detailed_debug_logging(self, value: bool) -> None:
+ """Set detailed debug logging."""
+ self._pulse_connection.detailed_debug_logging = value
+
+ @property
+ def keepalive_interval(self) -> int:
+ """Get the keepalive interval in minutes.
+
+ Returns:
+ int: the keepalive interval
+ """
+ return self._pulse_properties.keepalive_interval
+
+ @keepalive_interval.setter
+ @typechecked
+ def keepalive_interval(self, interval: int | None) -> None:
+ """Set the keepalive interval in minutes.
+
+ Args:
+ interval (int|None): The number of minutes between keepalive calls
+ If set to None, resets to ADT_DEFAULT_KEEPALIVE_INTERVAL
+
+ Raises:
+ ValueError: if a keepalive interval of greater than ADT_MAX_KEEPALIVE_INTERVAL
+ minutes is specified
+ """
+ self._pulse_properties.keepalive_interval = interval
+
+ @property
+ def relogin_interval(self) -> int:
+ """Get the relogin interval in minutes.
+
+ Returns:
+ int: the relogin interval
+ """
+ return self._pulse_properties.relogin_interval
+
+ @relogin_interval.setter
+ @typechecked
+ def relogin_interval(self, interval: int | None) -> None:
+ """Set the relogin interval in minutes.
+
+ If set to None, resets to ADT_DEFAULT_RELOGIN_INTERVAL
+ """
+ self._pulse_properties.relogin_interval = interval
+
+ @property
+ def sync_check_exception(self) -> Exception | None:
+ """Return sync check exception.
+
+ This should not be used by external code.
+
+ Returns:
+ Exception: sync check exception
+ """
+ with self._pa_attribute_lock:
+ return self._sync_check_exception
+
+ @sync_check_exception.setter
+ @typechecked
+ def sync_check_exception(self, value: Exception | None) -> None:
+ """Set sync check exception.
+
+ This should not be used by external code.
+
+ Args:
+ value (Exception): sync check exception
+ """
+ with self._pa_attribute_lock:
+ self._sync_check_exception = value
diff --git a/pyadtpulse/pyadtpulse_properties.py b/pyadtpulse/pyadtpulse_properties.py
new file mode 100644
index 0000000..d4a1a73
--- /dev/null
+++ b/pyadtpulse/pyadtpulse_properties.py
@@ -0,0 +1,173 @@
+"""PyADTPulse Properties."""
+
+import logging
+import asyncio
+from warnings import warn
+
+from typeguard import typechecked
+
+from .const import (
+ ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ ADT_DEFAULT_RELOGIN_INTERVAL,
+ ADT_MAX_KEEPALIVE_INTERVAL,
+ ADT_MIN_RELOGIN_INTERVAL,
+)
+from .site import ADTPulseSite
+from .util import set_debug_lock
+
+LOG = logging.getLogger(__name__)
+
+
+class PyADTPulseProperties:
+ """PyADTPulse Properties."""
+
+ __slots__ = (
+ "_updates_exist",
+ "_pp_attribute_lock",
+ "_relogin_interval",
+ "_keepalive_interval",
+ "_site",
+ )
+
+ @staticmethod
+ @typechecked
+ def _check_keepalive_interval(keepalive_interval: int) -> None:
+ if keepalive_interval > ADT_MAX_KEEPALIVE_INTERVAL or keepalive_interval <= 0:
+ raise ValueError(
+ f"keepalive interval ({keepalive_interval}) must be "
+ f"greater than 0 and less than {ADT_MAX_KEEPALIVE_INTERVAL}"
+ )
+
+ @staticmethod
+ @typechecked
+ def _check_relogin_interval(relogin_interval: int) -> None:
+ if relogin_interval < ADT_MIN_RELOGIN_INTERVAL:
+ raise ValueError(
+ f"relogin interval ({relogin_interval}) must be "
+ f"greater than {ADT_MIN_RELOGIN_INTERVAL}"
+ )
+
+ @typechecked
+ def __init__(
+ self,
+ keepalive_interval: int = ADT_DEFAULT_KEEPALIVE_INTERVAL,
+ relogin_interval: int = ADT_DEFAULT_RELOGIN_INTERVAL,
+ debug_locks: bool = False,
+ ) -> None:
+ """Create a PyADTPulse properties object.
+ Args:
+ pulse_authentication_properties (PulseAuthenticationProperties):
+ an instance of PulseAuthenticationProperties
+ pulse_connection_properties (PulseConnectionProperties):
+ """
+ # FIXME use thread event/condition, regular condition?
+ # defer initialization to make sure we have an event loop
+
+ self._updates_exist = asyncio.locks.Event()
+
+ self._pp_attribute_lock = set_debug_lock(
+ debug_locks, "pyadtpulse.async_attribute_lock"
+ )
+
+ self._site: ADTPulseSite | None = None
+ self.keepalive_interval = keepalive_interval
+ self.relogin_interval = relogin_interval
+
+ @property
+ def relogin_interval(self) -> int:
+ """Get re-login interval.
+
+ Returns:
+ int: number of minutes to re-login to Pulse
+ 0 means disabled
+ """
+ with self._pp_attribute_lock:
+ return self._relogin_interval
+
+ @relogin_interval.setter
+ @typechecked
+ def relogin_interval(self, interval: int | None) -> None:
+ """Set re-login interval.
+
+ Args:
+ interval (int|None): The number of minutes between logins.
+ If set to None, resets to ADT_DEFAULT_RELOGIN_INTERVAL
+
+ Raises:
+ ValueError: if a relogin interval of less than ADT_MIN_RELOGIN_INTERVAL
+ minutes is specified
+ """
+ if interval is None:
+ interval = ADT_DEFAULT_RELOGIN_INTERVAL
+ else:
+ self._check_relogin_interval(interval)
+ with self._pp_attribute_lock:
+ self._relogin_interval = interval
+ LOG.debug("relogin interval set to %d", self._relogin_interval)
+
+ @property
+ def keepalive_interval(self) -> int:
+ """Get the keepalive interval in minutes.
+
+ Returns:
+ int: the keepalive interval
+ """
+ with self._pp_attribute_lock:
+ return self._keepalive_interval
+
+ @keepalive_interval.setter
+ @typechecked
+ def keepalive_interval(self, interval: int | None) -> None:
+ """Set the keepalive interval in minutes.
+
+ Args:
+ interval (int|None): The number of minutes between keepalive calls
+ If set to None, resets to ADT_DEFAULT_KEEPALIVE_INTERVAL
+
+ Raises:
+ ValueError: if a keepalive interval of greater than ADT_MAX_KEEPALIVE_INTERVAL
+ minutes is specified
+ """
+ if interval is None:
+ interval = ADT_DEFAULT_KEEPALIVE_INTERVAL
+ else:
+ self._check_keepalive_interval(interval)
+ with self._pp_attribute_lock:
+ self._keepalive_interval = interval
+ LOG.debug("keepalive interval set to %d", self._keepalive_interval)
+
+ @property
+ def sites(self) -> list[ADTPulseSite]:
+ """Return all sites for this ADT Pulse account."""
+ warn(
+ "multiple sites being removed, use pyADTPulse.site instead",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ with self._pp_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return [self._site]
+
+ @property
+ def site(self) -> ADTPulseSite:
+ """Return the site associated with the Pulse login."""
+ with self._pp_attribute_lock:
+ if self._site is None:
+ raise RuntimeError(
+ "No sites have been retrieved, have you logged in yet?"
+ )
+ return self._site
+
+ def set_update_status(self) -> None:
+ """Sets updates_exist to notify wait_for_update."""
+ with self._pp_attribute_lock:
+ self.updates_exist.set()
+
+ @property
+ def updates_exist(self) -> asyncio.locks.Event:
+ """Check if updates exist."""
+ with self._pp_attribute_lock:
+ return self._updates_exist
diff --git a/pyadtpulse/site.py b/pyadtpulse/site.py
index b010e86..386d6a9 100644
--- a/pyadtpulse/site.py
+++ b/pyadtpulse/site.py
@@ -1,194 +1,94 @@
"""Module representing an ADT Pulse Site."""
+
import logging
import re
from asyncio import Task, create_task, gather, get_event_loop, run_coroutine_threadsafe
from datetime import datetime
-from threading import RLock
from time import time
-from typing import List, Optional, Union
-from warnings import warn
-
-# import dateparser
-from bs4 import BeautifulSoup
-from .alarm_panel import ADTPulseAlarmPanel
-from .const import ADT_DEVICE_URI, ADT_GATEWAY_STRING, ADT_SYSTEM_URI
-from .gateway import ADTPulseGateway
-from .pulse_connection import ADTPulseConnection
-from .util import DebugRLock, make_soup, parse_pulse_datetime, remove_prefix
+from lxml import html
+from typeguard import typechecked
+
+from .const import ADT_DEVICE_URI, ADT_GATEWAY_STRING, ADT_GATEWAY_URI, ADT_SYSTEM_URI
+from .exceptions import (
+ PulseClientConnectionError,
+ PulseGatewayOfflineError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from .pulse_connection import PulseConnection
+from .site_properties import ADTPulseSiteProperties
+from .util import make_etree, parse_pulse_datetime, remove_prefix
from .zones import ADTPulseFlattendZone, ADTPulseZones
LOG = logging.getLogger(__name__)
+SECURITY_PANEL_ID = "1"
+SECURITY_PANEL_NAME = "Security Panel"
+
-class ADTPulseSite:
+class ADTPulseSite(ADTPulseSiteProperties):
"""Represents an individual ADT Pulse site."""
- __slots__ = (
- "_pulse_connection",
- "_id",
- "_name",
- "_last_updated",
- "_alarm_panel",
- "_zones",
- "_site_lock",
- "_gateway",
- )
-
- def __init__(self, pulse_connection: ADTPulseConnection, site_id: str, name: str):
+ __slots__ = ("_pulse_connection", "_trouble_zones", "_tripped_zones")
+
+ @typechecked
+ def __init__(self, pulse_connection: PulseConnection, site_id: str, name: str):
"""Initialize.
Args:
- adt_service (PyADTPulse): a PyADTPulse object
- site_id (str): site ID
- name (str): site name
+ pulse_connection (PulseConnection): Pulse connection.
+ site_id (str): Site ID.
+ name (str): Site name.
"""
self._pulse_connection = pulse_connection
- self._id = site_id
- self._name = name
- self._last_updated: int = 0
- self._zones = ADTPulseZones()
- self._site_lock: Union[RLock, DebugRLock]
- if isinstance(self._pulse_connection._attribute_lock, DebugRLock):
- self._site_lock = DebugRLock("ADTPulseSite._site_lock")
- else:
- self._site_lock = RLock()
- self._alarm_panel = ADTPulseAlarmPanel()
- self._gateway = ADTPulseGateway()
-
- @property
- def id(self) -> str:
- """Get site id.
-
- Returns:
- str: the site id
- """
- return self._id
-
- @property
- def name(self) -> str:
- """Get site name.
-
- Returns:
- str: the site name
- """
- return self._name
-
- # FIXME: should this actually return if the alarm is going off!? How do we
- # return state that shows the site is compromised??
-
- @property
- def last_updated(self) -> int:
- """Return time site last updated.
-
- Returns:
- int: the time site last updated as datetime
- """
- with self._site_lock:
- return self._last_updated
-
- @property
- def site_lock(self) -> Union[RLock, DebugRLock]:
- """Get thread lock for site data.
-
- Not needed for async
-
- Returns:
- RLock: thread RLock
- """
- return self._site_lock
+ super().__init__(site_id, name, pulse_connection.debug_locks)
+ self._trouble_zones: set[int] | None = None
+ self._tripped_zones: set[int] = set()
+ @typechecked
def arm_home(self, force_arm: bool = False) -> bool:
"""Arm system home."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system home, no control panels exist")
return self.alarm_control_panel.arm_home(
self._pulse_connection, force_arm=force_arm
)
+ @typechecked
def arm_away(self, force_arm: bool = False) -> bool:
"""Arm system away."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system away, no control panels exist")
return self.alarm_control_panel.arm_away(
self._pulse_connection, force_arm=force_arm
)
def disarm(self) -> bool:
"""Disarm system."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot disarm system, no control panels exist")
return self.alarm_control_panel.disarm(self._pulse_connection)
+ @typechecked
async def async_arm_home(self, force_arm: bool = False) -> bool:
"""Arm system home async."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system home, no control panels exist")
return await self.alarm_control_panel.async_arm_home(
self._pulse_connection, force_arm=force_arm
)
+ @typechecked
async def async_arm_away(self, force_arm: bool = False) -> bool:
"""Arm system away async."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot arm system away, no control panels exist")
return await self.alarm_control_panel.async_arm_away(
self._pulse_connection, force_arm=force_arm
)
+ @typechecked
+ async def async_arm_night(self, force_arm: bool = False) -> bool:
+ """Arm system away async."""
+ return await self.alarm_control_panel.async_arm_night(
+ self._pulse_connection, force_arm=force_arm
+ )
+
async def async_disarm(self) -> bool:
"""Disarm system async."""
- if self.alarm_control_panel is None:
- raise RuntimeError("Cannot disarm system, no control panels exist")
return await self.alarm_control_panel.async_disarm(self._pulse_connection)
- @property
- def zones(self) -> Optional[List[ADTPulseFlattendZone]]:
- """Return all zones registered with the ADT Pulse account.
-
- (cached copy of last fetch)
- See Also fetch_zones()
- """
- with self._site_lock:
- if not self._zones:
- raise RuntimeError("No zones exist")
- return self._zones.flatten()
-
- @property
- def zones_as_dict(self) -> Optional[ADTPulseZones]:
- """Return zone information in dictionary form.
-
- Returns:
- ADTPulseZones: all zone information
- """
- with self._site_lock:
- if not self._zones:
- raise RuntimeError("No zones exist")
- return self._zones
-
- @property
- def alarm_control_panel(self) -> ADTPulseAlarmPanel:
- """Return the alarm panel object for the site.
-
- Returns:
- Optional[ADTPulseAlarmPanel]: the alarm panel object
- """
- return self._alarm_panel
-
- @property
- def gateway(self) -> ADTPulseGateway:
- """Get gateway device object.
-
- Returns:
- ADTPulseGateway: Gateway device
- """
- return self._gateway
-
- @property
- def history(self):
- """Return log of history for this zone (NOT IMPLEMENTED)."""
- raise NotImplementedError
-
# status_orb = summary_html_soup.find('canvas', {'id': 'ic_orb'})
# if status_orb:
# self._status = status_orb['orb']
@@ -200,50 +100,70 @@ def history(self):
# if we should also update the zone details, force a fresh fetch
# of data from ADT Pulse
- async def _get_device_attributes(self, device_id: str) -> Optional[dict[str, str]]:
+ async def _get_device_attributes(self, device_id: str) -> dict[str, str] | None:
+ """
+ Retrieves the attributes of a device.
+
+ Args:
+ device_id (str): The ID of the device to retrieve attributes for.
+
+ Returns:
+ Optional[dict[str, str]]: A dictionary of attribute names and their
+ corresponding values,
+ or None if the device response lxml tree is None.
+ """
result: dict[str, str] = {}
if device_id == ADT_GATEWAY_STRING:
- deviceResponse = await self._pulse_connection.async_query(
- "/system/gateway.jsp", timeout=10
+ device_response = await self._pulse_connection.async_query(
+ ADT_GATEWAY_URI, timeout=10
)
else:
- deviceResponse = await self._pulse_connection.async_query(
+ device_response = await self._pulse_connection.async_query(
ADT_DEVICE_URI, extra_params={"id": device_id}
)
- deviceResponseSoup = await make_soup(
- deviceResponse,
+ device_response_etree = make_etree(
+ device_response[0],
+ device_response[1],
+ device_response[2],
logging.DEBUG,
"Failed loading device attributes from ADT Pulse service",
)
- if deviceResponseSoup is None:
+ if device_response_etree is None:
return None
- for devInfoRow in deviceResponseSoup.find_all(
- "td", {"class", "InputFieldDescriptionL"}
+ for dev_info_row in device_response_etree.findall(
+ ".//td[@class='InputFieldDescriptionL']"
):
- identityText = (
- str(devInfoRow.get_text())
+ identity_text = (
+ str(dev_info_row.text_content())
.lower()
.strip()
.rstrip(":")
.replace(" ", "_")
.replace("/", "_")
)
- sibling = devInfoRow.find_next_sibling()
- if not sibling:
+ sibling = dev_info_row.getnext()
+ if sibling is None:
value = "Unknown"
else:
- value = str(sibling.get_text()).strip()
- result.update({identityText: value})
+ value = str(sibling.text_content().strip())
+ result.update({identity_text: value})
return result
- async def _set_device(self, device_id: str) -> None:
+ @typechecked
+ async def set_device(self, device_id: str) -> None:
+ """
+ Sets the device attributes for the given device ID.
+
+ Args:
+ device_id (str): The ID of the device.
+ """
dev_attr = await self._get_device_attributes(device_id)
if dev_attr is None:
return
if device_id == ADT_GATEWAY_STRING:
self._gateway.set_gateway_attributes(dev_attr)
return
- if device_id == "1":
+ if device_id == SECURITY_PANEL_ID:
self._alarm_panel.set_alarm_attributes(dev_attr)
return
if device_id.isdigit():
@@ -251,202 +171,335 @@ async def _set_device(self, device_id: str) -> None:
else:
LOG.debug("Zone %s is not an integer, skipping", device_id)
- async def _fetch_devices(self, soup: Optional[BeautifulSoup]) -> bool:
- """Fetch devices for a site.
+ @typechecked
+ async def fetch_devices(self, tree: html.HtmlElement | None) -> bool:
+ """
+ Fetches the devices from the given lxml etree and updates
+ the zone attributes.
Args:
- soup (BeautifulSoup, Optional): a BS4 object with data fetched from
- ADT Pulse web site
- Returns:
- ADTPulseZones
+ tree (Optional[html.HtmlElement]): The lxml etree containing
+ the devices.
- None if an error occurred
+ Returns:
+ bool: True if the devices were fetched and zone attributes were updated
+ successfully, False otherwise.
"""
+ regex_device = r"goToUrl\('device.jsp\?id=(\d*)'\);"
task_list: list[Task] = []
- if not soup:
+ zone_id: str | None = None
+
+ def add_zone_from_row(row_tds: list[html.HtmlElement]) -> str | None:
+ """Adds a zone from an HtmlElement row.
+
+ Returns None if successful, otherwise the zone ID if present.
+ """
+ zone_id: str | None = None
+ if row_tds and len(row_tds) > 4:
+ zone_name: str = row_tds[1].text_content().strip()
+ zone_id = row_tds[2].text_content().strip()
+ zone_type: str = row_tds[4].text_content().strip()
+ zone_status = "Unknown"
+ zs_temp = row_tds[0].find("canvas")
+ if (
+ zs_temp is not None
+ and zs_temp.get("title") is not None
+ and zs_temp.get("title") != ""
+ ):
+ zone_status = zs_temp.get("title")
+ if (
+ zone_id is not None
+ and zone_id.isdecimal()
+ and zone_name
+ and zone_type
+ ):
+ self._zones.update_zone_attributes(
+ {
+ "name": zone_name,
+ "zone": zone_id,
+ "type_model": zone_type,
+ "status": zone_status.strip(),
+ }
+ )
+ return None
+ return zone_id
+
+ def check_panel_or_gateway(
+ regex_device: str,
+ device_name: str,
+ zone_id: str | None,
+ on_click_value_text: str,
+ ) -> Task | None:
+ result = re.findall(regex_device, on_click_value_text)
+ if result:
+ device_id = result[0]
+ if device_id == SECURITY_PANEL_ID or device_name == SECURITY_PANEL_NAME:
+ return create_task(self.set_device(device_id))
+ if zone_id and zone_id.isdecimal():
+ return create_task(self.set_device(device_id))
+ LOG.debug("Skipping %s as it doesn't have an ID", device_name)
+ return None
+
+ if tree is None:
response = await self._pulse_connection.async_query(ADT_SYSTEM_URI)
- soup = await make_soup(
- response,
+ tree = make_etree(
+ response[0],
+ response[1],
+ response[2],
logging.WARNING,
"Failed loading zone status from ADT Pulse service",
)
- if not soup:
+ if tree is None:
return False
-
- regexDevice = r"goToUrl\('device.jsp\?id=(\d*)'\);"
with self._site_lock:
- for row in soup.find_all("tr", {"class": "p_listRow", "onclick": True}):
- device_name = row.find("a").get_text()
- row_tds = row.find_all("td")
- zone_id = None
- # see if we can create a zone without calling device.jsp
- if row_tds is not None and len(row_tds) > 4:
- zone_name = row_tds[1].get_text().strip()
- zone_id = row_tds[2].get_text().strip()
- zone_type = row_tds[4].get_text().strip()
- zone_status = row_tds[0].find("canvas").get("title").strip()
- if (
- zone_id.isdecimal()
- and zone_name is not None
- and zone_type is not None
- ):
- self._zones.update_zone_attributes(
- {
- "name": zone_name,
- "zone": zone_id,
- "type_model": zone_type,
- "status": zone_status,
- }
- )
- continue
- onClickValueText = row.get("onclick")
- if (
- onClickValueText == "goToUrl('gateway.jsp');"
- or device_name == "Gateway"
- ):
- task_list.append(create_task(self._set_device(ADT_GATEWAY_STRING)))
+ for row in tree.findall(".//tr[@class='p_listRow'][@onclick]"):
+ tmp_device_name = row.find(".//a")
+ if tmp_device_name is None:
+ LOG.debug("Skipping device as it has no name")
continue
- result = re.findall(regexDevice, onClickValueText)
-
- # only proceed if regex succeeded, as some users have onClick
- # links that include gateway.jsp
- if not result:
+ device_name = tmp_device_name.text_content().strip()
+ row_tds = row.findall("td")
+ zone_id = add_zone_from_row(row_tds)
+ if zone_id is None:
+ continue
+ on_click_value_text = row.get("onclick")
+ if on_click_value_text is None:
LOG.debug(
- "Failed regex match #%s on #%s "
- "from ADT Pulse service, ignoring",
- regexDevice,
- onClickValueText,
+ "Skipping device %s as it has no onclick value", device_name
)
continue
- # alarm panel case
- if result[0] == "1" or device_name == "Security Panel":
- task_list.append(create_task(self._set_device(result[0])))
- continue
- # zone case if we couldn't just call update_zone_attributes
- if zone_id is not None and zone_id.isdecimal():
- task_list.append(create_task(self._set_device(result[0])))
- continue
- else:
- LOG.debug("Skipping %s as it doesn't have an ID", device_name)
-
- await gather(*task_list)
- self._last_updated = int(time())
- return True
+ if (
+ on_click_value_text in ("goToUrl('gateway.jsp');", "Gateway")
+ or device_name == "Gateway"
+ ):
+ task_list.append(create_task(self.set_device(ADT_GATEWAY_STRING)))
+ elif (
+ result := check_panel_or_gateway(
+ regex_device,
+ device_name,
+ zone_id,
+ on_click_value_text,
+ )
+ ) is not None:
+ task_list.append(result)
- # FIXME: ensure the zones for the correct site are being loaded!!!
+ await gather(*task_list)
+ self._last_updated = int(time())
+ return True
async def _async_update_zones_as_dict(
- self, soup: Optional[BeautifulSoup]
- ) -> Optional[ADTPulseZones]:
+ self, tree: html.HtmlElement | None
+ ) -> ADTPulseZones | None:
"""Update zone status information asynchronously.
Returns:
ADTPulseZones: a dictionary of zones with status
None if an error occurred
+
+ Raises:
+ PulseGatewayOffline: If the gateway is offline.
"""
with self._site_lock:
if self._zones is None:
self._site_lock.release()
raise RuntimeError("No zones exist")
LOG.debug("fetching zones for site %s", self._id)
- if not soup:
+ if not tree:
# call ADT orb uri
- soup = await self._pulse_connection.query_orb(
- logging.WARNING, "Could not fetch zone status updates"
- )
- if soup is None:
+ try:
+ tree = await self._pulse_connection.query_orb(
+ logging.WARNING, "Could not fetch zone status updates"
+ )
+ except (
+ PulseServiceTemporarilyUnavailableError,
+ PulseServerConnectionError,
+ PulseClientConnectionError,
+ ) as ex:
+ LOG.warning(
+ "Could not fetch zone status updates from orb: %s", ex.args[0]
+ )
+ return None
+ if tree is None:
return None
- return self._update_zone_from_soup(soup)
+ self.update_zone_from_etree(tree)
+ return self._zones
- def _update_zone_from_soup(self, soup: BeautifulSoup) -> Optional[ADTPulseZones]:
- # parse ADT's convulated html to get sensor status
- with self._site_lock:
- gateway_online = False
- for row in soup.find_all("tr", {"class": "p_listRow"}):
- temp = row.find("div", {"class": "p_grayNormalText"})
- # v26 and lower: temp = row.find("span", {"class": "p_grayNormalText"})
- if temp is None:
- break
- try:
- zone = int(
- remove_prefix(
- temp.get_text(),
- "Zone",
- )
+ def update_zone_from_etree(self, tree: html.HtmlElement) -> set[int]:
+ """
+ Updates the zone information based on the provided lxml etree.
+
+ Args:
+ tree:html.HtmlElement: the parsed response tree
+
+ Returns:
+ set[int]: a set of zone ids that were updated
+
+ Raises:
+ PulseGatewayOffline: If the gateway is offline.
+ """
+
+ def get_zone_id(zone_row: html.HtmlElement) -> int | None:
+ try:
+ zone = int(
+ remove_prefix(
+ zone_row.find(
+ ".//div[@class='p_grayNormalText']"
+ ).text_content(),
+ "Zone",
)
- except ValueError:
- LOG.debug("skipping row due to zone not being an integer")
- continue
- # parse out last activity (required dealing with "Yesterday 1:52Â PM")
- temp = row.find("span", {"class": "devStatIcon"})
- if temp is None:
- break
- last_update = datetime(1970, 1, 1)
- try:
- last_update = parse_pulse_datetime(
- remove_prefix(temp.get("title"), "Last Event:")
- .lstrip()
- .rstrip()
+ )
+ except AttributeError:
+ LOG.debug("skipping row due to no zone id")
+ return None
+ except ValueError:
+ LOG.debug("skipping row due to zone not being an integer")
+ return None
+ return zone
+
+ def get_zone_last_update(zone_row: html.HtmlElement, zone: int) -> datetime:
+ try:
+ last_update = parse_pulse_datetime(
+ remove_prefix(
+ zone_row.find(".//span[@class='devStatIcon']").get("title"),
+ "Last Event:",
)
- except ValueError:
- last_update = datetime(1970, 1, 1)
- # name = row.find("a", {"class": "p_deviceNameText"}).get_text()
+ )
+ except (AttributeError, ValueError):
+ LOG.debug(
+ "Unable to set last event time for zone %d due to malformed html",
+ zone,
+ )
+ last_update = datetime(1970, 1, 1)
+ return last_update
+ def get_zone_state(zone_row: html.HtmlElement, zone: int) -> str:
+ try:
state = remove_prefix(
- row.find("canvas", {"class": "p_ic_icon_device"}).get("icon"),
+ zone_row.find(".//canvas[@class='p_ic_icon_device']").get("icon"),
"devStat",
)
- temp_status = row.find("td", {"class": "p_listRow"}).find_next(
- "td", {"class": "p_listRow"}
+ except (AttributeError, ValueError):
+ LOG.debug("Unable to set state for zone %d due to malformed html", zone)
+ return "Unknown"
+ return state
+
+ def get_zone_status(zone_row: html.HtmlElement, zone: int) -> str:
+ try:
+ status = (
+ zone_row.find(".//td[@class='p_listRow']").getnext().text_content()
)
-
- status = "Unknown"
- if temp_status is not None:
- temp_status = temp_status.get_text()
- if temp_status is not None:
- temp_status = str(temp_status.replace("\xa0", ""))
- if temp_status.startswith("Trouble"):
- trouble_code = str(temp_status).split()
- if len(trouble_code) > 1:
- status = " ".join(trouble_code[1:])
- else:
- status = "Unknown trouble code"
- else:
- status = "Online"
-
- # parse out last activity (required dealing with "Yesterday 1:52Â PM")
- # last_activity = time.time()
-
- # id: [integer]
- # name: device name
- # tags: sensor,[doorWindow,motion,glass,co,fire]
- # timestamp: timestamp of last activity
- # state: OK (device okay)
- # Open (door/window opened)
- # Motion (detected motion)
- # Tamper (glass broken or device tamper)
- # Alarm (detected CO/Smoke)
- # Unknown (device offline)
-
- # update device state from ORB info
- if not self._zones:
- LOG.warning("No zones exist")
- return None
- if state != "Unknown":
- gateway_online = True
- self._zones.update_device_info(zone, state, status, last_update)
+ status = status.replace("\xa0", "")
+ if status.startswith("Trouble"):
+ trouble_code = status.split()
+ if len(trouble_code) > 1:
+ status = " ".join(trouble_code[1:])
+ else:
+ status = "Unknown trouble code"
+ else:
+ status = "Online"
+ except (ValueError, AttributeError):
LOG.debug(
- "Set zone %d - to %s, status %s with timestamp %s",
- zone,
- state,
- status,
- last_update,
+ "Unable to set status for zone %s because html malformed", zone
)
- self._gateway.is_online = gateway_online
+ status = "Unknown"
+ return status
+
+ def update_zone_from_row(
+ zone: int,
+ state: str,
+ status: str,
+ last_update: datetime,
+ ) -> None:
+ # id: [integer]
+ # name: device name
+ # tags: sensor,[doorWindow,motion,glass,co,fire]
+ # timestamp: timestamp of last activity
+ # state: OK (device okay)
+ # Open (door/window opened)
+ # Motion (detected motion)
+ # Tamper (glass broken or device tamper)
+ # Alarm (detected CO/Smoke)
+ # Unknown (device offline)
+
+ # update device state from ORB info
+ if not self._zones:
+ LOG.warning("No zones exist")
+ return
+ self._zones.update_device_info(zone, state, status, last_update)
+ LOG.debug(
+ "Set zone %d - to %s, status %s with timestamp %s",
+ zone,
+ state,
+ status,
+ last_update,
+ )
+ retval.add(zone)
+
+ retval: set[int] = set()
+ start_time = 0.0
+ if self._pulse_connection.detailed_debug_logging:
+ start_time = time()
+ # parse ADT's convulated html to get sensor status
+ with self._site_lock:
+ try:
+ orb_status = tree.find(".//canvas[@id='ic_orb']").get("orb")
+ if orb_status == "offline":
+ self.gateway.is_online = False
+ raise PulseGatewayOfflineError(self.gateway.backoff)
+ else:
+ self.gateway.is_online = True
+ self.gateway.backoff.reset_backoff()
+
+ except (AttributeError, ValueError):
+ LOG.error("Failed to retrieve alarm status from orb!")
+ first_pass = False
+ if self._trouble_zones is None:
+ first_pass = True
+ self._trouble_zones = set()
+ original_non_default_zones = self._trouble_zones | self._tripped_zones
+ # v26 and lower: temp = row.find("span", {"class": "p_grayNormalText"})
+ for row in tree.findall(".//tr[@class='p_listRow']"):
+ zone_id = get_zone_id(row)
+ if not zone_id:
+ continue
+ status = get_zone_status(row, zone_id)
+ state = get_zone_state(row, zone_id)
+ last_update = get_zone_last_update(row, zone_id)
+ # we know that orb sorts with trouble first, tripped next, then ok
+ if status != "Online":
+ self._trouble_zones.add(zone_id)
+ if zone_id in self._tripped_zones:
+ self._tripped_zones.remove(zone_id)
+ update_zone_from_row(zone_id, state, status, last_update)
+ continue
+ # this should be trouble or OK sensors
+ if state != "OK":
+ self._tripped_zones.add(zone_id)
+ if zone_id in self._trouble_zones:
+ self._trouble_zones.remove(zone_id)
+ update_zone_from_row(zone_id, state, status, last_update)
+ continue
+ # everything here is OK, so we just need to check if anything in tripped or trouble states have
+ # returned to normal
+ if first_pass:
+ update_zone_from_row(zone_id, state, status, last_update)
+ continue
+ if not original_non_default_zones:
+ break
+ if zone_id in original_non_default_zones:
+ update_zone_from_row(zone_id, state, status, last_update)
+ original_non_default_zones.remove(zone_id)
+ if not original_non_default_zones:
+ break
+ continue
+
self._last_updated = int(time())
- return self._zones
- async def _async_update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
+ if self._pulse_connection.detailed_debug_logging:
+ LOG.debug("Updated zones in %f seconds", time() - start_time)
+ return retval
+
+ async def _async_update_zones(self) -> list[ADTPulseFlattendZone] | None:
"""Update zones asynchronously.
Returns:
@@ -462,7 +515,7 @@ async def _async_update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
return None
return zonelist.flatten()
- def update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
+ def update_zones(self) -> list[ADTPulseFlattendZone] | None:
"""Update zone status information.
Returns:
@@ -470,47 +523,3 @@ def update_zones(self) -> Optional[List[ADTPulseFlattendZone]]:
"""
coro = self._async_update_zones()
return run_coroutine_threadsafe(coro, get_event_loop()).result()
-
- @property
- def updates_may_exist(self) -> bool:
- """Query whether updated sensor data exists.
-
- Deprecated, use method on pyADTPulse object instead
- """
- # FIXME: this should actually capture the latest version
- # and compare if different!!!
- # ...this doesn't actually work if other components are also checking
- # if updates exist
- warn(
- "updates_may_exist on site object is deprecated, "
- "use method on pyADTPulse object instead",
- DeprecationWarning,
- stacklevel=2,
- )
- return False
-
- async def async_update(self) -> bool:
- """Force update site/zone data async with current data.
-
- Deprecated, use method on pyADTPulse object instead
- """
- warn(
- "updating zones from site object is deprecated, "
- "use method on pyADTPulse object instead",
- DeprecationWarning,
- stacklevel=2,
- )
- return False
-
- def update(self) -> bool:
- """Force update site/zones with current data.
-
- Deprecated, use method on pyADTPulse object instead
- """
- warn(
- "updating zones from site object is deprecated, "
- "use method on pyADTPulse object instead",
- DeprecationWarning,
- stacklevel=2,
- )
- return False
diff --git a/pyadtpulse/site_properties.py b/pyadtpulse/site_properties.py
new file mode 100644
index 0000000..4313c84
--- /dev/null
+++ b/pyadtpulse/site_properties.py
@@ -0,0 +1,164 @@
+"""Pulse Site Properties."""
+
+from threading import RLock
+from warnings import warn
+
+from typeguard import typechecked
+
+from .alarm_panel import ADTPulseAlarmPanel
+from .gateway import ADTPulseGateway
+from .util import DebugRLock, set_debug_lock
+from .zones import ADTPulseFlattendZone, ADTPulseZones
+
+
+class ADTPulseSiteProperties:
+ """Pulse Site Properties."""
+
+ __slots__ = (
+ "_id",
+ "_name",
+ "_last_updated",
+ "_alarm_panel",
+ "_zones",
+ "_site_lock",
+ "_gateway",
+ )
+
+ @typechecked
+ def __init__(self, site_id: str, name: str, debug_locks: bool = False):
+ self._id = site_id
+ self._name = name
+ self._last_updated: int = 0
+ self._zones = ADTPulseZones()
+ self._site_lock: RLock | DebugRLock
+ self._site_lock = set_debug_lock(debug_locks, "pyadtpulse.site_property_lock")
+ self._alarm_panel = ADTPulseAlarmPanel()
+ self._gateway = ADTPulseGateway()
+
+ @property
+ def id(self) -> str:
+ """Get site id.
+
+ Returns:
+ str: the site id
+ """
+ return self._id
+
+ @property
+ def name(self) -> str:
+ """Get site name.
+
+ Returns:
+ str: the site name
+ """
+ return self._name
+
+ # FIXME: should this actually return if the alarm is going off!? How do we
+ # return state that shows the site is compromised??
+
+ @property
+ def last_updated(self) -> int:
+ """Return time site last updated.
+
+ Returns:
+ int: the time site last updated as datetime
+ """
+ with self._site_lock:
+ return self._last_updated
+
+ @property
+ def site_lock(self) -> "RLock| DebugRLock":
+ """Get thread lock for site data.
+
+ Not needed for async
+
+ Returns:
+ RLock: thread RLock
+ """
+ return self._site_lock
+
+ @property
+ def zones(self) -> list[ADTPulseFlattendZone] | None:
+ """Return all zones registered with the ADT Pulse account.
+
+ (cached copy of last fetch)
+ See Also fetch_zones()
+ """
+ with self._site_lock:
+ if not self._zones:
+ raise RuntimeError("No zones exist")
+ return self._zones.flatten()
+
+ @property
+ def zones_as_dict(self) -> ADTPulseZones | None:
+ """Return zone information in dictionary form.
+
+ Returns:
+ ADTPulseZones: all zone information
+ """
+ with self._site_lock:
+ if not self._zones:
+ raise RuntimeError("No zones exist")
+ return self._zones
+
+ @property
+ def alarm_control_panel(self) -> ADTPulseAlarmPanel:
+ """Return the alarm panel object for the site.
+
+ Returns:
+ Optional[ADTPulseAlarmPanel]: the alarm panel object
+ """
+ return self._alarm_panel
+
+ @property
+ def gateway(self) -> ADTPulseGateway:
+ """Get gateway device object.
+
+ Returns:
+ ADTPulseGateway: Gateway device
+ """
+ return self._gateway
+
+ @property
+ def updates_may_exist(self) -> bool:
+ """Query whether updated sensor data exists.
+
+ Deprecated, use method on pyADTPulse object instead
+ """
+ # FIXME: this should actually capture the latest version
+ # and compare if different!!!
+ # ...this doesn't actually work if other components are also checking
+ # if updates exist
+ warn(
+ "updates_may_exist on site object is deprecated, "
+ "use method on pyADTPulse object instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return False
+
+ async def async_update(self) -> bool:
+ """Force update site/zone data async with current data.
+
+ Deprecated, use method on pyADTPulse object instead
+ """
+ warn(
+ "updating zones from site object is deprecated, "
+ "use method on pyADTPulse object instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return False
+
+ def update(self) -> bool:
+ """Force update site/zones with current data.
+
+ Deprecated, use method on pyADTPulse object instead
+ """
+ warn(
+ "updating zones from site object is deprecated, "
+ "use method on pyADTPulse object instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return False
diff --git a/pyadtpulse/util.py b/pyadtpulse/util.py
index 15fa564..a122f6c 100644
--- a/pyadtpulse/util.py
+++ b/pyadtpulse/util.py
@@ -1,4 +1,5 @@
"""Utility functions for pyadtpulse."""
+
import logging
import string
import sys
@@ -7,83 +8,67 @@
from pathlib import Path
from random import randint
from threading import RLock, current_thread
-from typing import Optional
-from aiohttp import ClientResponse
-from bs4 import BeautifulSoup
+from lxml import html
+from yarl import URL
LOG = logging.getLogger(__name__)
-def handle_response(
- response: Optional[ClientResponse], level: int, error_message: str
-) -> bool:
- """Handle the response from query().
+def remove_prefix(text: str, prefix: str) -> str:
+ """Remove prefix from a string.
Args:
- response (Optional[Response]): the response from the query()
- level (int): Level to log on error (i.e. INFO, DEBUG)
- error_message (str): the error message
+ text (str): original text
+ prefix (str): prefix to remove
Returns:
- bool: True if no error occurred.
- """
- if response is None:
- LOG.log(level, "%s", error_message)
- return False
-
- if response.ok:
- return True
-
- LOG.log(level, "%s: error code = %s", error_message, response.status)
-
- return False
-
-
-def close_response(response: Optional[ClientResponse]) -> None:
- """Close a response object, handles None.
-
- Args:
- response (Optional[ClientResponse]): ClientResponse object to close
+ str: modified string
"""
- if response is not None and not response.closed:
- response.close()
+ return text[text.startswith(prefix) and len(prefix) :]
-def remove_prefix(text: str, prefix: str) -> str:
- """Remove prefix from a string.
+def handle_response(code: int, url: URL | None, level: int, error_message: str) -> bool:
+ """Handle the response from query().
Args:
- text (str): original text
- prefix (str): prefix to remove
+ code (int): the return code
+ level (int): Level to log on error (i.e. INFO, DEBUG)
+ error_message (str): the error message
Returns:
- str: modified string
+ bool: True if no error occurred.
"""
- return text[text.startswith(prefix) and len(prefix) :]
+ if code >= 400:
+ LOG.log(level, "%s: error code = %s from %s", error_message, code, url)
+ return False
+ return True
-async def make_soup(
- response: Optional[ClientResponse], level: int, error_message: str
-) -> Optional[BeautifulSoup]:
- """Make a BS object from a Response.
+def make_etree(
+ code: int,
+ response_text: str | None,
+ url: URL | None,
+ level: int,
+ error_message: str,
+) -> html.HtmlElement | None:
+ """Make a parsed HTML tree from a Response using lxml.
Args:
- response (Optional[Response]): the response
+ code (int): the return code
+ response_text (Optional[str]): the response text
level (int): the logging level on error
error_message (str): the error message
Returns:
- Optional[BeautifulSoup]: a BS object, or None on failure
+ Optional[html.HtmlElement]: a parsed HTML tree, or None on failure
"""
- if not handle_response(response, level, error_message):
+ if not handle_response(code, url, level, error_message):
return None
-
- if response is None: # shut up type checker
+ if response_text is None:
+ LOG.log(level, "%s: no response received from %s", error_message, url)
return None
- body_text = await response.text()
- response.close()
- return BeautifulSoup(body_text, "html.parser")
+ return html.fromstring(response_text)
FINGERPRINT_LENGTH = 2292
@@ -227,7 +212,8 @@ def parse_pulse_datetime(datestring: str) -> datetime:
Returns:
datetime: time value of given string
"""
- split_string = datestring.split("\xa0")
+ datestring = datestring.replace("\xa0", " ").rstrip()
+ split_string = [s for s in datestring.split(" ") if s.strip()]
if len(split_string) < 3:
raise ValueError("Invalid datestring")
t = datetime.today()
@@ -239,7 +225,7 @@ def parse_pulse_datetime(datestring: str) -> datetime:
tempdate = f"{split_string[0]}/{t.year}"
last_update = datetime.strptime(tempdate, "%m/%d/%Y")
if last_update > t:
- last_update = last_update.replace(year=t.year)
+ last_update = last_update.replace(year=t.year - 1)
update_time = datetime.time(
datetime.strptime(split_string[1] + split_string[2], "%I:%M%p")
)
@@ -247,13 +233,16 @@ def parse_pulse_datetime(datestring: str) -> datetime:
return last_update
-class AuthenticationException(RuntimeError):
- """Raised when a login failed."""
+def set_debug_lock(debug_lock: bool, name: str) -> "RLock | DebugRLock":
+ """Set lock or debug lock
- def __init__(self, username: str):
- """Create the exception.
+ Args:
+ debug_lock (bool): set a debug lock
+ name (str): debug lock name
- Args:
- username (str): Username used to login
- """
- super().__init__(f"Could not log into ADT site with username {username}")
+ Returns:
+ RLock | DebugRLock: lock object to return
+ """
+ if debug_lock:
+ return DebugRLock(name)
+ return RLock()
diff --git a/pyadtpulse/zones.py b/pyadtpulse/zones.py
index 1402b7e..b018b84 100644
--- a/pyadtpulse/zones.py
+++ b/pyadtpulse/zones.py
@@ -1,11 +1,14 @@
"""ADT Pulse zone info."""
+
import logging
from collections import UserDict
from dataclasses import dataclass
from datetime import datetime
-from typing import List, Tuple, TypedDict
+from typing import TypedDict
+
+from typeguard import typechecked
-ADT_NAME_TO_DEFAULT_TAGS = {
+ADT_NAME_TO_DEFAULT_TAGS: dict[str, tuple[str, str]] = {
"Door": ("sensor", "doorWindow"),
"Window": ("sensor", "doorWindow"),
"Motion": ("sensor", "motion"),
@@ -38,10 +41,34 @@ class ADTPulseZoneData:
name: str
id_: str
- tags: Tuple = ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ _tags: tuple[str, str] = ADT_NAME_TO_DEFAULT_TAGS["Window"]
status: str = "Unknown"
state: str = "Unknown"
- last_activity_timestamp: int = 0
+ _last_activity_timestamp: int = 0
+
+ @property
+ def last_activity_timestamp(self) -> int:
+ """Return the last activity timestamp."""
+ return self._last_activity_timestamp
+
+ @last_activity_timestamp.setter
+ @typechecked
+ def last_activity_timestamp(self, value: int) -> None:
+ """Set the last activity timestamp."""
+ self._last_activity_timestamp = value
+
+ @property
+ def tags(self) -> tuple[str, str]:
+ """Return the tags."""
+ return self._tags
+
+ @tags.setter
+ @typechecked
+ def tags(self, value: tuple[str, str]) -> None:
+ """Set the tags."""
+ if value not in ADT_NAME_TO_DEFAULT_TAGS.values():
+ raise ValueError("tags must be one of: " + str(ADT_NAME_TO_DEFAULT_TAGS))
+ self._tags = value
class ADTPulseFlattendZone(TypedDict):
@@ -60,7 +87,7 @@ class ADTPulseFlattendZone(TypedDict):
zone: int
name: str
id_: str
- tags: Tuple
+ tags: tuple
status: str
state: str
last_activity_timestamp: int
@@ -112,6 +139,7 @@ def __setitem__(self, key: int, value: ADTPulseZoneData) -> None:
value.name = "Sensor for Zone " + str(key)
super().__setitem__(key, value)
+ @typechecked
def update_status(self, key: int, status: str) -> None:
"""Update zone status.
@@ -123,6 +151,7 @@ def update_status(self, key: int, status: str) -> None:
temp.status = status
self.__setitem__(key, temp)
+ @typechecked
def update_state(self, key: int, state: str) -> None:
"""Update zone state.
@@ -134,6 +163,7 @@ def update_state(self, key: int, state: str) -> None:
temp.state = state
self.__setitem__(key, temp)
+ @typechecked
def update_last_activity_timestamp(self, key: int, dt: datetime) -> None:
"""Update timestamp.
@@ -145,6 +175,7 @@ def update_last_activity_timestamp(self, key: int, dt: datetime) -> None:
temp.last_activity_timestamp = int(dt.timestamp())
self.__setitem__(key, temp)
+ @typechecked
def update_device_info(
self,
key: int,
@@ -170,13 +201,13 @@ def update_device_info(
temp.last_activity_timestamp = int(last_activity.timestamp())
self.__setitem__(key, temp)
- def flatten(self) -> List[ADTPulseFlattendZone]:
+ def flatten(self) -> list[ADTPulseFlattendZone]:
"""Flattens ADTPulseZones into a list of ADTPulseFlattenedZones.
Returns:
List[ADTPulseFlattendZone]
"""
- result: List[ADTPulseFlattendZone] = []
+ result: list[ADTPulseFlattendZone] = []
for k, i in self.items():
if not isinstance(i, ADTPulseZoneData):
raise ValueError("Invalid Zone data in ADTPulseZones")
@@ -193,41 +224,42 @@ def flatten(self) -> List[ADTPulseFlattendZone]:
)
return result
+ @typechecked
def update_zone_attributes(self, dev_attr: dict[str, str]) -> None:
"""Update zone attributes."""
- dName = dev_attr.get("name", "Unknown")
- dType = dev_attr.get("type_model", "Unknown")
- dZone = dev_attr.get("zone", "Unknown")
- dStatus = dev_attr.get("status", "Unknown")
+ d_name = dev_attr.get("name", "Unknown")
+ d_type = dev_attr.get("type_model", "Unknown")
+ d_zone = dev_attr.get("zone", "Unknown")
+ d_status = dev_attr.get("status", "Unknown")
- if dZone != "Unknown":
+ if d_zone != "Unknown":
tags = None
for search_term, default_tags in ADT_NAME_TO_DEFAULT_TAGS.items():
# convert to uppercase first
- if search_term.upper() in dType.upper():
+ if search_term.upper() in d_type.upper():
tags = default_tags
break
if not tags:
LOG.warning(
- "Unknown sensor type for '%s', defaulting to doorWindow", dType
+ "Unknown sensor type for '%s', defaulting to doorWindow", d_type
)
tags = ("sensor", "doorWindow")
LOG.debug(
"Retrieved sensor %s id: sensor-%s Status: %s, tags %s",
- dName,
- dZone,
- dStatus,
+ d_name,
+ d_zone,
+ d_status,
tags,
)
- if "Unknown" in (dName, dStatus, dZone) or not dZone.isdecimal():
+ if "Unknown" in (d_name, d_status, d_zone) or not d_zone.isdecimal():
LOG.debug("Zone data incomplete, skipping...")
else:
- tmpzone = ADTPulseZoneData(dName, f"sensor-{dZone}", tags, dStatus)
- self.update({int(dZone): tmpzone})
+ tmpzone = ADTPulseZoneData(d_name, f"sensor-{d_zone}", tags, d_status)
+ self.update({int(d_zone): tmpzone})
else:
LOG.debug(
"Skipping incomplete zone name: %s, zone: %s status: %s",
- dName,
- dZone,
- dStatus,
+ d_name,
+ d_zone,
+ d_status,
)
diff --git a/pyproject.toml b/pyproject.toml
index b770167..cc6040c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,30 +1,60 @@
-[build-system]
-requires = ["setuptools>=61.0"]
-build-backend = "setuptools.build_meta"
-
-[project]
+[tool.poetry]
name = "pyadtpulse"
-dynamic = ["version"]
-description="Python interface for ADT Pulse security systems"
+version = "1.2.9"
+description = "Python interface for ADT Pulse security systems"
+authors = ["Ryan Snodgrass"]
+maintainers = ["Robert Lippmann"]
+license = "Apache-2.0"
readme = "README.md"
-authors = [{name = "Ryan Snodgrass"}]
-maintainers = [{name = "Robert Lippmann"}]
-license = {file = "LICENSE.md"}
-dependencies = ["aiohttp>=3.8.1", "uvloop>=0.17.0", "beautifulsoup4>=4.11.1"]
-keywords = ["security system", "adt", "home automation", "security alarm"]
+repository = "https://github.com/rlippmann/pyadtpulse"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
- "Operating System :: OS Independent",
+ "Operating System :: OS Independent"
]
-[project.urls]
+
+[tool.poetry.dependencies]
+python = "^3.11"
+aiohttp = ">=3.8.5, < 4.0"
+uvloop = "^0.19.0"
+typeguard = "^4.1.5"
+yarl = ">=1.9, < 2.0"
+lxml = "^5.1.0"
+aiohttp-zlib-ng = ">=0.1.1"
+
+
+[tool.poetry.urls]
"Changelog" = "https://github.com/rlippmann/pyadtpulse/blob/master/CHANGELOG.md"
-"Source" = "https://github.com/rlippmann/pyadtpulse"
"Issues" = "https://github.com/rlippmann/pyadtpulse/issues"
-[tool.setuptools.dynamic]
-version = {attr = "pyadtpulse.const.__version__"}
+[tool.poetry.group.test.dependencies]
+pytest = "^7.4.3"
+pytest-asyncio = "^0.21.1"
+pytest-mock = "^3.12.0"
+pytest-aiohttp = "^1.0.5"
+pytest-timeout = "^2.2.0"
+aioresponses = "^0.7.6"
+freezegun = "^1.2.2"
+pytest-coverage = "^0.0"
+pytest-xdist = "^3.5.0"
+
+
+[tool.poetry.group.dev.dependencies]
+pre-commit = "^3.5.0"
+ruff = "^0.1.4"
+pycln = "^2.3.0"
+pyupgrade = "^3.15.0"
+isort = "^5.12.0"
+black = "^23.10.1"
+mypy = "^1.6.1"
+pylint = "^3.0.2"
+refurb = "^1.22.1"
+types-lxml = "^2024.2.9"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = ["poetry.core.masonry.api"]
[tool.isort]
profile = "black"
@@ -36,3 +66,9 @@ line-length = 90
[tool.pycln]
all = true
+
+[tool.pytest.ini_options]
+timeout = 30
+# addopts = "--cov=pyadtpulse --cov-report=html"
+
+[tool.pyright]
diff --git a/requirements.txt b/requirements.txt
index 50b37e6..d1fb5bb 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,5 @@
-beautifulsoup4>=4.11.1
-aiohttp>=3.8.1
+lxml>=5.1.0
+aiohttp>=3.9.1
uvloop>=0.17.0
+typeguard>=4.1.5
+aiohttp-zlib-ng>=0.1.1
diff --git a/setup.py b/setup.py
index 7cfa7a8..f6e086f 100644
--- a/setup.py
+++ b/setup.py
@@ -29,7 +29,14 @@
author="",
author_email="",
license="Apache Software License",
- install_requires=["aiohttp>=3.8.1", "uvloop>=0.17.0", "beautifulsoup4>=4.11.1"],
+ install_requires=[
+ "aiohttp>=3.8.5",
+ "uvloop>=0.17.0",
+ "lxml>=5.1.0",
+ "typeguard>=2.13.3",
+ "yarl>=1.8.2",
+ "aiohttp-zlib-ng>=0.1.1",
+ ],
keywords=["security system", "adt", "home automation", "security alarm"],
zip_safe=True,
classifiers=[
diff --git a/tests/data_files/device_1.html b/tests/data_files/device_1.html
new file mode 100644
index 0000000..f248389
--- /dev/null
+++ b/tests/data_files/device_1.html
@@ -0,0 +1,458 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_10.html b/tests/data_files/device_10.html
new file mode 100644
index 0000000..a1ad892
--- /dev/null
+++ b/tests/data_files/device_10.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_11.html b/tests/data_files/device_11.html
new file mode 100644
index 0000000..ba700c6
--- /dev/null
+++ b/tests/data_files/device_11.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_16.html b/tests/data_files/device_16.html
new file mode 100644
index 0000000..2d9be60
--- /dev/null
+++ b/tests/data_files/device_16.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_2.html b/tests/data_files/device_2.html
new file mode 100644
index 0000000..ca7507d
--- /dev/null
+++ b/tests/data_files/device_2.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_24.html b/tests/data_files/device_24.html
new file mode 100644
index 0000000..52085d6
--- /dev/null
+++ b/tests/data_files/device_24.html
@@ -0,0 +1,453 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_25.html b/tests/data_files/device_25.html
new file mode 100644
index 0000000..deb9552
--- /dev/null
+++ b/tests/data_files/device_25.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_26.html b/tests/data_files/device_26.html
new file mode 100644
index 0000000..3b4393a
--- /dev/null
+++ b/tests/data_files/device_26.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_27.html b/tests/data_files/device_27.html
new file mode 100644
index 0000000..cac9597
--- /dev/null
+++ b/tests/data_files/device_27.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_28.html b/tests/data_files/device_28.html
new file mode 100644
index 0000000..8a02013
--- /dev/null
+++ b/tests/data_files/device_28.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_29.html b/tests/data_files/device_29.html
new file mode 100644
index 0000000..2ba6fd7
--- /dev/null
+++ b/tests/data_files/device_29.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_3.html b/tests/data_files/device_3.html
new file mode 100644
index 0000000..f4da4b7
--- /dev/null
+++ b/tests/data_files/device_3.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_30.html b/tests/data_files/device_30.html
new file mode 100644
index 0000000..c178404
--- /dev/null
+++ b/tests/data_files/device_30.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_34.html b/tests/data_files/device_34.html
new file mode 100644
index 0000000..d5ffb17
--- /dev/null
+++ b/tests/data_files/device_34.html
@@ -0,0 +1,441 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_69.html b/tests/data_files/device_69.html
new file mode 100644
index 0000000..95f2122
--- /dev/null
+++ b/tests/data_files/device_69.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/device_70.html b/tests/data_files/device_70.html
new file mode 100644
index 0000000..68f5766
--- /dev/null
+++ b/tests/data_files/device_70.html
@@ -0,0 +1,435 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/gateway.html b/tests/data_files/gateway.html
new file mode 100644
index 0000000..a85f6d8
--- /dev/null
+++ b/tests/data_files/gateway.html
@@ -0,0 +1,395 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ |
+ |
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+ |
+ |
+ |
+
+
+ |
+
+
+
+
+
+
+ |
+
+
+ Status: |
+ |
+
+ Manufacturer: | ADT Pulse Gateway |
+
+ Model: | PGZNG1 |
+ Serial Number: | 5U020CN3007E3 |
+ Next Update: | Today 1:21 AM |
+ Last Update: | Today 7:21 PM |
+ Firmware Version: | 24.0.0-9 |
+ Hardware Version: | HW=3, BL=1.1.9b, PL=9.4.0.32.5, SKU=PGZNG1-2ADNAS |
+
+ |
+ Communication Link Status |
+ Primary Connection Type: | Broadband |
+ Broadband Connection Status: | Active |
+ Cellular Connection Status: | N/A |
+ Cellular Signal Strength: | N/A |
+ |
+ Network Address Information |
+ Broadband LAN IP Address: | 192.168.1.31 |
+ Broadband LAN MAC: | a4:11:62:35:07:96 |
+ Device LAN IP Address: | 192.168.107.1 |
+ Device LAN MAC: | a4:11:62:35:07:97 |
+ Router LAN IP Address: | 192.168.1.1 |
+ Router WAN IP Address: | |
+
+
+ |
+ |
+
+
+
+ |
+ |
+
+
+ |
+ |
+ |
+
+
+
+
+
+
+ |
+ |
+
+
+
+
+
+
+
+ |
+ |
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/mfa.html b/tests/data_files/mfa.html
new file mode 100644
index 0000000..5087001
--- /dev/null
+++ b/tests/data_files/mfa.html
@@ -0,0 +1,150 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Multi-factor Authentication
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/not_signed_in.html b/tests/data_files/not_signed_in.html
new file mode 100644
index 0000000..9608e41
--- /dev/null
+++ b/tests/data_files/not_signed_in.html
@@ -0,0 +1,177 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/orb.html b/tests/data_files/orb.html
new file mode 100644
index 0000000..57c808d
--- /dev/null
+++ b/tests/data_files/orb.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ All Quiet.
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_garage.html b/tests/data_files/orb_garage.html
new file mode 100644
index 0000000..13a3cc4
--- /dev/null
+++ b/tests/data_files/orb_garage.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ 1 Sensor Open.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Open |
+
+
+
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_gateway_offline.html b/tests/data_files/orb_gateway_offline.html
new file mode 100644
index 0000000..2083506
--- /dev/null
+++ b/tests/data_files/orb_gateway_offline.html
@@ -0,0 +1,293 @@
+
+
+
+
+
+
+
+
+ Status Unavailable.
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Unknown |
+
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ Unknown |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Unknown |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Unknown |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Unknown |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Unknown |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_patio_garage.html b/tests/data_files/orb_patio_garage.html
new file mode 100644
index 0000000..0c42926
--- /dev/null
+++ b/tests/data_files/orb_patio_garage.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ 2 Sensors Open.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Open |
+
+
+
+ |
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Open |
+
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/orb_patio_opened.html b/tests/data_files/orb_patio_opened.html
new file mode 100644
index 0000000..dfdd36f
--- /dev/null
+++ b/tests/data_files/orb_patio_opened.html
@@ -0,0 +1,323 @@
+
+
+
+
+
+
+
+
+ Disarmed.
+ 1 Sensor Open.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Patio Door
+ Zone 11
+
+ |
+ Open |
+
+
+
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Back Door
+ Zone 14
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Foyer Motion
+ Zone 15
+
+ |
+ No Motion |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Front Door
+ Zone 9
+
+ |
+ Closed |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Garage Door
+ Zone 10
+
+ |
+ Closed |
+
+
+ |
+
+
+ |
+
+
+
+
+
+
+
+ |
+
+ |
+
+ Main Gas
+ Zone 23
+
+ |
+ Okay |
+
+
+ |
+
+
+ |
+
+
+ | |
+
diff --git a/tests/data_files/signin.html b/tests/data_files/signin.html
new file mode 100644
index 0000000..c82a460
--- /dev/null
+++ b/tests/data_files/signin.html
@@ -0,0 +1,177 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/signin_fail.html b/tests/data_files/signin_fail.html
new file mode 100644
index 0000000..d7b52a4
--- /dev/null
+++ b/tests/data_files/signin_fail.html
@@ -0,0 +1,176 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/signin_locked.html b/tests/data_files/signin_locked.html
new file mode 100644
index 0000000..027079f
--- /dev/null
+++ b/tests/data_files/signin_locked.html
@@ -0,0 +1,176 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Sign In
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/summary.html b/tests/data_files/summary.html
new file mode 100644
index 0000000..e61e087
--- /dev/null
+++ b/tests/data_files/summary.html
@@ -0,0 +1,534 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Summary - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ADT Pulse Home
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Disarmed. All Quiet.
+
+
+
+
+
+
+
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ No other devices installed. |
|
+ |
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ |
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ No pictures or clips.
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/summary_gateway_offline.html b/tests/data_files/summary_gateway_offline.html
new file mode 100644
index 0000000..266fc06
--- /dev/null
+++ b/tests/data_files/summary_gateway_offline.html
@@ -0,0 +1,533 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - Summary - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ADT Pulse Home
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Status Unavailable.
+
+
+
+
+
+
+
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+ |
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+ |
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ No pictures or clips.
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data_files/system.html b/tests/data_files/system.html
new file mode 100644
index 0000000..736f9e5
--- /dev/null
+++ b/tests/data_files/system.html
@@ -0,0 +1,530 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ADT Pulse(TM) Interactive Solutions - System - Robert Lippmann
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Devices
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/test_backoff.py b/tests/test_backoff.py
new file mode 100644
index 0000000..29fa88e
--- /dev/null
+++ b/tests/test_backoff.py
@@ -0,0 +1,879 @@
+"""Test for pulse_backoff."""
+
+from time import time
+
+import pytest
+
+from pyadtpulse.pulse_backoff import PulseBackoff
+
+
+# Test that the PulseBackoff class can be initialized with valid parameters.
+def test_initialize_backoff_valid_parameters():
+ """
+ Test that the PulseBackoff class can be initialized with valid parameters.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Store the current time
+ current_time = time()
+
+ # Act
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Assert
+ assert backoff.name == name
+ assert backoff.initial_backoff_interval == initial_backoff_interval
+ assert backoff._max_backoff_interval == max_backoff_interval
+ assert backoff._backoff_count == 0
+ assert backoff._expiration_time == 0.0
+
+
+# Get current backoff interval
+def test_get_current_backoff_interval():
+ """
+ Test that the get_current_backoff_interval method returns the correct current backoff interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ current_backoff_interval = backoff.get_current_backoff_interval()
+ assert current_backoff_interval == 0.0
+ backoff.increment_backoff()
+ current_backoff_interval = backoff.get_current_backoff_interval()
+ # Assert
+ assert current_backoff_interval == initial_backoff_interval
+ backoff.increment_backoff()
+ current_backoff_interval = backoff.get_current_backoff_interval()
+ assert current_backoff_interval == initial_backoff_interval * 2
+
+
+# Increment backoff
+def test_increment_backoff():
+ """
+ Test that the increment_backoff method increments the backoff count.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff._backoff_count == 1
+ backoff.increment_backoff()
+ assert backoff._backoff_count == 2
+
+
+# Reset backoff
+def test_reset_backoff():
+ """
+ Test that the reset_backoff method resets the backoff count and expiration time.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ backoff.increment_backoff()
+
+ # Act
+ backoff.reset_backoff()
+
+ # Assert
+ assert backoff._backoff_count == 0
+
+
+# Test that the wait_for_backoff method waits for the correct amount of time.
+@pytest.mark.asyncio
+async def test_wait_for_backoff2(mock_sleep):
+ """
+ Test that the wait_for_backoff method waits for the correct amount of time.
+ """
+ # Arrange
+
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 0
+ backoff.increment_backoff()
+ await backoff.wait_for_backoff()
+ assert mock_sleep.await_args[0][0] == pytest.approx(initial_backoff_interval)
+
+
+# Check if backoff is needed
+def test_will_backoff():
+ """
+ Test that the will_backoff method returns True if backoff is needed, False otherwise.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act and Assert
+ assert not backoff.will_backoff()
+
+ backoff.increment_backoff()
+ assert backoff.will_backoff()
+
+
+# Initialize backoff with invalid initial_backoff_interval
+def test_initialize_backoff_invalid_initial_interval():
+ """
+ Test that initializing the PulseBackoff class with an invalid
+ initial_backoff_interval raises a ValueError.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = -1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+# Initialize backoff with invalid max_backoff_interval
+def test_initialize_backoff_invalid_max_interval():
+ """
+ Test that initializing the PulseBackoff class with an invalid
+ max_backoff_interval raises a ValueError.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 0.5
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+# Test that setting the absolute backoff time with an invalid backoff_time raises a ValueError.
+def test_set_absolute_backoff_time_invalid_time():
+ """
+ Test that setting the absolute backoff time with an invalid backoff_time raises a ValueError.
+ """
+ # Arrange
+ backoff = PulseBackoff(
+ name="test_backoff",
+ initial_backoff_interval=1.0,
+ max_backoff_interval=10.0,
+ threshold=0,
+ debug_locks=False,
+ detailed_debug_logging=False,
+ )
+
+ # Act and Assert
+ with pytest.raises(
+ ValueError, match="Absolute backoff time must be greater than current time"
+ ):
+ backoff.set_absolute_backoff_time(time() - 1)
+
+
+def test_set_absolute_backoff_time_valid_time():
+ """
+ Test that setting the absolute backoff time with a valid backoff_time works.
+ """
+ # Arrange
+ backoff = PulseBackoff(
+ name="test_backoff",
+ initial_backoff_interval=1.0,
+ max_backoff_interval=10.0,
+ threshold=0,
+ debug_locks=False,
+ detailed_debug_logging=False,
+ )
+
+ # Act and Assert
+ backoff_time = time() + 10
+ backoff.set_absolute_backoff_time(backoff_time)
+ assert backoff._expiration_time == backoff_time
+
+
+# Initialize backoff with valid parameters
+def test_initialize_backoff_valid_parameters2():
+ """
+ Test that the PulseBackoff class can be initialized with valid parameters.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Assert
+ assert backoff.name == name
+ assert backoff.initial_backoff_interval == initial_backoff_interval
+ assert backoff._max_backoff_interval == max_backoff_interval
+ assert backoff._backoff_count == 0
+ assert backoff._expiration_time == 0.0
+
+
+# Increment backoff
+def test_increment_backoff2():
+ """
+ Test that the backoff count is incremented correctly when calling the
+ increment_backoff method.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff.backoff_count == 1
+
+
+# Reset backoff
+def test_reset_backoff2():
+ """
+ Test that the backoff count and expiration time are not reset when calling
+ the reset_backoff method where expiration time is in the future.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ now = time()
+ backoff._backoff_count = 5
+ backoff._expiration_time = now + 10.0
+
+ # Act
+ backoff.reset_backoff()
+
+ # Assert
+ assert backoff._backoff_count == 5
+ assert backoff._expiration_time == now + 10.0
+ assert backoff.expiration_time == now + 10.0
+
+
+# Check if backoff is needed
+def test_backoff_needed():
+ """
+ Test that the 'will_backoff' method returns the correct value when
+ backoff is needed.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff.will_backoff() is True
+
+
+# Wait for backoff
+@pytest.mark.asyncio
+async def test_wait_for_backoff(mocker):
+ """
+ Test that the wait_for_backoff method waits for the correct amount of time.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ # Act
+ await backoff.wait_for_backoff()
+ assert backoff.expiration_time == 0.0
+ backoff.increment_backoff()
+ # Assert
+ assert backoff.expiration_time == 0.0
+
+
+# Set initial backoff interval
+def test_set_initial_backoff_interval():
+ """
+ Test that the initial backoff interval can be set.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ new_interval = 2.0
+ backoff.initial_backoff_interval = new_interval
+
+ # Assert
+ assert backoff.initial_backoff_interval == new_interval
+
+
+# Initialize backoff with invalid max_backoff_interval
+def test_initialize_backoff_invalid_max_interval2():
+ """
+ Test that the PulseBackoff class raises a ValueError when initialized
+ with an invalid max_backoff_interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 0.5
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+def test_initialize_backoff_invalid_initial_interval2():
+ """
+ Test that the PulseBackoff class raises a ValueError when initialized with an
+ invalid initial_backoff_interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = -1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+
+# Set absolute backoff time with invalid backoff_time
+def test_set_absolute_backoff_time_invalid_backoff_time():
+ """
+ Test that set_absolute_backoff_time raises a ValueError when given an invalid backoff_time.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act and Assert
+ invalid_backoff_time = time() - 1
+ with pytest.raises(ValueError):
+ backoff.set_absolute_backoff_time(invalid_backoff_time)
+
+
+# Wait for backoff with negative diff
+@pytest.mark.asyncio
+async def test_wait_for_backoff_with_negative_diff(mocker):
+ """
+ Test that the wait_for_backoff method handles a negative diff correctly.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Set the expiration time to a past time
+ backoff._expiration_time = time() - 1
+
+ start_time = time()
+
+ # Act
+ await backoff.wait_for_backoff()
+
+ # Assert
+ assert backoff._expiration_time >= initial_backoff_interval
+
+
+# Calculate backoff interval with backoff_count <= threshold
+def test_calculate_backoff_interval_with_backoff_count_less_than_threshold():
+ """
+ Test that the calculate_backoff_interval method returns 0
+ when the backoff count is less than or equal to the threshold.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 5
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ result = backoff._calculate_backoff_interval()
+
+ # Assert
+ assert result == 0.0
+
+
+# Calculate backoff interval with backoff_count > threshold and exceeds max_backoff_interval
+@pytest.mark.asyncio
+async def test_calculate_backoff_interval_exceeds_max(mocker):
+ """
+ Test that the calculate_backoff_interval method returns the correct backoff interval
+ when backoff_count is greater than threshold and exceeds max_backoff_interval.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ backoff._backoff_count = 2
+
+ # Act
+ result = backoff._calculate_backoff_interval()
+
+ # Assert
+ assert result == 2.0
+ backoff._backoff_count = 3
+ result = backoff._calculate_backoff_interval()
+ assert result == 4.0
+ backoff._backoff_count = 4
+ result = backoff._calculate_backoff_interval()
+ assert result == 8.0
+ backoff._backoff_count = 5
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+ backoff._backoff_count = 6
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 3
+ debug_locks = False
+ detailed_debug_logging = False
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ backoff._backoff_count = 2
+
+ # Act
+ result = backoff._calculate_backoff_interval()
+
+ # Assert
+ assert result == 1.0
+ backoff._backoff_count = 3
+ result = backoff._calculate_backoff_interval()
+ assert result == 1.0
+ backoff._backoff_count = 4
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval
+ backoff._backoff_count = 5
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval * 2
+ backoff._backoff_count = 6
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval * 4
+ backoff._backoff_count = 7
+ result = backoff._calculate_backoff_interval()
+ assert result == initial_backoff_interval * 8
+ backoff._backoff_count = 8
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+ backoff._backoff_count = 9
+ result = backoff._calculate_backoff_interval()
+ assert result == max_backoff_interval
+
+
+# Increment backoff and update expiration_time
+def test_increment_backoff_and_update_expiration_time():
+ """
+ Test that the backoff count is incremented
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ # Act
+ backoff.increment_backoff()
+
+ # Assert
+ assert backoff.backoff_count == 1
+
+
+# Calculate backoff interval with backoff_count > threshold
+def test_calculate_backoff_interval_with_backoff_count_greater_than_threshold():
+ """
+ Test the calculation of backoff interval when backoff_count is greater than threshold.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff_count = 5
+
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+ backoff._backoff_count = backoff_count
+
+ # Act
+ calculated_interval = backoff._calculate_backoff_interval()
+
+ # Assert
+ expected_interval = initial_backoff_interval * (2 ** (backoff_count - threshold))
+ assert calculated_interval == min(expected_interval, max_backoff_interval)
+
+
+# Test that calling increment backoff 4 times followed by wait for backoff
+# will sleep for 8 seconds with an initial backoff of 1, max backoff of 10.
+# And that an additional call to increment backoff followed by a wait for backoff will wait for 10.
+
+
+@pytest.mark.asyncio
+async def test_increment_backoff_and_wait_for_backoff(mock_sleep):
+ """
+ Test that calling increment backoff 4 times followed by wait for backoff will
+ sleep for 8 seconds with an initial backoff of 1, max backoff of 10.
+ And that an additional call to increment backoff followed by a wait
+ for backoff will wait for 10.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+
+ # Create a PulseBackoff object
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 0
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 1
+ assert mock_sleep.call_args_list[0][0][0] == initial_backoff_interval
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 2
+ assert mock_sleep.call_args_list[1][0][0] == 2 * initial_backoff_interval
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 3
+ assert mock_sleep.call_args_list[2][0][0] == 4 * initial_backoff_interval
+ backoff.increment_backoff()
+
+ # Additional call after 4 iterations
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 4
+ assert mock_sleep.call_args_list[3][0][0] == 8 * initial_backoff_interval
+ backoff.increment_backoff()
+
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 5
+ assert mock_sleep.call_args_list[4][0][0] == max_backoff_interval
+ backoff.increment_backoff()
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 6
+ assert mock_sleep.call_args_list[4][0][0] == max_backoff_interval
+
+
+@pytest.mark.asyncio
+async def test_absolute_backoff_time(mock_sleep, freeze_time_to_now):
+ """
+ Test that the absolute backoff time is calculated correctly.
+ """
+ # Arrange
+ name = "test_backoff"
+ initial_backoff_interval = 1.0
+ max_backoff_interval = 10.0
+ threshold = 0
+ debug_locks = False
+ detailed_debug_logging = False
+ backoff = PulseBackoff(
+ name,
+ initial_backoff_interval,
+ max_backoff_interval,
+ threshold,
+ debug_locks,
+ detailed_debug_logging,
+ )
+
+ # Act
+ backoff.set_absolute_backoff_time(time() + 100)
+ assert backoff._backoff_count == 0
+ backoff.reset_backoff()
+ # make sure backoff can't be reset
+ assert backoff.expiration_time == time() + 100
+ await backoff.wait_for_backoff()
+ assert mock_sleep.call_count == 1
+ assert mock_sleep.call_args_list[0][0][0] == 100
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py
new file mode 100644
index 0000000..3c4fc81
--- /dev/null
+++ b/tests/test_exceptions.py
@@ -0,0 +1,159 @@
+# Generated by CodiumAI
+from time import time
+
+import pytest
+
+from pyadtpulse.exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseClientConnectionError,
+ PulseConnectionError,
+ PulseExceptionWithBackoff,
+ PulseExceptionWithRetry,
+ PulseLoginException,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pulse_backoff import PulseBackoff
+
+
+class TestCodeUnderTest:
+ # PulseExceptionWithBackoff can be initialized with a message and a PulseBackoff object
+ def test_pulse_exception_with_backoff_initialization(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert str(exception) == "PulseExceptionWithBackoff: error"
+ assert exception.backoff == backoff
+ assert backoff.backoff_count == 1
+
+ # PulseExceptionWithBackoff increments the backoff count when initialized
+ def test_pulse_exception_with_backoff_increment(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert backoff.backoff_count == 1
+
+ # PulseExceptionWithRetry can be initialized with a message, a PulseBackoff object, and a retry time
+ def test_pulse_exception_with_retry_initialization(self):
+ backoff = PulseBackoff("test", 1.0)
+ retry_time = time() + 10
+ exception = PulseExceptionWithRetry("error", backoff, retry_time)
+ assert str(exception) == "PulseExceptionWithRetry: error"
+ assert exception.backoff == backoff
+ assert exception.retry_time == retry_time
+
+ # PulseExceptionWithRetry resets the backoff count and sets an absolute backoff time if retry time is in the future
+ def test_pulse_exception_with_retry_reset_and_set_absolute_backoff_time(self):
+ backoff = PulseBackoff("test", 1.0)
+ backoff.increment_backoff()
+ retry_time = time() + 10
+ exception = PulseExceptionWithRetry("error", backoff, retry_time)
+ assert backoff.backoff_count == 0
+ assert backoff.expiration_time == retry_time
+
+ # PulseServerConnectionError is a subclass of PulseExceptionWithBackoff and PulseConnectionError
+ def test_pulse_server_connection_error_inheritance_fixed(self):
+ assert issubclass(PulseServerConnectionError, PulseExceptionWithBackoff)
+ assert issubclass(PulseServerConnectionError, PulseConnectionError)
+
+ # PulseClientConnectionError is a subclass of PulseExceptionWithBackoff and PulseConnectionError
+ def test_pulse_client_connection_error_inheritance_fixed(self):
+ assert issubclass(PulseClientConnectionError, PulseExceptionWithBackoff)
+ assert issubclass(PulseClientConnectionError, PulseConnectionError)
+
+ # PulseExceptionWithBackoff raises an exception if initialized with an invalid message or non-PulseBackoff object
+ def test_pulse_exception_with_backoff_invalid_initialization(self):
+ with pytest.raises(Exception):
+ PulseExceptionWithBackoff(123, "backoff")
+
+ # PulseExceptionWithRetry raises an exception if initialized with an invalid message, non-PulseBackoff object, or invalid retry time
+ def test_pulse_exception_with_retry_invalid_initialization(self):
+ backoff = PulseBackoff("test", 1.0)
+ with pytest.raises(Exception):
+ PulseExceptionWithRetry(123, backoff, "retry")
+ with pytest.raises(Exception):
+ PulseExceptionWithRetry("error", "backoff", time() + 10)
+ with pytest.raises(Exception):
+ PulseExceptionWithRetry("error", backoff, "retry")
+
+ # PulseExceptionWithRetry does not reset the backoff count or set an absolute backoff time if retry time is in the past
+ def test_pulse_exception_with_retry_past_retry_time(self):
+ backoff = PulseBackoff("test", 1.0)
+ backoff.increment_backoff()
+ retry_time = time() - 10
+ with pytest.raises(PulseExceptionWithRetry):
+ raise PulseExceptionWithRetry(
+ "retry must be in the future", backoff, retry_time
+ )
+ # 1 backoff for increment
+ assert backoff.backoff_count == 2
+ assert backoff.expiration_time == 0.0
+
+ # PulseServiceTemporarilyUnavailableError does not reset the backoff count or set an absolute backoff time if retry time is in the past
+ def test_pulse_service_temporarily_unavailable_error_past_retry_time_fixed(self):
+ backoff = PulseBackoff("test", 1.0)
+ backoff.increment_backoff()
+ retry_time = time() - 10
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ raise PulseServiceTemporarilyUnavailableError(backoff, retry_time)
+ assert backoff.backoff_count == 2
+ assert backoff.expiration_time == 0.0
+
+ # PulseAuthenticationError is a subclass of PulseLoginException
+ def test_pulse_authentication_error_inheritance(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseAuthenticationError()
+ assert isinstance(exception, PulseLoginException)
+
+ # PulseServiceTemporarilyUnavailableError is a subclass of PulseExceptionWithRetry and PulseConnectionError
+ def test_pulse_service_temporarily_unavailable_error(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseServiceTemporarilyUnavailableError(
+ backoff, retry_time=time() + 10.0
+ )
+ assert backoff.backoff_count == 0
+ assert isinstance(exception, PulseExceptionWithRetry)
+ assert isinstance(exception, PulseConnectionError)
+
+ # PulseAccountLockedError is a subclass of PulseExceptionWithRetry and PulseLoginException
+ def test_pulse_account_locked_error_inheritance(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseAccountLockedError(backoff, time() + 10.0)
+ assert backoff.backoff_count == 0
+ assert isinstance(exception, PulseExceptionWithRetry)
+ assert isinstance(exception, PulseLoginException)
+
+ # PulseExceptionWithBackoff string representation includes the class name and message
+ def test_pulse_exception_with_backoff_string_representation(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert str(exception) == "PulseExceptionWithBackoff: error"
+
+ # PulseExceptionWithBackoff string representation includes the backoff object
+ def test_pulse_exception_with_backoff_string_representation(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithBackoff("error", backoff)
+ assert str(exception) == "PulseExceptionWithBackoff: error"
+ assert exception.backoff == backoff
+ assert backoff.backoff_count == 1
+
+ # PulseExceptionWithRetry string representation includes the class name, message, backoff object, and retry time
+ def test_pulse_exception_with_retry_string_representation_fixed(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithRetry("error", backoff, time() + 10)
+ expected_string = "PulseExceptionWithRetry: error"
+ assert str(exception) == expected_string
+
+ # PulseNotLoggedInError is a subclass of PulseLoginException
+ def test_pulse_not_logged_in_error_inheritance(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseNotLoggedInError()
+ assert isinstance(exception, PulseLoginException)
+
+ # PulseExceptionWithRetry string representation does not include the backoff count if retry time is set
+ def test_pulse_exception_with_retry_string_representation(self):
+ backoff = PulseBackoff("test", 1.0)
+ exception = PulseExceptionWithRetry("error", backoff, time() + 10)
+ assert str(exception) == "PulseExceptionWithRetry: error"
+ assert exception.backoff == backoff
+ assert backoff.backoff_count == 0
diff --git a/tests/test_gateway.py b/tests/test_gateway.py
new file mode 100644
index 0000000..d608128
--- /dev/null
+++ b/tests/test_gateway.py
@@ -0,0 +1,378 @@
+# Generated by CodiumAI
+from ipaddress import IPv4Address
+
+import pytest
+
+from pyadtpulse.const import (
+ ADT_DEFAULT_POLL_INTERVAL,
+ ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL,
+)
+from pyadtpulse.gateway import ADTPulseGateway
+
+
+# ADTPulseGateway object can be created with default values
+def test_default_values():
+ """
+ Test that ADTPulseGateway object can be created with default values
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.manufacturer == "Unknown"
+ assert gateway._status_text == "OFFLINE"
+ assert gateway.backoff._name == "Gateway"
+ assert gateway.backoff._initial_backoff_interval == ADT_DEFAULT_POLL_INTERVAL
+ assert (
+ gateway.backoff._max_backoff_interval == ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
+ assert gateway.backoff._backoff_count == 0
+ assert gateway.backoff._expiration_time == 0.0
+ assert gateway.backoff._detailed_debug_logging == False
+ assert gateway.backoff._threshold == 0
+ assert gateway.model == None
+ assert gateway.serial_number == None
+ assert gateway.next_update == 0
+ assert gateway.last_update == 0
+ assert gateway.firmware_version == None
+ assert gateway.hardware_version == None
+ assert gateway.primary_connection_type == None
+ assert gateway.broadband_connection_status == None
+ assert gateway.cellular_connection_status == None
+ assert gateway._cellular_connection_signal_strength == 0.0
+ assert gateway.broadband_lan_ip_address == None
+ assert gateway._broadband_lan_mac == None
+ assert gateway.device_lan_ip_address == None
+ assert gateway._device_lan_mac == None
+ assert gateway.router_lan_ip_address == None
+ assert gateway.router_wan_ip_address == None
+
+
+# is_online property returns correct online status
+def test_is_online_property():
+ """
+ Test that is_online property returns correct online status
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.is_online == False
+ gateway.is_online = True
+ assert gateway.is_online == True
+ gateway.is_online = False
+ assert gateway.is_online == False
+
+
+# poll_interval property can be set and returns correct value
+def test_poll_interval_property():
+ """
+ Test that poll_interval property can be set and returns correct value
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.poll_interval == ADT_DEFAULT_POLL_INTERVAL
+ gateway.poll_interval = 60.0
+ assert gateway.poll_interval == 60.0
+
+
+# gateway MAC addresses can be set and retrieved
+def test_gateway_mac_addresses():
+ """
+ Test that gateway MAC addresses can be set and retrieved
+ """
+ gateway = ADTPulseGateway()
+ gateway.broadband_lan_mac = "00:11:22:33:44:55"
+ assert gateway.broadband_lan_mac == "00:11:22:33:44:55"
+ gateway.device_lan_mac = "AA:BB:CC:DD:EE:FF"
+ assert gateway.device_lan_mac == "AA:BB:CC:DD:EE:FF"
+
+
+# cellular connection signal strength can be set and retrieved
+def test_cellular_connection_signal_strength():
+ """
+ Test that cellular connection signal strength can be set and retrieved
+ """
+ gateway = ADTPulseGateway()
+ gateway.cellular_connection_signal_strength = -70.5
+ assert gateway.cellular_connection_signal_strength == -70.5
+
+
+# set_gateway_attributes method sets attributes correctly
+def test_set_gateway_attributes_sets_attributes_correctly():
+ """
+ Test that set_gateway_attributes method sets attributes correctly
+ """
+ gateway = ADTPulseGateway()
+ attributes = {
+ "manufacturer": "ADT",
+ "model": "1234",
+ "serial_number": "5678",
+ "firmware_version": "1.0",
+ "hardware_version": "2.0",
+ "primary_connection_type": "Ethernet",
+ "broadband_connection_status": "Connected",
+ "cellular_connection_status": "Connected",
+ "broadband_lan_mac": "00:11:22:33:44:55",
+ "device_lan_mac": "AA:BB:CC:DD:EE:FF",
+ "cellular_connection_signal_strength": 4.5,
+ }
+
+ gateway.set_gateway_attributes(attributes)
+
+ assert gateway.manufacturer == "ADT"
+ assert gateway.model == "1234"
+ assert gateway.serial_number == "5678"
+ assert gateway.firmware_version == "1.0"
+ assert gateway.hardware_version == "2.0"
+ assert gateway.primary_connection_type == "Ethernet"
+ assert gateway.broadband_connection_status == "Connected"
+ assert gateway.cellular_connection_status == "Connected"
+ assert gateway.broadband_lan_mac == "00:11:22:33:44:55"
+ assert gateway.device_lan_mac == "AA:BB:CC:DD:EE:FF"
+ assert gateway.cellular_connection_signal_strength == 4.5
+
+
+# backoff object can be created with default values and current backoff interval can be retrieved
+def test_default_values2():
+ """
+ Test that ADTPulseGateway object can be created with default values
+ """
+ gateway = ADTPulseGateway()
+ assert gateway.manufacturer == "Unknown"
+ assert gateway._status_text == "OFFLINE"
+ assert gateway.backoff.name == "Gateway"
+ assert gateway.backoff.initial_backoff_interval == ADT_DEFAULT_POLL_INTERVAL
+ assert (
+ gateway.backoff._max_backoff_interval == ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
+ assert gateway.backoff.backoff_count == 0
+ assert gateway.backoff.expiration_time == 0.0
+ assert gateway.backoff._detailed_debug_logging == False
+ assert gateway.backoff._threshold == 0
+ assert gateway.model == None
+ assert gateway.serial_number == None
+ assert gateway.next_update == 0
+ assert gateway.last_update == 0
+ assert gateway.firmware_version == None
+ assert gateway.hardware_version == None
+ assert gateway.primary_connection_type == None
+ assert gateway.broadband_connection_status == None
+ assert gateway.cellular_connection_status == None
+ assert gateway._cellular_connection_signal_strength == 0.0
+ assert gateway.broadband_lan_ip_address == None
+ assert gateway._broadband_lan_mac == None
+ assert gateway.device_lan_ip_address == None
+ assert gateway._device_lan_mac == None
+ assert gateway.router_lan_ip_address == None
+ assert gateway.router_wan_ip_address == None
+
+
+# backoff object can be incremented and reset correctly
+def test_backoff_increment_and_reset():
+ """
+ Test that backoff object can be incremented and reset correctly
+ """
+ gateway = ADTPulseGateway()
+
+ # Increment backoff count
+ gateway.backoff.increment_backoff()
+ assert gateway.backoff._backoff_count == 1
+
+ # Reset backoff count
+ gateway.backoff.reset_backoff()
+ assert gateway.backoff._backoff_count == 0
+
+
+# is_online property returns correct offline status when set to False
+def test_is_online_returns_correct_offline_status_when_set_to_false():
+ """
+ Test that is_online property returns correct offline status when set to False
+ """
+ gateway = ADTPulseGateway()
+ gateway.is_online = False
+ assert gateway.is_online == False
+
+
+# poll_interval property raises ValueError when set to 0
+def test_poll_interval_raises_value_error_when_set_to_0():
+ """
+ Test that poll_interval property raises ValueError when set to 0
+ """
+ gateway = ADTPulseGateway()
+ with pytest.raises(ValueError):
+ gateway.poll_interval = 0
+
+
+# backoff object can wait for correct amount of time before returning
+@pytest.mark.asyncio
+async def test_backoff_wait_time():
+ """
+ Test that backoff object can wait for correct amount of time before returning
+ """
+ import time # Import the 'time' module
+
+ # Arrange
+ gateway = ADTPulseGateway()
+ gateway.backoff._backoff_count = 1
+ gateway.backoff._threshold = 0
+ gateway.backoff._initial_backoff_interval = 1.0
+ gateway.backoff._max_backoff_interval = 10.0
+ gateway.backoff._expiration_time = time.time() + 5.0
+
+ # Act
+ start_time = time.time()
+ await gateway.backoff.wait_for_backoff()
+
+ # Assert
+ end_time = time.time()
+ assert end_time - start_time >= 5.0
+
+
+# Test that set_gateway_attributes method sets attributes to None when given an empty string
+def test_set_gateway_attributes_empty_string_fixed():
+ """
+ Test that set_gateway_attributes method sets attributes to None when given an empty string
+ """
+ gateway = ADTPulseGateway()
+ gateway.set_gateway_attributes(
+ {"model": "", "serial_number": "", "firmware_version": ""}
+ )
+ assert gateway.model is None
+ assert gateway.serial_number is None
+ assert gateway.firmware_version is None
+
+
+# cellular connection signal strength can be set to 0.0
+def test_cellular_connection_signal_strength_to_zero():
+ """
+ Test that cellular connection signal strength can be set to 0.0
+ """
+ gateway = ADTPulseGateway()
+ gateway.cellular_connection_signal_strength = 0.0
+ assert gateway.cellular_connection_signal_strength == 0.0
+
+
+# poll_interval property raises ValueError when set to a value greater than ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+def test_poll_interval_raises_value_error():
+ """
+ Test that poll_interval property raises ValueError when set to a value greater than ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ """
+ gateway = ADTPulseGateway()
+ with pytest.raises(ValueError):
+ gateway.poll_interval = ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL + 1
+
+
+# Test that set_gateway_attributes method sets attributes to a valid value when given a valid value
+def test_set_gateway_attributes_valid_value():
+ """
+ Test that set_gateway_attributes method sets attributes to a valid value when given a valid value
+ """
+ gateway = ADTPulseGateway()
+ gateway.set_gateway_attributes({"broadband_lan_mac": "00:0a:95:9d:68:16"})
+ assert gateway.broadband_lan_mac == "00:0a:95:9d:68:16"
+
+
+# Test that set_gateway_attributes method sets IP address attributes to None when given an invalid IP address
+def test_set_gateway_attributes_invalid_ip():
+ """
+ Test that set_gateway_attributes method sets IP address attributes to None when given an invalid IP address
+ """
+ gateway = ADTPulseGateway()
+ gateway.set_gateway_attributes({"broadband_lan_ip_address": "invalid_ip"})
+ assert gateway.broadband_lan_ip_address is None
+ gateway.set_gateway_attributes({"device_lan_ip_address": "invalid_ip"})
+ assert gateway.device_lan_ip_address is None
+ gateway.set_gateway_attributes({"router_lan_ip_address": "invalid_ip"})
+ assert gateway.router_lan_ip_address is None
+ gateway.set_gateway_attributes({"router_wan_ip_address": "invalid_ip"})
+ assert gateway.router_wan_ip_address is None
+
+
+# gateway MAC addresses raise ValueError when set to an invalid MAC address
+def test_gateway_mac_address_invalid():
+ """
+ Test that setting an invalid MAC address raises a ValueError
+ """
+ gateway = ADTPulseGateway()
+ with pytest.raises(ValueError):
+ gateway.broadband_lan_mac = "00:00:00:00:00:00:00"
+ with pytest.raises(ValueError):
+ gateway.device_lan_mac = "00:00:00:00:00:00:00"
+
+
+# is_online property can be set to True and False
+def test_is_online_property_true_and_false():
+ """
+ Test that is_online property can be set to True and False
+ """
+ gateway = ADTPulseGateway()
+
+ # Test setting is_online to True
+ gateway.is_online = True
+ assert gateway.is_online == True
+ assert gateway._status_text == "ONLINE"
+
+ # Test setting is_online to False
+ gateway.is_online = False
+ assert gateway.is_online == False
+ assert gateway._status_text == "OFFLINE"
+
+
+# poll_interval property can be set to a custom value
+def test_poll_interval_custom_value():
+ """
+ Test that poll_interval property can be set to a custom value
+ """
+ gateway = ADTPulseGateway()
+ custom_interval = 10.0
+ gateway.poll_interval = custom_interval
+ assert gateway.poll_interval == custom_interval
+
+
+# ADTPulseGateway object can be created with custom values
+def test_custom_values():
+ """
+ Test that ADTPulseGateway object can be created with custom values
+ """
+ gateway = ADTPulseGateway(
+ manufacturer="Custom Manufacturer",
+ _status_text="CUSTOM_STATUS",
+ model="Custom Model",
+ serial_number="Custom Serial Number",
+ next_update=1234567890,
+ last_update=9876543210,
+ firmware_version="Custom Firmware Version",
+ hardware_version="Custom Hardware Version",
+ primary_connection_type="Custom Connection Type",
+ broadband_connection_status="Custom Broadband Status",
+ cellular_connection_status="Custom Cellular Status",
+ _cellular_connection_signal_strength=0.5,
+ broadband_lan_ip_address=IPv4Address("192.168.0.1"),
+ _broadband_lan_mac="00:11:22:33:44:55",
+ device_lan_ip_address=IPv4Address("192.168.0.2"),
+ _device_lan_mac="AA:BB:CC:DD:EE:FF",
+ router_lan_ip_address=IPv4Address("192.168.1.1"),
+ router_wan_ip_address=IPv4Address("10.0.0.1"),
+ )
+
+ assert gateway.manufacturer == "Custom Manufacturer"
+ assert gateway._status_text == "CUSTOM_STATUS"
+ assert gateway.backoff._name == "Gateway"
+ assert gateway.backoff._initial_backoff_interval == ADT_DEFAULT_POLL_INTERVAL
+ assert (
+ gateway.backoff._max_backoff_interval == ADT_GATEWAY_MAX_OFFLINE_POLL_INTERVAL
+ )
+ assert gateway.backoff._backoff_count == 0
+ assert gateway.backoff._expiration_time == 0.0
+ assert gateway.backoff._detailed_debug_logging == False
+ assert gateway.backoff._threshold == 0
+ assert gateway.model == "Custom Model"
+ assert gateway.serial_number == "Custom Serial Number"
+ assert gateway.next_update == 1234567890
+ assert gateway.last_update == 9876543210
+ assert gateway.firmware_version == "Custom Firmware Version"
+ assert gateway.hardware_version == "Custom Hardware Version"
+ assert gateway.primary_connection_type == "Custom Connection Type"
+ assert gateway.broadband_connection_status == "Custom Broadband Status"
+ assert gateway.cellular_connection_status == "Custom Cellular Status"
+ assert gateway._cellular_connection_signal_strength == 0.5
+ assert gateway.broadband_lan_ip_address == IPv4Address("192.168.0.1")
+ assert gateway._broadband_lan_mac == "00:11:22:33:44:55"
+ assert gateway.device_lan_ip_address == IPv4Address("192.168.0.2")
+ assert gateway._device_lan_mac == "AA:BB:CC:DD:EE:FF"
+ assert gateway.router_lan_ip_address == IPv4Address("192.168.1.1")
+ assert gateway.router_wan_ip_address == IPv4Address("10.0.0.1")
diff --git a/tests/test_paa_codium.py b/tests/test_paa_codium.py
new file mode 100644
index 0000000..0293e21
--- /dev/null
+++ b/tests/test_paa_codium.py
@@ -0,0 +1,259 @@
+# Generated by CodiumAI
+
+import pytest
+from lxml import html
+
+from conftest import LoginType, add_signin
+from pyadtpulse.exceptions import PulseAuthenticationError, PulseNotLoggedInError
+from pyadtpulse.pyadtpulse_async import PyADTPulseAsync
+from pyadtpulse.site import ADTPulseSite
+
+
+class TestPyADTPulseAsync:
+ # The class can be instantiated with the required parameters (username, password, fingerprint) and optional parameters (service_host, user_agent, debug_locks, keepalive_interval, relogin_interval, detailed_debug_logging).
+ @pytest.mark.asyncio
+ async def test_instantiation_with_parameters(self):
+ pulse = PyADTPulseAsync(
+ username="valid_email@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+ assert isinstance(pulse, PyADTPulseAsync)
+
+ # The __repr__ method returns a string representation of the class.
+ @pytest.mark.asyncio
+ async def test_repr_method_with_valid_email(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ )
+ assert repr(pulse) == ""
+
+ # The async_login method successfully authenticates the user to the ADT Pulse cloud service using a valid email address as the username.
+ @pytest.mark.asyncio
+ async def test_async_login_success_with_valid_email(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ pulse = PyADTPulseAsync(
+ username="valid_email@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ await pulse.async_login()
+
+ # The class is instantiated without the required parameters (username, password, fingerprint) and raises an exception.
+ @pytest.mark.asyncio
+ async def test_instantiation_without_parameters(self):
+ with pytest.raises(TypeError):
+ pulse = PyADTPulseAsync()
+
+ # The async_login method fails to authenticate the user to the ADT Pulse cloud service and raises a PulseAuthenticationError.
+ @pytest.mark.asyncio
+ async def test_async_login_failure_with_valid_username(self):
+ pulse = PyADTPulseAsync(
+ username="valid_email@example.com",
+ password="invalid_password",
+ fingerprint="invalid_fingerprint",
+ )
+ with pytest.raises(PulseAuthenticationError):
+ await pulse.async_login()
+
+ # The async_logout method is called without being logged in and returns without any action.
+ @pytest.mark.asyncio
+ async def test_async_logout_without_login_with_valid_email_fixed(self):
+ pulse = PyADTPulseAsync(
+ username="valid_username@example.com",
+ password="valid_password",
+ fingerprint="valid_fingerprint",
+ )
+ with pytest.raises(RuntimeError):
+ await pulse.async_logout()
+
+ # The async_logout method successfully logs the user out of the ADT Pulse cloud service.
+ @pytest.mark.asyncio
+ async def test_async_logout_successfully_logs_out(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ # Arrange
+ pulse = PyADTPulseAsync(
+ username="test_user@example.com",
+ password="test_password",
+ fingerprint="test_fingerprint",
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ # Act
+ await pulse.async_login()
+ await pulse.async_logout()
+
+ # Assert
+ assert not pulse.is_connected
+
+ # The site property returns an ADTPulseSite object after logging in.
+ @pytest.mark.asyncio
+ async def test_site_property_returns_ADTPulseSite_object_with_login(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ # Arrange
+ pulse = PyADTPulseAsync("test@example.com", "valid_password", "fingerprint")
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ # Act
+ await pulse.async_login()
+ site = pulse.site
+
+ # Assert
+ assert isinstance(site, ADTPulseSite)
+
+ # The is_connected property returns True if the class is connected to the ADT Pulse cloud service.
+ @pytest.mark.asyncio
+ async def test_is_connected_property_returns_true(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ pulse = PyADTPulseAsync(
+ username="valid_username@example.com",
+ password="valid_password",
+ fingerprint="valid_fingerprint",
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ await pulse.async_login()
+ assert pulse.is_connected == True
+
+ # The site property is accessed without being logged in and raises an exception.
+ @pytest.mark.asyncio
+ async def test_site_property_without_login_raises_exception(self):
+ pulse = PyADTPulseAsync(
+ username="test@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+ with pytest.raises(RuntimeError):
+ pulse.site
+
+ # The sites property returns a list of ADTPulseSite objects.
+ @pytest.mark.asyncio
+ async def test_sites_property_returns_list_of_objects(
+ self, mocked_server_responses, get_mocked_url, read_file
+ ):
+ # Arrange
+ pulse = PyADTPulseAsync(
+ "test@example.com", "valid_password", "valid_fingerprint"
+ )
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ # Act
+ await pulse.async_login()
+ sites = pulse.sites
+
+ # Assert
+ assert isinstance(sites, list)
+ for site in sites:
+ assert isinstance(site, ADTPulseSite)
+
+ # The is_connected property returns False if the class is not connected to the ADT Pulse cloud service.
+ @pytest.mark.asyncio
+ async def test_is_connected_property_returns_false_when_not_connected(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ )
+ assert pulse.is_connected == False
+
+ # The sites property is accessed without being logged in and raises an exception.
+ @pytest.mark.asyncio
+ async def test_sites_property_without_login_raises_exception(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+ with pytest.raises(RuntimeError):
+ pulse.sites
+
+ # The wait_for_update method is called without being logged in and raises an exception.
+ @pytest.mark.asyncio
+ async def test_wait_for_update_without_login_raises_exception(self):
+ pulse = PyADTPulseAsync(
+ username="your_username@example.com",
+ password="your_password",
+ fingerprint="your_fingerprint",
+ service_host="https://portal.adtpulse.com",
+ user_agent="Your User Agent",
+ debug_locks=False,
+ keepalive_interval=5,
+ relogin_interval=60,
+ detailed_debug_logging=True,
+ )
+
+ with pytest.raises(PulseNotLoggedInError):
+ await pulse.wait_for_update()
+
+ # The _initialize_sites method retrieves the site id and name from the lxml
+ # etree and creates a new ADTPulseSite object.
+ @pytest.mark.asyncio
+ async def test_initialize_sites_method_with_valid_service_host(
+ self, mocker, read_file
+ ):
+ # Arrange
+ username = "test@example.com"
+ password = "test_password"
+ fingerprint = "test_fingerprint"
+ service_host = "https://portal.adtpulse.com"
+ user_agent = "Test User Agent"
+ debug_locks = False
+ keepalive_interval = 10
+ relogin_interval = 30
+ detailed_debug_logging = True
+
+ pulse = PyADTPulseAsync(
+ username=username,
+ password=password,
+ fingerprint=fingerprint,
+ service_host=service_host,
+ user_agent=user_agent,
+ debug_locks=debug_locks,
+ keepalive_interval=keepalive_interval,
+ relogin_interval=relogin_interval,
+ detailed_debug_logging=detailed_debug_logging,
+ )
+
+ tree = html.fromstring(read_file("summary.html"))
+
+ # Mock the fetch_devices method to always return True
+ # mocker.patch.object(ADTPulseSite, "fetch_devices", return_value=True)
+
+ # Act
+ await pulse._initialize_sites(tree)
+
+ # Assert
+ assert pulse._site is not None
+ assert pulse._site.id == "160301za524548"
+ assert pulse._site.name == "Robert Lippmann"
diff --git a/tests/test_pap.py b/tests/test_pap.py
new file mode 100644
index 0000000..010fec2
--- /dev/null
+++ b/tests/test_pap.py
@@ -0,0 +1,314 @@
+# Generated by CodiumAI
+
+import pytest
+from typeguard import TypeCheckError
+
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+
+
+class TestPulseAuthenticationProperties:
+ # Initialize object with valid username, password, and fingerprint
+ def test_initialize_with_valid_credentials(self):
+ """
+ Test initializing PulseAuthenticationProperties with valid username, password, and fingerprint
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+
+ # Act
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Assert
+ assert properties.username == username
+ assert properties.password == password
+ assert properties.fingerprint == fingerprint
+
+ # Get and set username, password, fingerprint, site_id, and last_login_time properties
+ def test_get_and_set_properties(self):
+ """
+ Test getting and setting username, password, fingerprint, site_id, and last_login_time properties
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ site_id = "site123"
+ last_login_time = 123456789
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.username = "new_username@example.com"
+ properties.password = "new_password"
+ properties.fingerprint = "new_fingerprint"
+ properties.site_id = site_id
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert properties.username == "new_username@example.com"
+ assert properties.password == "new_password"
+ assert properties.fingerprint == "new_fingerprint"
+ assert properties.site_id == site_id
+ assert properties.last_login_time == last_login_time
+
+ # Get last_login_time property after setting it
+ def test_get_last_login_time_after_setting(self):
+ """
+ Test getting last_login_time property after setting it
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ last_login_time = 123456789
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert properties.last_login_time == last_login_time
+
+ # Set username, password, fingerprint, site_id properties with valid values
+ def test_set_properties_with_valid_values(self):
+ """
+ Test setting username, password, fingerprint, site_id properties with valid values
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ site_id = "site123"
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.site_id = site_id
+
+ # Assert
+ assert properties.username == username
+ assert properties.password == password
+ assert properties.fingerprint == fingerprint
+ assert properties.site_id == site_id
+
+ # Set username, password, fingerprint properties with non-empty fingerprint
+ def test_set_properties_with_non_empty_fingerprint(self):
+ """
+ Test setting username, password, fingerprint properties with non-empty fingerprint
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.username = username
+ properties.password = password
+ properties.fingerprint = fingerprint
+
+ # Assert
+ assert properties.username == username
+ assert properties.password == password
+ assert properties.fingerprint == fingerprint
+
+ # Set site_id property with empty string
+ def test_set_site_id_with_empty_string(self):
+ """
+ Test setting site_id property with empty string
+ """
+ # Arrange
+ site_id = ""
+
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act
+ properties.site_id = site_id
+
+ # Assert
+ assert properties.site_id == site_id
+
+ # Initialize object with empty username, password, or fingerprint
+ def test_initialize_with_empty_credentials(self):
+ """
+ Test initializing PulseAuthenticationProperties with empty username, password, or fingerprint
+ """
+ # Arrange
+ username = ""
+ password = ""
+ fingerprint = ""
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Initialize object with invalid username or password
+ def test_initialize_with_invalid_credentials1(self):
+ """
+ Test initializing PulseAuthenticationProperties with invalid username or password
+ """
+ # Arrange
+ username = "invalid_username"
+ password = "invalid_password"
+ fingerprint = "fingerprint123"
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Set username, password, fingerprint properties with invalid values
+ def test_set_properties_with_invalid_values(self):
+ """
+ Test setting username, password, fingerprint properties with invalid values
+ """
+ # Arrange
+ username = "invalid_username"
+ password = ""
+ fingerprint = ""
+
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ properties.username = username
+
+ with pytest.raises(ValueError):
+ properties.password = password
+
+ with pytest.raises(ValueError):
+ properties.fingerprint = fingerprint
+
+ # Set last_login_time property with non-integer value
+ def test_set_last_login_time_with_non_integer_value(self):
+ """
+ Test setting last_login_time property with non-integer value
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ last_login_time = "invalid_time"
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act and Assert
+ with pytest.raises(TypeCheckError) as exc_info:
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert (
+ str(exc_info.value)
+ == 'argument "login_time" (str) is not an instance of int'
+ )
+
+ # Set site_id property with non-string value
+ def test_set_site_id_with_non_string_value(self):
+ """
+ Test setting site_id property with non-string value
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ site_id = 12345 # Fix: Set a non-string value
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ with pytest.raises(TypeCheckError):
+ properties.site_id = site_id
+
+ # Assert
+ assert not properties.site_id
+
+ # Set last_login_time property with integer value
+ def test_set_last_login_time_with_integer_value(self):
+ """
+ Test setting last_login_time property with integer value
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ last_login_time = 123456789
+
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act
+ properties.last_login_time = last_login_time
+
+ # Assert
+ assert properties.last_login_time == last_login_time
+
+ # Raise ValueError when initializing object with invalid username or password
+ def test_initialize_with_invalid_credentials(self):
+ """
+ Test initializing PulseAuthenticationProperties with invalid username or password
+ """
+ # Arrange
+ username = "invalid_username"
+ password = ""
+ fingerprint = "valid_fingerprint"
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Raise TypeError when setting site_id property with non-string value
+ def test_raise_type_error_when_setting_site_id_with_non_string_value(self):
+ """
+ Test that a TypeError is raised when setting the site_id property with a non-string value
+ """
+ # Arrange
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act and Assert
+ with pytest.raises(TypeCheckError):
+ properties.site_id = 123
+
+ # Raise ValueError when setting username, password, fingerprint properties with invalid values
+ def test_invalid_properties(self):
+ """
+ Test that ValueError is raised when setting invalid username, password, and fingerprint properties
+ """
+ # Arrange
+ username = "test@example.com"
+ password = "password123"
+ fingerprint = "fingerprint123"
+ properties = PulseAuthenticationProperties(username, password, fingerprint)
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ properties.username = ""
+ with pytest.raises(ValueError):
+ properties.password = ""
+ with pytest.raises(ValueError):
+ properties.fingerprint = ""
+
+ # Raise TypeCheckError when setting last_login_time property with non-integer value
+ def test_raise_type_check_error_when_setting_last_login_time_with_non_integer_value(
+ self,
+ ):
+ """
+ Test that a TypeCheckError is raised when setting the last_login_time property with a non-integer value
+ """
+ import typeguard
+
+ # Arrange
+ properties = PulseAuthenticationProperties(
+ "test@example.com", "password123", "fingerprint123"
+ )
+
+ # Act and Assert
+ with pytest.raises(typeguard.TypeCheckError):
+ properties.last_login_time = "invalid_time"
diff --git a/tests/test_pqm_codium.py b/tests/test_pqm_codium.py
new file mode 100644
index 0000000..435d01e
--- /dev/null
+++ b/tests/test_pqm_codium.py
@@ -0,0 +1,508 @@
+# Generated by CodiumAI
+
+# Dependencies:
+# pip install pytest-mock
+from time import time
+
+import pytest
+from aiohttp.client_exceptions import ClientConnectionError, ServerDisconnectedError
+from aiohttp.client_reqrep import ConnectionKey
+from yarl import URL
+
+from conftest import MOCKED_API_VERSION
+from pyadtpulse.exceptions import (
+ PulseClientConnectionError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pulse_backoff import PulseBackoff
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.pulse_query_manager import PulseQueryManager
+
+
+class TestPulseQueryManager:
+ # can successfully make a GET request to a given URI with a valid service host
+ @pytest.mark.asyncio
+ async def test_get_request_success_with_valid_service_host(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query("/api/data")
+
+ # Then
+ assert response == expected_response
+
+ # can successfully make a POST request to a given URI
+ @pytest.mark.asyncio
+ async def test_post_request_success(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method,
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query("/api/data", method="POST")
+
+ # Then
+ assert response == expected_response
+
+ # can handle HTTP 200 OK response with a valid service host
+ @pytest.mark.asyncio
+ async def test_handle_http_200_ok_with_valid_service_host(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query("/api/data")
+
+ # Then
+ assert response == expected_response
+
+ # can handle HTTP 503 Service Unavailable response with fixed mock function
+ @pytest.mark.asyncio
+ async def test_handle_http_503_service_unavailable_fixed_fixed(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ backoff = PulseBackoff(
+ "Query:GET /api/data",
+ connection_status.get_backoff().initial_backoff_interval,
+ threshold=0,
+ debug_locks=query_manager._debug_locks,
+ detailed_debug_logging=connection_properties.detailed_debug_logging,
+ )
+ retry_time = await backoff.wait_for_backoff()
+ if retry_time is None:
+ retry_time = time() + 1 # Set a future time for retry_time
+ else:
+ retry_time += time() + 1
+ raise PulseServiceTemporarilyUnavailableError(backoff, retry_time)
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await query_manager.async_query("/api/data")
+
+ # Then
+ # PulseServiceTemporarilyUnavailableError should be raised
+
+ # can handle HTTP 429 Too Many Requests response with the recommended fix
+ @pytest.mark.asyncio
+ async def test_handle_http_429_with_fix(
+ self, mocker, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ url = get_mocked_url("/api/data")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (429, "Too Many Requests", URL(url))
+ mocked_server_responses.get(url, status=expected_response[0])
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # Then
+ assert "Pulse service temporarily unavailable until indefinitely" in str(
+ exc_info.value
+ )
+ assert exc_info.value.backoff == connection_status.get_backoff()
+
+ # can handle ClientConnectionError with 'Connection refused' message using default parameter values
+ @pytest.mark.asyncio
+ async def test_handle_client_connection_error_with_default_values_fixed_fixed(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_error_message = "Connection refused"
+
+ expected_response = (None, None, None, None)
+
+ # When
+ with pytest.raises(PulseServerConnectionError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # Then
+ assert (
+ str(exc_info.value)
+ == f"PulseServerConnectionError: Pulse server error: {expected_error_message}: GET {get_mocked_url('/api/data')}"
+ )
+
+ # can handle ClientConnectorError with non-TimeoutError or BrokenPipeError os_error
+ @pytest.mark.asyncio
+ async def test_handle_client_connector_error_with_fix(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_error = PulseServerConnectionError(
+ "Error occurred", connection_status.get_backoff()
+ )
+ ck = ConnectionKey("portal.adtpulse.com", 443, True, None, None, None, None)
+ url = get_mocked_url("/api/data")
+ mocked_server_responses.get(url, exception=expected_error)
+ # When, Then
+ with pytest.raises(PulseServerConnectionError) as ex:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+ assert str(ex.value) == str(expected_error)
+
+ # can handle Retry-After header in HTTP response
+ @pytest.mark.timeout(70)
+ @pytest.mark.asyncio
+ async def test_handle_retry_after_header(
+ self, mocked_server_responses, get_mocked_url, freeze_time_to_now
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ url = get_mocked_url("/api/data")
+ expected_response = (429, "Too Many Requests", URL(url))
+ expected_retry_after = "60"
+ mocked_server_responses.get(
+ url,
+ status=expected_response[0],
+ headers={"Retry-After": expected_retry_after},
+ )
+
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # Then
+ assert exc_info.value.backoff == connection_status.get_backoff()
+ assert exc_info.value.retry_time == int(expected_retry_after) + int(time())
+
+ # can handle ServerTimeoutError
+ @pytest.mark.asyncio
+ async def test_handle_server_timeout_error(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ raise PulseServerConnectionError("message", connection_status.get_backoff())
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ with pytest.raises(PulseServerConnectionError):
+ await query_manager.async_query(
+ "/api/data",
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ )
+
+ # can handle missing API version
+ @pytest.mark.asyncio
+ async def test_handle_missing_api_version(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri, method, extra_params, extra_headers, timeout, requires_authentication
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ await query_manager.async_fetch_version()
+
+ # Then
+ assert connection_properties.api_version is not None
+
+ # can handle valid method parameter
+ @pytest.mark.asyncio
+ async def test_valid_method_parameter(
+ self, mocked_server_responses, get_mocked_url, mocker
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL(get_mocked_url("/api/data")))
+
+ mocked_server_responses.get(
+ get_mocked_url("/api/data"), status=200, body="Response"
+ )
+ # When
+ result = await query_manager.async_query(
+ "/api/data", requires_authentication=False
+ )
+
+ # Then
+ assert result == expected_response
+
+ assert query_manager._connection_properties.api_version == MOCKED_API_VERSION
+
+ # can handle ClientResponseError and include backoff in the raised exception
+ @pytest.mark.asyncio
+ async def test_handle_client_response_error_with_backoff(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (429, "Too Many Requests", URL("http://example.com"))
+ mocked_server_responses.get(get_mocked_url("/api/data"), status=429)
+ # When
+ with pytest.raises(PulseServiceTemporarilyUnavailableError) as exc_info:
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # can handle invalid Retry-After header value format
+ @pytest.mark.asyncio
+ async def test_handle_invalid_retry_after_header_format(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (503, "Service Unavailable", URL("http://example.com"))
+ retry_after_header = "invalid_format"
+
+ async def mock_async_query(
+ uri, method, extra_params, extra_headers, timeout, requires_authentication
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query(
+ "/api/data",
+ method="GET",
+ extra_params=None,
+ extra_headers={"Retry-After": retry_after_header},
+ timeout=1,
+ requires_authentication=True,
+ )
+
+ # Then
+ assert response == expected_response
+
+ # can handle non-numeric Retry-After header value
+ @pytest.mark.asyncio
+ async def test_handle_non_numeric_retry_after_header_value(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (503, "Service Unavailable", URL("http://example.com"))
+ retry_after_header = "Thu, 01 Jan 1970 00:00:00 GMT"
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ return expected_response
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ response = await query_manager.async_query(
+ "/api/data", extra_headers={"Retry-After": retry_after_header}
+ )
+
+ # Then
+ assert response == expected_response
+
+ # can handle TimeoutError
+ @pytest.mark.asyncio
+ async def test_handle_timeout_error_fixed(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (0, None, None, None)
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ raise TimeoutError()
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When
+ with pytest.raises(TimeoutError):
+ await query_manager.async_query(
+ "/api/data",
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ )
+
+ # Then
+ assert True
+
+ # can handle PulseClientConnectionError
+ @pytest.mark.asyncio
+ async def test_handle_pulse_client_connection_error(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ mocked_server_responses.get(
+ get_mocked_url("/api/data"),
+ exception=ClientConnectionError("Network error"),
+ repeat=True,
+ )
+ # When
+ with pytest.raises(PulseClientConnectionError):
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # can handle ServerDisconnectedError
+ @pytest.mark.asyncio
+ async def test_handle_server_disconnected_error(
+ self, mocked_server_responses, get_mocked_url
+ ):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ mocked_server_responses.get(
+ get_mocked_url("/api/data"), exception=ServerDisconnectedError
+ )
+ # When
+ with pytest.raises(PulseServerConnectionError):
+ await query_manager.async_query("/api/data", requires_authentication=False)
+
+ # can handle PulseNotLoggedInError
+ @pytest.mark.asyncio
+ async def test_handle_pulse_not_logged_in_error(self, mocker):
+ # Given
+ connection_status = PulseConnectionStatus()
+ connection_properties = PulseConnectionProperties("https://portal.adtpulse.com")
+ query_manager = PulseQueryManager(connection_status, connection_properties)
+ expected_response = (200, "Response", URL("http://example.com"))
+
+ async def mock_async_query(
+ uri,
+ method="GET",
+ extra_params=None,
+ extra_headers=None,
+ timeout=1,
+ requires_authentication=True,
+ ):
+ backoff = PulseBackoff(
+ "Query:GET /api/data",
+ connection_status.get_backoff().initial_backoff_interval,
+ threshold=0,
+ debug_locks=query_manager._debug_locks,
+ detailed_debug_logging=connection_properties.detailed_debug_logging,
+ )
+ raise PulseNotLoggedInError()
+
+ mocker.patch.object(
+ PulseQueryManager, "async_query", side_effect=mock_async_query
+ )
+
+ # When, Then
+ with pytest.raises(PulseNotLoggedInError):
+ await query_manager.async_query("/api/data")
diff --git a/tests/test_pulse_async.py b/tests/test_pulse_async.py
new file mode 100644
index 0000000..63aba61
--- /dev/null
+++ b/tests/test_pulse_async.py
@@ -0,0 +1,691 @@
+"""Test Pulse Query Manager."""
+
+import asyncio
+import re
+from collections.abc import Generator
+from http.client import responses
+from typing import Any, Callable, Literal
+from unittest.mock import AsyncMock, patch
+
+import aiohttp
+import pytest
+from aioresponses import aioresponses
+from pytest_mock import MockerFixture
+
+from conftest import LoginType, add_custom_response, add_logout, add_signin
+from pyadtpulse.const import (
+ ADT_DEFAULT_POLL_INTERVAL,
+ ADT_DEVICE_URI,
+ ADT_LOGIN_URI,
+ ADT_LOGOUT_URI,
+ ADT_MFA_FAIL_URI,
+ ADT_ORB_URI,
+ ADT_SUMMARY_URI,
+ ADT_SYNC_CHECK_URI,
+ ADT_TIMEOUT_URI,
+ DEFAULT_API_HOST,
+)
+from pyadtpulse.exceptions import (
+ PulseAuthenticationError,
+ PulseConnectionError,
+ PulseGatewayOfflineError,
+ PulseMFARequiredError,
+ PulseNotLoggedInError,
+ PulseServerConnectionError,
+)
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+from pyadtpulse.pyadtpulse_async import PyADTPulseAsync
+
+DEFAULT_SYNC_CHECK = "234532-456432-0"
+NEXT_SYNC_CHECK = "234533-456432-0"
+
+
+def set_keepalive(get_mocked_url, mocked_server_responses, repeat: bool = False):
+ m = mocked_server_responses
+ m.post(
+ get_mocked_url(ADT_TIMEOUT_URI),
+ body="",
+ content_type="text/html",
+ repeat=repeat,
+ )
+
+
+@pytest.mark.asyncio
+async def test_mocked_responses(
+ read_file: Callable[..., str],
+ mocked_server_responses: aioresponses,
+ get_mocked_mapped_static_responses: dict[str, str],
+ get_mocked_url: Callable[..., str],
+ extract_ids_from_data_directory: list[str],
+):
+ """Fixture to test mocked responses."""
+ static_responses = get_mocked_mapped_static_responses
+ m = mocked_server_responses
+ async with aiohttp.ClientSession() as session:
+ for url, file_name in static_responses.items():
+ # Make an HTTP request to the URL
+ response = await session.get(url)
+
+ # Assert the status code is 200
+ assert response.status == 200
+
+ # Assert the content matches the content of the file
+ expected_content = read_file(file_name)
+ actual_content = await response.text()
+ assert actual_content == expected_content
+ devices = extract_ids_from_data_directory
+ for device_id in devices:
+ response = await session.get(
+ f"{get_mocked_url(ADT_DEVICE_URI)}?id={device_id}"
+ )
+ assert response.status == 200
+ expected_content = read_file(f"device_{device_id}.html")
+ actual_content = await response.text()
+ assert actual_content == expected_content
+
+ # redirects
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name="signin.html",
+ )
+ response = await session.get(f"{DEFAULT_API_HOST}/", allow_redirects=True)
+ assert response.status == 200
+ actual_content = await response.text()
+ expected_content = read_file("signin.html")
+ assert actual_content == expected_content
+ add_custom_response(
+ mocked_server_responses,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name="signin.html",
+ )
+ response = await session.get(get_mocked_url(ADT_LOGOUT_URI))
+ assert response.status == 200
+ expected_content = read_file("signin.html")
+ actual_content = await response.text()
+ assert actual_content == expected_content
+ add_signin(
+ LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file
+ )
+ response = await session.post(get_mocked_url(ADT_LOGIN_URI))
+ assert response.status == 200
+ expected_content = read_file(static_responses[get_mocked_url(ADT_SUMMARY_URI)])
+ actual_content = await response.text()
+ assert actual_content == expected_content
+ pattern = re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+ m.get(pattern, status=200, body="1-0-0", content_type="text/html")
+ response = await session.get(
+ get_mocked_url(ADT_SYNC_CHECK_URI), params={"ts": "first call"}
+ )
+ assert response.status == 200
+ actual_content = await response.text()
+ expected_content = "1-0-0"
+ assert actual_content == expected_content
+ set_keepalive(get_mocked_url, m)
+ response = await session.post(get_mocked_url(ADT_TIMEOUT_URI))
+
+
+# not sure we need this
+@pytest.fixture
+def wrap_wait_for_update():
+ with patch.object(
+ PyADTPulseAsync,
+ "wait_for_update",
+ new_callable=AsyncMock,
+ spec=PyADTPulseAsync,
+ wraps=PyADTPulseAsync.wait_for_update,
+ ) as wait_for_update:
+ yield wait_for_update
+
+
+@pytest.fixture
+@pytest.mark.asyncio
+async def adt_pulse_instance(
+ mocked_server_responses: aioresponses,
+ extract_ids_from_data_directory: list[str],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ """Create an instance of PyADTPulseAsync and login."""
+ p = PyADTPulseAsync("testuser@example.com", "testpassword", "testfingerprint")
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await p.async_login()
+ # Assertions after login
+ assert p._pulse_connection_status.authenticated_flag.is_set()
+ assert p._pulse_connection_status.get_backoff().backoff_count == 0
+ assert p._pulse_connection.login_in_progress is False
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+ assert p.site.name == "Robert Lippmann"
+ assert p._timeout_task is not None
+ assert p._timeout_task.get_name() == p._get_timeout_task_name()
+ assert p._sync_task is None
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ return p, mocked_server_responses
+
+
+@pytest.mark.asyncio
+async def test_login(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ extract_ids_from_data_directory: list[str],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ """Fixture to test login."""
+ p, response = await adt_pulse_instance
+ # make sure everything is there on logout
+
+ assert p._pulse_connection_status.get_backoff().backoff_count == 0
+ assert p._pulse_connection.login_in_progress is False
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+ add_logout(response, get_mocked_url, read_file)
+ add_custom_response(
+ response,
+ read_file,
+ get_mocked_url(ADT_LOGIN_URI),
+ file_name=LoginType.SUCCESS.value,
+ )
+ await p.async_logout()
+ assert not p._pulse_connection_status.authenticated_flag.is_set()
+ assert p._pulse_connection_status.get_backoff().backoff_count == 0
+ assert p._pulse_connection.login_in_progress is False
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+ assert p.site.name == "Robert Lippmann"
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ assert p._timeout_task is None
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "test_type",
+ (
+ (LoginType.FAIL, PulseAuthenticationError),
+ (LoginType.NOT_SIGNED_IN, PulseNotLoggedInError),
+ (LoginType.MFA, PulseMFARequiredError),
+ ),
+)
+async def test_login_failures(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ test_type: Literal[LoginType.FAIL, LoginType.NOT_SIGNED_IN, LoginType.MFA],
+):
+ p, response = await adt_pulse_instance
+ assert p._pulse_connection.login_backoff.backoff_count == 0, "initial"
+ add_logout(response, get_mocked_url, read_file)
+ await p.async_logout()
+ assert p._pulse_connection.login_backoff.backoff_count == 0, "post logout"
+
+ assert p._pulse_connection.login_backoff.backoff_count == 0, str(test_type[0])
+ add_signin(test_type[0], response, get_mocked_url, read_file)
+ with pytest.raises(test_type[1]):
+ await p.async_login()
+ await asyncio.sleep(1)
+ assert p._timeout_task is None or p._timeout_task.done()
+ assert p._pulse_connection.login_backoff.backoff_count == 0, str(test_type)
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ assert p._pulse_connection.login_backoff.backoff_count == 0
+
+
+async def do_wait_for_update(p: PyADTPulseAsync, shutdown_event: asyncio.Event):
+ while not shutdown_event.is_set():
+ try:
+ await p.wait_for_update()
+ except asyncio.CancelledError:
+ break
+
+
+@pytest.mark.asyncio
+async def test_wait_for_update(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, responses = await adt_pulse_instance
+ shutdown_event = asyncio.Event()
+ task = asyncio.create_task(do_wait_for_update(p, shutdown_event))
+ await p.async_logout()
+ assert p._sync_task is None
+ assert p.site.name == "Robert Lippmann"
+ with pytest.raises(PulseNotLoggedInError):
+ await task
+
+ # test exceptions
+ # check we can't wait for update if not logged in
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+
+ add_signin(LoginType.SUCCESS, responses, get_mocked_url, read_file)
+ await p.async_login()
+ await p.async_logout()
+
+
+def make_sync_check_pattern(get_mocked_url):
+ return re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("test_requests", (False, True))
+@pytest.mark.timeout(60)
+async def test_orb_update(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ test_requests: bool,
+):
+ p, response = await adt_pulse_instance
+ pattern = make_sync_check_pattern(get_mocked_url)
+
+ def signal_status_change():
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(pattern, body="1-0-0", content_type="text/html")
+ response.get(pattern, body="2-0-0", content_type="text/html")
+ response.get(
+ pattern,
+ body=NEXT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body=NEXT_SYNC_CHECK,
+ content_type="text/html",
+ )
+
+ def open_patio():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb_patio_opened.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def close_all():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def open_garage():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb_garage.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def open_both_garage_and_patio():
+ response.get(
+ get_mocked_url(ADT_ORB_URI),
+ body=read_file("orb_patio_garage.html"),
+ content_type="text/html",
+ )
+ signal_status_change()
+
+ def setup_sync_check():
+ open_patio()
+ close_all()
+
+ async def test_sync_check_and_orb():
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_ORB_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == read_file("orb_patio_opened.html")
+ await asyncio.sleep(1)
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_ORB_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == read_file("orb.html")
+ await asyncio.sleep(1)
+ for _ in range(1):
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == DEFAULT_SYNC_CHECK
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == "1-0-0"
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == "2-0-0"
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == NEXT_SYNC_CHECK
+ code, content, _ = await p._pulse_connection.async_query(
+ ADT_SYNC_CHECK_URI, requires_authentication=False
+ )
+ assert code == 200
+ assert content == NEXT_SYNC_CHECK
+
+ # do a first run though to make sure aioresponses will work ok
+ if not test_requests:
+ setup_sync_check()
+ await test_sync_check_and_orb()
+ await p.async_logout()
+ assert p._sync_task is None
+ assert p._timeout_task is None
+ return
+ await p.async_logout()
+ for j in range(2):
+ if j == 0:
+ zone = 11
+ else:
+ zone = 10
+ for i in range(2):
+ if i == 0:
+ if j == 0:
+ open_patio()
+ else:
+ open_garage()
+ state = "Open"
+ else:
+ close_all()
+ state = "OK"
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ await p.wait_for_update()
+ await p.async_logout()
+ assert len(p.site.zones) == 13
+ assert p.site.zones_as_dict[zone].state == state
+ assert p._sync_task is not None
+
+
+@pytest.mark.asyncio
+async def test_keepalive_check(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, response = await adt_pulse_instance
+ assert p._timeout_task is not None
+ await asyncio.sleep(0)
+
+
+@pytest.mark.asyncio
+async def test_infinite_sync_check(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, response = await adt_pulse_instance
+ pattern = re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ repeat=True,
+ )
+ shutdown_event = asyncio.Event()
+ shutdown_event.clear()
+ task = asyncio.create_task(do_wait_for_update(p, shutdown_event))
+ await asyncio.sleep(5)
+ shutdown_event.set()
+ task.cancel()
+ await task
+
+
+@pytest.mark.asyncio
+async def test_sync_check_errors(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ mocker: Callable[..., Generator[MockerFixture, None, None]],
+):
+ p, response = await adt_pulse_instance
+ pattern = re.compile(rf"{re.escape(get_mocked_url(ADT_SYNC_CHECK_URI))}/?.*$")
+
+ shutdown_event = asyncio.Event()
+ shutdown_event.clear()
+ for test_type in (
+ (LoginType.FAIL, PulseAuthenticationError),
+ (LoginType.NOT_SIGNED_IN, PulseNotLoggedInError),
+ (LoginType.MFA, PulseMFARequiredError),
+ ):
+ redirect = ADT_LOGIN_URI
+ if test_type[0] == LoginType.MFA:
+ redirect = ADT_MFA_FAIL_URI
+ response.get(
+ pattern, status=302, headers={"Location": get_mocked_url(redirect)}
+ )
+ add_signin(test_type[0], response, get_mocked_url, read_file)
+ task = asyncio.create_task(do_wait_for_update(p, shutdown_event))
+ with pytest.raises(test_type[1]):
+ await task
+ await asyncio.sleep(0.5)
+ assert p._sync_task is None or p._sync_task.done()
+ assert p._timeout_task is None or p._timeout_task.done()
+ if test_type[0] == LoginType.MFA:
+ # pop the post MFA redirect from the responses
+ with pytest.raises(PulseMFARequiredError):
+ await p.async_login()
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ if test_type[0] != LoginType.LOCKED:
+ await p.async_login()
+
+
+@pytest.mark.asyncio
+async def test_multiple_login(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ extract_ids_from_data_directory: list[str],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, response = await adt_pulse_instance
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ add_logout(response, get_mocked_url, read_file)
+ await p.async_logout()
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ await p.async_login()
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+ add_signin(LoginType.SUCCESS, response, get_mocked_url, read_file)
+ assert p.site.zones_as_dict is not None
+ assert len(p.site.zones_as_dict) == len(extract_ids_from_data_directory) - 3
+
+
+@pytest.mark.timeout(180)
+@pytest.mark.asyncio
+async def test_gateway_offline(
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+):
+ p, response = await adt_pulse_instance
+ pattern = make_sync_check_pattern(get_mocked_url)
+ response.get(
+ get_mocked_url(ADT_ORB_URI), body=read_file("orb_gateway_offline.html")
+ )
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body="1-0-0",
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ num_backoffs = 3
+ for i in range(3):
+ response.get(
+ pattern,
+ body=DEFAULT_SYNC_CHECK,
+ content_type="text/html",
+ )
+ # success case
+ response.get(get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True)
+ response.get(
+ pattern,
+ body="1-0-0",
+ content_type="text/html",
+ )
+ response.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ add_logout(response, get_mocked_url, read_file)
+ assert p.site.gateway.poll_interval == 2.0
+ # FIXME: why + 2?
+ for i in range(num_backoffs + 2):
+ with pytest.raises(PulseGatewayOfflineError):
+ await p.wait_for_update()
+
+ await p.wait_for_update()
+ assert p.site.gateway.is_online
+ assert p.site.gateway.backoff.get_current_backoff_interval() == 0
+
+ await p.async_logout()
+
+
+@pytest.mark.asyncio
+async def test_not_logged_in(
+ mocked_server_responses: aioresponses,
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p = PyADTPulseAsync("testuser@example.com", "testpassword", "testfingerprint")
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ add_logout(mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ await p.async_login()
+ await p.async_logout()
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ add_logout(mocked_server_responses, get_mocked_url, read_file)
+ pattern = make_sync_check_pattern(get_mocked_url)
+ mocked_server_responses.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ mocked_server_responses.get(
+ get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True
+ )
+ await p.async_login()
+ task = asyncio.create_task(do_wait_for_update(p, asyncio.Event()))
+ await asyncio.sleep(ADT_DEFAULT_POLL_INTERVAL * 5)
+ await p.async_logout()
+ with pytest.raises(PulseNotLoggedInError):
+ await task
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+ await asyncio.sleep(ADT_DEFAULT_POLL_INTERVAL * 2)
+ with pytest.raises(PulseNotLoggedInError):
+ await p.wait_for_update()
+
+
+@pytest.mark.asyncio
+@pytest.mark.timeout(120)
+async def test_connection_fails_wait_for_update(
+ mocked_server_responses: aioresponses,
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p = PyADTPulseAsync("testuser@example.com", "testpassword", "testfingerprint")
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ add_logout(mocked_server_responses, get_mocked_url, read_file)
+ mocked_server_responses.get(
+ get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True
+ )
+ await p.async_login()
+ with pytest.raises(PulseConnectionError):
+ await do_wait_for_update(p, asyncio.Event())
+ await p.async_logout()
+
+
+@pytest.mark.timeout(180)
+@pytest.mark.asyncio
+async def test_sync_check_disconnect(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ read_file: Callable[..., str],
+ get_mocked_url: Callable[..., str],
+):
+ p, responses = await adt_pulse_instance
+ add_logout(responses, get_mocked_url, read_file)
+ pattern = make_sync_check_pattern(get_mocked_url)
+ responses.get(pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html")
+ responses.get(get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True)
+ while p._pulse_connection_status.get_backoff().get_current_backoff_interval() < 15:
+ with pytest.raises(PulseServerConnectionError):
+ await p.wait_for_update()
+ # check recovery
+ responses.get(pattern, body="1-0-0", content_type="text/html")
+ responses.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ await p.wait_for_update()
+ await p.async_logout()
+
+
+@pytest.mark.asyncio
+@pytest.mark.timeout(60)
+async def test_sync_check_relogin(
+ adt_pulse_instance: tuple[PyADTPulseAsync, Any],
+ get_mocked_url: Callable[..., str],
+ read_file: Callable[..., str],
+):
+ p, responses = await adt_pulse_instance
+ pa: PulseAuthenticationProperties = p._authentication_properties
+ login_time = pa.last_login_time
+ # fail redirect
+ add_signin(LoginType.NOT_SIGNED_IN, responses, get_mocked_url, read_file)
+ # successful login afterward
+ add_signin(LoginType.SUCCESS, responses, get_mocked_url, read_file)
+ add_logout(responses, get_mocked_url, read_file)
+ pattern = make_sync_check_pattern(get_mocked_url)
+ for _ in range(3):
+ responses.get(pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html")
+ responses.get(
+ pattern,
+ body="",
+ content_type="text/html",
+ status=307,
+ headers={"Location": get_mocked_url(ADT_LOGIN_URI)},
+ )
+ # resume normal operation
+ # signal update to stop wait for update
+ responses.get(pattern, body="1-0-0", content_type="text/html")
+ responses.get(
+ pattern, body=DEFAULT_SYNC_CHECK, content_type="text/html", repeat=True
+ )
+ responses.get(get_mocked_url(ADT_ORB_URI), body=read_file("orb.html"), repeat=True)
+
+ await p.wait_for_update()
+ assert p._authentication_properties.last_login_time > login_time
+ await p.async_logout()
diff --git a/tests/test_pulse_connection.py b/tests/test_pulse_connection.py
new file mode 100644
index 0000000..9bb67f0
--- /dev/null
+++ b/tests/test_pulse_connection.py
@@ -0,0 +1,222 @@
+"""Test Pulse Connection."""
+
+import asyncio
+import datetime
+
+import pytest
+from lxml import html
+
+from conftest import LoginType, add_custom_response, add_signin
+from pyadtpulse.const import ADT_LOGIN_URI, DEFAULT_API_HOST
+from pyadtpulse.exceptions import (
+ PulseAccountLockedError,
+ PulseAuthenticationError,
+ PulseMFARequiredError,
+ PulseServerConnectionError,
+)
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+from pyadtpulse.pulse_connection import PulseConnection
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.pulse_query_manager import MAX_REQUERY_RETRIES
+
+
+def setup_pulse_connection() -> PulseConnection:
+ s = PulseConnectionStatus()
+ pcp = PulseConnectionProperties(DEFAULT_API_HOST)
+ pa = PulseAuthenticationProperties(
+ "test@example.com", "testpassword", "testfingerprint"
+ )
+ pc = PulseConnection(s, pcp, pa)
+ return pc
+
+
+@pytest.mark.asyncio
+async def test_login(mocked_server_responses, read_file, mock_sleep, get_mocked_url):
+ """Test Pulse Connection."""
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ # first call to signin post is successful in conftest.py
+ result = await pc.async_do_login_query()
+ assert result is not None
+ assert html.tostring(result) == read_file("summary.html")
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+ # so logout won't fail
+ add_custom_response(
+ mocked_server_responses, read_file, get_mocked_url(ADT_LOGIN_URI)
+ )
+ await pc.async_do_logout_query()
+ assert not pc._connection_status.authenticated_flag.is_set()
+ assert mock_sleep.call_count == 0
+ assert pc._login_backoff.backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_login_failure_server_down(mock_server_down):
+ pc = setup_pulse_connection()
+ with pytest.raises(PulseServerConnectionError):
+ await pc.async_do_login_query()
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_multiple_login(
+ mocked_server_responses, get_mocked_url, read_file, mock_sleep
+):
+ """Test Pulse Connection."""
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ result = await pc.async_do_login_query()
+ assert result is not None
+ assert html.tostring(result) == read_file("summary.html")
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.get_backoff().backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+ # this should fail
+ with pytest.raises(PulseServerConnectionError):
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES - 1
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.get_backoff().backoff_count == 1
+ assert not pc._connection_status.authenticated_flag.is_set()
+ assert not pc.is_connected
+ with pytest.raises(PulseServerConnectionError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ # 2 retries first time, 1 for the connection backoff
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES
+ assert pc.login_in_progress is False
+
+ assert pc._connection_status.get_backoff().backoff_count == 2
+ assert not pc._connection_status.authenticated_flag.is_set()
+ assert not pc.is_connected
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ # will just to a connection backoff
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ # shouldn't sleep at all
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc._connection_status.authenticated_flag.is_set()
+
+
+@pytest.mark.asyncio
+async def test_account_lockout(
+ mocked_server_responses, mock_sleep, get_mocked_url, read_file, freeze_time_to_now
+):
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ assert pc.is_connected
+ assert pc._connection_status.authenticated_flag.is_set()
+ add_signin(LoginType.LOCKED, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseAccountLockedError):
+ await pc.async_do_login_query()
+ # won't sleep yet
+ assert not pc.is_connected
+ assert not pc._connection_status.authenticated_flag.is_set()
+ # don't set backoff on locked account, just set expiration time on backoff
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+ freeze_time_to_now.tick(delta=datetime.timedelta(seconds=(60 * 30) + 1))
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.is_connected
+ assert pc._connection_status.authenticated_flag.is_set()
+ freeze_time_to_now.tick(delta=datetime.timedelta(seconds=60 * 30 + 1))
+ add_signin(LoginType.LOCKED, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseAccountLockedError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+
+
+@pytest.mark.asyncio
+async def test_invalid_credentials(
+ mocked_server_responses, mock_sleep, get_mocked_url, read_file
+):
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert mock_sleep.call_count == 0
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.FAIL, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseAuthenticationError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+ add_signin(LoginType.FAIL, mocked_server_responses, get_mocked_url, read_file)
+
+ with pytest.raises(PulseAuthenticationError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ assert pc._login_backoff.backoff_count == 0
+ assert mock_sleep.call_count == 0
+
+
+@pytest.mark.asyncio
+async def test_mfa_failure(mocked_server_responses, get_mocked_url, read_file):
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert pc.login_in_progress is False
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.MFA, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseMFARequiredError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.MFA, mocked_server_responses, get_mocked_url, read_file)
+ with pytest.raises(PulseMFARequiredError):
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ await pc.async_do_login_query()
+ assert pc._login_backoff.backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_only_single_login(mocked_server_responses, get_mocked_url, read_file):
+ async def login_task():
+ await pc.async_do_login_query()
+
+ pc = setup_pulse_connection()
+ add_signin(LoginType.SUCCESS, mocked_server_responses, get_mocked_url, read_file)
+ # delay one task for a little bit
+ for i in range(4):
+ pc._login_backoff.increment_backoff()
+ task1 = asyncio.create_task(login_task())
+ task2 = asyncio.create_task(login_task())
+ await task2
+ assert pc.login_in_progress
+ assert not pc.is_connected
+ assert not task1.done()
+ await task1
+ assert not pc.login_in_progress
+ assert pc.is_connected
diff --git a/tests/test_pulse_connection_properties.py b/tests/test_pulse_connection_properties.py
new file mode 100644
index 0000000..0051325
--- /dev/null
+++ b/tests/test_pulse_connection_properties.py
@@ -0,0 +1,374 @@
+# Generated by CodiumAI
+from asyncio import AbstractEventLoop
+
+import pytest
+from aiohttp import ClientSession
+
+from pyadtpulse.const import ADT_DEFAULT_HTTP_USER_AGENT, API_HOST_CA, DEFAULT_API_HOST
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+
+
+class TestPulseConnectionProperties:
+ # Initialize PulseConnectionProperties with valid host
+ @pytest.mark.asyncio
+ async def test_initialize_with_valid_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.service_host == host
+ assert connection_properties._user_agent == user_agent
+ assert connection_properties._detailed_debug_logging == detailed_debug_logging
+ assert connection_properties._debug_locks == debug_locks
+
+ # Set service host to default API host
+ @pytest.mark.asyncio
+ async def test_set_service_host_to_default_api_host(self):
+ # Arrange
+ host = DEFAULT_API_HOST
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.service_host = DEFAULT_API_HOST
+
+ # Assert
+ assert connection_properties.service_host == DEFAULT_API_HOST
+
+ # Set service host to API host CA
+ @pytest.mark.asyncio
+ async def test_set_service_host_to_api_host_ca(self):
+ # Arrange
+ host = DEFAULT_API_HOST
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.service_host = API_HOST_CA
+
+ # Assert
+ assert connection_properties.service_host == API_HOST_CA
+
+ # Get the service host
+ @pytest.mark.asyncio
+ async def test_get_service_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ assert connection_properties.service_host == host
+
+ # Set detailed debug logging to True
+ @pytest.mark.asyncio
+ async def test_set_detailed_debug_logging_to_true(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.detailed_debug_logging = True
+
+ # Assert
+ assert connection_properties.detailed_debug_logging is True
+
+ # Set detailed debug logging to False
+ @pytest.mark.asyncio
+ async def test_set_detailed_debug_logging_to_false(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = True
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.detailed_debug_logging = False
+
+ # Assert
+ assert connection_properties.detailed_debug_logging is False
+
+ # Initialize PulseConnectionProperties with invalid host raises ValueError
+ @pytest.mark.asyncio
+ async def test_initialize_with_invalid_host_raises_value_error(self):
+ # Arrange
+ host = ""
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Set service host to valid host does not raise ValueError
+ @pytest.mark.asyncio
+ async def test_set_service_host_to_valid_host_does_not_raise_value_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ connection_properties.service_host = host
+
+ # Set API version to invalid version raises ValueError
+ @pytest.mark.asyncio
+ async def test_set_api_version_to_invalid_version_raises_value_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ connection_properties.api_version = "1.0"
+
+ # Check sync without setting the event loop raises RuntimeError
+ @pytest.mark.asyncio
+ async def test_check_sync_without_setting_event_loop_raises_runtime_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act & Assert
+ with pytest.raises(RuntimeError):
+ connection_properties.check_sync("Sync login was not performed")
+
+ # Get the detailed debug logging flag
+ @pytest.mark.asyncio
+ async def test_get_detailed_debug_logging_flag(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = True
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ result = connection_properties.detailed_debug_logging
+
+ # Assert
+ assert result == detailed_debug_logging
+
+ # Set debug locks to True with a valid service host
+ @pytest.mark.asyncio
+ async def test_set_debug_locks_to_true_with_valid_service_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = True
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.service_host == host
+ assert connection_properties._user_agent == user_agent
+ assert connection_properties._detailed_debug_logging == detailed_debug_logging
+ assert connection_properties._debug_locks == debug_locks
+
+ # Get the debug locks flag
+ @pytest.mark.asyncio
+ async def test_get_debug_locks_flag(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = True
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.debug_locks == debug_locks
+
+ # Set debug locks to False with a valid service host
+ @pytest.mark.asyncio
+ async def test_set_debug_locks_to_false_with_valid_service_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Assert
+ assert connection_properties.debug_locks == debug_locks
+
+ # Set the event loop
+ @pytest.mark.asyncio
+ async def test_set_event_loop(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ loop = AbstractEventLoop()
+
+ # Act
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+ connection_properties.loop = loop
+
+ # Assert
+ assert connection_properties.loop == loop
+
+ # Get the event loop
+ @pytest.mark.asyncio
+ async def test_get_event_loop(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ event_loop = connection_properties.loop
+
+ # Assert
+ assert event_loop is None
+
+ # Set the API version
+ @pytest.mark.asyncio
+ async def test_set_api_version(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ version = "26.0.0-subpatch"
+
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ with pytest.raises(ValueError):
+ connection_properties.api_version = version
+ version = "26.0.0"
+ with pytest.raises(ValueError):
+ connection_properties.api_version = version
+ version = "25.0.0-22"
+ with pytest.raises(ValueError):
+ connection_properties.api_version = version
+ version = "26.0.0-22"
+ connection_properties.api_version = version
+ # Assert
+ assert connection_properties.api_version == version
+
+ # Get the API version
+ @pytest.mark.asyncio
+ async def test_get_api_version(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ response_path = "example.com/api/v1"
+
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ api_version = connection_properties.get_api_version(response_path)
+
+ # Assert
+ assert api_version is None
+
+ # Get the session with a valid host
+ @pytest.mark.asyncio
+ async def test_get_session_with_valid_host(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ session = connection_properties.session
+
+ # Assert
+ assert isinstance(session, ClientSession)
+ assert connection_properties._session == session
+
+ # Check async after setting the event loop raises RuntimeError
+ @pytest.mark.asyncio
+ async def test_check_async_after_setting_event_loop_raises_runtime_error(self):
+ # Arrange
+ host = "https://portal.adtpulse.com"
+ user_agent = ADT_DEFAULT_HTTP_USER_AGENT["User-Agent"]
+ detailed_debug_logging = False
+ debug_locks = False
+ connection_properties = PulseConnectionProperties(
+ host, user_agent, detailed_debug_logging, debug_locks
+ )
+
+ # Act
+ connection_properties.loop = AbstractEventLoop()
+
+ # Assert
+ with pytest.raises(RuntimeError):
+ connection_properties.check_async("Async login not performed")
diff --git a/tests/test_pulse_connection_status.py b/tests/test_pulse_connection_status.py
new file mode 100644
index 0000000..7a0260d
--- /dev/null
+++ b/tests/test_pulse_connection_status.py
@@ -0,0 +1,186 @@
+# Generated by CodiumAI
+import pytest
+
+from pyadtpulse.pulse_backoff import PulseBackoff
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+
+
+class TestPulseConnectionStatus:
+ # PulseConnectionStatus can be initialized without errors
+ def test_initialized_without_errors(self):
+ """
+ Test that PulseConnectionStatus can be initialized without errors.
+ """
+ pcs = PulseConnectionStatus()
+ assert pcs is not None
+
+ # authenticated_flag can be accessed without errors
+ def test_access_authenticated_flag(self):
+ """
+ Test that authenticated_flag can be accessed without errors.
+ """
+ pcs = PulseConnectionStatus()
+ authenticated_flag = pcs.authenticated_flag
+ assert authenticated_flag is not None
+
+ # retry_after can be accessed without errors
+ def test_access_retry_after(self):
+ """
+ Test that retry_after can be accessed without errors.
+ """
+ pcs = PulseConnectionStatus()
+ retry_after = pcs.retry_after
+ assert retry_after is not None
+
+ # retry_after can be set without errors
+ def test_set_retry_after(self):
+ """
+ Test that retry_after can be set without errors.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ current_time = time.time()
+ retry_time = current_time + 1000
+ pcs.retry_after = retry_time
+ assert pcs.retry_after == retry_time
+
+ # get_backoff returns a PulseBackoff object
+ def test_get_backoff(self):
+ """
+ Test that get_backoff returns a PulseBackoff object.
+ """
+ pcs = PulseConnectionStatus()
+ backoff = pcs.get_backoff()
+ assert isinstance(backoff, PulseBackoff)
+
+ # increment_backoff can be called without errors
+ def test_increment_backoff(self):
+ """
+ Test that increment_backoff can be called without errors.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.get_backoff().increment_backoff()
+
+ # retry_after can be set to a time in the future
+ def test_set_retry_after_past_time_fixed(self):
+ """
+ Test that retry_after can be set to a time in the future.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ current_time = time.time()
+ past_time = current_time - 10.0
+ with pytest.raises(ValueError):
+ pcs.retry_after = past_time
+
+ # retry_after can be set to a time in the future
+ def test_set_retry_after_future_time_fixed(self):
+ """
+ Test that retry_after can be set to a time in the future.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ pcs.retry_after = time.time() + 10.0
+ assert pcs.retry_after > time.time()
+
+ # retry_after can be set to a positive value greater than the current time
+ def test_set_retry_after_negative_value_fixed(self):
+ """
+ Test that retry_after can be set to a positive value greater than the current time.
+ """
+ from time import time
+
+ pcs = PulseConnectionStatus()
+ retry_after_time = time() + 10.0
+ pcs.retry_after = retry_after_time
+ assert pcs.retry_after == retry_after_time
+
+ # retry_after can be set to a very large value
+ def test_set_retry_after_large_value(self):
+ """
+ Test that retry_after can be set to a very large value.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.retry_after = float("inf")
+ assert pcs.retry_after == float("inf")
+
+ # retry_after can be set to a non-numeric value
+ def test_set_retry_after_non_numeric_value_fixed(self):
+ """
+ Test that retry_after can be set to a non-numeric value.
+ """
+ import time
+
+ pcs = PulseConnectionStatus()
+ retry_after_time = time.time() + 5.0
+ pcs.retry_after = retry_after_time
+ assert pcs.retry_after == retry_after_time
+
+ # reset_backoff can be called without errors
+ def test_reset_backoff(self):
+ """
+ Test that reset_backoff can be called without errors.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.get_backoff().reset_backoff()
+
+ # authenticated_flag can be set to True
+ def test_authenticated_flag_set_to_true(self):
+ """
+ Test that authenticated_flag can be set to True.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.authenticated_flag.set()
+ assert pcs.authenticated_flag.is_set()
+
+ # authenticated_flag can be set to False
+ def test_authenticated_flag_false(self):
+ """
+ Test that authenticated_flag can be set to False.
+ """
+ pcs = PulseConnectionStatus()
+ pcs.authenticated_flag.clear()
+ assert not pcs.authenticated_flag.is_set()
+
+ # Test that get_backoff returns the same PulseBackoff object every time it is called.
+ def test_get_backoff_returns_same_object(self):
+ """
+ Test that get_backoff returns the same PulseBackoff object every time it is called.
+ Arrange:
+ - Create an instance of PulseConnectionStatus
+ Act:
+ - Call get_backoff method twice
+ Assert:
+ - The returned PulseBackoff objects are the same
+ """
+ pcs = PulseConnectionStatus()
+ backoff1 = pcs.get_backoff()
+ backoff2 = pcs.get_backoff()
+ assert backoff1 is backoff2
+
+ # increment_backoff increases the backoff count by 1
+ def test_increment_backoff2(self):
+ """
+ Test that increment_backoff increases the backoff count by 1.
+ """
+ pcs = PulseConnectionStatus()
+ backoff = pcs.get_backoff()
+ initial_backoff_count = backoff.backoff_count
+ backoff.increment_backoff()
+ new_backoff_count = backoff.backoff_count
+ assert new_backoff_count == initial_backoff_count + 1
+
+ # reset_backoff sets the backoff count to 0 and the expiration time to 0.0
+ def test_reset_backoff_sets_backoff_count_and_expiration_time(self):
+ """
+ Test that reset_backoff sets the backoff count to 0 and the expiration time to 0.0.
+ """
+ pcs = PulseConnectionStatus()
+ backoff = pcs.get_backoff()
+ backoff.increment_backoff()
+ backoff.reset_backoff()
+ assert backoff.backoff_count == 0
+ assert backoff.expiration_time == 0.0
diff --git a/tests/test_pulse_query_manager.py b/tests/test_pulse_query_manager.py
new file mode 100644
index 0000000..29cb56e
--- /dev/null
+++ b/tests/test_pulse_query_manager.py
@@ -0,0 +1,397 @@
+"""Test Pulse Query Manager."""
+
+import logging
+import asyncio
+import time
+from datetime import datetime, timedelta
+from typing import Any, Callable
+
+import pytest
+from aiohttp import client_exceptions, client_reqrep
+from aioresponses import aioresponses
+from freezegun.api import FrozenDateTimeFactory, StepTickTimeFactory
+from lxml import html
+
+from conftest import MOCKED_API_VERSION
+from pyadtpulse.const import ADT_ORB_URI, DEFAULT_API_HOST
+from pyadtpulse.exceptions import (
+ PulseClientConnectionError,
+ PulseConnectionError,
+ PulseServerConnectionError,
+ PulseServiceTemporarilyUnavailableError,
+)
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.pulse_query_manager import MAX_REQUERY_RETRIES, PulseQueryManager
+
+
+@pytest.mark.asyncio
+async def test_fetch_version(mocked_server_responses: aioresponses):
+ """Test fetch version."""
+ s = PulseConnectionStatus()
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ p = PulseQueryManager(s, cp)
+ await p.async_fetch_version()
+ assert cp.api_version == MOCKED_API_VERSION
+
+
+@pytest.mark.asyncio
+async def test_fetch_version_fail(mock_server_down: aioresponses):
+ """Test fetch version."""
+ s = PulseConnectionStatus()
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ p = PulseQueryManager(s, cp)
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_fetch_version()
+ assert s.get_backoff().backoff_count == 1
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert s.get_backoff().backoff_count == 2
+ assert s.get_backoff().get_current_backoff_interval() == 2.0
+
+
+@pytest.mark.asyncio
+async def test_fetch_version_eventually_succeeds(
+ mock_server_temporarily_down: aioresponses,
+):
+ """Test fetch version."""
+ s = PulseConnectionStatus()
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ p = PulseQueryManager(s, cp)
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_fetch_version()
+ assert s.get_backoff().backoff_count == 1
+ with pytest.raises(PulseServerConnectionError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert s.get_backoff().backoff_count == 2
+ assert s.get_backoff().get_current_backoff_interval() == 2.0
+ await p.async_fetch_version()
+ assert s.get_backoff().backoff_count == 0
+
+
+@pytest.mark.asyncio
+async def test_query_orb(
+ mocked_server_responses: aioresponses,
+ read_file: Callable[..., str],
+ mock_sleep: Any,
+ get_mocked_connection_properties: PulseConnectionProperties,
+):
+ """Test query orb.
+
+ We also check that it waits for authenticated flag.
+ """
+
+ async def query_orb_task():
+ return await p.query_orb(logging.DEBUG, "Failed to query orb")
+
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+ orb_file = read_file("orb.html")
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI), status=200, content_type="text/html", body=orb_file
+ )
+ task = asyncio.create_task(query_orb_task())
+ await asyncio.sleep(2)
+ assert not task.done()
+ s.authenticated_flag.set()
+ await task
+ assert task.done()
+ result_etree = task.result()
+ assert result_etree is not None
+ assert html.tostring(result_etree) == orb_file
+ assert mock_sleep.call_count == 1 # from the asyncio.sleep call above
+ mocked_server_responses.get(cp.make_url(ADT_ORB_URI), status=404)
+ with pytest.raises(PulseServerConnectionError):
+ result = await query_orb_task()
+ assert mock_sleep.call_count == 1
+ assert s.get_backoff().backoff_count == 1
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI), status=200, content_type="text/html", body=orb_file
+ )
+ result = await query_orb_task()
+ assert result is not None
+ assert html.tostring(result) == orb_file
+ assert mock_sleep.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_retry_after(
+ mocked_server_responses: aioresponses,
+ freeze_time_to_now: FrozenDateTimeFactory | StepTickTimeFactory,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ mock_sleep: Any,
+):
+ """Test retry after."""
+
+ retry_after_time = 120
+ frozen_time = freeze_time_to_now
+ now = time.time()
+
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=429,
+ headers={"Retry-After": str(retry_after_time)},
+ )
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ # make sure we can't override the retry
+ s.get_backoff().reset_backoff()
+ assert s.get_backoff().expiration_time == int(now + float(retry_after_time))
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ frozen_time.tick(timedelta(seconds=retry_after_time + 1))
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # this should succeed
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+ now = time.time()
+ retry_date = now + float(retry_after_time)
+ retry_date_str = datetime.fromtimestamp(retry_date).strftime(
+ "%a, %d %b %Y %H:%M:%S GMT"
+ )
+ # need to get the new retry after time since it doesn't have fractions of seconds
+ new_retry_after = (
+ datetime.strptime(retry_date_str, "%a, %d %b %Y %H:%M:%S GMT").timestamp() - now
+ )
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=503,
+ headers={"Retry-After": retry_date_str},
+ )
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+ frozen_time.tick(timedelta(seconds=new_retry_after - 1))
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ frozen_time.tick(timedelta(seconds=2))
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # should succeed
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ # unavailable with no retry after
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=503,
+ )
+ frozen_time.tick(timedelta(seconds=retry_after_time + 1))
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # should succeed
+ frozen_time.tick(timedelta(seconds=1))
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+ # retry after in the past
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=503,
+ headers={"Retry-After": retry_date_str},
+ )
+ with pytest.raises(PulseServiceTemporarilyUnavailableError):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ frozen_time.tick(timedelta(seconds=1))
+ # should succeed
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+
+
+async def run_query_exception_test(
+ mocked_server_responses,
+ mock_sleep,
+ get_mocked_connection_properties,
+ aiohttp_exception: client_exceptions.ClientError,
+ pulse_exception: PulseConnectionError,
+):
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+ # need to do ClientConnectorError, but it requires initialization
+ for _ in range(MAX_REQUERY_RETRIES + 1):
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ exception=aiohttp_exception,
+ )
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ with pytest.raises(pulse_exception):
+ await p.async_query(
+ ADT_ORB_URI,
+ requires_authentication=False,
+ )
+
+ # only MAX_REQUERY_RETRIES - 1 sleeps since first call won't sleep
+ assert (
+ mock_sleep.call_count == MAX_REQUERY_RETRIES - 1
+ ), f"Failure on exception {aiohttp_exception.__name__}"
+ for i in range(MAX_REQUERY_RETRIES - 1):
+ assert mock_sleep.call_args_list[i][0][0] == 1 * 2 ** (
+ i
+ ), f"Failure on exception sleep count {i} on exception {aiohttp_exception.__name__}"
+ assert (
+ s.get_backoff().backoff_count == 1
+ ), f"Failure on exception {aiohttp_exception.__name__}"
+ with pytest.raises(pulse_exception):
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ # pqm backoff should trigger here
+
+ # MAX_REQUERY_RETRIES - 1 backoff for query, 1 for connection backoff
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES
+ assert (
+ mock_sleep.call_args_list[MAX_REQUERY_RETRIES - 1][0][0]
+ == s.get_backoff().initial_backoff_interval
+ )
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ )
+ # this should trigger a sleep
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+ assert (
+ mock_sleep.call_args_list[MAX_REQUERY_RETRIES][0][0]
+ == s.get_backoff().initial_backoff_interval * 2
+ )
+ # this shouldn't trigger a sleep
+ await p.async_query(ADT_ORB_URI, requires_authentication=False)
+ assert mock_sleep.call_count == MAX_REQUERY_RETRIES + 1
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "test_exception",
+ (
+ (client_exceptions.ClientConnectionError, PulseClientConnectionError),
+ (client_exceptions.ClientError, PulseClientConnectionError),
+ (client_exceptions.ClientOSError, PulseClientConnectionError),
+ (client_exceptions.ServerDisconnectedError, PulseServerConnectionError),
+ (client_exceptions.ServerTimeoutError, PulseServerConnectionError),
+ (client_exceptions.ServerConnectionError, PulseServerConnectionError),
+ (asyncio.TimeoutError, PulseServerConnectionError),
+ ),
+)
+async def test_async_query_exceptions(
+ mocked_server_responses: aioresponses,
+ mock_sleep: Any,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ test_exception,
+):
+ await run_query_exception_test(
+ mocked_server_responses,
+ mock_sleep,
+ get_mocked_connection_properties,
+ *test_exception,
+ )
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "test_exception",
+ (
+ (ConnectionRefusedError, PulseServerConnectionError),
+ (ConnectionResetError, PulseServerConnectionError),
+ (TimeoutError, PulseClientConnectionError),
+ (BrokenPipeError, PulseClientConnectionError),
+ ),
+)
+async def test_async_query_connector_errors(
+ mocked_server_responses: aioresponses,
+ mock_sleep: Any,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ test_exception,
+):
+ aiohttp_exception = client_exceptions.ClientConnectorError(
+ client_reqrep.ConnectionKey(
+ DEFAULT_API_HOST,
+ 443,
+ is_ssl=True,
+ ssl=True,
+ proxy=None,
+ proxy_auth=None,
+ proxy_headers_hash=None,
+ ),
+ os_error=test_exception[0],
+ )
+ await run_query_exception_test(
+ mocked_server_responses,
+ mock_sleep,
+ get_mocked_connection_properties,
+ aiohttp_exception,
+ test_exception[1],
+ )
+
+
+async def test_wait_for_authentication_flag(
+ mocked_server_responses: aioresponses,
+ get_mocked_connection_properties: PulseConnectionProperties,
+ read_file: Callable[..., str],
+):
+ async def query_orb_task(lock: asyncio.Lock):
+ async with lock:
+ try:
+ result = await p.query_orb(logging.DEBUG, "Failed to query orb")
+ except asyncio.CancelledError:
+ result = None
+ return result
+
+ s = PulseConnectionStatus()
+ cp = get_mocked_connection_properties
+ p = PulseQueryManager(s, cp)
+ mocked_server_responses.get(
+ cp.make_url(ADT_ORB_URI),
+ status=200,
+ body=read_file("orb.html"),
+ )
+ lock = asyncio.Lock()
+ task = asyncio.create_task(query_orb_task(lock))
+ try:
+ await asyncio.wait_for(query_orb_task(lock), 10)
+ except asyncio.TimeoutError:
+ task.cancel()
+ await task
+ # if we time out, the test has passed
+ else:
+ pytest.fail("Query should have timed out")
+ await lock.acquire()
+ task = asyncio.create_task(query_orb_task(lock))
+ lock.release()
+ await asyncio.sleep(1)
+ assert not task.done()
+ await asyncio.sleep(3)
+ assert not task.done()
+ s.authenticated_flag.set()
+ result = await task
+ assert result is not None
+ assert html.tostring(result) == read_file("orb.html")
+
+ # test query with retry will wait for authentication
+ # don't set an orb response so that we will backoff on the query
+ await lock.acquire()
+ task = asyncio.create_task(query_orb_task(lock))
+ lock.release()
+ await asyncio.sleep(0.5)
+ assert not task.done()
+ s.authenticated_flag.clear()
+ await asyncio.sleep(5)
+ assert not task.done()
+ s.authenticated_flag.set()
+ with pytest.raises(PulseServerConnectionError):
+ await task
diff --git a/tests/test_site_properties.py b/tests/test_site_properties.py
new file mode 100644
index 0000000..034baa4
--- /dev/null
+++ b/tests/test_site_properties.py
@@ -0,0 +1,303 @@
+# Generated by CodiumAI
+from multiprocessing import RLock
+from time import time
+
+# Dependencies:
+# pip install pytest-mock
+import pytest
+
+from pyadtpulse.alarm_panel import ADTPulseAlarmPanel
+from pyadtpulse.const import DEFAULT_API_HOST
+from pyadtpulse.gateway import ADTPulseGateway
+from pyadtpulse.pulse_authentication_properties import PulseAuthenticationProperties
+from pyadtpulse.pulse_connection import PulseConnection
+from pyadtpulse.pulse_connection_properties import PulseConnectionProperties
+from pyadtpulse.pulse_connection_status import PulseConnectionStatus
+from pyadtpulse.site_properties import ADTPulseSiteProperties
+from pyadtpulse.zones import ADTPulseFlattendZone, ADTPulseZoneData, ADTPulseZones
+
+
+class TestADTPulseSiteProperties:
+ # Retrieve site id and name
+ def test_retrieve_site_id_and_name(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ retrieved_id = site_properties.id
+ retrieved_name = site_properties.name
+
+ # Assert
+ assert retrieved_id == site_id
+ assert retrieved_name == site_name
+
+ # Retrieve last update time
+ def test_retrieve_last_update_time(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ last_updated = site_properties.last_updated
+
+ # Assert
+ assert isinstance(last_updated, int)
+
+ # Retrieve all zones registered with ADT Pulse account when zones exist
+ def test_retrieve_all_zones_with_zones_fixed(self):
+ # Arrange
+
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Add some zones to the site_properties instance
+ zone1 = ADTPulseZoneData(id_=1, name="Front Door")
+ zone2 = ADTPulseZoneData(id_=2, name="Back Door")
+
+ site_properties._zones[1] = zone1
+ site_properties._zones[2] = zone2
+
+ # Act
+ zones = site_properties.zones
+
+ # Assert
+ assert isinstance(zones, list)
+ assert len(zones) == 2
+
+ # Retrieve zone information in dictionary form
+ def test_retrieve_zone_information_as_dict(self):
+ # Arrange
+
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ site_properties._zones = ADTPulseZones()
+ zone = ADTPulseZoneData(id_=1, name="Zone1") # Provide the 'id_' argument
+ site_properties._zones[1] = zone
+
+ # Act
+ zones_dict = site_properties.zones_as_dict
+
+ # Assert
+ assert isinstance(zones_dict, ADTPulseZones)
+
+ # Retrieve alarm panel object for the site
+ def test_retrieve_alarm_panel_object(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ alarm_panel = site_properties.alarm_control_panel
+
+ # Assert
+ assert isinstance(alarm_panel, ADTPulseAlarmPanel)
+
+ # Retrieve gateway device object
+ def test_retrieve_gateway_device_object(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ gateway = site_properties.gateway
+
+ # Assert
+ assert isinstance(gateway, ADTPulseGateway)
+
+ # No zones exist
+ def test_no_zones_exist(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act & Assert
+ with pytest.raises(RuntimeError):
+ site_properties.zones
+
+ # Attempting to retrieve site data while another thread is modifying it
+ def test_retrieve_site_data_while_modifying(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ def modify_site_data():
+ with site_properties.site_lock:
+ time.sleep(2)
+ site_properties._last_updated = int(time())
+
+ mocker.patch.object(site_properties, "_last_updated", 0)
+ mocker.patch.object(site_properties, "_site_lock", RLock())
+
+ # Act
+ with site_properties.site_lock:
+ retrieved_last_updated = site_properties.last_updated
+
+ # Assert
+ assert retrieved_last_updated == 0
+
+ # Attempting to set alarm status to existing status
+ def test_set_alarm_status_to_existing_status(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ mocker.patch.object(site_properties._alarm_panel, "_status", "Armed Away")
+
+ # Check if updates exist
+ def test_check_updates_exist(self, mocker):
+ # Arrange
+ from time import time
+
+ site_properties = ADTPulseSiteProperties("12345", "My ADT Pulse Site")
+ mocker.patch.object(site_properties, "_last_updated", return_value=time())
+
+ # Act
+ result = site_properties.updates_may_exist
+
+ # Assert
+ assert result is False
+
+ # Update site/zone data async with current data
+ @pytest.mark.asyncio
+ async def test_update_site_zone_data_async(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ mock_zones = mocker.Mock()
+ mock_zones.flatten.return_value = [ADTPulseFlattendZone()]
+ site_properties._zones = mock_zones
+
+ # Act
+ result = await site_properties.async_update()
+
+ # Assert
+ assert result == False
+
+ # Cannot set alarm status from one state to another
+ @pytest.mark.asyncio
+ async def test_cannot_set_alarm_status(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ cp = PulseConnectionProperties(DEFAULT_API_HOST)
+ cs = PulseConnectionStatus()
+ pa = PulseAuthenticationProperties(
+ "test@example.com", "testpassword", "testfingerprint"
+ )
+
+ connection = PulseConnection(cs, cp, pa)
+
+ # Act
+ result = await site_properties._alarm_panel._arm(
+ connection, "Armed Home", False
+ )
+
+ # Assert
+ assert result == False
+
+ # Failed updating ADT Pulse alarm to new mode
+ @pytest.mark.asyncio
+ async def test_failed_updating_alarm_mode(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Mock the _arm method to return False
+ async def mock_arm(*args, **kwargs):
+ return False
+
+ mocker.patch.object(ADTPulseAlarmPanel, "_arm", side_effect=mock_arm)
+
+ # Act
+ result = await site_properties.alarm_control_panel._arm(None, "new_mode", False)
+
+ # Assert
+ assert result == False
+
+ # Retrieve last update time with invalid input
+ def test_retrieve_last_update_invalid_input(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ last_updated = site_properties.last_updated
+
+ # Assert
+ assert last_updated == 0
+
+ # Retrieve site id and name with invalid input
+ def test_retrieve_site_id_and_name_with_invalid_input(self):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Act
+ retrieved_id = site_properties.id
+ retrieved_name = site_properties.name
+
+ # Assert
+ assert retrieved_id == site_id
+ assert retrieved_name == site_name
+
+ # Retrieve zone information in dictionary form with invalid input
+ def test_retrieve_zone_info_invalid_input(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ mocker.patch.object(site_properties, "_zones", None)
+
+ # Act and Assert
+ with pytest.raises(RuntimeError):
+ site_properties.zones
+
+ with pytest.raises(RuntimeError):
+ site_properties.zones_as_dict
+
+ # Retrieve all zones registered with ADT Pulse account with invalid input
+ def test_retrieve_zones_with_invalid_input(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+ mocker.patch.object(site_properties, "_zones", None)
+
+ # Act and Assert
+ with pytest.raises(RuntimeError):
+ _ = site_properties.zones
+
+ with pytest.raises(RuntimeError):
+ _ = site_properties.zones_as_dict
+
+ # Retrieve alarm panel object for the site with invalid input
+ def test_retrieve_alarm_panel_invalid_input(self, mocker):
+ # Arrange
+ site_id = "12345"
+ site_name = "My ADT Pulse Site"
+ site_properties = ADTPulseSiteProperties(site_id, site_name)
+
+ # Mock the ADTPulseAlarmPanel object
+ mock_alarm_panel = mocker.Mock()
+ site_properties._alarm_panel = mock_alarm_panel
+
+ # Act
+ retrieved_alarm_panel = site_properties.alarm_control_panel
+
+ # Assert
+ assert retrieved_alarm_panel == mock_alarm_panel
diff --git a/tests/test_zones.py b/tests/test_zones.py
new file mode 100644
index 0000000..b7e19db
--- /dev/null
+++ b/tests/test_zones.py
@@ -0,0 +1,1221 @@
+# Generated by CodiumAI
+from datetime import datetime
+
+import pytest
+from typeguard import TypeCheckError
+
+from pyadtpulse.zones import (
+ ADT_NAME_TO_DEFAULT_TAGS,
+ ADTPulseFlattendZone,
+ ADTPulseZoneData,
+ ADTPulseZones,
+)
+
+
+class TestADTPulseZoneData:
+ # Creating an instance of ADTPulseZoneData with required parameters should succeed.
+ def test_create_instance_with_required_parameters(self):
+ """
+ Test that creating an instance of ADTPulseZoneData with required parameters succeeds.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+
+ # Act
+ zone_data = ADTPulseZoneData(name, id_)
+
+ # Assert
+ assert zone_data.name == name
+ assert zone_data.id_ == id_
+ assert zone_data.tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zone_data.status == "Unknown"
+ assert zone_data.state == "Unknown"
+ assert zone_data.last_activity_timestamp == 0
+
+ # Setting the last_activity_timestamp with a value greater than or equal to 1420070400 should succeed.
+ def test_set_last_activity_timestamp_greater_than_or_equal_to_1420070400(self):
+ """
+ Test that setting the last_activity_timestamp with a value greater than or equal to 1420070400 succeeds.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ timestamp = 1420070400
+
+ # Act
+ zone_data.last_activity_timestamp = timestamp
+
+ # Assert
+ assert zone_data.last_activity_timestamp == timestamp
+
+ # Setting the tags with a valid value should succeed.
+ def test_set_tags_with_valid_value(self):
+ """
+ Test that setting the tags with a valid value succeeds.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ tags = ("sensor", "doorWindow")
+
+ # Act
+ zone_data.tags = tags
+
+ # Assert
+ assert zone_data.tags == tags
+
+ # Getting the last_activity_timestamp should return the correct value.
+ def test_get_last_activity_timestamp(self):
+ """
+ Test that getting the last_activity_timestamp returns the correct value.
+ """
+ # Arrange
+ timestamp = 1420070400
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ zone_data.last_activity_timestamp = timestamp
+
+ # Act
+ result = zone_data.last_activity_timestamp
+
+ # Assert
+ assert result == timestamp
+
+ # Getting the tags should return the correct value.
+ def test_get_tags_fixed(self):
+ """
+ Test that getting the tags returns the correct value.
+ """
+ # Arrange
+ tags = ("sensor", "doorWindow")
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ zone_data.tags = tags
+
+ # Act
+ result = zone_data.tags
+
+ # Assert
+ assert result == tags
+
+ # ADT_NAME_TO_DEFAULT_TAGS should be a valid dictionary.
+ def test_ADT_NAME_TO_DEFAULT_TAGS_is_valid_dictionary(self):
+ """
+ Test that ADT_NAME_TO_DEFAULT_TAGS is a valid dictionary.
+ """
+ # Arrange
+
+ # Act
+
+ # Assert
+ assert isinstance(ADT_NAME_TO_DEFAULT_TAGS, dict)
+
+ # Creating an instance of ADTPulseZoneData without required parameters should fail.
+ def test_create_instance_without_required_parameters(self):
+ """
+ Test that creating an instance of ADTPulseZoneData without required parameters fails.
+ """
+ # Arrange
+
+ # Act and Assert
+ with pytest.raises(TypeError):
+ ADTPulseZoneData()
+
+ # Setting the last_activity_timestamp with a value less than 1420070400 should raise a ValueError.
+ def test_set_last_activity_timestamp_less_than_1420070400(self):
+ """
+ Test that setting the last_activity_timestamp with a value less than 1420070400 raises a ValueError.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ timestamp = 1419999999
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zone_data.last_activity_timestamp = timestamp
+
+ # Setting the tags with an invalid value should raise a ValueError.
+ def test_set_tags_with_invalid_value(self):
+ """
+ Test that setting the tags with an invalid value raises a ValueError.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ tags = ("InvalidSensor", "InvalidType")
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zone_data.tags = tags
+
+ # Getting the name should return the correct value.
+ def test_get_name(self):
+ """
+ Test that getting the name returns the correct value.
+ """
+ # Arrange
+ name = "Zone 1"
+ zone_data = ADTPulseZoneData(name, "zone1")
+
+ # Act
+ result = zone_data.name
+
+ # Assert
+ assert result == name
+
+ # Getting the id_ should return the correct value.
+ def test_get_id(self):
+ """
+ Test that getting the id_ returns the correct value.
+ """
+ # Arrange
+ id_ = "zone1"
+ zone_data = ADTPulseZoneData("Zone 1", id_)
+
+ # Act
+ result = zone_data.id_
+
+ # Assert
+ assert result == id_
+
+ # Setting the status with a valid value should succeed.
+ def test_set_status_with_valid_value(self):
+ """
+ Test that setting the status with a valid value succeeds.
+ """
+ # Arrange
+ zone_data = ADTPulseZoneData("Zone 1", "zone1")
+ status = "Online"
+
+ # Act
+ zone_data.status = status
+
+ # Assert
+ assert zone_data.status == status
+
+ # Setting the state with a valid value should succeed.
+ def test_setting_state_with_valid_value(self):
+ """
+ Test that setting the state with a valid value succeeds.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+ state = "Opened"
+
+ # Act
+ zone_data = ADTPulseZoneData(name, id_)
+ zone_data.state = state
+
+ # Assert
+ assert zone_data.state == state
+
+ # Getting the status should return the correct value.
+ def test_getting_status(self):
+ """
+ Test that getting the status returns the correct value.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+ status = "Online"
+
+ # Act
+ zone_data = ADTPulseZoneData(name, id_)
+ zone_data.status = status
+
+ # Assert
+ assert zone_data.status == status
+
+ # Getting the state should return the correct value.
+ def test_getting_state_returns_correct_value(self):
+ """
+ Test that getting the state returns the correct value.
+ """
+ # Arrange
+ name = "Zone 1"
+ id_ = "zone1"
+ state = "Opened"
+
+ zone_data = ADTPulseZoneData(name, id_)
+ zone_data.state = state
+
+ # Act
+ result = zone_data.state
+
+ # Assert
+ assert result == state
+
+
+class TestADTPulseFlattendZone:
+ # Creating a new instance of ADTPulseFlattendZone with valid parameters should successfully create an object with the correct attributes.
+ def test_valid_parameters(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with valid parameters successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Accessing any attribute of an instance of ADTPulseFlattendZone should return the expected value.
+ def test_access_attributes(self):
+ """
+ Test that accessing any attribute of an instance of ADTPulseFlattendZone returns the expected value.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Act & Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Modifying any attribute of an instance of ADTPulseFlattendZone should successfully update the attribute with the new value.
+ def test_modify_attributes_fixed(self):
+ """
+ Test that modifying any attribute of an instance of ADTPulseFlattendZone successfully updates the attribute with the new value.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Act
+ new_zone = 2
+ new_name = "Zone 2"
+ new_id = "zone2"
+ new_tags = ("sensor2", "type2")
+ new_status = "Offline"
+ new_state = "Closed"
+ new_last_activity_timestamp = 9876543210
+
+ zone_obj["zone"] = new_zone
+ zone_obj["name"] = new_name
+ zone_obj["id_"] = new_id
+ zone_obj["tags"] = new_tags
+ zone_obj["status"] = new_status
+ zone_obj["state"] = new_state
+ zone_obj["last_activity_timestamp"] = new_last_activity_timestamp
+
+ # Assert
+ assert zone_obj["zone"] == new_zone
+ assert zone_obj["name"] == new_name
+ assert zone_obj["id_"] == new_id
+ assert zone_obj["tags"] == new_tags
+ assert zone_obj["status"] == new_status
+ assert zone_obj["state"] == new_state
+ assert zone_obj["last_activity_timestamp"] == new_last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-integer value for 'zone' should not raise a TypeError.
+ def test_non_integer_zone(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-integer value for 'zone' does not raise a TypeError.
+ """
+ # Arrange
+ zone = "1"
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with an empty string for 'name' should not raise a ValueError.
+ def test_empty_name(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with an empty string for 'name' does not raise a ValueError.
+ """
+ # Arrange
+ zone = 1
+ name = ""
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with an empty string for 'id_' should not raise a ValueError.
+ def test_empty_id_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with an empty string for 'id_' does not raise a ValueError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = ""
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert True
+
+ # Creating a new instance of ADTPulseFlattendZone with a tuple that contains non-string values for 'tags' should not raise a TypeError.
+ def test_non_string_tags(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a tuple that contains non-string values for 'tags' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", 2)
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-string value for 'status' should not raise a TypeError.
+ def test_non_string_status(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-string value for 'status' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = 1
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-string value for 'state' should not raise a TypeError.
+ def test_non_string_state(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-string value for 'state' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = 1
+ last_activity_timestamp = 1234567890
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a non-integer value for 'last_activity_timestamp' should not raise a TypeError.
+ def test_non_integer_last_activity_timestamp(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a non-integer value for 'last_activity_timestamp' does not raise a TypeError.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = "1234567890"
+
+ # Act & Assert
+ ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Creating a new instance of ADTPulseFlattendZone with a very large integer value for 'zone' should successfully create an object with the correct attributes.
+ def test_large_zone_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very large integer value for 'zone' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'name' should successfully create an object with the correct attributes.
+ def test_long_name_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'name' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "This is a very long name that exceeds the maximum length allowed for the 'name' attribute in ADTPulseFlattendZone"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'id_' should successfully create an object with the correct attributes.
+ def test_long_id_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'id_' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "a" * 1000 # Very long string for 'id_'
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a tuple that contains multiple strings for 'tags' should successfully create an object with the correct attributes.
+ def test_create_instance_with_multiple_tags_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a tuple that contains multiple strings for 'tags' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1", "sensor2", "type2")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'status' should successfully create an object with the correct attributes.
+ def test_long_status_string_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'status' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Very long status string" * 1000
+ state = "Opened"
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very long string for 'state' should successfully create an object with the correct attributes.
+ def test_long_state_string_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very long string for 'state' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "a" * 1000 # Very long string for 'state'
+ last_activity_timestamp = 1234567890
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+ # Creating a new instance of ADTPulseFlattendZone with a very large integer value for 'last_activity_timestamp' should successfully create an object with the correct attributes.
+ def test_large_last_activity_timestamp_fixed(self):
+ """
+ Test that creating a new instance of ADTPulseFlattendZone with a very large integer value for 'last_activity_timestamp' successfully creates an object with the correct attributes.
+ """
+ # Arrange
+ zone = 1
+ name = "Zone 1"
+ id_ = "zone1"
+ tags = ("sensor1", "type1")
+ status = "Online"
+ state = "Opened"
+ last_activity_timestamp = 999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
+
+ # Act
+ zone_obj = ADTPulseFlattendZone(
+ zone=zone,
+ name=name,
+ id_=id_,
+ tags=tags,
+ status=status,
+ state=state,
+ last_activity_timestamp=last_activity_timestamp,
+ )
+
+ # Assert
+ assert zone_obj["zone"] == zone
+ assert zone_obj["name"] == name
+ assert zone_obj["id_"] == id_
+ assert zone_obj["tags"] == tags
+ assert zone_obj["status"] == status
+ assert zone_obj["state"] == state
+ assert zone_obj["last_activity_timestamp"] == last_activity_timestamp
+
+
+class TestADTPulseZones:
+ # ADTPulseZones can be initialized with a dictionary containing ADTPulseZoneData with zone as the key
+ def test_initialized_with_dictionary(self):
+ """
+ Test that ADTPulseZones can be initialized with a dictionary containing ADTPulseZoneData with zone as the key
+ """
+ # Arrange
+ data = {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+
+ # Act
+ zones = ADTPulseZones(data)
+
+ # Assert
+ assert len(zones) == 3
+ assert zones[1].name == "Zone 1"
+ assert zones[2].name == "Zone 2"
+ assert zones[3].name == "Zone 3"
+
+ # ADTPulseZones can get a Zone by its id
+ def test_get_zone_by_id(self):
+ """
+ Test that ADTPulseZones can get a Zone by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ zone_1 = zones[1]
+ zone_2 = zones[2]
+ zone_3 = zones[3]
+
+ # Assert
+ assert zone_1.name == "Zone 1"
+ assert zone_2.name == "Zone 2"
+ assert zone_3.name == "Zone 3"
+
+ # ADTPulseZones can set a Zone by its id
+ def test_set_zone_by_id(self):
+ """
+ Test that ADTPulseZones can set a Zone by its id
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+ zones[2] = ADTPulseZoneData("Zone 2", "sensor-2")
+ zones[3] = ADTPulseZoneData("Zone 3", "sensor-3")
+
+ # Assert
+ assert len(zones) == 3
+ assert zones[1].name == "Zone 1"
+ assert zones[2].name == "Zone 2"
+ assert zones[3].name == "Zone 3"
+
+ # ADTPulseZones can update zone status by its id
+ def test_update_zone_status(self):
+ """
+ Test that ADTPulseZones can update zone status by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ zones.update_status(1, "Online")
+ zones.update_status(2, "Low Battery")
+ zones.update_status(3, "Offline")
+
+ # Assert
+ assert zones[1].status == "Online"
+ assert zones[2].status == "Low Battery"
+ assert zones[3].status == "Offline"
+
+ # ADTPulseZones can update zone state by its id
+ def test_update_zone_state(self):
+ """
+ Test that ADTPulseZones can update zone state by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ zones.update_state(1, "Opened")
+ zones.update_state(2, "Closed")
+ zones.update_state(3, "Unknown")
+
+ # Assert
+ assert zones[1].state == "Opened"
+ assert zones[2].state == "Closed"
+ assert zones[3].state == "Unknown"
+
+ # ADTPulseZones can update last activity timestamp by its id
+ def test_update_last_activity_timestamp(self):
+ """
+ Test that ADTPulseZones can update last activity timestamp by its id
+ """
+ # Arrange
+ zones = ADTPulseZones(
+ {
+ 1: ADTPulseZoneData("Zone 1", "sensor-1"),
+ 2: ADTPulseZoneData("Zone 2", "sensor-2"),
+ 3: ADTPulseZoneData("Zone 3", "sensor-3"),
+ }
+ )
+
+ # Act
+ dt_1 = datetime(2022, 1, 1, 12, 0, 0)
+ dt_2 = datetime(2022, 1, 2, 12, 0, 0)
+ dt_3 = datetime(2022, 1, 3, 12, 0, 0)
+
+ zones.update_last_activity_timestamp(1, dt_1)
+ zones.update_last_activity_timestamp(2, dt_2)
+ zones.update_last_activity_timestamp(3, dt_3)
+
+ # Assert
+ assert zones[1].last_activity_timestamp == int(dt_1.timestamp())
+ assert zones[2].last_activity_timestamp == int(dt_2.timestamp())
+ assert zones[3].last_activity_timestamp == int(dt_3.timestamp())
+
+ # ADTPulseZones can update device info by its id
+ def test_update_device_info_by_id(self):
+ """
+ Test that ADTPulseZones can update device info by its id
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+
+ # Act
+ zones.update_device_info(1, "Opened", "Low Battery")
+
+ # Assert
+ assert zones[1].state == "Opened"
+ assert zones[1].status == "Low Battery"
+
+ # ADTPulseZones can update zone attributes with a dictionary containing zone attributes
+ def test_update_zone_attributes_with_dictionary(self):
+ """
+ Test that ADTPulseZones can update zone attributes with a dictionary containing zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Zone 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Zone 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0
+
+ # ADTPulseZones raises a KeyError if the key is not an int when getting or setting a Zone
+ def test_key_not_int(self):
+ """
+ Test that ADTPulseZones raises a KeyError if the key is not an int when getting or setting a Zone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ valid_key = 1
+ invalid_key = "1"
+ value = ADTPulseZoneData("Zone 1", "sensor-1")
+
+ # Act
+ zones[valid_key] = value
+
+ # Assert
+ with pytest.raises(KeyError):
+ zones[invalid_key]
+
+ # ADTPulseZones can flatten its data into a list of ADTPulseFlattendZone
+ def test_flatten_method(self):
+ """
+ Test that ADTPulseZones can flatten its data into a list of ADTPulseFlattendZone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+ zones[2] = ADTPulseZoneData("Zone 2", "sensor-2")
+ zones[3] = ADTPulseZoneData("Zone 3", "sensor-3")
+
+ # Act
+ flattened_zones = zones.flatten()
+
+ # Assert
+ assert len(flattened_zones) == 3
+ assert flattened_zones[0]["zone"] == 1
+ assert flattened_zones[0]["name"] == "Zone 1"
+ assert flattened_zones[0]["id_"] == "sensor-1"
+ assert flattened_zones[1]["zone"] == 2
+ assert flattened_zones[1]["name"] == "Zone 2"
+ assert flattened_zones[1]["id_"] == "sensor-2"
+ assert flattened_zones[2]["zone"] == 3
+ assert flattened_zones[2]["name"] == "Zone 3"
+ assert flattened_zones[2]["id_"] == "sensor-3"
+
+ # ADTPulseZones raises a ValueError if the value is not ADTPulseZoneData when setting a Zone
+ def test_raises_value_error_if_value_not_adtpulsezonedata(self):
+ """
+ Test that ADTPulseZones raises a ValueError if the value is not ADTPulseZoneData when setting a Zone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zones[1] = "Invalid Zone Data"
+
+ # ADTPulseZones raises a ValueError when setting a Zone with a non-ADTPulseZoneData value
+ def test_raises_value_error_when_setting_zone_with_non_adtpulsezonedata_value(self):
+ """
+ Test that ADTPulseZones raises a ValueError when setting a Zone with a non-ADTPulseZoneData value
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ key = 1
+ value = "Not ADTPulseZoneData"
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ zones[key] = value
+
+ # ADTPulseZones raises a ValueError when setting a Zone with a string value
+ def test_raises_value_error_when_setting_zone_with_string_value(self):
+ """
+ Test that ADTPulseZones raises a ValueError when setting a Zone with a string value
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act and Assert
+ with pytest.raises(ValueError):
+ zones[1] = "Zone 1"
+
+ # ADTPulseZones raises a ValueError when setting a Zone with a list value
+ def test_raises_value_error_when_setting_zone_with_list_value(self):
+ """
+ Test that ADTPulseZones raises a ValueError when setting a Zone with a list value
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ key = 1
+ value = [1, 2, 3]
+
+ # Act & Assert
+ with pytest.raises(ValueError):
+ zones[key] = value
+
+ # ADTPulseZones sets default values for ADTPulseZoneData.id_ and name if not set when setting a Zone
+ def test_default_values_for_id_and_name(self):
+ """
+ Test that ADTPulseZones sets default values for ADTPulseZoneData.id_ and name if not set when setting a Zone
+ """
+ # Arrange
+ zones = ADTPulseZones()
+
+ # Act
+ zones[1] = ADTPulseZoneData("", "")
+
+ # Assert
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].name == "Sensor for Zone 1"
+
+ # ADTPulseZones raises a ValueError if there is invalid Zone data in ADTPulseZones when flattening
+ def test_invalid_zone_data_in_flattening(self):
+ """
+ Test that ADTPulseZones raises a ValueError if there is invalid Zone data in ADTPulseZones when flattening
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("Zone 1", "sensor-1")
+ zones[2] = ADTPulseZoneData("Zone 2", "sensor-2")
+ zones[3] = ADTPulseZoneData("Zone 3", "sensor-3")
+ with pytest.raises(TypeCheckError):
+ zones[3].tags = "Invalid Tags"
+
+ # ADTPulseZones skips incomplete zone data when updating zone attributes
+ def test_skips_incomplete_zone_data(self):
+ """
+ Test that ADTPulseZones skips incomplete zone data when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Zone 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Zone 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0
+
+ # ADTPulseZones can handle unknown sensor types when updating zone attributes
+ def test_handle_unknown_sensor_types(self):
+ """
+ Test that ADTPulseZones can handle unknown sensor types when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Sensor 1",
+ "type_model": "Unknown Sensor Type",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Sensor 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ("sensor", "doorWindow")
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0
+
+ # ADTPulseZones can handle missing status when updating zone attributes
+ def test_missing_status_handling_fixed(self):
+ """
+ Test that ADTPulseZones can handle missing status when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Zone 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Unknown", # Added status key with value "Unknown"
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 0
+
+ # ADTPulseZones can handle invalid datetime when updating last activity timestamp
+ def test_handle_invalid_datetime(self):
+ """
+ Test that ADTPulseZones can handle invalid datetime when updating last activity timestamp
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ zones[1] = ADTPulseZoneData("name", "id")
+ key = 1
+ invalid_dt = "2022-13-01 12:00:00" # Invalid datetime format
+
+ # Act
+ with pytest.raises(ValueError):
+ dt = datetime.strptime(invalid_dt, "%Y-%m-%d %H:%M:%S")
+ zones.update_last_activity_timestamp(key, dt)
+
+ # Assert
+ assert zones[key].last_activity_timestamp == 0
+
+ # ADTPulseZones can handle missing name when updating zone attributes
+ def test_handle_missing_name_when_updating_zone_attributes(self):
+ """
+ Test that ADTPulseZones can handle missing name when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Unknown",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 0
+
+ # ADTPulseZones can handle missing zone when updating zone attributes
+ def test_handle_missing_zone(self):
+ """
+ Test that ADTPulseZones can handle missing zone when updating zone attributes
+ """
+ # Arrange
+ zones = ADTPulseZones()
+ dev_attr = {
+ "name": "Sensor 1",
+ "type_model": "Window Sensor",
+ "zone": "1",
+ "status": "Online",
+ }
+
+ # Act
+ zones.update_zone_attributes(dev_attr)
+
+ # Assert
+ assert len(zones) == 1
+ assert zones[1].name == "Sensor 1"
+ assert zones[1].id_ == "sensor-1"
+ assert zones[1].tags == ADT_NAME_TO_DEFAULT_TAGS["Window"]
+ assert zones[1].status == "Online"
+ assert zones[1].state == "Unknown"
+ assert zones[1].last_activity_timestamp == 0