From 41c244e903fdf68661fa7bd22a0eb1cebd49a5c2 Mon Sep 17 00:00:00 2001 From: "bparodi@lezzo.org" Date: Sat, 14 Dec 2024 14:55:37 +0100 Subject: [PATCH] first commit --- .gitignore | 168 ++ LICENSE | 674 ++++++ bleak/__init__.py | 924 ++++++++ bleak/assigned_numbers.py | 38 + bleak/backends/__init__.py | 7 + bleak/backends/_manufacturers.py | 1936 +++++++++++++++++ bleak/backends/bluezdbus/__init__.py | 1 + .../bluezdbus/advertisement_monitor.py | 120 + bleak/backends/bluezdbus/characteristic.py | 107 + bleak/backends/bluezdbus/client.py | 993 +++++++++ bleak/backends/bluezdbus/defs.py | 166 ++ bleak/backends/bluezdbus/descriptor.py | 44 + bleak/backends/bluezdbus/manager.py | 1060 +++++++++ bleak/backends/bluezdbus/scanner.py | 286 +++ bleak/backends/bluezdbus/service.py | 44 + bleak/backends/bluezdbus/signals.py | 202 ++ bleak/backends/bluezdbus/utils.py | 69 + bleak/backends/bluezdbus/version.py | 62 + bleak/backends/characteristic.py | 133 ++ bleak/backends/client.py | 274 +++ .../corebluetooth/CentralManagerDelegate.py | 371 ++++ .../corebluetooth/PeripheralDelegate.py | 629 ++++++ bleak/backends/corebluetooth/__init__.py | 11 + .../backends/corebluetooth/characteristic.py | 121 ++ bleak/backends/corebluetooth/client.py | 389 ++++ bleak/backends/corebluetooth/descriptor.py | 43 + bleak/backends/corebluetooth/scanner.py | 185 ++ bleak/backends/corebluetooth/service.py | 42 + bleak/backends/corebluetooth/utils.py | 42 + bleak/backends/descriptor.py | 143 ++ bleak/backends/device.py | 73 + bleak/backends/p4android/__init__.py | 0 bleak/backends/p4android/characteristic.py | 96 + bleak/backends/p4android/client.py | 545 +++++ bleak/backends/p4android/defs.py | 91 + bleak/backends/p4android/descriptor.py | 37 + .../bleak/PythonBluetoothGattCallback.java | 84 + .../hbldh/bleak/PythonScanCallback.java | 41 + .../p4android/recipes/bleak/__init__.py | 58 + .../p4android/recipes/bleak/fix_setup.py | 10 + bleak/backends/p4android/scanner.py | 297 +++ bleak/backends/p4android/service.py | 36 + bleak/backends/p4android/utils.py | 94 + bleak/backends/scanner.py | 335 +++ bleak/backends/service.py | 214 ++ bleak/backends/winrt/__init__.py | 0 bleak/backends/winrt/characteristic.py | 142 ++ bleak/backends/winrt/client.py | 1134 ++++++++++ bleak/backends/winrt/descriptor.py | 43 + bleak/backends/winrt/scanner.py | 300 +++ bleak/backends/winrt/service.py | 42 + bleak/backends/winrt/util.py | 223 ++ bleak/exc.py | 184 ++ bleak/py.typed | 0 bleak/uuids.py | 1274 +++++++++++ dbus_fast/__init__.py | 82 + dbus_fast/__version__.py | 10 + dbus_fast/_private/__init__.py | 0 dbus_fast/_private/_cython_compat.py | 12 + dbus_fast/_private/address.pxd | 15 + dbus_fast/_private/address.py | 116 + dbus_fast/_private/constants.py | 18 + dbus_fast/_private/marshaller.pxd | 110 + dbus_fast/_private/marshaller.py | 229 ++ dbus_fast/_private/unmarshaller.pxd | 241 ++ dbus_fast/_private/unmarshaller.py | 811 +++++++ dbus_fast/_private/util.py | 172 ++ dbus_fast/aio/__init__.py | 2 + dbus_fast/aio/message_bus.py | 553 +++++ dbus_fast/aio/message_reader.pxd | 13 + dbus_fast/aio/message_reader.py | 45 + dbus_fast/aio/proxy_object.py | 205 ++ dbus_fast/auth.py | 127 ++ dbus_fast/constants.py | 135 ++ dbus_fast/errors.py | 84 + dbus_fast/glib/__init__.py | 2 + dbus_fast/glib/message_bus.py | 513 +++++ dbus_fast/glib/proxy_object.py | 320 +++ dbus_fast/introspection.py | 597 +++++ dbus_fast/main.py | 2 + dbus_fast/message.pxd | 56 + dbus_fast/message.py | 319 +++ dbus_fast/message_bus.pxd | 70 + dbus_fast/message_bus.py | 1299 +++++++++++ dbus_fast/proxy_object.py | 341 +++ dbus_fast/py.typed | 0 dbus_fast/send_reply.py | 59 + dbus_fast/service.pxd | 51 + dbus_fast/service.py | 659 ++++++ dbus_fast/signature.pxd | 26 + dbus_fast/signature.py | 456 ++++ dbus_fast/unpack.pxd | 13 + dbus_fast/unpack.py | 24 + dbus_fast/validators.py | 199 ++ idasen/cli.py | 330 +++ idasen/desk.py | 466 ++++ macparse/macaddress.py | 389 ++++ main.py | 78 + voluptuous/__init__.py | 88 + voluptuous/error.py | 219 ++ voluptuous/humanize.py | 57 + voluptuous/py.typed | 0 voluptuous/schema_builder.py | 1315 +++++++++++ voluptuous/util.py | 149 ++ voluptuous/validators.py | 1248 +++++++++++ 105 files changed, 26962 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 bleak/__init__.py create mode 100644 bleak/assigned_numbers.py create mode 100644 bleak/backends/__init__.py create mode 100644 bleak/backends/_manufacturers.py create mode 100644 bleak/backends/bluezdbus/__init__.py create mode 100644 bleak/backends/bluezdbus/advertisement_monitor.py create mode 100644 bleak/backends/bluezdbus/characteristic.py create mode 100644 bleak/backends/bluezdbus/client.py create mode 100644 bleak/backends/bluezdbus/defs.py create mode 100644 bleak/backends/bluezdbus/descriptor.py create mode 100644 bleak/backends/bluezdbus/manager.py create mode 100644 bleak/backends/bluezdbus/scanner.py create mode 100644 bleak/backends/bluezdbus/service.py create mode 100644 bleak/backends/bluezdbus/signals.py create mode 100644 bleak/backends/bluezdbus/utils.py create mode 100644 bleak/backends/bluezdbus/version.py create mode 100644 bleak/backends/characteristic.py create mode 100644 bleak/backends/client.py create mode 100644 bleak/backends/corebluetooth/CentralManagerDelegate.py create mode 100644 bleak/backends/corebluetooth/PeripheralDelegate.py create mode 100644 bleak/backends/corebluetooth/__init__.py create mode 100644 bleak/backends/corebluetooth/characteristic.py create mode 100644 bleak/backends/corebluetooth/client.py create mode 100644 bleak/backends/corebluetooth/descriptor.py create mode 100644 bleak/backends/corebluetooth/scanner.py create mode 100644 bleak/backends/corebluetooth/service.py create mode 100644 bleak/backends/corebluetooth/utils.py create mode 100644 bleak/backends/descriptor.py create mode 100644 bleak/backends/device.py create mode 100644 bleak/backends/p4android/__init__.py create mode 100644 bleak/backends/p4android/characteristic.py create mode 100644 bleak/backends/p4android/client.py create mode 100644 bleak/backends/p4android/defs.py create mode 100644 bleak/backends/p4android/descriptor.py create mode 100644 bleak/backends/p4android/java/com/github/hbldh/bleak/PythonBluetoothGattCallback.java create mode 100644 bleak/backends/p4android/java/com/github/hbldh/bleak/PythonScanCallback.java create mode 100644 bleak/backends/p4android/recipes/bleak/__init__.py create mode 100644 bleak/backends/p4android/recipes/bleak/fix_setup.py create mode 100644 bleak/backends/p4android/scanner.py create mode 100644 bleak/backends/p4android/service.py create mode 100644 bleak/backends/p4android/utils.py create mode 100644 bleak/backends/scanner.py create mode 100644 bleak/backends/service.py create mode 100644 bleak/backends/winrt/__init__.py create mode 100644 bleak/backends/winrt/characteristic.py create mode 100644 bleak/backends/winrt/client.py create mode 100644 bleak/backends/winrt/descriptor.py create mode 100644 bleak/backends/winrt/scanner.py create mode 100644 bleak/backends/winrt/service.py create mode 100644 bleak/backends/winrt/util.py create mode 100644 bleak/exc.py create mode 100644 bleak/py.typed create mode 100644 bleak/uuids.py create mode 100644 dbus_fast/__init__.py create mode 100644 dbus_fast/__version__.py create mode 100644 dbus_fast/_private/__init__.py create mode 100644 dbus_fast/_private/_cython_compat.py create mode 100644 dbus_fast/_private/address.pxd create mode 100644 dbus_fast/_private/address.py create mode 100644 dbus_fast/_private/constants.py create mode 100644 dbus_fast/_private/marshaller.pxd create mode 100644 dbus_fast/_private/marshaller.py create mode 100644 dbus_fast/_private/unmarshaller.pxd create mode 100644 dbus_fast/_private/unmarshaller.py create mode 100644 dbus_fast/_private/util.py create mode 100644 dbus_fast/aio/__init__.py create mode 100644 dbus_fast/aio/message_bus.py create mode 100644 dbus_fast/aio/message_reader.pxd create mode 100644 dbus_fast/aio/message_reader.py create mode 100644 dbus_fast/aio/proxy_object.py create mode 100644 dbus_fast/auth.py create mode 100644 dbus_fast/constants.py create mode 100644 dbus_fast/errors.py create mode 100644 dbus_fast/glib/__init__.py create mode 100644 dbus_fast/glib/message_bus.py create mode 100644 dbus_fast/glib/proxy_object.py create mode 100644 dbus_fast/introspection.py create mode 100644 dbus_fast/main.py create mode 100644 dbus_fast/message.pxd create mode 100644 dbus_fast/message.py create mode 100644 dbus_fast/message_bus.pxd create mode 100644 dbus_fast/message_bus.py create mode 100644 dbus_fast/proxy_object.py create mode 100644 dbus_fast/py.typed create mode 100644 dbus_fast/send_reply.py create mode 100644 dbus_fast/service.pxd create mode 100644 dbus_fast/service.py create mode 100644 dbus_fast/signature.pxd create mode 100644 dbus_fast/signature.py create mode 100644 dbus_fast/unpack.pxd create mode 100644 dbus_fast/unpack.py create mode 100644 dbus_fast/validators.py create mode 100755 idasen/cli.py create mode 100644 idasen/desk.py create mode 100644 macparse/macaddress.py create mode 100644 main.py create mode 100644 voluptuous/__init__.py create mode 100644 voluptuous/error.py create mode 100644 voluptuous/humanize.py create mode 100644 voluptuous/py.typed create mode 100644 voluptuous/schema_builder.py create mode 100644 voluptuous/util.py create mode 100644 voluptuous/validators.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c2fb773 --- /dev/null +++ b/.gitignore @@ -0,0 +1,168 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..94a9ed0 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/bleak/__init__.py b/bleak/__init__.py new file mode 100644 index 0000000..68e1781 --- /dev/null +++ b/bleak/__init__.py @@ -0,0 +1,924 @@ +# -*- coding: utf-8 -*- + +"""Top-level package for bleak.""" + +from __future__ import annotations + +__author__ = """Henrik Blidh""" +__email__ = "henrik.blidh@gmail.com" + +import asyncio +import functools +import inspect +import logging +import os +import sys +import uuid +from types import TracebackType +from typing import ( + TYPE_CHECKING, + AsyncGenerator, + Awaitable, + Callable, + Dict, + Iterable, + List, + Literal, + Optional, + Set, + Tuple, + Type, + TypedDict, + Union, + overload, +) +from warnings import warn + +if sys.version_info < (3, 12): + from typing_extensions import Buffer +else: + from collections.abc import Buffer + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout + from typing_extensions import Unpack +else: + from asyncio import timeout as async_timeout + from typing import Unpack + +from .backends.characteristic import BleakGATTCharacteristic +from .backends.client import BaseBleakClient, get_platform_client_backend_type +from .backends.device import BLEDevice +from .backends.scanner import ( + AdvertisementData, + AdvertisementDataCallback, + AdvertisementDataFilter, + BaseBleakScanner, + get_platform_scanner_backend_type, +) +from .backends.service import BleakGATTServiceCollection +from .exc import BleakCharacteristicNotFoundError, BleakError +from .uuids import normalize_uuid_str + +if TYPE_CHECKING: + from .backends.bluezdbus.scanner import BlueZScannerArgs + from .backends.corebluetooth.scanner import CBScannerArgs + from .backends.winrt.client import WinRTClientArgs + + +_logger = logging.getLogger(__name__) +_logger.addHandler(logging.NullHandler()) +if bool(os.environ.get("BLEAK_LOGGING", False)): + FORMAT = "%(asctime)-15s %(name)-8s %(threadName)s %(levelname)s: %(message)s" + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + handler.setFormatter(logging.Formatter(fmt=FORMAT)) + _logger.addHandler(handler) + _logger.setLevel(logging.DEBUG) + + +# prevent tasks from being garbage collected +_background_tasks: Set[asyncio.Task] = set() + + +class BleakScanner: + """ + Interface for Bleak Bluetooth LE Scanners. + + The scanner will listen for BLE advertisements, optionally filtering on advertised services or + other conditions, and collect a list of :class:`BLEDevice` objects. These can subsequently be used to + connect to the corresponding BLE server. + + A :class:`BleakScanner` can be used as an asynchronous context manager in which case it automatically + starts and stops scanning. + + Args: + detection_callback: + Optional function that will be called each time a device is + discovered or advertising data has changed. + service_uuids: + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. Required on + macOS >= 12.0, < 12.3 (unless you create an app with ``py2app``). + scanning_mode: + Set to ``"passive"`` to avoid the ``"active"`` scanning mode. + Passive scanning is not supported on macOS! Will raise + :class:`BleakError` if set to ``"passive"`` on macOS. + bluez: + Dictionary of arguments specific to the BlueZ backend. + cb: + Dictionary of arguments specific to the CoreBluetooth backend. + backend: + Used to override the automatically selected backend (i.e. for a + custom backend). + **kwargs: + Additional args for backwards compatibility. + + .. tip:: The first received advertisement in ``detection_callback`` may or + may not include scan response data if the remote device supports it. + Be sure to take this into account when handing the callback. For example, + the scan response often contains the local name of the device so if you + are matching a device based on other data but want to display the local + name to the user, be sure to wait for ``adv_data.local_name is not None``. + + .. versionchanged:: 0.15 + ``detection_callback``, ``service_uuids`` and ``scanning_mode`` are no longer keyword-only. + Added ``bluez`` parameter. + + .. versionchanged:: 0.18 + No longer is alias for backend type and no longer inherits from :class:`BaseBleakScanner`. + Added ``backend`` parameter. + """ + + def __init__( + self, + detection_callback: Optional[AdvertisementDataCallback] = None, + service_uuids: Optional[List[str]] = None, + scanning_mode: Literal["active", "passive"] = "active", + *, + bluez: BlueZScannerArgs = {}, + cb: CBScannerArgs = {}, + backend: Optional[Type[BaseBleakScanner]] = None, + **kwargs, + ) -> None: + PlatformBleakScanner = ( + get_platform_scanner_backend_type() if backend is None else backend + ) + + self._backend = PlatformBleakScanner( + detection_callback, + service_uuids, + scanning_mode, + bluez=bluez, + cb=cb, + **kwargs, + ) + + async def __aenter__(self) -> BleakScanner: + await self._backend.start() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + await self._backend.stop() + + def register_detection_callback( + self, callback: Optional[AdvertisementDataCallback] + ) -> None: + """ + Register a callback that is called when a device is discovered or has a property changed. + + .. deprecated:: 0.17.0 + This method will be removed in a future version of Bleak. Pass + the callback directly to the :class:`BleakScanner` constructor instead. + + Args: + callback: A function, coroutine or ``None``. + + + """ + warn( + "This method will be removed in a future version of Bleak. Use the detection_callback of the BleakScanner constructor instead.", + FutureWarning, + stacklevel=2, + ) + + try: + unregister = getattr(self, "_unregister_") + except AttributeError: + pass + else: + unregister() + + if callback is not None: + unregister = self._backend.register_detection_callback(callback) + setattr(self, "_unregister_", unregister) + + async def start(self) -> None: + """Start scanning for devices""" + await self._backend.start() + + async def stop(self) -> None: + """Stop scanning for devices""" + await self._backend.stop() + + def set_scanning_filter(self, **kwargs) -> None: + """ + Set scanning filter for the BleakScanner. + + .. deprecated:: 0.17.0 + This method will be removed in a future version of Bleak. Pass + arguments directly to the :class:`BleakScanner` constructor instead. + + Args: + **kwargs: The filter details. + + """ + warn( + "This method will be removed in a future version of Bleak. Use BleakScanner constructor args instead.", + FutureWarning, + stacklevel=2, + ) + self._backend.set_scanning_filter(**kwargs) + + async def advertisement_data( + self, + ) -> AsyncGenerator[Tuple[BLEDevice, AdvertisementData], None]: + """ + Yields devices and associated advertising data packets as they are discovered. + + .. note:: + Ensure that scanning is started before calling this method. + + Returns: + An async iterator that yields tuples (:class:`BLEDevice`, :class:`AdvertisementData`). + + .. versionadded:: 0.21 + """ + devices = asyncio.Queue() + + unregister_callback = self._backend.register_detection_callback( + lambda bd, ad: devices.put_nowait((bd, ad)) + ) + try: + while True: + yield await devices.get() + finally: + unregister_callback() + + class ExtraArgs(TypedDict, total=False): + """ + Keyword args from :class:`~bleak.BleakScanner` that can be passed to + other convenience methods. + """ + + service_uuids: List[str] + """ + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. Required on + macOS >= 12.0, < 12.3 (unless you create an app with ``py2app``). + """ + scanning_mode: Literal["active", "passive"] + """ + Set to ``"passive"`` to avoid the ``"active"`` scanning mode. + Passive scanning is not supported on macOS! Will raise + :class:`BleakError` if set to ``"passive"`` on macOS. + """ + bluez: BlueZScannerArgs + """ + Dictionary of arguments specific to the BlueZ backend. + """ + cb: CBScannerArgs + """ + Dictionary of arguments specific to the CoreBluetooth backend. + """ + backend: Type[BaseBleakScanner] + """ + Used to override the automatically selected backend (i.e. for a + custom backend). + """ + + @overload + @classmethod + async def discover( + cls, timeout: float = 5.0, *, return_adv: Literal[False] = False, **kwargs + ) -> List[BLEDevice]: ... + + @overload + @classmethod + async def discover( + cls, timeout: float = 5.0, *, return_adv: Literal[True], **kwargs + ) -> Dict[str, Tuple[BLEDevice, AdvertisementData]]: ... + + @classmethod + async def discover( + cls, timeout=5.0, *, return_adv=False, **kwargs: Unpack[ExtraArgs] + ): + """ + Scan continuously for ``timeout`` seconds and return discovered devices. + + Args: + timeout: + Time, in seconds, to scan for. + return_adv: + If ``True``, the return value will include advertising data. + **kwargs: + Additional arguments will be passed to the :class:`BleakScanner` + constructor. + + Returns: + The value of :attr:`discovered_devices_and_advertisement_data` if + ``return_adv`` is ``True``, otherwise the value of :attr:`discovered_devices`. + + .. versionchanged:: 0.19 + Added ``return_adv`` parameter. + """ + async with cls(**kwargs) as scanner: + await asyncio.sleep(timeout) + + if return_adv: + return scanner.discovered_devices_and_advertisement_data + + return scanner.discovered_devices + + @property + def discovered_devices(self) -> List[BLEDevice]: + """ + Gets list of the devices that the scanner has discovered during the scanning. + + If you also need advertisement data, use :attr:`discovered_devices_and_advertisement_data` instead. + """ + return [d for d, _ in self._backend.seen_devices.values()] + + @property + def discovered_devices_and_advertisement_data( + self, + ) -> Dict[str, Tuple[BLEDevice, AdvertisementData]]: + """ + Gets a map of device address to tuples of devices and the most recently + received advertisement data for that device. + + The address keys are useful to compare the discovered devices to a set + of known devices. If you don't need to do that, consider using + ``discovered_devices_and_advertisement_data.values()`` to just get the + values instead. + + .. versionadded:: 0.19 + """ + return self._backend.seen_devices + + async def get_discovered_devices(self) -> List[BLEDevice]: + """Gets the devices registered by the BleakScanner. + + .. deprecated:: 0.11.0 + This method will be removed in a future version of Bleak. Use the + :attr:`.discovered_devices` property instead. + + Returns: + A list of the devices that the scanner has discovered during the scanning. + + """ + warn( + "This method will be removed in a future version of Bleak. Use the `discovered_devices` property instead.", + FutureWarning, + stacklevel=2, + ) + return self.discovered_devices + + @classmethod + async def find_device_by_address( + cls, device_identifier: str, timeout: float = 10.0, **kwargs: Unpack[ExtraArgs] + ) -> Optional[BLEDevice]: + """Obtain a ``BLEDevice`` for a BLE server specified by Bluetooth address or (macOS) UUID address. + + Args: + device_identifier: The Bluetooth/UUID address of the Bluetooth peripheral sought. + timeout: Optional timeout to wait for detection of specified peripheral before giving up. Defaults to 10.0 seconds. + **kwargs: additional args passed to the :class:`BleakScanner` constructor. + + Returns: + The ``BLEDevice`` sought or ``None`` if not detected. + + """ + device_identifier = device_identifier.lower() + return await cls.find_device_by_filter( + lambda d, ad: d.address.lower() == device_identifier, + timeout=timeout, + **kwargs, + ) + + @classmethod + async def find_device_by_name( + cls, name: str, timeout: float = 10.0, **kwargs: Unpack[ExtraArgs] + ) -> Optional[BLEDevice]: + """Obtain a ``BLEDevice`` for a BLE server specified by the local name in the advertising data. + + Args: + name: The name sought. + timeout: Optional timeout to wait for detection of specified peripheral before giving up. Defaults to 10.0 seconds. + **kwargs: additional args passed to the :class:`BleakScanner` constructor. + + Returns: + The ``BLEDevice`` sought or ``None`` if not detected. + + .. versionadded:: 0.20 + """ + return await cls.find_device_by_filter( + lambda d, ad: ad.local_name == name, + timeout=timeout, + **kwargs, + ) + + @classmethod + async def find_device_by_filter( + cls, + filterfunc: AdvertisementDataFilter, + timeout: float = 10.0, + **kwargs: Unpack[ExtraArgs], + ) -> Optional[BLEDevice]: + """Obtain a ``BLEDevice`` for a BLE server that matches a given filter function. + + This can be used to find a BLE server by other identifying information than its address, + for example its name. + + Args: + filterfunc: + A function that is called for every BLEDevice found. It should + return ``True`` only for the wanted device. + timeout: + Optional timeout to wait for detection of specified peripheral + before giving up. Defaults to 10.0 seconds. + **kwargs: + Additional arguments to be passed to the :class:`BleakScanner` + constructor. + + Returns: + The :class:`BLEDevice` sought or ``None`` if not detected before + the timeout. + + """ + async with cls(**kwargs) as scanner: + try: + async with async_timeout(timeout): + async for bd, ad in scanner.advertisement_data(): + if filterfunc(bd, ad): + return bd + except asyncio.TimeoutError: + return None + + +class BleakClient: + """The Client interface for connecting to a specific BLE GATT server and communicating with it. + + A BleakClient can be used as an asynchronous context manager in which case it automatically + connects and disconnects. + + How many BLE connections can be active simultaneously, and whether connections can be active while + scanning depends on the Bluetooth adapter hardware. + + Args: + address_or_ble_device: + A :class:`BLEDevice` received from a :class:`BleakScanner` or a + Bluetooth address (device UUID on macOS). + disconnected_callback: + Callback that will be scheduled in the event loop when the client is + disconnected. The callable must take one argument, which will be + this client object. + services: + Optional list of services to filter. If provided, only these services + will be resolved. This may or may not reduce the time needed to + enumerate the services depending on if the OS supports such filtering + in the Bluetooth stack or not (should affect Windows and Mac). + These can be 16-bit or 128-bit UUIDs. + timeout: + Timeout in seconds passed to the implicit ``discover`` call when + ``address_or_ble_device`` is not a :class:`BLEDevice`. Defaults to 10.0. + winrt: + Dictionary of WinRT/Windows platform-specific options. + backend: + Used to override the automatically selected backend (i.e. for a + custom backend). + **kwargs: + Additional keyword arguments for backwards compatibility. + + .. warning:: Although example code frequently initializes :class:`BleakClient` + with a Bluetooth address for simplicity, it is not recommended to do so + for more complex use cases. There are several known issues with providing + a Bluetooth address as the ``address_or_ble_device`` argument. + + 1. macOS does not provide access to the Bluetooth address for privacy/ + security reasons. Instead it creates a UUID for each Bluetooth + device which is used in place of the address on this platform. + 2. Providing an address or UUID instead of a :class:`BLEDevice` causes + the :meth:`connect` method to implicitly call :meth:`BleakScanner.discover`. + This is known to cause problems when trying to connect to multiple + devices at the same time. + + .. versionchanged:: 0.15 + ``disconnected_callback`` is no longer keyword-only. Added ``winrt`` parameter. + + .. versionchanged:: 0.18 + No longer is alias for backend type and no longer inherits from :class:`BaseBleakClient`. + Added ``backend`` parameter. + """ + + def __init__( + self, + address_or_ble_device: Union[BLEDevice, str], + disconnected_callback: Optional[Callable[[BleakClient], None]] = None, + services: Optional[Iterable[str]] = None, + *, + timeout: float = 10.0, + winrt: WinRTClientArgs = {}, + backend: Optional[Type[BaseBleakClient]] = None, + **kwargs, + ) -> None: + PlatformBleakClient = ( + get_platform_client_backend_type() if backend is None else backend + ) + + self._backend = PlatformBleakClient( + address_or_ble_device, + disconnected_callback=( + None + if disconnected_callback is None + else functools.partial(disconnected_callback, self) + ), + services=( + None if services is None else set(map(normalize_uuid_str, services)) + ), + timeout=timeout, + winrt=winrt, + **kwargs, + ) + + # device info + + @property + def address(self) -> str: + """ + Gets the Bluetooth address of this device (UUID on macOS). + """ + return self._backend.address + + @property + def mtu_size(self) -> int: + """ + Gets the negotiated MTU size in bytes for the active connection. + + Consider using :attr:`bleak.backends.characteristic.BleakGATTCharacteristic.max_write_without_response_size` instead. + + .. warning:: The BlueZ backend will always return 23 (the minimum MTU size). + See the ``mtu_size.py`` example for a way to hack around this. + + """ + return self._backend.mtu_size + + def __str__(self) -> str: + return f"{self.__class__.__name__}, {self.address}" + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}, {self.address}, {type(self._backend)}>" + + # Async Context managers + + async def __aenter__(self) -> BleakClient: + await self.connect() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + await self.disconnect() + + # Connectivity methods + + def set_disconnected_callback( + self, callback: Optional[Callable[[BleakClient], None]], **kwargs + ) -> None: + """Set the disconnect callback. + + .. deprecated:: 0.17.0 + This method will be removed in a future version of Bleak. + Pass the callback to the :class:`BleakClient` constructor instead. + + Args: + callback: callback to be called on disconnection. + + """ + warn( + "This method will be removed future version, pass the callback to the BleakClient constructor instead.", + FutureWarning, + stacklevel=2, + ) + self._backend.set_disconnected_callback( + None if callback is None else functools.partial(callback, self), **kwargs + ) + + async def connect(self, **kwargs) -> bool: + """Connect to the specified GATT server. + + Args: + **kwargs: For backwards compatibility - should not be used. + + Returns: + Always returns ``True`` for backwards compatibility. + + """ + return await self._backend.connect(**kwargs) + + async def disconnect(self) -> bool: + """Disconnect from the specified GATT server. + + Returns: + Always returns ``True`` for backwards compatibility. + + """ + return await self._backend.disconnect() + + async def pair(self, *args, **kwargs) -> bool: + """ + Pair with the specified GATT server. + + This method is not available on macOS. Instead of manually initiating + paring, the user will be prompted to pair the device the first time + that a characteristic that requires authentication is read or written. + This method may have backend-specific additional keyword arguments. + + Returns: + Always returns ``True`` for backwards compatibility. + + """ + return await self._backend.pair(*args, **kwargs) + + async def unpair(self) -> bool: + """ + Unpair from the specified GATT server. + + Unpairing will also disconnect the device. + + This method is only available on Windows and Linux and will raise an + exception on other platforms. + + Returns: + Always returns ``True`` for backwards compatibility. + """ + return await self._backend.unpair() + + @property + def is_connected(self) -> bool: + """ + Check connection status between this client and the GATT server. + + Returns: + Boolean representing connection status. + + """ + return self._backend.is_connected + + # GATT services methods + + async def get_services(self, **kwargs) -> BleakGATTServiceCollection: + """Get all services registered for this GATT server. + + .. deprecated:: 0.17.0 + This method will be removed in a future version of Bleak. + Use the :attr:`services` property instead. + + Returns: + A :class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree. + + """ + warn( + "This method will be removed future version, use the services property instead.", + FutureWarning, + stacklevel=2, + ) + return await self._backend.get_services(**kwargs) + + @property + def services(self) -> BleakGATTServiceCollection: + """ + Gets the collection of GATT services available on the device. + + The returned value is only valid as long as the device is connected. + + Raises: + BleakError: if service discovery has not been performed yet during this connection. + """ + if not self._backend.services: + raise BleakError("Service Discovery has not been performed yet") + + return self._backend.services + + # I/O methods + + async def read_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID], + **kwargs, + ) -> bytearray: + """ + Perform read operation on the specified GATT characteristic. + + Args: + char_specifier: + The characteristic to read from, specified by either integer + handle, UUID or directly by the BleakGATTCharacteristic object + representing it. + + Returns: + The read data. + + """ + return await self._backend.read_gatt_char(char_specifier, **kwargs) + + async def write_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID], + data: Buffer, + response: bool = None, + ) -> None: + """ + Perform a write operation on the specified GATT characteristic. + + There are two possible kinds of writes. *Write with response* (sometimes + called a *Request*) will write the data then wait for a response from + the remote device. *Write without response* (sometimes called *Command*) + will queue data to be written and return immediately. + + Each characteristic may support one kind or the other or both or neither. + Consult the device's documentation or inspect the properties of the + characteristic to find out which kind of writes are supported. + + .. tip:: Explicit is better than implicit. Best practice is to always + include an explicit ``response=True`` or ``response=False`` + when calling this method. + + Args: + char_specifier: + The characteristic to write to, specified by either integer + handle, UUID or directly by the :class:`~bleak.backends.characteristic.BleakGATTCharacteristic` + object representing it. If a device has more than one characteristic + with the same UUID, then attempting to use the UUID wil fail and + a characteristic object must be used instead. + data: + The data to send. When a write-with-response operation is used, + the length of the data is limited to 512 bytes. When a + write-without-response operation is used, the length of the + data is limited to :attr:`~bleak.backends.characteristic.BleakGATTCharacteristic.max_write_without_response_size`. + Any type that supports the buffer protocol can be passed. + response: + If ``True``, a write-with-response operation will be used. If + ``False``, a write-without-response operation will be used. + If omitted or ``None``, the "best" operation will be used + based on the reported properties of the characteristic. + + .. versionchanged:: 0.21 + The default behavior when ``response=`` is omitted was changed. + + Example:: + + MY_CHAR_UUID = "1234" + ... + await client.write_gatt_char(MY_CHAR_UUID, b"\x00\x01\x02\x03", response=True) + """ + if isinstance(char_specifier, BleakGATTCharacteristic): + characteristic = char_specifier + else: + characteristic = self.services.get_characteristic(char_specifier) + + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + if response is None: + # if not specified, prefer write-with-response over write-without- + # response if it is available since it is the more reliable write. + response = "write" in characteristic.properties + + await self._backend.write_gatt_char(characteristic, data, response) + + async def start_notify( + self, + char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID], + callback: Callable[ + [BleakGATTCharacteristic, bytearray], Union[None, Awaitable[None]] + ], + **kwargs, + ) -> None: + """ + Activate notifications/indications on a characteristic. + + Callbacks must accept two inputs. The first will be the characteristic + and the second will be a ``bytearray`` containing the data received. + + .. code-block:: python + + def callback(sender: BleakGATTCharacteristic, data: bytearray): + print(f"{sender}: {data}") + + client.start_notify(char_uuid, callback) + + Args: + char_specifier: + The characteristic to activate notifications/indications on a + characteristic, specified by either integer handle, + UUID or directly by the BleakGATTCharacteristic object representing it. + callback: + The function to be called on notification. Can be regular + function or async function. + + + .. versionchanged:: 0.18 + The first argument of the callback is now a :class:`BleakGATTCharacteristic` + instead of an ``int``. + """ + if not self.is_connected: + raise BleakError("Not connected") + + if not isinstance(char_specifier, BleakGATTCharacteristic): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + if inspect.iscoroutinefunction(callback): + + def wrapped_callback(data: bytearray) -> None: + task = asyncio.create_task(callback(characteristic, data)) + _background_tasks.add(task) + task.add_done_callback(_background_tasks.discard) + + else: + wrapped_callback = functools.partial(callback, characteristic) + + await self._backend.start_notify(characteristic, wrapped_callback, **kwargs) + + async def stop_notify( + self, char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID] + ) -> None: + """ + Deactivate notification/indication on a specified characteristic. + + Args: + char_specifier: + The characteristic to deactivate notification/indication on, + specified by either integer handle, UUID or directly by the + BleakGATTCharacteristic object representing it. + + .. tip:: Notifications are stopped automatically on disconnect, so this + method does not need to be called unless notifications need to be + stopped some time before the device disconnects. + """ + await self._backend.stop_notify(char_specifier) + + async def read_gatt_descriptor(self, handle: int, **kwargs) -> bytearray: + """ + Perform read operation on the specified GATT descriptor. + + Args: + handle: The handle of the descriptor to read from. + + Returns: + The read data. + + """ + return await self._backend.read_gatt_descriptor(handle, **kwargs) + + async def write_gatt_descriptor(self, handle: int, data: Buffer) -> None: + """ + Perform a write operation on the specified GATT descriptor. + + Args: + handle: + The handle of the descriptor to read from. + data: + The data to send. + + """ + await self._backend.write_gatt_descriptor(handle, data) + + +# for backward compatibility +def discover(*args, **kwargs): + """ + .. deprecated:: 0.17.0 + This method will be removed in a future version of Bleak. + Use :meth:`BleakScanner.discover` instead. + """ + warn( + "The discover function will removed in a future version, use BleakScanner.discover instead.", + FutureWarning, + stacklevel=2, + ) + return BleakScanner.discover(*args, **kwargs) + + +def cli() -> None: + import argparse + + parser = argparse.ArgumentParser( + description="Perform Bluetooth Low Energy device scan" + ) + parser.add_argument("-i", dest="adapter", default=None, help="HCI device") + parser.add_argument( + "-t", dest="timeout", type=int, default=5, help="Duration to scan for" + ) + args = parser.parse_args() + + out = asyncio.run(discover(adapter=args.adapter, timeout=float(args.timeout))) + for o in out: + print(str(o)) + + +if __name__ == "__main__": + cli() diff --git a/bleak/assigned_numbers.py b/bleak/assigned_numbers.py new file mode 100644 index 0000000..d52220f --- /dev/null +++ b/bleak/assigned_numbers.py @@ -0,0 +1,38 @@ +""" +Bluetooth Assigned Numbers +-------------------------- + +This module contains useful assigned numbers from the Bluetooth spec. + +See . +""" + +from enum import IntEnum + + +class AdvertisementDataType(IntEnum): + """ + Generic Access Profile advertisement data types. + + `Source `. + + .. versionadded:: 0.15 + """ + + FLAGS = 0x01 + INCOMPLETE_LIST_SERVICE_UUID16 = 0x02 + COMPLETE_LIST_SERVICE_UUID16 = 0x03 + INCOMPLETE_LIST_SERVICE_UUID32 = 0x04 + COMPLETE_LIST_SERVICE_UUID32 = 0x05 + INCOMPLETE_LIST_SERVICE_UUID128 = 0x06 + COMPLETE_LIST_SERVICE_UUID128 = 0x07 + SHORTENED_LOCAL_NAME = 0x08 + COMPLETE_LOCAL_NAME = 0x09 + TX_POWER_LEVEL = 0x0A + CLASS_OF_DEVICE = 0x0D + + SERVICE_DATA_UUID16 = 0x16 + SERVICE_DATA_UUID32 = 0x20 + SERVICE_DATA_UUID128 = 0x21 + + MANUFACTURER_SPECIFIC_DATA = 0xFF diff --git a/bleak/backends/__init__.py b/bleak/backends/__init__.py new file mode 100644 index 0000000..007bc01 --- /dev/null +++ b/bleak/backends/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +""" +__init__.py + +Created on 2017-11-19 by hbldh + +""" diff --git a/bleak/backends/_manufacturers.py b/bleak/backends/_manufacturers.py new file mode 100644 index 0000000..4a16bee --- /dev/null +++ b/bleak/backends/_manufacturers.py @@ -0,0 +1,1936 @@ +""" +Manufacturer data retrieved from https://www.bluetooth.com/specifications/assigned-numbers/company-identifiers +""" + +MANUFACTURERS = { + 0x0000: "Ericsson Technology Licensing", + 0x0001: "Nokia Mobile Phones", + 0x0002: "Intel Corp.", + 0x0003: "IBM Corp.", + 0x0004: "Toshiba Corp.", + 0x0005: "3Com", + 0x0006: "Microsoft", + 0x0007: "Lucent", + 0x0008: "Motorola", + 0x0009: "Infineon Technologies AG", + 0x000A: "Qualcomm Technologies International, Ltd. (QTIL)", + 0x000B: "Silicon Wave", + 0x000C: "Digianswer A/S", + 0x000D: "Texas Instruments Inc.", + 0x000E: "Parthus Technologies Inc.", + 0x000F: "Broadcom Corporation", + 0x0010: "Mitel Semiconductor", + 0x0011: "Widcomm, Inc.", + 0x0012: "Zeevo, Inc.", + 0x0013: "Atmel Corporation", + 0x0014: "Mitsubishi Electric Corporation", + 0x0015: "RTX Telecom A/S", + 0x0016: "KC Technology Inc.", + 0x0017: "Newlogic", + 0x0018: "Transilica, Inc.", + 0x0019: "Rohde & Schwarz GmbH & Co. KG", + 0x001A: "TTPCom Limited", + 0x001B: "Signia Technologies, Inc.", + 0x001C: "Conexant Systems Inc.", + 0x001D: "Qualcomm", + 0x001E: "Inventel", + 0x001F: "AVM Berlin", + 0x0020: "BandSpeed, Inc.", + 0x0021: "Mansella Ltd", + 0x0022: "NEC Corporation", + 0x0023: "WavePlus Technology Co., Ltd.", + 0x0024: "Alcatel", + 0x0025: "NXP Semiconductors (formerly Philips Semiconductors)", + 0x0026: "C Technologies", + 0x0027: "Open Interface", + 0x0028: "R F Micro Devices", + 0x0029: "Hitachi Ltd", + 0x002A: "Symbol Technologies, Inc.", + 0x002B: "Tenovis", + 0x002C: "Macronix International Co. Ltd.", + 0x002D: "GCT Semiconductor", + 0x002E: "Norwood Systems", + 0x002F: "MewTel Technology Inc.", + 0x0030: "ST Microelectronics", + 0x0031: "Synopsys, Inc.", + 0x0032: "Red-M (Communications) Ltd", + 0x0033: "Commil Ltd", + 0x0034: "Computer Access Technology Corporation (CATC)", + 0x0035: "Eclipse (HQ Espana) S.L.", + 0x0036: "Renesas Electronics Corporation", + 0x0037: "Mobilian Corporation", + 0x0038: "Syntronix Corporation", + 0x0039: "Integrated System Solution Corp.", + 0x003A: "Matsushita Electric Industrial Co., Ltd.", + 0x003B: "Gennum Corporation", + 0x003C: "BlackBerry Limited (formerly Research In Motion)", + 0x003D: "IPextreme, Inc.", + 0x003E: "Systems and Chips, Inc", + 0x003F: "Bluetooth SIG, Inc", + 0x0040: "Seiko Epson Corporation", + 0x0041: "Integrated Silicon Solution Taiwan, Inc.", + 0x0042: "CONWISE Technology Corporation Ltd", + 0x0043: "PARROT AUTOMOTIVE SAS", + 0x0044: "Socket Mobile", + 0x0045: "Atheros Communications, Inc.", + 0x0046: "MediaTek, Inc.", + 0x0047: "Bluegiga", + 0x0048: "Marvell Technology Group Ltd.", + 0x0049: "3DSP Corporation", + 0x004A: "Accel Semiconductor Ltd.", + 0x004B: "Continental Automotive Systems", + 0x004C: "Apple, Inc.", + 0x004D: "Staccato Communications, Inc.", + 0x004E: "Avago Technologies", + 0x004F: "APT Ltd.", + 0x0050: "SiRF Technology, Inc.", + 0x0051: "Tzero Technologies, Inc.", + 0x0052: "J&M Corporation", + 0x0053: "Free2move AB", + 0x0054: "3DiJoy Corporation", + 0x0055: "Plantronics, Inc.", + 0x0056: "Sony Ericsson Mobile Communications", + 0x0057: "Harman International Industries, Inc.", + 0x0058: "Vizio, Inc.", + 0x0059: "Nordic Semiconductor ASA", + 0x005A: "EM Microelectronic-Marin SA", + 0x005B: "Ralink Technology Corporation", + 0x005C: "Belkin International, Inc.", + 0x005D: "Realtek Semiconductor Corporation", + 0x005E: "Stonestreet One, LLC", + 0x005F: "Wicentric, Inc.", + 0x0060: "RivieraWaves S.A.S", + 0x0061: "RDA Microelectronics", + 0x0062: "Gibson Guitars", + 0x0063: "MiCommand Inc.", + 0x0064: "Band XI International, LLC", + 0x0065: "Hewlett-Packard Company", + 0x0066: "9Solutions Oy", + 0x0067: "GN Netcom A/S", + 0x0068: "General Motors", + 0x0069: "A&D Engineering, Inc.", + 0x006A: "MindTree Ltd.", + 0x006B: "Polar Electro OY", + 0x006C: "Beautiful Enterprise Co., Ltd.", + 0x006D: "BriarTek, Inc", + 0x006E: "Summit Data Communications, Inc.", + 0x006F: "Sound ID", + 0x0070: "Monster, LLC", + 0x0071: "connectBlue AB", + 0x0072: "ShangHai Super Smart Electronics Co. Ltd.", + 0x0073: "Group Sense Ltd.", + 0x0074: "Zomm, LLC", + 0x0075: "Samsung Electronics Co. Ltd.", + 0x0076: "Creative Technology Ltd.", + 0x0077: "Laird Technologies", + 0x0078: "Nike, Inc.", + 0x0079: "lesswire AG", + 0x007A: "MStar Semiconductor, Inc.", + 0x007B: "Hanlynn Technologies", + 0x007C: "A & R Cambridge", + 0x007D: "Seers Technology Co., Ltd.", + 0x007E: "Sports Tracking Technologies Ltd.", + 0x007F: "Autonet Mobile", + 0x0080: "DeLorme Publishing Company, Inc.", + 0x0081: "WuXi Vimicro", + 0x0082: "Sennheiser Communications A/S", + 0x0083: "TimeKeeping Systems, Inc.", + 0x0084: "Ludus Helsinki Ltd.", + 0x0085: "BlueRadios, Inc.", + 0x0086: "Equinux AG", + 0x0087: "Garmin International, Inc.", + 0x0088: "Ecotest", + 0x0089: "GN ReSound A/S", + 0x008A: "Jawbone", + 0x008B: "Topcon Positioning Systems, LLC", + 0x008C: "Gimbal Inc. (formerly Qualcomm Labs, Inc. and Qualcomm Retail Solutions, Inc.)", + 0x008D: "Zscan Software", + 0x008E: "Quintic Corp", + 0x008F: "Telit Wireless Solutions GmbH (formerly Stollmann E+V GmbH)", + 0x0090: "Funai Electric Co., Ltd.", + 0x0091: "Advanced PANMOBIL systems GmbH & Co. KG", + 0x0092: "ThinkOptics, Inc.", + 0x0093: "Universal Electronics, Inc.", + 0x0094: "Airoha Technology Corp.", + 0x0095: "NEC Lighting, Ltd.", + 0x0096: "ODM Technology, Inc.", + 0x0097: "ConnecteDevice Ltd.", + 0x0098: "zero1.tv GmbH", + 0x0099: "i.Tech Dynamic Global Distribution Ltd.", + 0x009A: "Alpwise", + 0x009B: "Jiangsu Toppower Automotive Electronics Co., Ltd.", + 0x009C: "Colorfy, Inc.", + 0x009D: "Geoforce Inc.", + 0x009E: "Bose Corporation", + 0x009F: "Suunto Oy", + 0x00A0: "Kensington Computer Products Group", + 0x00A1: "SR-Medizinelektronik", + 0x00A2: "Vertu Corporation Limited", + 0x00A3: "Meta Watch Ltd.", + 0x00A4: "LINAK A/S", + 0x00A5: "OTL Dynamics LLC", + 0x00A6: "Panda Ocean Inc.", + 0x00A7: "Visteon Corporation", + 0x00A8: "ARP Devices Limited", + 0x00A9: "Magneti Marelli S.p.A", + 0x00AA: "CAEN RFID srl", + 0x00AB: "Ingenieur-Systemgruppe Zahn GmbH", + 0x00AC: "Green Throttle Games", + 0x00AD: "Peter Systemtechnik GmbH", + 0x00AE: "Omegawave Oy", + 0x00AF: "Cinetix", + 0x00B0: "Passif Semiconductor Corp", + 0x00B1: "Saris Cycling Group, Inc", + 0x00B2: "Bekey A/S", + 0x00B3: "Clarinox Technologies Pty. Ltd.", + 0x00B4: "BDE Technology Co., Ltd.", + 0x00B5: "Swirl Networks", + 0x00B6: "Meso international", + 0x00B7: "TreLab Ltd", + 0x00B8: "Qualcomm Innovation Center, Inc. (QuIC)", + 0x00B9: "Johnson Controls, Inc.", + 0x00BA: "Starkey Laboratories Inc.", + 0x00BB: "S-Power Electronics Limited", + 0x00BC: "Ace Sensor Inc", + 0x00BD: "Aplix Corporation", + 0x00BE: "AAMP of America", + 0x00BF: "Stalmart Technology Limited", + 0x00C0: "AMICCOM Electronics Corporation", + 0x00C1: "Shenzhen Excelsecu Data Technology Co.,Ltd", + 0x00C2: "Geneq Inc.", + 0x00C3: "adidas AG", + 0x00C4: "LG Electronics", + 0x00C5: "Onset Computer Corporation", + 0x00C6: "Selfly BV", + 0x00C7: "Quuppa Oy.", + 0x00C8: "GeLo Inc", + 0x00C9: "Evluma", + 0x00CA: "MC10", + 0x00CB: "Binauric SE", + 0x00CC: "Beats Electronics", + 0x00CD: "Microchip Technology Inc.", + 0x00CE: "Elgato Systems GmbH", + 0x00CF: "ARCHOS SA", + 0x00D0: "Dexcom, Inc.", + 0x00D1: "Polar Electro Europe B.V.", + 0x00D2: "Dialog Semiconductor B.V.", + 0x00D3: "Taixingbang Technology (HK) Co,. LTD.", + 0x00D4: "Kawantech", + 0x00D5: "Austco Communication Systems", + 0x00D6: "Timex Group USA, Inc.", + 0x00D7: "Qualcomm Technologies, Inc.", + 0x00D8: "Qualcomm Connected Experiences, Inc.", + 0x00D9: "Voyetra Turtle Beach", + 0x00DA: "txtr GmbH", + 0x00DB: "Biosentronics", + 0x00DC: "Procter & Gamble", + 0x00DD: "Hosiden Corporation", + 0x00DE: "Muzik LLC", + 0x00DF: "Misfit Wearables Corp", + 0x00E0: "Google", + 0x00E1: "Danlers Ltd", + 0x00E2: "Semilink Inc", + 0x00E3: "inMusic Brands, Inc", + 0x00E4: "L.S. Research Inc.", + 0x00E5: "Eden Software Consultants Ltd.", + 0x00E6: "Freshtemp", + 0x00E7: "KS Technologies", + 0x00E8: "ACTS Technologies", + 0x00E9: "Vtrack Systems", + 0x00EA: "Nielsen-Kellerman Company", + 0x00EB: "Server Technology Inc.", + 0x00EC: "BioResearch Associates", + 0x00ED: "Jolly Logic, LLC", + 0x00EE: "Above Average Outcomes, Inc.", + 0x00EF: "Bitsplitters GmbH", + 0x00F0: "PayPal, Inc.", + 0x00F1: "Witron Technology Limited", + 0x00F2: "Morse Project Inc.", + 0x00F3: "Kent Displays Inc.", + 0x00F4: "Nautilus Inc.", + 0x00F5: "Smartifier Oy", + 0x00F6: "Elcometer Limited", + 0x00F7: "VSN Technologies, Inc.", + 0x00F8: "AceUni Corp., Ltd.", + 0x00F9: "StickNFind", + 0x00FA: "Crystal Code AB", + 0x00FB: "KOUKAAM a.s.", + 0x00FC: "Delphi Corporation", + 0x00FD: "ValenceTech Limited", + 0x00FE: "Stanley Black and Decker", + 0x00FF: "Typo Products, LLC", + 0x0100: "TomTom International BV", + 0x0101: "Fugoo, Inc.", + 0x0102: "Keiser Corporation", + 0x0103: "Bang & Olufsen A/S", + 0x0104: "PLUS Location Systems Pty Ltd", + 0x0105: "Ubiquitous Computing Technology Corporation", + 0x0106: "Innovative Yachtter Solutions", + 0x0107: "William Demant Holding A/S", + 0x0108: "Chicony Electronics Co., Ltd.", + 0x0109: "Atus BV", + 0x010A: "Codegate Ltd", + 0x010B: "ERi, Inc", + 0x010C: "Transducers Direct, LLC", + 0x010D: "Fujitsu Ten LImited", + 0x010E: "Audi AG", + 0x010F: "HiSilicon Technologies Col, Ltd.", + 0x0110: "Nippon Seiki Co., Ltd.", + 0x0111: "Steelseries ApS", + 0x0112: "Visybl Inc.", + 0x0113: "Openbrain Technologies, Co., Ltd.", + 0x0114: "Xensr", + 0x0115: "e.solutions", + 0x0116: "10AK Technologies", + 0x0117: "Wimoto Technologies Inc", + 0x0118: "Radius Networks, Inc.", + 0x0119: "Wize Technology Co., Ltd.", + 0x011A: "Qualcomm Labs, Inc.", + 0x011B: "Hewlett Packard Enterprise", + 0x011C: "Baidu", + 0x011D: "Arendi AG", + 0x011E: "Skoda Auto a.s.", + 0x011F: "Volkswagen AG", + 0x0120: "Porsche AG", + 0x0121: "Sino Wealth Electronic Ltd.", + 0x0122: "AirTurn, Inc.", + 0x0123: "Kinsa, Inc", + 0x0124: "HID Global", + 0x0125: "SEAT es", + 0x0126: "Promethean Ltd.", + 0x0127: "Salutica Allied Solutions", + 0x0128: "GPSI Group Pty Ltd", + 0x0129: "Nimble Devices Oy", + 0x012A: "Changzhou Yongse Infotech Co., Ltd.", + 0x012B: "SportIQ", + 0x012C: "TEMEC Instruments B.V.", + 0x012D: "Sony Corporation", + 0x012E: "ASSA ABLOY", + 0x012F: "Clarion Co. Inc.", + 0x0130: "Warehouse Innovations", + 0x0131: "Cypress Semiconductor", + 0x0132: "MADS Inc", + 0x0133: "Blue Maestro Limited", + 0x0134: "Resolution Products, Ltd.", + 0x0135: "Aireware LLC", + 0x0136: "Silvair, Inc.", + 0x0137: "Prestigio Plaza Ltd.", + 0x0138: "NTEO Inc.", + 0x0139: "Focus Systems Corporation", + 0x013A: "Tencent Holdings Ltd.", + 0x013B: "Allegion", + 0x013C: "Murata Manufacturing Co., Ltd.", + 0x013D: "WirelessWERX", + 0x013E: "Nod, Inc.", + 0x013F: "B&B Manufacturing Company", + 0x0140: "Alpine Electronics (China) Co., Ltd", + 0x0141: "FedEx Services", + 0x0142: "Grape Systems Inc.", + 0x0143: "Bkon Connect", + 0x0144: "Lintech GmbH", + 0x0145: "Novatel Wireless", + 0x0146: "Ciright", + 0x0147: "Mighty Cast, Inc.", + 0x0148: "Ambimat Electronics", + 0x0149: "Perytons Ltd.", + 0x014A: "Tivoli Audio, LLC", + 0x014B: "Master Lock", + 0x014C: "Mesh-Net Ltd", + 0x014D: "HUIZHOU DESAY SV AUTOMOTIVE CO., LTD.", + 0x014E: "Tangerine, Inc.", + 0x014F: "B&W Group Ltd.", + 0x0150: "Pioneer Corporation", + 0x0151: "OnBeep", + 0x0152: "Vernier Software & Technology", + 0x0153: "ROL Ergo", + 0x0154: "Pebble Technology", + 0x0155: "NETATMO", + 0x0156: "Accumulate AB", + 0x0157: "Anhui Huami Information Technology Co., Ltd.", + 0x0158: "Inmite s.r.o.", + 0x0159: "ChefSteps, Inc.", + 0x015A: "micas AG", + 0x015B: "Biomedical Research Ltd.", + 0x015C: "Pitius Tec S.L.", + 0x015D: "Estimote, Inc.", + 0x015E: "Unikey Technologies, Inc.", + 0x015F: "Timer Cap Co.", + 0x0160: "AwoX", + 0x0161: "yikes", + 0x0162: "MADSGlobalNZ Ltd.", + 0x0163: "PCH International", + 0x0164: "Qingdao Yeelink Information Technology Co., Ltd.", + 0x0165: "Milwaukee Tool (Formally Milwaukee Electric Tools)", + 0x0166: "MISHIK Pte Ltd", + 0x0167: "Ascensia Diabetes Care US Inc.", + 0x0168: "Spicebox LLC", + 0x0169: "emberlight", + 0x016A: "Cooper-Atkins Corporation", + 0x016B: "Qblinks", + 0x016C: "MYSPHERA", + 0x016D: "LifeScan Inc", + 0x016E: "Volantic AB", + 0x016F: "Podo Labs, Inc", + 0x0170: "Roche Diabetes Care AG", + 0x0171: "Amazon Fulfillment Service", + 0x0172: "Connovate Technology Private Limited", + 0x0173: "Kocomojo, LLC", + 0x0174: "Everykey Inc.", + 0x0175: "Dynamic Controls", + 0x0176: "SentriLock", + 0x0177: "I-SYST inc.", + 0x0178: "CASIO COMPUTER CO., LTD.", + 0x0179: "LAPIS Semiconductor Co., Ltd.", + 0x017A: "Telemonitor, Inc.", + 0x017B: "taskit GmbH", + 0x017C: "Daimler AG", + 0x017D: "BatAndCat", + 0x017E: "BluDotz Ltd", + 0x017F: "XTel Wireless ApS", + 0x0180: "Gigaset Communications GmbH", + 0x0181: "Gecko Health Innovations, Inc.", + 0x0182: "HOP Ubiquitous", + 0x0183: "Walt Disney", + 0x0184: "Nectar", + 0x0185: "bel'apps LLC", + 0x0186: "CORE Lighting Ltd", + 0x0187: "Seraphim Sense Ltd", + 0x0188: "Unico RBC", + 0x0189: "Physical Enterprises Inc.", + 0x018A: "Able Trend Technology Limited", + 0x018B: "Konica Minolta, Inc.", + 0x018C: "Wilo SE", + 0x018D: "Extron Design Services", + 0x018E: "Fitbit, Inc.", + 0x018F: "Fireflies Systems", + 0x0190: "Intelletto Technologies Inc.", + 0x0191: "FDK CORPORATION", + 0x0192: "Cloudleaf, Inc", + 0x0193: "Maveric Automation LLC", + 0x0194: "Acoustic Stream Corporation", + 0x0195: "Zuli", + 0x0196: "Paxton Access Ltd", + 0x0197: "WiSilica Inc.", + 0x0198: "VENGIT Korlatolt Felelossegu Tarsasag", + 0x0199: "SALTO SYSTEMS S.L.", + 0x019A: "TRON Forum (formerly T-Engine Forum)", + 0x019B: "CUBETECH s.r.o.", + 0x019C: "Cokiya Incorporated", + 0x019D: "CVS Health", + 0x019E: "Ceruus", + 0x019F: "Strainstall Ltd", + 0x01A0: "Channel Enterprises (HK) Ltd.", + 0x01A1: "FIAMM", + 0x01A2: "GIGALANE.CO.,LTD", + 0x01A3: "EROAD", + 0x01A4: "Mine Safety Appliances", + 0x01A5: "Icon Health and Fitness", + 0x01A6: "Wille Engineering (formely as Asandoo GmbH)", + 0x01A7: "ENERGOUS CORPORATION", + 0x01A8: "Taobao", + 0x01A9: "Canon Inc.", + 0x01AA: "Geophysical Technology Inc.", + 0x01AB: "Facebook, Inc.", + 0x01AC: "Trividia Health, Inc.", + 0x01AD: "FlightSafety International", + 0x01AE: "Earlens Corporation", + 0x01AF: "Sunrise Micro Devices, Inc.", + 0x01B0: "Star Micronics Co., Ltd.", + 0x01B1: "Netizens Sp. z o.o.", + 0x01B2: "Nymi Inc.", + 0x01B3: "Nytec, Inc.", + 0x01B4: "Trineo Sp. z o.o.", + 0x01B5: "Nest Labs Inc.", + 0x01B6: "LM Technologies Ltd", + 0x01B7: "General Electric Company", + 0x01B8: "i+D3 S.L.", + 0x01B9: "HANA Micron", + 0x01BA: "Stages Cycling LLC", + 0x01BB: "Cochlear Bone Anchored Solutions AB", + 0x01BC: "SenionLab AB", + 0x01BD: "Syszone Co., Ltd", + 0x01BE: "Pulsate Mobile Ltd.", + 0x01BF: "Hong Kong HunterSun Electronic Limited", + 0x01C0: "pironex GmbH", + 0x01C1: "BRADATECH Corp.", + 0x01C2: "Transenergooil AG", + 0x01C3: "Bunch", + 0x01C4: "DME Microelectronics", + 0x01C5: "Bitcraze AB", + 0x01C6: "HASWARE Inc.", + 0x01C7: "Abiogenix Inc.", + 0x01C8: "Poly-Control ApS", + 0x01C9: "Avi-on", + 0x01CA: "Laerdal Medical AS", + 0x01CB: "Fetch My Pet", + 0x01CC: "Sam Labs Ltd.", + 0x01CD: "Chengdu Synwing Technology Ltd", + 0x01CE: "HOUWA SYSTEM DESIGN, k.k.", + 0x01CF: "BSH", + 0x01D0: "Primus Inter Pares Ltd", + 0x01D1: "August Home, Inc", + 0x01D2: "Gill Electronics", + 0x01D3: "Sky Wave Design", + 0x01D4: "Newlab S.r.l.", + 0x01D5: "ELAD srl", + 0x01D6: "G-wearables inc.", + 0x01D7: "Squadrone Systems Inc.", + 0x01D8: "Code Corporation", + 0x01D9: "Savant Systems LLC", + 0x01DA: "Logitech International SA", + 0x01DB: "Innblue Consulting", + 0x01DC: "iParking Ltd.", + 0x01DD: "Koninklijke Philips Electronics N.V.", + 0x01DE: "Minelab Electronics Pty Limited", + 0x01DF: "Bison Group Ltd.", + 0x01E0: "Widex A/S", + 0x01E1: "Jolla Ltd", + 0x01E2: "Lectronix, Inc.", + 0x01E3: "Caterpillar Inc", + 0x01E4: "Freedom Innovations", + 0x01E5: "Dynamic Devices Ltd", + 0x01E6: "Technology Solutions (UK) Ltd", + 0x01E7: "IPS Group Inc.", + 0x01E8: "STIR", + 0x01E9: "Sano, Inc.", + 0x01EA: "Advanced Application Design, Inc.", + 0x01EB: "AutoMap LLC", + 0x01EC: "Spreadtrum Communications Shanghai Ltd", + 0x01ED: "CuteCircuit LTD", + 0x01EE: "Valeo Service", + 0x01EF: "Fullpower Technologies, Inc.", + 0x01F0: "KloudNation", + 0x01F1: "Zebra Technologies Corporation", + 0x01F2: "Itron, Inc.", + 0x01F3: "The University of Tokyo", + 0x01F4: "UTC Fire and Security", + 0x01F5: "Cool Webthings Limited", + 0x01F6: "DJO Global", + 0x01F7: "Gelliner Limited", + 0x01F8: "Anyka (Guangzhou) Microelectronics Technology Co, LTD", + 0x01F9: "Medtronic Inc.", + 0x01FA: "Gozio Inc.", + 0x01FB: "Form Lifting, LLC", + 0x01FC: "Wahoo Fitness, LLC", + 0x01FD: "Kontakt Micro-Location Sp. z o.o.", + 0x01FE: "Radio Systems Corporation", + 0x01FF: "Freescale Semiconductor, Inc.", + 0x0200: "Verifone Systems Pte Ltd. Taiwan Branch", + 0x0201: "AR Timing", + 0x0202: "Rigado LLC", + 0x0203: "Kemppi Oy", + 0x0204: "Tapcentive Inc.", + 0x0205: "Smartbotics Inc.", + 0x0206: "Otter Products, LLC", + 0x0207: "STEMP Inc.", + 0x0208: "LumiGeek LLC", + 0x0209: "InvisionHeart Inc.", + 0x020A: "Macnica Inc.", + 0x020B: "Jaguar Land Rover Limited", + 0x020C: "CoroWare Technologies, Inc", + 0x020D: "Simplo Technology Co., LTD", + 0x020E: "Omron Healthcare Co., LTD", + 0x020F: "Comodule GMBH", + 0x0210: "ikeGPS", + 0x0211: "Telink Semiconductor Co. Ltd", + 0x0212: "Interplan Co., Ltd", + 0x0213: "Wyler AG", + 0x0214: "IK Multimedia Production srl", + 0x0215: "Lukoton Experience Oy", + 0x0216: "MTI Ltd", + 0x0217: "Tech4home, Lda", + 0x0218: "Hiotech AB", + 0x0219: "DOTT Limited", + 0x021A: "Blue Speck Labs, LLC", + 0x021B: "Cisco Systems, Inc", + 0x021C: "Mobicomm Inc", + 0x021D: "Edamic", + 0x021E: "Goodnet, Ltd", + 0x021F: "Luster Leaf Products Inc", + 0x0220: "Manus Machina BV", + 0x0221: "Mobiquity Networks Inc", + 0x0222: "Praxis Dynamics", + 0x0223: "Philip Morris Products S.A.", + 0x0224: "Comarch SA", + 0x0225: "Nestl Nespresso S.A.", + 0x0226: "Merlinia A/S", + 0x0227: "LifeBEAM Technologies", + 0x0228: "Twocanoes Labs, LLC", + 0x0229: "Muoverti Limited", + 0x022A: "Stamer Musikanlagen GMBH", + 0x022B: "Tesla Motors", + 0x022C: "Pharynks Corporation", + 0x022D: "Lupine", + 0x022E: "Siemens AG", + 0x022F: "Huami (Shanghai) Culture Communication CO., LTD", + 0x0230: "Foster Electric Company, Ltd", + 0x0231: "ETA SA", + 0x0232: "x-Senso Solutions Kft", + 0x0233: "Shenzhen SuLong Communication Ltd", + 0x0234: "FengFan (BeiJing) Technology Co, Ltd", + 0x0235: "Qrio Inc", + 0x0236: "Pitpatpet Ltd", + 0x0237: "MSHeli s.r.l.", + 0x0238: "Trakm8 Ltd", + 0x0239: "JIN CO, Ltd", + 0x023A: "Alatech Tehnology", + 0x023B: "Beijing CarePulse Electronic Technology Co, Ltd", + 0x023C: "Awarepoint", + 0x023D: "ViCentra B.V.", + 0x023E: "Raven Industries", + 0x023F: "WaveWare Technologies Inc.", + 0x0240: "Argenox Technologies", + 0x0241: "Bragi GmbH", + 0x0242: "16Lab Inc", + 0x0243: "Masimo Corp", + 0x0244: "Iotera Inc", + 0x0245: "Endress+Hauser", + 0x0246: "ACKme Networks, Inc.", + 0x0247: "FiftyThree Inc.", + 0x0248: "Parker Hannifin Corp", + 0x0249: "Transcranial Ltd", + 0x024A: "Uwatec AG", + 0x024B: "Orlan LLC", + 0x024C: "Blue Clover Devices", + 0x024D: "M-Way Solutions GmbH", + 0x024E: "Microtronics Engineering GmbH", + 0x024F: "Schneider Schreibgerte GmbH", + 0x0250: "Sapphire Circuits LLC", + 0x0251: "Lumo Bodytech Inc.", + 0x0252: "UKC Technosolution", + 0x0253: "Xicato Inc.", + 0x0254: "Playbrush", + 0x0255: "Dai Nippon Printing Co., Ltd.", + 0x0256: "G24 Power Limited", + 0x0257: "AdBabble Local Commerce Inc.", + 0x0258: "Devialet SA", + 0x0259: "ALTYOR", + 0x025A: "University of Applied Sciences Valais/Haute Ecole Valaisanne", + 0x025B: "Five Interactive, LLC dba Zendo", + 0x025C: "NetEaseHangzhouNetwork co.Ltd.", + 0x025D: "Lexmark International Inc.", + 0x025E: "Fluke Corporation", + 0x025F: "Yardarm Technologies", + 0x0260: "SensaRx", + 0x0261: "SECVRE GmbH", + 0x0262: "Glacial Ridge Technologies", + 0x0263: "Identiv, Inc.", + 0x0264: "DDS, Inc.", + 0x0265: "SMK Corporation", + 0x0266: "Schawbel Technologies LLC", + 0x0267: "XMI Systems SA", + 0x0268: "Cerevo", + 0x0269: "Torrox GmbH & Co KG", + 0x026A: "Gemalto", + 0x026B: "DEKA Research & Development Corp.", + 0x026C: "Domster Tadeusz Szydlowski", + 0x026D: "Technogym SPA", + 0x026E: "FLEURBAEY BVBA", + 0x026F: "Aptcode Solutions", + 0x0270: "LSI ADL Technology", + 0x0271: "Animas Corp", + 0x0272: "Alps Electric Co., Ltd.", + 0x0273: "OCEASOFT", + 0x0274: "Motsai Research", + 0x0275: "Geotab", + 0x0276: "E.G.O. Elektro-Gertebau GmbH", + 0x0277: "bewhere inc", + 0x0278: "Johnson Outdoors Inc", + 0x0279: "steute Schaltgerate GmbH & Co. KG", + 0x027A: "Ekomini inc.", + 0x027B: "DEFA AS", + 0x027C: "Aseptika Ltd", + 0x027D: "HUAWEI Technologies Co., Ltd. ( )", + 0x027E: "HabitAware, LLC", + 0x027F: "ruwido austria gmbh", + 0x0280: "ITEC corporation", + 0x0281: "StoneL", + 0x0282: "Sonova AG", + 0x0283: "Maven Machines, Inc.", + 0x0284: "Synapse Electronics", + 0x0285: "Standard Innovation Inc.", + 0x0286: "RF Code, Inc.", + 0x0287: "Wally Ventures S.L.", + 0x0288: "Willowbank Electronics Ltd", + 0x0289: "SK Telecom", + 0x028A: "Jetro AS", + 0x028B: "Code Gears LTD", + 0x028C: "NANOLINK APS", + 0x028D: "IF, LLC", + 0x028E: "RF Digital Corp", + 0x028F: "Church & Dwight Co., Inc", + 0x0290: "Multibit Oy", + 0x0291: "CliniCloud Inc", + 0x0292: "SwiftSensors", + 0x0293: "Blue Bite", + 0x0294: "ELIAS GmbH", + 0x0295: "Sivantos GmbH", + 0x0296: "Petzl", + 0x0297: "storm power ltd", + 0x0298: "EISST Ltd", + 0x0299: "Inexess Technology Simma KG", + 0x029A: "Currant, Inc.", + 0x029B: "C2 Development, Inc.", + 0x029C: "Blue Sky Scientific, LLC", + 0x029D: "ALOTTAZS LABS, LLC", + 0x029E: "Kupson spol. s r.o.", + 0x029F: "Areus Engineering GmbH", + 0x02A0: "Impossible Camera GmbH", + 0x02A1: "InventureTrack Systems", + 0x02A2: "LockedUp", + 0x02A3: "Itude", + 0x02A4: "Pacific Lock Company", + 0x02A5: "Tendyron Corporation ( )", + 0x02A6: "Robert Bosch GmbH", + 0x02A7: "Illuxtron international B.V.", + 0x02A8: "miSport Ltd.", + 0x02A9: "Chargelib", + 0x02AA: "Doppler Lab", + 0x02AB: "BBPOS Limited", + 0x02AC: "RTB Elektronik GmbH & Co. KG", + 0x02AD: "Rx Networks, Inc.", + 0x02AE: "WeatherFlow, Inc.", + 0x02AF: "Technicolor USA Inc.", + 0x02B0: "Bestechnic(Shanghai),Ltd", + 0x02B1: "Raden Inc", + 0x02B2: "JouZen Oy", + 0x02B3: "CLABER S.P.A.", + 0x02B4: "Hyginex, Inc.", + 0x02B5: "HANSHIN ELECTRIC RAILWAY CO.,LTD.", + 0x02B6: "Schneider Electric", + 0x02B7: "Oort Technologies LLC", + 0x02B8: "Chrono Therapeutics", + 0x02B9: "Rinnai Corporation", + 0x02BA: "Swissprime Technologies AG", + 0x02BB: "Koha.,Co.Ltd", + 0x02BC: "Genevac Ltd", + 0x02BD: "Chemtronics", + 0x02BE: "Seguro Technology Sp. z o.o.", + 0x02BF: "Redbird Flight Simulations", + 0x02C0: "Dash Robotics", + 0x02C1: "LINE Corporation", + 0x02C2: "Guillemot Corporation", + 0x02C3: "Techtronic Power Tools Technology Limited", + 0x02C4: "Wilson Sporting Goods", + 0x02C5: "Lenovo (Singapore) Pte Ltd. ( )", + 0x02C6: "Ayatan Sensors", + 0x02C7: "Electronics Tomorrow Limited", + 0x02C8: "VASCO Data Security International, Inc.", + 0x02C9: "PayRange Inc.", + 0x02CA: "ABOV Semiconductor", + 0x02CB: "AINA-Wireless Inc.", + 0x02CC: "Eijkelkamp Soil & Water", + 0x02CD: "BMA ergonomics b.v.", + 0x02CE: "Teva Branded Pharmaceutical Products R&D, Inc.", + 0x02CF: "Anima", + 0x02D0: "3M", + 0x02D1: "Empatica Srl", + 0x02D2: "Afero, Inc.", + 0x02D3: "Powercast Corporation", + 0x02D4: "Secuyou ApS", + 0x02D5: "OMRON Corporation", + 0x02D6: "Send Solutions", + 0x02D7: "NIPPON SYSTEMWARE CO.,LTD.", + 0x02D8: "Neosfar", + 0x02D9: "Fliegl Agrartechnik GmbH", + 0x02DA: "Gilvader", + 0x02DB: "Digi International Inc (R)", + 0x02DC: "DeWalch Technologies, Inc.", + 0x02DD: "Flint Rehabilitation Devices, LLC", + 0x02DE: "Samsung SDS Co., Ltd.", + 0x02DF: "Blur Product Development", + 0x02E0: "University of Michigan", + 0x02E1: "Victron Energy BV", + 0x02E2: "NTT docomo", + 0x02E3: "Carmanah Technologies Corp.", + 0x02E4: "Bytestorm Ltd.", + 0x02E5: "Espressif Incorporated ( () )", + 0x02E6: "Unwire", + 0x02E7: "Connected Yard, Inc.", + 0x02E8: "American Music Environments", + 0x02E9: "Sensogram Technologies, Inc.", + 0x02EA: "Fujitsu Limited", + 0x02EB: "Ardic Technology", + 0x02EC: "Delta Systems, Inc", + 0x02ED: "HTC Corporation", + 0x02EE: "Citizen Holdings Co., Ltd.", + 0x02EF: "SMART-INNOVATION.inc", + 0x02F0: "Blackrat Software", + 0x02F1: "The Idea Cave, LLC", + 0x02F2: "GoPro, Inc.", + 0x02F3: "AuthAir, Inc", + 0x02F4: "Vensi, Inc.", + 0x02F5: "Indagem Tech LLC", + 0x02F6: "Intemo Technologies", + 0x02F7: "DreamVisions co., Ltd.", + 0x02F8: "Runteq Oy Ltd", + 0x02F9: "IMAGINATION TECHNOLOGIES LTD", + 0x02FA: "CoSTAR TEchnologies", + 0x02FB: "Clarius Mobile Health Corp.", + 0x02FC: "Shanghai Frequen Microelectronics Co., Ltd.", + 0x02FD: "Uwanna, Inc.", + 0x02FE: "Lierda Science & Technology Group Co., Ltd.", + 0x02FF: "Silicon Laboratories", + 0x0300: "World Moto Inc.", + 0x0301: "Giatec Scientific Inc.", + 0x0302: "Loop Devices, Inc", + 0x0303: "IACA electronique", + 0x0304: "Proxy Technologies, Inc.", + 0x0305: "Swipp ApS", + 0x0306: "Life Laboratory Inc.", + 0x0307: "FUJI INDUSTRIAL CO.,LTD.", + 0x0308: "Surefire, LLC", + 0x0309: "Dolby Labs", + 0x030A: "Ellisys", + 0x030B: "Magnitude Lighting Converters", + 0x030C: "Hilti AG", + 0x030D: "Devdata S.r.l.", + 0x030E: "Deviceworx", + 0x030F: "Shortcut Labs", + 0x0310: "SGL Italia S.r.l.", + 0x0311: "PEEQ DATA", + 0x0312: "Ducere Technologies Pvt Ltd", + 0x0313: "DiveNav, Inc.", + 0x0314: "RIIG AI Sp. z o.o.", + 0x0315: "Thermo Fisher Scientific", + 0x0316: "AG Measurematics Pvt. Ltd.", + 0x0317: "CHUO Electronics CO., LTD.", + 0x0318: "Aspenta International", + 0x0319: "Eugster Frismag AG", + 0x031A: "Amber wireless GmbH", + 0x031B: "HQ Inc", + 0x031C: "Lab Sensor Solutions", + 0x031D: "Enterlab ApS", + 0x031E: "Eyefi, Inc.", + 0x031F: "MetaSystem S.p.A.", + 0x0320: "SONO ELECTRONICS. CO., LTD", + 0x0321: "Jewelbots", + 0x0322: "Compumedics Limited", + 0x0323: "Rotor Bike Components", + 0x0324: "Astro, Inc.", + 0x0325: "Amotus Solutions", + 0x0326: "Healthwear Technologies (Changzhou)Ltd", + 0x0327: "Essex Electronics", + 0x0328: "Grundfos A/S", + 0x0329: "Eargo, Inc.", + 0x032A: "Electronic Design Lab", + 0x032B: "ESYLUX", + 0x032C: "NIPPON SMT.CO.,Ltd", + 0x032D: "BM innovations GmbH", + 0x032E: "indoormap", + 0x032F: "OttoQ Inc", + 0x0330: "North Pole Engineering", + 0x0331: "3flares Technologies Inc.", + 0x0332: "Electrocompaniet A.S.", + 0x0333: "Mul-T-Lock", + 0x0334: "Corentium AS", + 0x0335: "Enlighted Inc", + 0x0336: "GISTIC", + 0x0337: "AJP2 Holdings, LLC", + 0x0338: "COBI GmbH", + 0x0339: "Blue Sky Scientific, LLC", + 0x033A: "Appception, Inc.", + 0x033B: "Courtney Thorne Limited", + 0x033C: "Virtuosys", + 0x033D: "TPV Technology Limited", + 0x033E: "Monitra SA", + 0x033F: "Automation Components, Inc.", + 0x0340: "Letsense s.r.l.", + 0x0341: "Etesian Technologies LLC", + 0x0342: "GERTEC BRASIL LTDA.", + 0x0343: "Drekker Development Pty. Ltd.", + 0x0344: "Whirl Inc", + 0x0345: "Locus Positioning", + 0x0346: "Acuity Brands Lighting, Inc", + 0x0347: "Prevent Biometrics", + 0x0348: "Arioneo", + 0x0349: "VersaMe", + 0x034A: "Vaddio", + 0x034B: "Libratone A/S", + 0x034C: "HM Electronics, Inc.", + 0x034D: "TASER International, Inc.", + 0x034E: "SafeTrust Inc.", + 0x034F: "Heartland Payment Systems", + 0x0350: "Bitstrata Systems Inc.", + 0x0351: "Pieps GmbH", + 0x0352: "iRiding(Xiamen)Technology Co.,Ltd.", + 0x0353: "Alpha Audiotronics, Inc.", + 0x0354: "TOPPAN FORMS CO.,LTD.", + 0x0355: "Sigma Designs, Inc.", + 0x0356: "Spectrum Brands, Inc.", + 0x0357: "Polymap Wireless", + 0x0358: "MagniWare Ltd.", + 0x0359: "Novotec Medical GmbH", + 0x035A: "Medicom Innovation Partner a/s", + 0x035B: "Matrix Inc.", + 0x035C: "Eaton Corporation", + 0x035D: "KYS", + 0x035E: "Naya Health, Inc.", + 0x035F: "Acromag", + 0x0360: "Insulet Corporation", + 0x0361: "Wellinks Inc.", + 0x0362: "ON Semiconductor", + 0x0363: "FREELAP SA", + 0x0364: "Favero Electronics Srl", + 0x0365: "BioMech Sensor LLC", + 0x0366: "BOLTT Sports technologies Private limited", + 0x0367: "Saphe International", + 0x0368: "Metormote AB", + 0x0369: "littleBits", + 0x036A: "SetPoint Medical", + 0x036B: "BRControls Products BV", + 0x036C: "Zipcar", + 0x036D: "AirBolt Pty Ltd", + 0x036E: "KeepTruckin Inc", + 0x036F: "Motiv, Inc.", + 0x0370: "Wazombi Labs O", + 0x0371: "ORBCOMM", + 0x0372: "Nixie Labs, Inc.", + 0x0373: "AppNearMe Ltd", + 0x0374: "Holman Industries", + 0x0375: "Expain AS", + 0x0376: "Electronic Temperature Instruments Ltd", + 0x0377: "Plejd AB", + 0x0378: "Propeller Health", + 0x0379: "Shenzhen iMCO Electronic Technology Co.,Ltd", + 0x037A: "Algoria", + 0x037B: "Apption Labs Inc.", + 0x037C: "Cronologics Corporation", + 0x037D: "MICRODIA Ltd.", + 0x037E: "lulabytes S.L.", + 0x037F: "Nestec S.A.", + 0x0380: 'LLC "MEGA-F service"', + 0x0381: "Sharp Corporation", + 0x0382: "Precision Outcomes Ltd", + 0x0383: "Kronos Incorporated", + 0x0384: "OCOSMOS Co., Ltd.", + 0x0385: "Embedded Electronic Solutions Ltd. dba e2Solutions", + 0x0386: "Aterica Inc.", + 0x0387: "BluStor PMC, Inc.", + 0x0388: "Kapsch TrafficCom AB", + 0x0389: "ActiveBlu Corporation", + 0x038A: "Kohler Mira Limited", + 0x038B: "Noke", + 0x038C: "Appion Inc.", + 0x038D: "Resmed Ltd", + 0x038E: "Crownstone B.V.", + 0x038F: "Xiaomi Inc.", + 0x0390: "INFOTECH s.r.o.", + 0x0391: "Thingsquare AB", + 0x0392: "T&D", + 0x0393: "LAVAZZA S.p.A.", + 0x0394: "Netclearance Systems, Inc.", + 0x0395: "SDATAWAY", + 0x0396: "BLOKS GmbH", + 0x0397: "LEGO System A/S", + 0x0398: "Thetatronics Ltd", + 0x0399: "Nikon Corporation", + 0x039A: "NeST", + 0x039B: "South Silicon Valley Microelectronics", + 0x039C: "ALE International", + 0x039D: "CareView Communications, Inc.", + 0x039E: "SchoolBoard Limited", + 0x039F: "Molex Corporation", + 0x03A0: "BARROT TECHNOLOGY LIMITED (formerly IVT Wireless Limited)", + 0x03A1: "Alpine Labs LLC", + 0x03A2: "Candura Instruments", + 0x03A3: "SmartMovt Technology Co., Ltd", + 0x03A4: "Token Zero Ltd", + 0x03A5: "ACE CAD Enterprise Co., Ltd. (ACECAD)", + 0x03A6: "Medela, Inc", + 0x03A7: "AeroScout", + 0x03A8: "Esrille Inc.", + 0x03A9: "THINKERLY SRL", + 0x03AA: "Exon Sp. z o.o.", + 0x03AB: "Meizu Technology Co., Ltd.", + 0x03AC: "Smablo LTD", + 0x03AD: "XiQ", + 0x03AE: "Allswell Inc.", + 0x03AF: "Comm-N-Sense Corp DBA Verigo", + 0x03B0: "VIBRADORM GmbH", + 0x03B1: "Otodata Wireless Network Inc.", + 0x03B2: "Propagation Systems Limited", + 0x03B3: "Midwest Instruments & Controls", + 0x03B4: "Alpha Nodus, inc.", + 0x03B5: "petPOMM, Inc", + 0x03B6: "Mattel", + 0x03B7: "Airbly Inc.", + 0x03B8: "A-Safe Limited", + 0x03B9: "FREDERIQUE CONSTANT SA", + 0x03BA: "Maxscend Microelectronics Company Limited", + 0x03BB: "Abbott Diabetes Care", + 0x03BC: "ASB Bank Ltd", + 0x03BD: "amadas", + 0x03BE: "Applied Science, Inc.", + 0x03BF: "iLumi Solutions Inc.", + 0x03C0: "Arch Systems Inc.", + 0x03C1: "Ember Technologies, Inc.", + 0x03C2: "Snapchat Inc", + 0x03C3: "Casambi Technologies Oy", + 0x03C4: "Pico Technology Inc.", + 0x03C5: "St. Jude Medical, Inc.", + 0x03C6: "Intricon", + 0x03C7: "Structural Health Systems, Inc.", + 0x03C8: "Avvel International", + 0x03C9: "Gallagher Group", + 0x03CA: "In2things Automation Pvt. Ltd.", + 0x03CB: "SYSDEV Srl", + 0x03CC: "Vonkil Technologies Ltd", + 0x03CD: "Wynd Technologies, Inc.", + 0x03CE: "CONTRINEX S.A.", + 0x03CF: "MIRA, Inc.", + 0x03D0: "Watteam Ltd", + 0x03D1: "Density Inc.", + 0x03D2: "IOT Pot India Private Limited", + 0x03D3: "Sigma Connectivity AB", + 0x03D4: "PEG PEREGO SPA", + 0x03D5: "Wyzelink Systems Inc.", + 0x03D6: "Yota Devices LTD", + 0x03D7: "FINSECUR", + 0x03D8: "Zen-Me Labs Ltd", + 0x03D9: "3IWare Co., Ltd.", + 0x03DA: "EnOcean GmbH", + 0x03DB: "Instabeat, Inc", + 0x03DC: "Nima Labs", + 0x03DD: "Andreas Stihl AG & Co. KG", + 0x03DE: "Nathan Rhoades LLC", + 0x03DF: "Grob Technologies, LLC", + 0x03E0: "Actions (Zhuhai) Technology Co., Limited", + 0x03E1: "SPD Development Company Ltd", + 0x03E2: "Sensoan Oy", + 0x03E3: "Qualcomm Life Inc", + 0x03E4: "Chip-ing AG", + 0x03E5: "ffly4u", + 0x03E6: "IoT Instruments Oy", + 0x03E7: "TRUE Fitness Technology", + 0x03E8: "Reiner Kartengeraete GmbH & Co. KG.", + 0x03E9: "SHENZHEN LEMONJOY TECHNOLOGY CO., LTD.", + 0x03EA: "Hello Inc.", + 0x03EB: "Evollve Inc.", + 0x03EC: "Jigowatts Inc.", + 0x03ED: "BASIC MICRO.COM,INC.", + 0x03EE: "CUBE TECHNOLOGIES", + 0x03EF: "foolography GmbH", + 0x03F0: "CLINK", + 0x03F1: "Hestan Smart Cooking Inc.", + 0x03F2: "WindowMaster A/S", + 0x03F3: "Flowscape AB", + 0x03F4: "PAL Technologies Ltd", + 0x03F5: "WHERE, Inc.", + 0x03F6: "Iton Technology Corp.", + 0x03F7: "Owl Labs Inc.", + 0x03F8: "Rockford Corp.", + 0x03F9: "Becon Technologies Co.,Ltd.", + 0x03FA: "Vyassoft Technologies Inc", + 0x03FB: "Nox Medical", + 0x03FC: "Kimberly-Clark", + 0x03FD: "Trimble Navigation Ltd.", + 0x03FE: "Littelfuse", + 0x03FF: "Withings", + 0x0400: "i-developer IT Beratung UG", + 0x0401: "", + 0x0402: "Sears Holdings Corporation", + 0x0403: "Gantner Electronic GmbH", + 0x0404: "Authomate Inc", + 0x0405: "Vertex International, Inc.", + 0x0406: "Airtago", + 0x0407: "Swiss Audio SA", + 0x0408: "ToGetHome Inc.", + 0x0409: "AXIS", + 0x040A: "Openmatics", + 0x040B: "Jana Care Inc.", + 0x040C: "Senix Corporation", + 0x040D: "NorthStar Battery Company, LLC", + 0x040E: "SKF (U.K.) Limited", + 0x040F: "CO-AX Technology, Inc.", + 0x0410: "Fender Musical Instruments", + 0x0411: "Luidia Inc", + 0x0412: "SEFAM", + 0x0413: "Wireless Cables Inc", + 0x0414: "Lightning Protection International Pty Ltd", + 0x0415: "Uber Technologies Inc", + 0x0416: "SODA GmbH", + 0x0417: "Fatigue Science", + 0x0418: "Alpine Electronics Inc.", + 0x0419: "Novalogy LTD", + 0x041A: "Friday Labs Limited", + 0x041B: "OrthoAccel Technologies", + 0x041C: "WaterGuru, Inc.", + 0x041D: "Benning Elektrotechnik und Elektronik GmbH & Co. KG", + 0x041E: "Dell Computer Corporation", + 0x041F: "Kopin Corporation", + 0x0420: "TecBakery GmbH", + 0x0421: "Backbone Labs, Inc.", + 0x0422: "DELSEY SA", + 0x0423: "Chargifi Limited", + 0x0424: "Trainesense Ltd.", + 0x0425: "Unify Software and Solutions GmbH & Co. KG", + 0x0426: "Husqvarna AB", + 0x0427: "Focus fleet and fuel management inc", + 0x0428: "SmallLoop, LLC", + 0x0429: "Prolon Inc.", + 0x042A: "BD Medical", + 0x042B: "iMicroMed Incorporated", + 0x042C: "Ticto N.V.", + 0x042D: "Meshtech AS", + 0x042E: "MemCachier Inc.", + 0x042F: "Danfoss A/S", + 0x0430: "SnapStyk Inc.", + 0x0431: "Amway Corporation", + 0x0432: "Silk Labs, Inc.", + 0x0433: "Pillsy Inc.", + 0x0434: "Hatch Baby, Inc.", + 0x0435: "Blocks Wearables Ltd.", + 0x0436: "Drayson Technologies (Europe) Limited", + 0x0437: "eBest IOT Inc.", + 0x0438: "Helvar Ltd", + 0x0439: "Radiance Technologies", + 0x043A: "Nuheara Limited", + 0x043B: "Appside co., ltd.", + 0x043C: "DeLaval", + 0x043D: "Coiler Corporation", + 0x043E: "Thermomedics, Inc.", + 0x043F: "Tentacle Sync GmbH", + 0x0440: "Valencell, Inc.", + 0x0441: "iProtoXi Oy", + 0x0442: "SECOM CO., LTD.", + 0x0443: "Tucker International LLC", + 0x0444: "Metanate Limited", + 0x0445: "Kobian Canada Inc.", + 0x0446: "NETGEAR, Inc.", + 0x0447: "Fabtronics Australia Pty Ltd", + 0x0448: "Grand Centrix GmbH", + 0x0449: "1UP USA.com llc", + 0x044A: "SHIMANO INC.", + 0x044B: "Nain Inc.", + 0x044C: "LifeStyle Lock, LLC", + 0x044D: "VEGA Grieshaber KG", + 0x044E: "Xtrava Inc.", + 0x044F: "TTS Tooltechnic Systems AG & Co. KG", + 0x0450: "Teenage Engineering AB", + 0x0451: "Tunstall Nordic AB", + 0x0452: "Svep Design Center AB", + 0x0453: "GreenPeak Technologies BV", + 0x0454: "Sphinx Electronics GmbH & Co KG", + 0x0455: "Atomation", + 0x0456: "Nemik Consulting Inc", + 0x0457: "RF INNOVATION", + 0x0458: "Mini Solution Co., Ltd.", + 0x0459: "Lumenetix, Inc", + 0x045A: "2048450 Ontario Inc", + 0x045B: "SPACEEK LTD", + 0x045C: "Delta T Corporation", + 0x045D: "Boston Scientific Corporation", + 0x045E: "Nuviz, Inc.", + 0x045F: "Real Time Automation, Inc.", + 0x0460: "Kolibree", + 0x0461: "vhf elektronik GmbH", + 0x0462: "Bonsai Systems GmbH", + 0x0463: "Fathom Systems Inc.", + 0x0464: "Bellman & Symfon", + 0x0465: "International Forte Group LLC", + 0x0466: "CycleLabs Solutions inc.", + 0x0467: "Codenex Oy", + 0x0468: "Kynesim Ltd", + 0x0469: "Palago AB", + 0x046A: "INSIGMA INC.", + 0x046B: "PMD Solutions", + 0x046C: "Qingdao Realtime Technology Co., Ltd.", + 0x046D: "BEGA Gantenbrink-Leuchten KG", + 0x046E: "Pambor Ltd.", + 0x046F: "Develco Products A/S", + 0x0470: "iDesign s.r.l.", + 0x0471: "TiVo Corp", + 0x0472: "Control-J Pty Ltd", + 0x0473: "Steelcase, Inc.", + 0x0474: "iApartment co., ltd.", + 0x0475: "Icom inc.", + 0x0476: "Oxstren Wearable Technologies Private Limited", + 0x0477: "Blue Spark Technologies", + 0x0478: "FarSite Communications Limited", + 0x0479: "mywerk system GmbH", + 0x047A: "Sinosun Technology Co., Ltd.", + 0x047B: "MIYOSHI ELECTRONICS CORPORATION", + 0x047C: "POWERMAT LTD", + 0x047D: "Occly LLC", + 0x047E: "OurHub Dev IvS", + 0x047F: "Pro-Mark, Inc.", + 0x0480: "Dynometrics Inc.", + 0x0481: "Quintrax Limited", + 0x0482: "POS Tuning Udo Vosshenrich GmbH & Co. KG", + 0x0483: "Multi Care Systems B.V.", + 0x0484: "Revol Technologies Inc", + 0x0485: "SKIDATA AG", + 0x0486: "DEV TECNOLOGIA INDUSTRIA, COMERCIO E MANUTENCAO DE EQUIPAMENTOS LTDA. - ME", + 0x0487: "Centrica Connected Home", + 0x0488: "Automotive Data Solutions Inc", + 0x0489: "Igarashi Engineering", + 0x048A: "Taelek Oy", + 0x048B: "CP Electronics Limited", + 0x048C: "Vectronix AG", + 0x048D: "S-Labs Sp. z o.o.", + 0x048E: "Companion Medical, Inc.", + 0x048F: "BlueKitchen GmbH", + 0x0490: "Matting AB", + 0x0491: "SOREX - Wireless Solutions GmbH", + 0x0492: "ADC Technology, Inc.", + 0x0493: "Lynxemi Pte Ltd", + 0x0494: "SENNHEISER electronic GmbH & Co. KG", + 0x0495: "LMT Mercer Group, Inc", + 0x0496: "Polymorphic Labs LLC", + 0x0497: "Cochlear Limited", + 0x0498: "METER Group, Inc. USA", + 0x0499: "Ruuvi Innovations Ltd.", + 0x049A: "Situne AS", + 0x049B: "nVisti, LLC", + 0x049C: "DyOcean", + 0x049D: "Uhlmann & Zacher GmbH", + 0x049E: "AND!XOR LLC", + 0x049F: "tictote AB", + 0x04A0: "Vypin, LLC", + 0x04A1: "PNI Sensor Corporation", + 0x04A2: "ovrEngineered, LLC", + 0x04A3: "GT-tronics HK Ltd", + 0x04A4: "Herbert Waldmann GmbH & Co. KG", + 0x04A5: "Guangzhou FiiO Electronics Technology Co.,Ltd", + 0x04A6: "Vinetech Co., Ltd", + 0x04A7: "Dallas Logic Corporation", + 0x04A8: "BioTex, Inc.", + 0x04A9: "DISCOVERY SOUND TECHNOLOGY, LLC", + 0x04AA: "LINKIO SAS", + 0x04AB: "Harbortronics, Inc.", + 0x04AC: "Undagrid B.V.", + 0x04AD: "Shure Inc", + 0x04AE: "ERM Electronic Systems LTD", + 0x04AF: "BIOROWER Handelsagentur GmbH", + 0x04B0: "Weba Sport und Med. Artikel GmbH", + 0x04B1: "Kartographers Technologies Pvt. Ltd.", + 0x04B2: "The Shadow on the Moon", + 0x04B3: "mobike (Hong Kong) Limited", + 0x04B4: "Inuheat Group AB", + 0x04B5: "Swiftronix AB", + 0x04B6: "Diagnoptics Technologies", + 0x04B7: "Analog Devices, Inc.", + 0x04B8: "Soraa Inc.", + 0x04B9: "CSR Building Products Limited", + 0x04BA: "Crestron Electronics, Inc.", + 0x04BB: "Neatebox Ltd", + 0x04BC: "Draegerwerk AG & Co. KGaA", + 0x04BD: "AlbynMedical", + 0x04BE: "Averos FZCO", + 0x04BF: "VIT Initiative, LLC", + 0x04C0: "Statsports International", + 0x04C1: "Sospitas, s.r.o.", + 0x04C2: "Dmet Products Corp.", + 0x04C3: "Mantracourt Electronics Limited", + 0x04C4: "TeAM Hutchins AB", + 0x04C5: "Seibert Williams Glass, LLC", + 0x04C6: "Insta GmbH", + 0x04C7: "Svantek Sp. z o.o.", + 0x04C8: "Shanghai Flyco Electrical Appliance Co., Ltd.", + 0x04C9: "Thornwave Labs Inc", + 0x04CA: "Steiner-Optik GmbH", + 0x04CB: "Novo Nordisk A/S", + 0x04CC: "Enflux Inc.", + 0x04CD: "Safetech Products LLC", + 0x04CE: "GOOOLED S.R.L.", + 0x04CF: "DOM Sicherheitstechnik GmbH & Co. KG", + 0x04D0: "Olympus Corporation", + 0x04D1: "KTS GmbH", + 0x04D2: "Anloq Technologies Inc.", + 0x04D3: "Queercon, Inc", + 0x04D4: "5th Element Ltd", + 0x04D5: "Gooee Limited", + 0x04D6: "LUGLOC LLC", + 0x04D7: "Blincam, Inc.", + 0x04D8: "FUJIFILM Corporation", + 0x04D9: "RandMcNally", + 0x04DA: "Franceschi Marina snc", + 0x04DB: "Engineered Audio, LLC.", + 0x04DC: "IOTTIVE (OPC) PRIVATE LIMITED", + 0x04DD: "4MOD Technology", + 0x04DE: "Lutron Electronics Co., Inc.", + 0x04DF: "Emerson", + 0x04E0: "Guardtec, Inc.", + 0x04E1: "REACTEC LIMITED", + 0x04E2: "EllieGrid", + 0x04E3: "Under Armour", + 0x04E4: "Woodenshark", + 0x04E5: "Avack Oy", + 0x04E6: "Smart Solution Technology, Inc.", + 0x04E7: "REHABTRONICS INC.", + 0x04E8: "STABILO International", + 0x04E9: "Busch Jaeger Elektro GmbH", + 0x04EA: "Pacific Bioscience Laboratories, Inc", + 0x04EB: "Bird Home Automation GmbH", + 0x04EC: "Motorola Solutions", + 0x04ED: "R9 Technology, Inc.", + 0x04EE: "Auxivia", + 0x04EF: "DaisyWorks, Inc", + 0x04F0: "Kosi Limited", + 0x04F1: "Theben AG", + 0x04F2: "InDreamer Techsol Private Limited", + 0x04F3: "Cerevast Medical", + 0x04F4: "ZanCompute Inc.", + 0x04F5: "Pirelli Tyre S.P.A.", + 0x04F6: "McLear Limited", + 0x04F7: "Shenzhen Huiding Technology Co.,Ltd.", + 0x04F8: "Convergence Systems Limited", + 0x04F9: "Interactio", + 0x04FA: "Androtec GmbH", + 0x04FB: "Benchmark Drives GmbH & Co. KG", + 0x04FC: "SwingLync L. L. C.", + 0x04FD: "Tapkey GmbH", + 0x04FE: "Woosim Systems Inc.", + 0x04FF: "Microsemi Corporation", + 0x0500: "Wiliot LTD.", + 0x0501: "Polaris IND", + 0x0502: "Specifi-Kali LLC", + 0x0503: "Locoroll, Inc", + 0x0504: "PHYPLUS Inc", + 0x0505: "Inplay Technologies LLC", + 0x0506: "Hager", + 0x0507: "Yellowcog", + 0x0508: "Axes System sp. z o. o.", + 0x0509: "myLIFTER Inc.", + 0x050A: "Shake-on B.V.", + 0x050B: "Vibrissa Inc.", + 0x050C: "OSRAM GmbH", + 0x050D: "TRSystems GmbH", + 0x050E: "Yichip Microelectronics (Hangzhou) Co.,Ltd.", + 0x050F: "Foundation Engineering LLC", + 0x0510: "UNI-ELECTRONICS, INC.", + 0x0511: "Brookfield Equinox LLC", + 0x0512: "Soprod SA", + 0x0513: "9974091 Canada Inc.", + 0x0514: "FIBRO GmbH", + 0x0515: "RB Controls Co., Ltd.", + 0x0516: "Footmarks", + 0x0517: "Amtronic Sverige AB (formerly Amcore AB)", + 0x0518: "MAMORIO.inc", + 0x0519: "Tyto Life LLC", + 0x051A: "Leica Camera AG", + 0x051B: "Angee Technologies Ltd.", + 0x051C: "EDPS", + 0x051D: "OFF Line Co., Ltd.", + 0x051E: "Detect Blue Limited", + 0x051F: "Setec Pty Ltd", + 0x0520: "Target Corporation", + 0x0521: "IAI Corporation", + 0x0522: "NS Tech, Inc.", + 0x0523: "MTG Co., Ltd.", + 0x0524: "Hangzhou iMagic Technology Co., Ltd", + 0x0525: "HONGKONG NANO IC TECHNOLOGIES CO., LIMITED", + 0x0526: "Honeywell International Inc.", + 0x0527: "Albrecht JUNG", + 0x0528: "Lunera Lighting Inc.", + 0x0529: "Lumen UAB", + 0x052A: "Keynes Controls Ltd", + 0x052B: "Novartis AG", + 0x052C: "Geosatis SA", + 0x052D: "EXFO, Inc.", + 0x052E: "LEDVANCE GmbH", + 0x052F: "Center ID Corp.", + 0x0530: "Adolene, Inc.", + 0x0531: "D&M Holdings Inc.", + 0x0532: "CRESCO Wireless, Inc.", + 0x0533: "Nura Operations Pty Ltd", + 0x0534: "Frontiergadget, Inc.", + 0x0535: "Smart Component Technologies Limited", + 0x0536: "ZTR Control Systems LLC", + 0x0537: "MetaLogics Corporation", + 0x0538: "Medela AG", + 0x0539: "OPPLE Lighting Co., Ltd", + 0x053A: "Savitech Corp.,", + 0x053B: "prodigy", + 0x053C: "Screenovate Technologies Ltd", + 0x053D: "TESA SA", + 0x053E: "CLIM8 LIMITED", + 0x053F: "Silergy Corp", + 0x0540: "SilverPlus, Inc", + 0x0541: "Sharknet srl", + 0x0542: "Mist Systems, Inc.", + 0x0543: "MIWA LOCK CO.,Ltd", + 0x0544: "OrthoSensor, Inc.", + 0x0545: "Candy Hoover Group s.r.l", + 0x0546: "Apexar Technologies S.A.", + 0x0547: "LOGICDATA d.o.o.", + 0x0548: "Knick Elektronische Messgeraete GmbH & Co. KG", + 0x0549: "Smart Technologies and Investment Limited", + 0x054A: "Linough Inc.", + 0x054B: "Advanced Electronic Designs, Inc.", + 0x054C: "Carefree Scott Fetzer Co Inc", + 0x054D: "Sensome", + 0x054E: "FORTRONIK storitve d.o.o.", + 0x054F: "Sinnoz", + 0x0550: "Versa Networks, Inc.", + 0x0551: "Sylero", + 0x0552: "Avempace SARL", + 0x0553: "Nintendo Co., Ltd.", + 0x0554: "National Instruments", + 0x0555: "KROHNE Messtechnik GmbH", + 0x0556: "Otodynamics Ltd", + 0x0557: "Arwin Technology Limited", + 0x0558: "benegear, inc.", + 0x0559: "Newcon Optik", + 0x055A: "CANDY HOUSE, Inc.", + 0x055B: "FRANKLIN TECHNOLOGY INC", + 0x055C: "Lely", + 0x055D: "Valve Corporation", + 0x055E: "Hekatron Vertriebs GmbH", + 0x055F: "PROTECH S.A.S. DI GIRARDI ANDREA & C.", + 0x0560: "Sarita CareTech APS (formerly Sarita CareTech IVS)", + 0x0561: "Finder S.p.A.", + 0x0562: "Thalmic Labs Inc.", + 0x0563: "Steinel Vertrieb GmbH", + 0x0564: "Beghelli Spa", + 0x0565: "Beijing Smartspace Technologies Inc.", + 0x0566: "CORE TRANSPORT TECHNOLOGIES NZ LIMITED", + 0x0567: "Xiamen Everesports Goods Co., Ltd", + 0x0568: "Bodyport Inc.", + 0x0569: "Audionics System, INC.", + 0x056A: "Flipnavi Co.,Ltd.", + 0x056B: "Rion Co., Ltd.", + 0x056C: "Long Range Systems, LLC", + 0x056D: "Redmond Industrial Group LLC", + 0x056E: "VIZPIN INC.", + 0x056F: "BikeFinder AS", + 0x0570: "Consumer Sleep Solutions LLC", + 0x0571: "PSIKICK, INC.", + 0x0572: "AntTail.com", + 0x0573: "Lighting Science Group Corp.", + 0x0574: "AFFORDABLE ELECTRONICS INC", + 0x0575: "Integral Memroy Plc", + 0x0576: "Globalstar, Inc.", + 0x0577: "True Wearables, Inc.", + 0x0578: "Wellington Drive Technologies Ltd", + 0x0579: "Ensemble Tech Private Limited", + 0x057A: "OMNI Remotes", + 0x057B: "Duracell U.S. Operations Inc.", + 0x057C: "Toor Technologies LLC", + 0x057D: "Instinct Performance", + 0x057E: "Beco, Inc", + 0x057F: "Scuf Gaming International, LLC", + 0x0580: "ARANZ Medical Limited", + 0x0581: "LYS TECHNOLOGIES LTD", + 0x0582: "Breakwall Analytics, LLC", + 0x0583: "Code Blue Communications", + 0x0584: "Gira Giersiepen GmbH & Co. KG", + 0x0585: "Hearing Lab Technology", + 0x0586: "LEGRAND", + 0x0587: "Derichs GmbH", + 0x0588: "ALT-TEKNIK LLC", + 0x0589: "Star Technologies", + 0x058A: "START TODAY CO.,LTD.", + 0x058B: "Maxim Integrated Products", + 0x058C: "MERCK Kommanditgesellschaft auf Aktien", + 0x058D: "Jungheinrich Aktiengesellschaft", + 0x058E: "Oculus VR, LLC", + 0x058F: "HENDON SEMICONDUCTORS PTY LTD", + 0x0590: "Pur3 Ltd", + 0x0591: "Viasat Group S.p.A.", + 0x0592: "IZITHERM", + 0x0593: "Spaulding Clinical Research", + 0x0594: "Kohler Company", + 0x0595: "Inor Process AB", + 0x0596: "My Smart Blinds", + 0x0597: "RadioPulse Inc", + 0x0598: "rapitag GmbH", + 0x0599: "Lazlo326, LLC.", + 0x059A: "Teledyne Lecroy, Inc.", + 0x059B: "Dataflow Systems Limited", + 0x059C: "Macrogiga Electronics", + 0x059D: "Tandem Diabetes Care", + 0x059E: "Polycom, Inc.", + 0x059F: "Fisher & Paykel Healthcare", + 0x05A0: "RCP Software Oy", + 0x05A1: "Shanghai Xiaoyi Technology Co.,Ltd.", + 0x05A2: "ADHERIUM(NZ) LIMITED", + 0x05A3: "Axiomware Systems Incorporated", + 0x05A4: "O. E. M. Controls, Inc.", + 0x05A5: "Kiiroo BV", + 0x05A6: "Telecon Mobile Limited", + 0x05A7: "Sonos Inc", + 0x05A8: "Tom Allebrandi Consulting", + 0x05A9: "Monidor", + 0x05AA: "Tramex Limited", + 0x05AB: "Nofence AS", + 0x05AC: "GoerTek Dynaudio Co., Ltd.", + 0x05AD: "INIA", + 0x05AE: "CARMATE MFG.CO.,LTD", + 0x05AF: "ONvocal", + 0x05B0: "NewTec GmbH", + 0x05B1: "Medallion Instrumentation Systems", + 0x05B2: "CAREL INDUSTRIES S.P.A.", + 0x05B3: "Parabit Systems, Inc.", + 0x05B4: "White Horse Scientific ltd", + 0x05B5: "verisilicon", + 0x05B6: "Elecs Industry Co.,Ltd.", + 0x05B7: "Beijing Pinecone Electronics Co.,Ltd.", + 0x05B8: "Ambystoma Labs Inc.", + 0x05B9: "Suzhou Pairlink Network Technology", + 0x05BA: "igloohome", + 0x05BB: "Oxford Metrics plc", + 0x05BC: "Leviton Mfg. Co., Inc.", + 0x05BD: "ULC Robotics Inc.", + 0x05BE: "RFID Global by Softwork SrL", + 0x05BF: "Real-World-Systems Corporation", + 0x05C0: "Nalu Medical, Inc.", + 0x05C1: "P.I.Engineering", + 0x05C2: "Grote Industries", + 0x05C3: "Runtime, Inc.", + 0x05C4: "Codecoup sp. z o.o. sp. k.", + 0x05C5: "SELVE GmbH & Co. KG", + 0x05C6: "Smart Animal Training Systems, LLC", + 0x05C7: "Lippert Components, INC", + 0x05C8: "SOMFY SAS", + 0x05C9: "TBS Electronics B.V.", + 0x05CA: "MHL Custom Inc", + 0x05CB: "LucentWear LLC", + 0x05CC: "WATTS ELECTRONICS", + 0x05CD: "RJ Brands LLC", + 0x05CE: "V-ZUG Ltd", + 0x05CF: "Biowatch SA", + 0x05D0: "Anova Applied Electronics", + 0x05D1: "Lindab AB", + 0x05D2: "frogblue TECHNOLOGY GmbH", + 0x05D3: "Acurable Limited", + 0x05D4: "LAMPLIGHT Co., Ltd.", + 0x05D5: "TEGAM, Inc.", + 0x05D6: "Zhuhai Jieli technology Co.,Ltd", + 0x05D7: "modum.io AG", + 0x05D8: "Farm Jenny LLC", + 0x05D9: "Toyo Electronics Corporation", + 0x05DA: "Applied Neural Research Corp", + 0x05DB: "Avid Identification Systems, Inc.", + 0x05DC: "Petronics Inc.", + 0x05DD: "essentim GmbH", + 0x05DE: "QT Medical INC.", + 0x05DF: "VIRTUALCLINIC.DIRECT LIMITED", + 0x05E0: "Viper Design LLC", + 0x05E1: "Human, Incorporated", + 0x05E2: "stAPPtronics GmbH", + 0x05E3: "Elemental Machines, Inc.", + 0x05E4: "Taiyo Yuden Co., Ltd", + 0x05E5: "INEO ENERGY& SYSTEMS", + 0x05E6: "Motion Instruments Inc.", + 0x05E7: "PressurePro", + 0x05E8: "COWBOY", + 0x05E9: "iconmobile GmbH", + 0x05EA: "ACS-Control-System GmbH", + 0x05EB: "Bayerische Motoren Werke AG", + 0x05EC: "Gycom Svenska AB", + 0x05ED: "Fuji Xerox Co., Ltd", + 0x05EE: "Glide Inc.", + 0x05EF: "SIKOM AS", + 0x05F0: "beken", + 0x05F1: "The Linux Foundation", + 0x05F2: "Try and E CO.,LTD.", + 0x05F3: "SeeScan", + 0x05F4: "Clearity, LLC", + 0x05F5: "GS TAG", + 0x05F6: "DPTechnics", + 0x05F7: "TRACMO, INC.", + 0x05F8: "Anki Inc.", + 0x05F9: "Hagleitner Hygiene International GmbH", + 0x05FA: "Konami Sports Life Co., Ltd.", + 0x05FB: "Arblet Inc.", + 0x05FC: "Masbando GmbH", + 0x05FD: "Innoseis", + 0x05FE: "Niko", + 0x05FF: "Wellnomics Ltd", + 0x0600: "iRobot Corporation", + 0x0601: "Schrader Electronics", + 0x0602: "Geberit International AG", + 0x0603: "Fourth Evolution Inc", + 0x0604: "Cell2Jack LLC", + 0x0605: "FMW electronic Futterer u. Maier-Wolf OHG", + 0x0606: "John Deere", + 0x0607: "Rookery Technology Ltd", + 0x0608: "KeySafe-Cloud", + 0x0609: "BUCHI Labortechnik AG", + 0x060A: "IQAir AG", + 0x060B: "Triax Technologies Inc", + 0x060C: "Vuzix Corporation", + 0x060D: "TDK Corporation", + 0x060E: "Blueair AB", + 0x060F: "Signify Netherlands (formerlyPhilips Lighting B.V.)", + 0x0610: "ADH GUARDIAN USA LLC", + 0x0611: "Beurer GmbH", + 0x0612: "Playfinity AS", + 0x0613: "Hans Dinslage GmbH", + 0x0614: "OnAsset Intelligence, Inc.", + 0x0615: "INTER ACTION Corporation", + 0x0616: "OS42 UG (haftungsbeschraenkt)", + 0x0617: "WIZCONNECTED COMPANY LIMITED", + 0x0618: "Audio-Technica Corporation", + 0x0619: "Six Guys Labs, s.r.o.", + 0x061A: "R.W. Beckett Corporation", + 0x061B: "silex technology, inc.", + 0x061C: "Univations Limited", + 0x061D: "SENS Innovation ApS", + 0x061E: "Diamond Kinetics, Inc.", + 0x061F: "Phrame Inc.", + 0x0620: "Forciot Oy", + 0x0621: "Noordung d.o.o.", + 0x0622: "Beam Labs, LLC", + 0x0623: "Philadelphia Scientific (U.K.) Limited", + 0x0624: "Biovotion AG", + 0x0625: "Square Panda, Inc.", + 0x0626: "Amplifico", + 0x0627: "WEG S.A.", + 0x0628: "Ensto Oy", + 0x0629: "PHONEPE PVT LTD", + 0x062A: "Lunatico Astronomia SL", + 0x062B: "MinebeaMitsumi Inc.", + 0x062C: "ASPion GmbH", + 0x062D: "Vossloh-Schwabe Deutschland GmbH", + 0x062E: "Procept", + 0x062F: "ONKYO Corporation", + 0x0630: "Asthrea D.O.O.", + 0x0631: "Fortiori Design LLC", + 0x0632: "Hugo Muller GmbH & Co KG", + 0x0633: "Wangi Lai PLT", + 0x0634: "Fanstel Corp", + 0x0635: "Crookwood", + 0x0636: "ELECTRONICA INTEGRAL DE SONIDO S.A.", + 0x0637: "GiP Innovation Tools GmbH", + 0x0638: "LX SOLUTIONS PTY LIMITED", + 0x0639: "Shenzhen Minew Technologies Co., Ltd.", + 0x063A: "Prolojik Limited", + 0x063B: "Kromek Group Plc", + 0x063C: "Contec Medical Systems Co., Ltd.", + 0x063D: "Xradio Technology Co.,Ltd.", + 0x063E: "The Indoor Lab, LLC", + 0x063F: "LDL TECHNOLOGY", + 0x0640: "Parkifi", + 0x0641: "Revenue Collection Systems FRANCE SAS", + 0x0642: "Bluetrum Technology Co.,Ltd", + 0x0643: "makita corporation", + 0x0644: "Apogee Instruments", + 0x0645: "BM3", + 0x0646: "SGV Group Holding GmbH & Co. KG", + 0x0647: "MED-EL", + 0x0648: "Ultune Technologies", + 0x0649: "Ryeex Technology Co.,Ltd.", + 0x064A: "Open Research Institute, Inc.", + 0x064B: "Scale-Tec, Ltd", + 0x064C: "Zumtobel Group AG", + 0x064D: "iLOQ Oy", + 0x064E: "KRUXWorks Technologies Private Limited", + 0x064F: "Digital Matter Pty Ltd", + 0x0650: "Coravin, Inc.", + 0x0651: "Stasis Labs, Inc.", + 0x0652: "ITZ Innovations- und Technologiezentrum GmbH", + 0x0653: "Meggitt SA", + 0x0654: "Ledlenser GmbH & Co. KG", + 0x0655: "Renishaw PLC", + 0x0656: "ZhuHai AdvanPro Technology Company Limited", + 0x0657: "Meshtronix Limited", + 0x0658: "Payex Norge AS", + 0x0659: "UnSeen Technologies Oy", + 0x065A: "Zound Industries International AB", + 0x065B: "Sesam Solutions BV", + 0x065C: "PixArt Imaging Inc.", + 0x065D: "Panduit Corp.", + 0x065E: "Alo AB", + 0x065F: "Ricoh Company Ltd", + 0x0660: "RTC Industries, Inc.", + 0x0661: "Mode Lighting Limited", + 0x0662: "Particle Industries, Inc.", + 0x0663: "Advanced Telemetry Systems, Inc.", + 0x0664: "RHA TECHNOLOGIES LTD", + 0x0665: "Pure International Limited", + 0x0666: "WTO Werkzeug-Einrichtungen GmbH", + 0x0667: "Spark Technology Labs Inc.", + 0x0668: "Bleb Technology srl", + 0x0669: "Livanova USA, Inc.", + 0x066A: "Brady Worldwide Inc.", + 0x066B: "DewertOkin GmbH", + 0x066C: "Ztove ApS", + 0x066D: "Venso EcoSolutions AB", + 0x066E: "Eurotronik Kranj d.o.o.", + 0x066F: "Hug Technology Ltd", + 0x0670: "Gema Switzerland GmbH", + 0x0671: "Buzz Products Ltd.", + 0x0672: "Kopi", + 0x0673: "Innova Ideas Limited", + 0x0674: "BeSpoon", + 0x0675: "Deco Enterprises, Inc.", + 0x0676: "Expai Solutions Private Limited", + 0x0677: "Innovation First, Inc.", + 0x0678: "SABIK Offshore GmbH", + 0x0679: "4iiii Innovations Inc.", + 0x067A: "The Energy Conservatory, Inc.", + 0x067B: "I.FARM, INC.", + 0x067C: "Tile, Inc.", + 0x067D: "Form Athletica Inc.", + 0x067E: "MbientLab Inc", + 0x067F: "NETGRID S.N.C. DI BISSOLI MATTEO, CAMPOREALE SIMONE, TOGNETTI FEDERICO", + 0x0680: "Mannkind Corporation", + 0x0681: "Trade FIDES a.s.", + 0x0682: "Photron Limited", + 0x0683: "Eltako GmbH", + 0x0684: "Dermalapps, LLC", + 0x0685: "Greenwald Industries", + 0x0686: "inQs Co., Ltd.", + 0x0687: "Cherry GmbH", + 0x0688: "Amsted Digital Solutions Inc.", + 0x0689: "Tacx b.v.", + 0x068A: "Raytac Corporation", + 0x068B: "Jiangsu Teranovo Tech Co., Ltd.", + 0x068C: "Changzhou Sound Dragon Electronics and Acoustics Co., Ltd", + 0x068D: "JetBeep Inc.", + 0x068E: "Razer Inc.", + 0x068F: "JRM Group Limited", + 0x0690: "Eccrine Systems, Inc.", + 0x0691: "Curie Point AB", + 0x0692: "Georg Fischer AG", + 0x0693: "Hach - Danaher", + 0x0694: "T&A Laboratories LLC", + 0x0695: "Koki Holdings Co., Ltd.", + 0x0696: "Gunakar Private Limited", + 0x0697: "Stemco Products Inc", + 0x0698: "Wood IT Security, LLC", + 0x0699: "RandomLab SAS", + 0x069A: "Adero, Inc. (formerly as TrackR, Inc.)", + 0x069B: "Dragonchip Limited", + 0x069C: "Noomi AB", + 0x069D: "Vakaros LLC", + 0x069E: "Delta Electronics, Inc.", + 0x069F: "FlowMotion Technologies AS", + 0x06A0: "OBIQ Location Technology Inc.", + 0x06A1: "Cardo Systems, Ltd", + 0x06A2: "Globalworx GmbH", + 0x06A3: "Nymbus, LLC", + 0x06A4: "Sanyo Techno Solutions Tottori Co., Ltd.", + 0x06A5: "TEKZITEL PTY LTD", + 0x06A6: "Roambee Corporation", + 0x06A7: "Chipsea Technologies (ShenZhen) Corp.", + 0x06A8: "GD Midea Air-Conditioning Equipment Co., Ltd.", + 0x06A9: "Soundmax Electronics Limited", + 0x06AA: "Produal Oy", + 0x06AB: "HMS Industrial Networks AB", + 0x06AC: "Ingchips Technology Co., Ltd.", + 0x06AD: "InnovaSea Systems Inc.", + 0x06AE: "SenseQ Inc.", + 0x06AF: "Shoof Technologies", + 0x06B0: "BRK Brands, Inc.", + 0x06B1: "SimpliSafe, Inc.", + 0x06B2: "Tussock Innovation 2013 Limited", + 0x06B3: "The Hablab ApS", + 0x06B4: "Sencilion Oy", + 0x06B5: "Wabilogic Ltd.", + 0x06B6: "Sociometric Solutions, Inc.", + 0x06B7: "iCOGNIZE GmbH", + 0x06B8: "ShadeCraft, Inc", + 0x06B9: "Beflex Inc.", + 0x06BA: "Beaconzone Ltd", + 0x06BB: "Leaftronix Analogic Solutions Private Limited", + 0x06BC: "TWS Srl", + 0x06BD: "ABB Oy", + 0x06BE: "HitSeed Oy", + 0x06BF: "Delcom Products Inc.", + 0x06C0: "CAME S.p.A.", + 0x06C1: "Alarm.com Holdings, Inc", + 0x06C2: "Measurlogic Inc.", + 0x06C3: "King I Electronics.Co.,Ltd", + 0x06C4: "Dream Labs GmbH", + 0x06C5: "Urban Compass, Inc", + 0x06C6: "Simm Tronic Limited", + 0x06C7: "Somatix Inc", + 0x06C8: "Storz & Bickel GmbH & Co. KG", + 0x06C9: "MYLAPS B.V.", + 0x06CA: "Shenzhen Zhongguang Infotech Technology Development Co., Ltd", + 0x06CB: "Dyeware, LLC", + 0x06CC: "Dongguan SmartAction Technology Co.,Ltd.", + 0x06CD: "DIG Corporation", + 0x06CE: "FIOR & GENTZ", + 0x06CF: "Belparts N.V.", + 0x06D0: "Etekcity Corporation", + 0x06D1: "Meyer Sound Laboratories, Incorporated", + 0x06D2: "CeoTronics AG", + 0x06D3: "TriTeq Lock and Security, LLC", + 0x06D4: "DYNAKODE TECHNOLOGY PRIVATE LIMITED", + 0x06D5: "Sensirion AG", + 0x06D6: "JCT Healthcare Pty Ltd", + 0x06D7: "FUBA Automotive Electronics GmbH", + 0x06D8: "AW Company", + 0x06D9: "Shanghai Mountain View Silicon Co.,Ltd.", + 0x06DA: "Zliide Technologies ApS", + 0x06DB: "Automatic Labs, Inc.", + 0x06DC: "Industrial Network Controls, LLC", + 0x06DD: "Intellithings Ltd.", + 0x06DE: "Navcast, Inc.", + 0x06DF: "Hubbell Lighting, Inc.", + 0x06E0: "Avaya", + 0x06E1: "Milestone AV Technologies LLC", + 0x06E2: "Alango Technologies Ltd", + 0x06E3: "Spinlock Ltd", + 0x06E4: "Aluna", + 0x06E5: "OPTEX CO.,LTD.", + 0x06E6: "NIHON DENGYO KOUSAKU", + 0x06E7: "VELUX A/S", + 0x06E8: "Almendo Technologies GmbH", + 0x06E9: "Zmartfun Electronics, Inc.", + 0x06EA: "SafeLine Sweden AB", + 0x06EB: "Houston Radar LLC", + 0x06EC: "Sigur", + 0x06ED: "J Neades Ltd", + 0x06EE: "Avantis Systems Limited", + 0x06EF: "ALCARE Co., Ltd.", + 0x06F0: "Chargy Technologies, SL", + 0x06F1: "Shibutani Co., Ltd.", + 0x06F2: "Trapper Data AB", + 0x06F3: "Alfred International Inc.", + 0x06F4: "Near Field Solutions Ltd", + 0x06F5: "Vigil Technologies Inc.", + 0x06F6: "Vitulo Plus BV", + 0x06F7: "WILKA Schliesstechnik GmbH", + 0x06F8: "BodyPlus Technology Co.,Ltd", + 0x06F9: "happybrush GmbH", + 0x06FA: "Enequi AB", + 0x06FB: "Sartorius AG", + 0x06FC: "Tom Communication Industrial Co.,Ltd.", + 0x06FD: "ESS Embedded System Solutions Inc.", + 0x06FE: "Mahr GmbH", + 0x06FF: "Redpine Signals Inc", + 0x0700: "TraqFreq LLC", + 0x0701: "PAFERS TECH", + 0x0702: 'Akciju sabiedriba "SAF TEHNIKA"', + 0x0703: "Beijing Jingdong Century Trading Co., Ltd.", + 0x0704: "JBX Designs Inc.", + 0x0705: "AB Electrolux", + 0x0706: "Wernher von Braun Center for ASdvanced Research", + 0x0707: "Essity Hygiene and Health Aktiebolag", + 0x0708: "Be Interactive Co., Ltd", + 0x0709: "Carewear Corp.", + 0x070A: "Huf Hlsbeck & Frst GmbH & Co. KG", + 0x070B: "Element Products, Inc.", + 0x070C: "Beijing Winner Microelectronics Co.,Ltd", + 0x070D: "SmartSnugg Pty Ltd", + 0x070E: "FiveCo Sarl", + 0x070F: "California Things Inc.", + 0x0710: "Audiodo AB", + 0x0711: "ABAX AS", + 0x0712: "Bull Group Company Limited", + 0x0713: "Respiri Limited", + 0x0714: "MindPeace Safety LLC", + 0x0715: "MBARC LABS Inc (formerly Vgyan Solutions)", + 0x0716: "Altonics", + 0x0717: "iQsquare BV", + 0x0718: "IDIBAIX enginneering", + 0x0719: "ECSG", + 0x071A: "REVSMART WEARABLE HK CO LTD", + 0x071B: "Precor", + 0x071C: "F5 Sports, Inc", + 0x071D: "exoTIC Systems", + 0x071E: "DONGGUAN HELE ELECTRONICS CO., LTD", + 0x071F: "Dongguan Liesheng Electronic Co.Ltd", + 0x0720: "Oculeve, Inc.", + 0x0721: "Clover Network, Inc.", + 0x0722: "Xiamen Eholder Electronics Co.Ltd", + 0x0723: "Ford Motor Company", + 0x0724: "Guangzhou SuperSound Information Technology Co.,Ltd", + 0x0725: "Tedee Sp. z o.o.", + 0x0726: "PHC Corporation", + 0x0727: "STALKIT AS", + 0x0728: "Eli Lilly and Company", + 0x0729: "SwaraLink Technologies", + 0x072A: "JMR embedded systems GmbH", + 0x072B: "Bitkey Inc.", + 0x072C: "GWA Hygiene GmbH", + 0x072D: "Safera Oy", + 0x072E: "Open Platform Systems LLC", + 0x072F: "OnePlus Electronics (Shenzhen) Co., Ltd.", + 0x0730: "Wildlife Acoustics, Inc.", + 0x0731: "ABLIC Inc.", + 0x0732: "Dairy Tech, Inc.", + 0x0733: "Iguanavation, Inc.", + 0x0734: "DiUS Computing Pty Ltd", + 0x0735: "UpRight Technologies LTD", + 0x0736: "FrancisFund, LLC", + 0x0737: "LLC Navitek", + 0x0738: "Glass Security Pte Ltd", + 0x0739: "Jiangsu Qinheng Co., Ltd.", + 0x073A: "Chandler Systems Inc.", + 0x073B: "Fantini Cosmi s.p.a.", + 0x073C: "Acubit ApS", + 0x073D: "Beijing Hao Heng Tian Tech Co., Ltd.", + 0x073E: "Bluepack S.R.L.", + 0x073F: "Beijing Unisoc Technologies Co., Ltd.", + 0x0740: "HITIQ LIMITED", + 0x0741: "MAC SRL", + 0x0742: "DML LLC", + 0x0743: "Sanofi", + 0x0744: "SOCOMEC", + 0x0745: "WIZNOVA, Inc.", + 0x0746: "Seitec Elektronik GmbH", + 0x0747: "OR Technologies Pty Ltd", + 0x0748: "GuangZhou KuGou Computer Technology Co.Ltd", + 0x0749: "DIAODIAO (Beijing) Technology Co., Ltd.", + 0x074A: "Illusory Studios LLC", + 0x074B: "Sarvavid Software Solutions LLP", + 0x074C: "iopool s.a.", + 0x074D: "Amtech Systems, LLC", + 0x074E: "EAGLE DETECTION SA", + 0x074F: "MEDIATECH S.R.L.", + 0x0750: "Hamilton Professional Services of Canada Incorporated", + 0x0751: "Changsha JEMO IC Design Co.,Ltd", + 0x0752: "Elatec GmbH", + 0x0753: "JLG Industries, Inc.", + 0x0754: "Michael Parkin", + 0x0755: "Brother Industries, Ltd", + 0x0756: "Lumens For Less, Inc", + 0x0757: "ELA Innovation", + 0x0758: "umanSense AB", + 0x0759: "Shanghai InGeek Cyber Security Co., Ltd.", + 0x075A: "HARMAN CO.,LTD.", + 0x075B: "Smart Sensor Devices AB", + 0x075C: "Antitronics Inc.", + 0x075D: "RHOMBUS SYSTEMS, INC.", + 0x075E: "Katerra Inc.", + 0x075F: "Remote Solution Co., LTD.", + 0x0760: "Vimar SpA", + 0x0761: "Mantis Tech LLC", + 0x0762: "TerOpta Ltd", + 0x0763: "PIKOLIN S.L.", + 0x0764: "WWZN Information Technology Company Limited", + 0x0765: "Voxx International", + 0x0766: "ART AND PROGRAM, INC.", + 0x0767: "NITTO DENKO ASIA TECHNICAL CENTRE PTE. LTD.", + 0x0768: "Peloton Interactive Inc.", + 0x0769: "Force Impact Technologies", + 0x076A: "Dmac Mobile Developments, LLC", + 0x076B: "Engineered Medical Technologies", + 0x076C: "Noodle Technology inc", + 0x076D: "Graesslin GmbH", + 0x076E: "WuQi technologies, Inc.", + 0x076F: "Successful Endeavours Pty Ltd", + 0x0770: "InnoCon Medical ApS", + 0x0771: "Corvex Connected Safety", + 0x0772: "Thirdwayv Inc.", + 0x0773: "Echoflex Solutions Inc.", + 0x0774: "C-MAX Asia Limited", + 0x0775: "4eBusiness GmbH", + 0x0776: "Cyber Transport Control GmbH", + 0x0777: "Cue", + 0x0778: "KOAMTAC INC.", + 0x0779: "Loopshore Oy", + 0x077A: "Niruha Systems Private Limited", + 0x077B: "AmaterZ, Inc.", + 0x077C: "radius co., ltd.", + 0x077D: "Sensority, s.r.o.", + 0x077E: "Sparkage Inc.", + 0x077F: "Glenview Software Corporation", + 0x0780: "Finch Technologies Ltd.", + 0x0781: "Qingping Technology (Beijing) Co., Ltd.", + 0x0782: "DeviceDrive AS", + 0x0783: "ESEMBER LIMITED LIABILITY COMPANY", + 0x0784: "audifon GmbH & Co. KG", + 0x0785: "O2 Micro, Inc.", + 0x0786: "HLP Controls Pty Limited", + 0x0787: "Pangaea Solution", + 0x0788: "BubblyNet, LLC", + 0xFFFF: "This value has special meaning depending on the context in which it used. Link Manager Protocol (LMP): This value may be used in the internal and interoperability tests before a Company ID has been assigned. This value shall not be used in shipping end products. Device ID Profile: This value is reserved as the default vendor ID when no Device ID service record is present in a remote device.", +} diff --git a/bleak/backends/bluezdbus/__init__.py b/bleak/backends/bluezdbus/__init__.py new file mode 100644 index 0000000..688b9df --- /dev/null +++ b/bleak/backends/bluezdbus/__init__.py @@ -0,0 +1 @@ +"""BlueZ backend.""" diff --git a/bleak/backends/bluezdbus/advertisement_monitor.py b/bleak/backends/bluezdbus/advertisement_monitor.py new file mode 100644 index 0000000..922ffab --- /dev/null +++ b/bleak/backends/bluezdbus/advertisement_monitor.py @@ -0,0 +1,120 @@ +""" +Advertisement Monitor +--------------------- + +This module contains types associated with the BlueZ D-Bus `advertisement +monitor api `. +""" + +import logging +from typing import Iterable, NamedTuple, Tuple, Union, no_type_check + +from dbus_fast.service import PropertyAccess, ServiceInterface, dbus_property, method + +from ...assigned_numbers import AdvertisementDataType +from . import defs + +logger = logging.getLogger(__name__) + + +class OrPattern(NamedTuple): + """ + BlueZ advertisement monitor or-pattern. + + https://github.com/bluez/bluez/blob/master/doc/org.bluez.AdvertisementMonitor.rst#arrayuint8-uint8-arraybyte-patterns-read-only-optional + """ + + start_position: int + ad_data_type: AdvertisementDataType + content_of_pattern: bytes + + +# Windows has a similar structure, so we allow generic tuple for cross-platform compatibility +OrPatternLike = Union[OrPattern, Tuple[int, AdvertisementDataType, bytes]] + + +class AdvertisementMonitor(ServiceInterface): + """ + Implementation of the org.bluez.AdvertisementMonitor1 D-Bus interface. + + The BlueZ advertisement monitor API design seems to be just for device + presence (is it in range or out of range), but this isn't really what + we want in Bleak, we want to monitor changes in advertisement data, just + like in active scanning. + + So the only thing we are using here is the "or_patterns" since it is + currently required, but really we don't need that either. Hopefully an + "all" "Type" could be added to BlueZ in the future. + """ + + def __init__( + self, + or_patterns: Iterable[OrPatternLike], + ): + """ + Args: + or_patterns: + List of or patterns that will be returned by the ``Patterns`` property. + """ + super().__init__(defs.ADVERTISEMENT_MONITOR_INTERFACE) + # dbus_fast marshaling requires list instead of tuple + self._or_patterns = [list(p) for p in or_patterns] + + @method() + def Release(self): + logger.debug("Release") + + @method() + def Activate(self): + logger.debug("Activate") + + # REVISIT: mypy is broke, so we have to add redundant @no_type_check + # https://github.com/python/mypy/issues/6583 + + @method() + @no_type_check + def DeviceFound(self, device: "o"): # noqa: F821 + if logger.isEnabledFor(logging.DEBUG): + logger.debug("DeviceFound %s", device) + + @method() + @no_type_check + def DeviceLost(self, device: "o"): # noqa: F821 + if logger.isEnabledFor(logging.DEBUG): + logger.debug("DeviceLost %s", device) + + @dbus_property(PropertyAccess.READ) + @no_type_check + def Type(self) -> "s": # noqa: F821 + # this is currently the only type supported in BlueZ + return "or_patterns" + + @dbus_property(PropertyAccess.READ, disabled=True) + @no_type_check + def RSSILowThreshold(self) -> "n": # noqa: F821 + ... + + @dbus_property(PropertyAccess.READ, disabled=True) + @no_type_check + def RSSIHighThreshold(self) -> "n": # noqa: F821 + ... + + @dbus_property(PropertyAccess.READ, disabled=True) + @no_type_check + def RSSILowTimeout(self) -> "q": # noqa: F821 + ... + + @dbus_property(PropertyAccess.READ, disabled=True) + @no_type_check + def RSSIHighTimeout(self) -> "q": # noqa: F821 + ... + + @dbus_property(PropertyAccess.READ, disabled=True) + @no_type_check + def RSSISamplingPeriod(self) -> "q": # noqa: F821 + ... + + @dbus_property(PropertyAccess.READ) + @no_type_check + def Patterns(self) -> "a(yyay)": # noqa: F821 + return self._or_patterns diff --git a/bleak/backends/bluezdbus/characteristic.py b/bleak/backends/bluezdbus/characteristic.py new file mode 100644 index 0000000..ab70782 --- /dev/null +++ b/bleak/backends/bluezdbus/characteristic.py @@ -0,0 +1,107 @@ +from typing import Callable, List, Union +from uuid import UUID + +from ..characteristic import BleakGATTCharacteristic +from ..descriptor import BleakGATTDescriptor +from .defs import GattCharacteristic1 +from .utils import extract_service_handle_from_path + +_GattCharacteristicsFlagsEnum = { + 0x0001: "broadcast", + 0x0002: "read", + 0x0004: "write-without-response", + 0x0008: "write", + 0x0010: "notify", + 0x0020: "indicate", + 0x0040: "authenticated-signed-writes", + 0x0080: "extended-properties", + 0x0100: "reliable-write", + 0x0200: "writable-auxiliaries", + # "encrypt-read" + # "encrypt-write" + # "encrypt-authenticated-read" + # "encrypt-authenticated-write" + # "secure-read" #(Server only) + # "secure-write" #(Server only) + # "authorize" +} + + +class BleakGATTCharacteristicBlueZDBus(BleakGATTCharacteristic): + """GATT Characteristic implementation for the BlueZ DBus backend""" + + def __init__( + self, + obj: GattCharacteristic1, + object_path: str, + service_uuid: str, + service_handle: int, + max_write_without_response_size: Callable[[], int], + ): + super(BleakGATTCharacteristicBlueZDBus, self).__init__( + obj, max_write_without_response_size + ) + self.__descriptors = [] + self.__path = object_path + self.__service_uuid = service_uuid + self.__service_handle = service_handle + self._handle = extract_service_handle_from_path(object_path) + + @property + def service_uuid(self) -> str: + """The uuid of the Service containing this characteristic""" + return self.__service_uuid + + @property + def service_handle(self) -> int: + """The handle of the Service containing this characteristic""" + return self.__service_handle + + @property + def handle(self) -> int: + """The handle of this characteristic""" + return self._handle + + @property + def uuid(self) -> str: + """The uuid of this characteristic""" + return self.obj.get("UUID") + + @property + def properties(self) -> List[str]: + """Properties of this characteristic + + Returns the characteristics `Flags` present in the DBus API. + """ + return self.obj["Flags"] + + @property + def descriptors(self) -> List[BleakGATTDescriptor]: + """List of descriptors for this service""" + return self.__descriptors + + def get_descriptor( + self, specifier: Union[int, str, UUID] + ) -> Union[BleakGATTDescriptor, None]: + """Get a descriptor by handle (int) or UUID (str or uuid.UUID)""" + try: + if isinstance(specifier, int): + return next(filter(lambda x: x.handle == specifier, self.descriptors)) + else: + return next( + filter(lambda x: x.uuid == str(specifier), self.descriptors) + ) + except StopIteration: + return None + + def add_descriptor(self, descriptor: BleakGATTDescriptor) -> None: + """Add a :py:class:`~BleakGATTDescriptor` to the characteristic. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__descriptors.append(descriptor) + + @property + def path(self) -> str: + """The DBus path. Mostly needed by `bleak`, not by end user""" + return self.__path diff --git a/bleak/backends/bluezdbus/client.py b/bleak/backends/bluezdbus/client.py new file mode 100644 index 0000000..ec22558 --- /dev/null +++ b/bleak/backends/bluezdbus/client.py @@ -0,0 +1,993 @@ +# -*- coding: utf-8 -*- +""" +BLE Client for BlueZ on Linux +""" +import asyncio +import logging +import os +import sys +import warnings +from typing import Callable, Dict, Optional, Set, Union, cast +from uuid import UUID + +if sys.version_info < (3, 12): + from typing_extensions import Buffer +else: + from collections.abc import Buffer + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout +else: + from asyncio import timeout as async_timeout + +from dbus_fast.aio import MessageBus +from dbus_fast.constants import BusType, ErrorType, MessageType +from dbus_fast.message import Message +from dbus_fast.signature import Variant + +from ... import BleakScanner +from ...exc import ( + BleakCharacteristicNotFoundError, + BleakDBusError, + BleakDeviceNotFoundError, + BleakError, +) +from ..characteristic import BleakGATTCharacteristic +from ..client import BaseBleakClient, NotifyCallback +from ..device import BLEDevice +from ..service import BleakGATTServiceCollection +from . import defs +from .characteristic import BleakGATTCharacteristicBlueZDBus +from .manager import get_global_bluez_manager +from .scanner import BleakScannerBlueZDBus +from .utils import assert_reply, get_dbus_authenticator +from .version import BlueZFeatures + +logger = logging.getLogger(__name__) + +# prevent tasks from being garbage collected +_background_tasks: Set[asyncio.Task] = set() + + +class BleakClientBlueZDBus(BaseBleakClient): + """A native Linux Bleak Client + + Implemented by using the `BlueZ DBUS API `_. + + Args: + address_or_ble_device (`BLEDevice` or str): The Bluetooth address of the BLE peripheral to connect to or the `BLEDevice` object representing it. + services: Optional list of service UUIDs that will be used. + + Keyword Args: + timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0. + disconnected_callback (callable): Callback that will be scheduled in the + event loop when the client is disconnected. The callable must take one + argument, which will be this client object. + adapter (str): Bluetooth adapter to use for discovery. + """ + + def __init__( + self, + address_or_ble_device: Union[BLEDevice, str], + services: Optional[Set[str]] = None, + **kwargs, + ): + super(BleakClientBlueZDBus, self).__init__(address_or_ble_device, **kwargs) + # kwarg "device" is for backwards compatibility + self._adapter: Optional[str] = kwargs.get("adapter", kwargs.get("device")) + + # Backend specific, D-Bus objects and data + if isinstance(address_or_ble_device, BLEDevice): + self._device_path = address_or_ble_device.details["path"] + self._device_info = address_or_ble_device.details.get("props") + else: + self._device_path = None + self._device_info = None + + self._requested_services = services + + # D-Bus message bus + self._bus: Optional[MessageBus] = None + # tracks device watcher subscription + self._remove_device_watcher: Optional[Callable] = None + # private backing for is_connected property + self._is_connected = False + # indicates disconnect request in progress when not None + self._disconnecting_event: Optional[asyncio.Event] = None + # used to ensure device gets disconnected if event loop crashes + self._disconnect_monitor_event: Optional[asyncio.Event] = None + # map of characteristic D-Bus object path to notification callback + self._notification_callbacks: Dict[str, NotifyCallback] = {} + + # used to override mtu_size property + self._mtu_size: Optional[int] = None + + # Connectivity methods + + async def connect(self, dangerous_use_bleak_cache: bool = False, **kwargs) -> bool: + """Connect to the specified GATT server. + + Keyword Args: + timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0. + + Returns: + Boolean representing connection status. + + Raises: + BleakError: If the device is already connected or if the device could not be found. + BleakDBusError: If there was a D-Bus error + asyncio.TimeoutError: If the connection timed out + """ + logger.debug("Connecting to device @ %s", self.address) + + if self.is_connected: + raise BleakError("Client is already connected") + + if not BlueZFeatures.checked_bluez_version: + await BlueZFeatures.check_bluez_version() + if not BlueZFeatures.supported_version: + raise BleakError("Bleak requires BlueZ >= 5.43.") + # A Discover must have been run before connecting to any devices. + # Find the desired device before trying to connect. + timeout = kwargs.get("timeout", self._timeout) + if self._device_path is None: + device = await BleakScanner.find_device_by_address( + self.address, + timeout=timeout, + adapter=self._adapter, + backend=BleakScannerBlueZDBus, + ) + + if device: + self._device_info = device.details.get("props") + self._device_path = device.details["path"] + else: + raise BleakDeviceNotFoundError( + self.address, f"Device with address {self.address} was not found." + ) + + manager = await get_global_bluez_manager() + + async with async_timeout(timeout): + while True: + # Each BLE connection session needs a new D-Bus connection to avoid a + # BlueZ quirk where notifications are automatically enabled on reconnect. + self._bus = await MessageBus( + bus_type=BusType.SYSTEM, + negotiate_unix_fd=True, + auth=get_dbus_authenticator(), + ).connect() + + def on_connected_changed(connected: bool) -> None: + if not connected: + logger.debug("Device disconnected (%s)", self._device_path) + + self._is_connected = False + + if self._disconnect_monitor_event: + self._disconnect_monitor_event.set() + self._disconnect_monitor_event = None + + self._cleanup_all() + if self._disconnected_callback is not None: + self._disconnected_callback() + disconnecting_event = self._disconnecting_event + if disconnecting_event: + disconnecting_event.set() + + def on_value_changed(char_path: str, value: bytes) -> None: + callback = self._notification_callbacks.get(char_path) + + if callback: + callback(bytearray(value)) + + watcher = manager.add_device_watcher( + self._device_path, on_connected_changed, on_value_changed + ) + self._remove_device_watcher = lambda: manager.remove_device_watcher( + watcher + ) + + self._disconnect_monitor_event = local_disconnect_monitor_event = ( + asyncio.Event() + ) + + try: + try: + # + # The BlueZ backend does not disconnect devices when the + # application closes or crashes. This can cause problems + # when trying to reconnect to the same device. To work + # around this, we check if the device is already connected. + # + # For additional details see https://github.com/bluez/bluez/issues/89 + # + if manager.is_connected(self._device_path): + logger.debug( + 'skipping calling "Connect" since %s is already connected', + self._device_path, + ) + else: + logger.debug( + "Connecting to BlueZ path %s", self._device_path + ) + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + interface=defs.DEVICE_INTERFACE, + path=self._device_path, + member="Connect", + ) + ) + + assert reply is not None + + if reply.message_type == MessageType.ERROR: + # This error is often caused by RF interference + # from other Bluetooth or Wi-Fi devices. In many + # cases, retrying will connect successfully. + # Note: this error was added in BlueZ 6.62. + if ( + reply.error_name == "org.bluez.Error.Failed" + and reply.body + and reply.body[0] == "le-connection-abort-by-local" + ): + logger.debug( + "retry due to le-connection-abort-by-local" + ) + + # When this error occurs, BlueZ actually + # connected so we get "Connected" property changes + # that we need to wait for before attempting + # to connect again. + await local_disconnect_monitor_event.wait() + + # Jump way back to the `while True:`` to retry. + continue + + if reply.error_name == ErrorType.UNKNOWN_OBJECT.value: + raise BleakDeviceNotFoundError( + self.address, + f"Device with address {self.address} was not found. It may have been removed from BlueZ when scanning stopped.", + ) + + assert_reply(reply) + + self._is_connected = True + + # Create a task that runs until the device is disconnected. + task = asyncio.create_task( + self._disconnect_monitor( + self._bus, + self._device_path, + local_disconnect_monitor_event, + ) + ) + _background_tasks.add(task) + task.add_done_callback(_background_tasks.discard) + + # + # We will try to use the cache if it exists and `dangerous_use_bleak_cache` + # is True. + # + await self.get_services( + dangerous_use_bleak_cache=dangerous_use_bleak_cache + ) + + return True + except BaseException: + # Calling Disconnect cancels any pending connect request. Also, + # if connection was successful but get_services() raises (e.g. + # because task was cancelled), the we still need to disconnect + # before passing on the exception. + if self._bus: + # If disconnected callback already fired, this will be a no-op + # since self._bus will be None and the _cleanup_all call will + # have already disconnected. + try: + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + interface=defs.DEVICE_INTERFACE, + path=self._device_path, + member="Disconnect", + ) + ) + try: + assert_reply(reply) + except BleakDBusError as e: + # if the object no longer exists, then we know we + # are disconnected for sure, so don't need to log a + # warning about it + if e.dbus_error != ErrorType.UNKNOWN_OBJECT.value: + raise + except Exception as e: + logger.warning( + f"Failed to cancel connection ({self._device_path}): {e}" + ) + + raise + except BaseException: + # this effectively cancels the disconnect monitor in case the event + # was not triggered by a D-Bus callback + local_disconnect_monitor_event.set() + self._cleanup_all() + raise + + @staticmethod + async def _disconnect_monitor( + bus: MessageBus, device_path: str, disconnect_monitor_event: asyncio.Event + ) -> None: + # This task runs until the device is disconnected. If the task is + # cancelled, it probably means that the event loop crashed so we + # try to disconnected the device. Otherwise BlueZ will keep the device + # connected even after Python exits. This will only work if the event + # loop is called with asyncio.run() or otherwise runs pending tasks + # after the original event loop stops. This will also cause an exception + # if a run loop is stopped before the device is disconnected since this + # task will still be running and asyncio complains if a loop with running + # tasks is stopped. + try: + await disconnect_monitor_event.wait() + except asyncio.CancelledError: + try: + # by using send() instead of call(), we ensure that the message + # gets sent, but we don't wait for a reply, which could take + # over one second while the device disconnects. + await bus.send( + Message( + destination=defs.BLUEZ_SERVICE, + path=device_path, + interface=defs.DEVICE_INTERFACE, + member="Disconnect", + ) + ) + except Exception: + pass + + def _cleanup_all(self) -> None: + """ + Free all the allocated resource in DBus. Use this method to + eventually cleanup all otherwise leaked resources. + """ + logger.debug("_cleanup_all(%s)", self._device_path) + + if self._remove_device_watcher: + self._remove_device_watcher() + self._remove_device_watcher = None + + if not self._bus: + logger.debug("already disconnected (%s)", self._device_path) + return + + # Try to disconnect the System Bus. + try: + self._bus.disconnect() + except Exception as e: + logger.error( + "Attempt to disconnect system bus failed (%s): %s", + self._device_path, + e, + ) + else: + # Critical to remove the `self._bus` object here to since it was + # closed above. If not, calls made to it later could lead to + # a stuck client. + self._bus = None + + # Reset all stored services. + self.services = None + + async def disconnect(self) -> bool: + """Disconnect from the specified GATT server. + + Returns: + Boolean representing if device is disconnected. + + Raises: + BleakDBusError: If there was a D-Bus error + asyncio.TimeoutError if the device was not disconnected within 10 seconds + """ + logger.debug("Disconnecting ({%s})", self._device_path) + + if self._bus is None: + # No connection exists. Either one hasn't been created or + # we have already called disconnect and closed the D-Bus + # connection. + logger.debug("already disconnected ({%s})", self._device_path) + return True + + if self._disconnecting_event: + # another call to disconnect() is already in progress + logger.debug("already in progress ({%s})", self._device_path) + async with async_timeout(10): + await self._disconnecting_event.wait() + elif self.is_connected: + self._disconnecting_event = asyncio.Event() + try: + # Try to disconnect the actual device/peripheral + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=self._device_path, + interface=defs.DEVICE_INTERFACE, + member="Disconnect", + ) + ) + assert_reply(reply) + async with async_timeout(10): + await self._disconnecting_event.wait() + finally: + self._disconnecting_event = None + + # sanity check to make sure _cleanup_all() was triggered by the + # "PropertiesChanged" signal handler and that it completed successfully + assert self._bus is None + + return True + + async def pair(self, *args, **kwargs) -> bool: + """Pair with the peripheral. + + You can use ConnectDevice method if you already know the MAC address of the device. + Else you need to StartDiscovery, Trust, Pair and Connect in sequence. + + Returns: + Boolean regarding success of pairing. + + """ + # See if it is already paired. + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=self._device_path, + interface=defs.PROPERTIES_INTERFACE, + member="Get", + signature="ss", + body=[defs.DEVICE_INTERFACE, "Paired"], + ) + ) + assert_reply(reply) + if reply.body[0].value: + logger.debug("BLE device @ %s is already paired", self.address) + return True + + # Set device as trusted. + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=self._device_path, + interface=defs.PROPERTIES_INTERFACE, + member="Set", + signature="ssv", + body=[defs.DEVICE_INTERFACE, "Trusted", Variant("b", True)], + ) + ) + assert_reply(reply) + + logger.debug("Pairing to BLE device @ %s", self.address) + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=self._device_path, + interface=defs.DEVICE_INTERFACE, + member="Pair", + ) + ) + assert_reply(reply) + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=self._device_path, + interface=defs.PROPERTIES_INTERFACE, + member="Get", + signature="ss", + body=[defs.DEVICE_INTERFACE, "Paired"], + ) + ) + assert_reply(reply) + + return reply.body[0].value + + async def unpair(self) -> bool: + """Unpair with the peripheral. + + Returns: + Boolean regarding success of unpairing. + + """ + adapter_path = await self._get_adapter_path() + device_path = await self._get_device_path() + manager = await get_global_bluez_manager() + + logger.debug( + "Removing BlueZ device path %s from adapter path %s", + device_path, + adapter_path, + ) + + # If this client object wants to connect again, BlueZ needs the device + # to follow Discovery process again - so reset the local connection + # state. + # + # (This is true even if the request to RemoveDevice fails, + # so clear it before.) + self._device_path = None + self._device_info = None + self._is_connected = False + + try: + reply = await manager._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADAPTER_INTERFACE, + member="RemoveDevice", + signature="o", + body=[device_path], + ) + ) + assert_reply(reply) + except BleakDBusError as e: + if e.dbus_error == "org.bluez.Error.DoesNotExist": + raise BleakDeviceNotFoundError( + self.address, f"Device with address {self.address} was not found." + ) from e + raise + + return True + + @property + def is_connected(self) -> bool: + """Check connection status between this client and the server. + + Returns: + Boolean representing connection status. + + """ + return self._DeprecatedIsConnectedReturn( + False if self._bus is None else self._is_connected + ) + + async def _acquire_mtu(self) -> None: + """Acquires the MTU for this device by calling the "AcquireWrite" or + "AcquireNotify" method of the first characteristic that has such a method. + + This method only needs to be called once, after connecting to the device + but before accessing the ``mtu_size`` property. + + If a device uses encryption on characteristics, it will need to be bonded + first before calling this method. + """ + # This will try to get the "best" characteristic for getting the MTU. + # We would rather not start notifications if we don't have to. + try: + method = "AcquireWrite" + char = next( + c + for c in self.services.characteristics.values() + if "write-without-response" in c.properties + ) + except StopIteration: + method = "AcquireNotify" + char = next( + c + for c in self.services.characteristics.values() + if "notify" in c.properties + ) + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=char.path, + interface=defs.GATT_CHARACTERISTIC_INTERFACE, + member=method, + signature="a{sv}", + body=[{}], + ) + ) + assert_reply(reply) + + # we aren't actually using the write or notify, we just want the MTU + os.close(reply.unix_fds[0]) + self._mtu_size = reply.body[1] + + async def _get_adapter_path(self) -> str: + """Private coroutine to return the BlueZ path to the adapter this client is assigned to. + + Can be called even if no connection has been established yet. + """ + if self._device_info: + # If we have a BlueZ DBus object with _device_info, use what it tell us + return self._device_info["Adapter"] + if self._adapter: + # If the adapter name was set in the constructor, convert to a BlueZ path + return f"/org/bluez/{self._adapter}" + + # Fall back to the system's default Bluetooth adapter + manager = await get_global_bluez_manager() + return manager.get_default_adapter() + + async def _get_device_path(self) -> str: + """Private coroutine to return the BlueZ path to the device address this client is assigned to. + + Unlike the _device_path property, this function can be called even if discovery process has not + started and/or connection has not been established yet. + """ + if self._device_path: + # If we have a BlueZ DBus object, return its device path + return self._device_path + + # Otherwise, build a new path using the adapter path and the BLE address + adapter_path = await self._get_adapter_path() + bluez_address = self.address.upper().replace(":", "_") + return f"{adapter_path}/dev_{bluez_address}" + + @property + def mtu_size(self) -> int: + """Get ATT MTU size for active connection""" + if self._mtu_size is None: + warnings.warn( + "Using default MTU value. Call _acquire_mtu() or set _mtu_size first to avoid this warning." + ) + return 23 + + return self._mtu_size + + # GATT services methods + + async def get_services( + self, dangerous_use_bleak_cache: bool = False, **kwargs + ) -> BleakGATTServiceCollection: + """Get all services registered for this GATT server. + + Args: + dangerous_use_bleak_cache (bool): Use cached services if available. + + Returns: + A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + if self.services is not None: + return self.services + + manager = await get_global_bluez_manager() + + self.services = await manager.get_services( + self._device_path, dangerous_use_bleak_cache, self._requested_services + ) + + return self.services + + # IO methods + + async def read_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristicBlueZDBus, int, str, UUID], + **kwargs, + ) -> bytearray: + """Perform read operation on the specified GATT characteristic. + + Args: + char_specifier (BleakGATTCharacteristicBlueZDBus, int, str or UUID): The characteristic to read from, + specified by either integer handle, UUID or directly by the + BleakGATTCharacteristicBlueZDBus object representing it. + + Returns: + (bytearray) The read data. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + if not isinstance(char_specifier, BleakGATTCharacteristicBlueZDBus): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + + if not characteristic: + # Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:) + # has been moved to interface org.bluez.Battery1 instead of as a regular service. + if ( + str(char_specifier) == "00002a19-0000-1000-8000-00805f9b34fb" + and BlueZFeatures.hides_battery_characteristic + ): + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=self._device_path, + interface=defs.PROPERTIES_INTERFACE, + member="GetAll", + signature="s", + body=[defs.BATTERY_INTERFACE], + ) + ) + assert_reply(reply) + # Simulate regular characteristics read to be consistent over all platforms. + value = bytearray([reply.body[0]["Percentage"].value]) + logger.debug( + "Read Battery Level {0} | {1}: {2}".format( + char_specifier, self._device_path, value + ) + ) + return value + if ( + str(char_specifier) == "00002a00-0000-1000-8000-00805f9b34fb" + and BlueZFeatures.hides_device_name_characteristic + ): + # Simulate regular characteristics read to be consistent over all platforms. + manager = await get_global_bluez_manager() + value = bytearray(manager.get_device_name(self._device_path).encode()) + logger.debug( + "Read Device Name {0} | {1}: {2}".format( + char_specifier, self._device_path, value + ) + ) + return value + + raise BleakCharacteristicNotFoundError(char_specifier) + + while True: + assert self._bus + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=characteristic.path, + interface=defs.GATT_CHARACTERISTIC_INTERFACE, + member="ReadValue", + signature="a{sv}", + body=[{}], + ) + ) + + assert reply + + if reply.error_name == "org.bluez.Error.InProgress": + logger.debug("retrying characteristic ReadValue due to InProgress") + # Avoid calling in a tight loop. There is no dbus signal to + # indicate ready, so unfortunately, we have to poll. + await asyncio.sleep(0.01) + continue + + assert_reply(reply) + break + + value = bytearray(reply.body[0]) + + logger.debug( + "Read Characteristic {0} | {1}: {2}".format( + characteristic.uuid, characteristic.path, value + ) + ) + return value + + async def read_gatt_descriptor(self, handle: int, **kwargs) -> bytearray: + """Perform read operation on the specified GATT descriptor. + + Args: + handle (int): The handle of the descriptor to read from. + + Returns: + (bytearray) The read data. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + descriptor = self.services.get_descriptor(handle) + if not descriptor: + raise BleakError("Descriptor with handle {0} was not found!".format(handle)) + + while True: + assert self._bus + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=descriptor.path, + interface=defs.GATT_DESCRIPTOR_INTERFACE, + member="ReadValue", + signature="a{sv}", + body=[{}], + ) + ) + + assert reply + + if reply.error_name == "org.bluez.Error.InProgress": + logger.debug("retrying descriptor ReadValue due to InProgress") + # Avoid calling in a tight loop. There is no dbus signal to + # indicate ready, so unfortunately, we have to poll. + await asyncio.sleep(0.01) + continue + + assert_reply(reply) + break + + value = bytearray(reply.body[0]) + + logger.debug("Read Descriptor %s | %s: %s", handle, descriptor.path, value) + return value + + async def write_gatt_char( + self, + characteristic: BleakGATTCharacteristic, + data: Buffer, + response: bool, + ) -> None: + if not self.is_connected: + raise BleakError("Not connected") + + # See docstring for details about this handling. + if not response and not BlueZFeatures.can_write_without_response: + raise BleakError("Write without response requires at least BlueZ 5.46") + + if response or not BlueZFeatures.write_without_response_workaround_needed: + while True: + assert self._bus + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=characteristic.path, + interface=defs.GATT_CHARACTERISTIC_INTERFACE, + member="WriteValue", + signature="aya{sv}", + body=[ + bytes(data), + { + "type": Variant( + "s", "request" if response else "command" + ) + }, + ], + ) + ) + + assert reply + + if reply.error_name == "org.bluez.Error.InProgress": + logger.debug("retrying characteristic WriteValue due to InProgress") + # Avoid calling in a tight loop. There is no dbus signal to + # indicate ready, so unfortunately, we have to poll. + await asyncio.sleep(0.01) + continue + + assert_reply(reply) + break + else: + # Older versions of BlueZ don't have the "type" option, so we have + # to write the hard way. This isn't the most efficient way of doing + # things, but it works. + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=characteristic.path, + interface=defs.GATT_CHARACTERISTIC_INTERFACE, + member="AcquireWrite", + signature="a{sv}", + body=[{}], + ) + ) + assert_reply(reply) + fd = reply.unix_fds[0] + try: + os.write(fd, data) + finally: + os.close(fd) + + logger.debug( + "Write Characteristic %s | %s: %s", + characteristic.uuid, + characteristic.path, + data, + ) + + async def write_gatt_descriptor(self, handle: int, data: Buffer) -> None: + """Perform a write operation on the specified GATT descriptor. + + Args: + handle: The handle of the descriptor to read from. + data: The data to send (any bytes-like object). + + """ + if not self.is_connected: + raise BleakError("Not connected") + + descriptor = self.services.get_descriptor(handle) + + if not descriptor: + raise BleakError(f"Descriptor with handle {handle} was not found!") + + while True: + assert self._bus + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=descriptor.path, + interface=defs.GATT_DESCRIPTOR_INTERFACE, + member="WriteValue", + signature="aya{sv}", + body=[bytes(data), {"type": Variant("s", "command")}], + ) + ) + + assert reply + + if reply.error_name == "org.bluez.Error.InProgress": + logger.debug("retrying descriptor WriteValue due to InProgress") + # Avoid calling in a tight loop. There is no dbus signal to + # indicate ready, so unfortunately, we have to poll. + await asyncio.sleep(0.01) + continue + + assert_reply(reply) + break + + logger.debug("Write Descriptor %s | %s: %s", handle, descriptor.path, data) + + async def start_notify( + self, + characteristic: BleakGATTCharacteristic, + callback: NotifyCallback, + **kwargs, + ) -> None: + """ + Activate notifications/indications on a characteristic. + """ + characteristic = cast(BleakGATTCharacteristicBlueZDBus, characteristic) + + self._notification_callbacks[characteristic.path] = callback + + assert self._bus is not None + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=characteristic.path, + interface=defs.GATT_CHARACTERISTIC_INTERFACE, + member="StartNotify", + ) + ) + assert_reply(reply) + + async def stop_notify( + self, + char_specifier: Union[BleakGATTCharacteristicBlueZDBus, int, str, UUID], + ) -> None: + """Deactivate notification/indication on a specified characteristic. + + Args: + char_specifier (BleakGATTCharacteristicBlueZDBus, int, str or UUID): The characteristic to deactivate + notification/indication on, specified by either integer handle, UUID or + directly by the BleakGATTCharacteristicBlueZDBus object representing it. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + if not isinstance(char_specifier, BleakGATTCharacteristicBlueZDBus): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=characteristic.path, + interface=defs.GATT_CHARACTERISTIC_INTERFACE, + member="StopNotify", + ) + ) + assert_reply(reply) + + self._notification_callbacks.pop(characteristic.path, None) diff --git a/bleak/backends/bluezdbus/defs.py b/bleak/backends/bluezdbus/defs.py new file mode 100644 index 0000000..18ed0c3 --- /dev/null +++ b/bleak/backends/bluezdbus/defs.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- + +from typing import Dict, List, Literal, Tuple, TypedDict + +# DBus Interfaces +OBJECT_MANAGER_INTERFACE = "org.freedesktop.DBus.ObjectManager" +PROPERTIES_INTERFACE = "org.freedesktop.DBus.Properties" + +# Bluez specific DBUS +BLUEZ_SERVICE = "org.bluez" +ADAPTER_INTERFACE = "org.bluez.Adapter1" +ADVERTISEMENT_MONITOR_INTERFACE = "org.bluez.AdvertisementMonitor1" +ADVERTISEMENT_MONITOR_MANAGER_INTERFACE = "org.bluez.AdvertisementMonitorManager1" +DEVICE_INTERFACE = "org.bluez.Device1" +BATTERY_INTERFACE = "org.bluez.Battery1" + +# GATT interfaces +GATT_MANAGER_INTERFACE = "org.bluez.GattManager1" +GATT_PROFILE_INTERFACE = "org.bluez.GattProfile1" +GATT_SERVICE_INTERFACE = "org.bluez.GattService1" +GATT_CHARACTERISTIC_INTERFACE = "org.bluez.GattCharacteristic1" +GATT_DESCRIPTOR_INTERFACE = "org.bluez.GattDescriptor1" + + +# D-Bus properties for interfaces +# https://github.com/bluez/bluez/blob/master/doc/org.bluez.Adapter.rst + + +class Adapter1(TypedDict): + Address: str + Name: str + Alias: str + Class: int + Powered: bool + Discoverable: bool + Pairable: bool + PairableTimeout: int + DiscoverableTimeout: int + Discovering: int + UUIDs: List[str] + Modalias: str + Roles: List[str] + ExperimentalFeatures: List[str] + + +# https://github.com/bluez/bluez/blob/master/doc/org.bluez.AdvertisementMonitor.rst + + +class AdvertisementMonitor1(TypedDict): + Type: str + RSSILowThreshold: int + RSSIHighThreshold: int + RSSILowTimeout: int + RSSIHighTimeout: int + RSSISamplingPeriod: int + Patterns: List[Tuple[int, int, bytes]] + + +# https://github.com/bluez/bluez/blob/master/doc/org.bluez.AdvertisementMonitorManager.rst + + +class AdvertisementMonitorManager1(TypedDict): + SupportedMonitorTypes: List[str] + SupportedFeatures: List[str] + + +# https://github.com/bluez/bluez/blob/master/doc/org.bluez.Battery.rst + + +class Battery1(TypedDict): + SupportedMonitorTypes: List[str] + SupportedFeatures: List[str] + + +# https://github.com/bluez/bluez/blob/master/doc/org.bluez.Device.rst + + +class Device1(TypedDict): + Address: str + AddressType: str + Name: str + Icon: str + Class: int + Appearance: int + UUIDs: List[str] + Paired: bool + Bonded: bool + Connected: bool + Trusted: bool + Blocked: bool + WakeAllowed: bool + Alias: str + Adapter: str + LegacyPairing: bool + Modalias: str + RSSI: int + TxPower: int + ManufacturerData: Dict[int, bytes] + ServiceData: Dict[str, bytes] + ServicesResolved: bool + AdvertisingFlags: bytes + AdvertisingData: Dict[int, bytes] + + +# https://github.com/bluez/bluez/blob/master/doc/org.bluez.GattService.rst + + +class GattService1(TypedDict): + UUID: str + Primary: bool + Device: str + Includes: List[str] + # Handle is server-only and not available in Bleak + + +class GattCharacteristic1(TypedDict): + UUID: str + Service: str + Value: bytes + WriteAcquired: bool + NotifyAcquired: bool + Notifying: bool + Flags: List[ + Literal[ + "broadcast", + "read", + "write-without-response", + "write", + "notify", + "indicate", + "authenticated-signed-writes", + "extended-properties", + "reliable-write", + "writable-auxiliaries", + "encrypt-read", + "encrypt-write", + # "encrypt-notify" and "encrypt-indicate" are server-only + "encrypt-authenticated-read", + "encrypt-authenticated-write", + # "encrypt-authenticated-notify", "encrypt-authenticated-indicate", + # "secure-read", "secure-write", "secure-notify", "secure-indicate" + # are server-only + "authorize", + ] + ] + MTU: int + # Handle is server-only and not available in Bleak + + +class GattDescriptor1(TypedDict): + UUID: str + Characteristic: str + Value: bytes + Flags: List[ + Literal[ + "read", + "write", + "encrypt-read", + "encrypt-write", + "encrypt-authenticated-read", + "encrypt-authenticated-write", + # "secure-read" and "secure-write" are server-only and not available in Bleak + "authorize", + ] + ] + # Handle is server-only and not available in Bleak diff --git a/bleak/backends/bluezdbus/descriptor.py b/bleak/backends/bluezdbus/descriptor.py new file mode 100644 index 0000000..bf35079 --- /dev/null +++ b/bleak/backends/bluezdbus/descriptor.py @@ -0,0 +1,44 @@ +from ..descriptor import BleakGATTDescriptor +from .defs import GattDescriptor1 + + +class BleakGATTDescriptorBlueZDBus(BleakGATTDescriptor): + """GATT Descriptor implementation for BlueZ DBus backend""" + + def __init__( + self, + obj: GattDescriptor1, + object_path: str, + characteristic_uuid: str, + characteristic_handle: int, + ): + super(BleakGATTDescriptorBlueZDBus, self).__init__(obj) + self.__path = object_path + self.__characteristic_uuid = characteristic_uuid + self.__characteristic_handle = characteristic_handle + self.__handle = int(self.path.split("/")[-1].replace("desc", ""), 16) + + @property + def characteristic_handle(self) -> int: + """Handle for the characteristic that this descriptor belongs to""" + return self.__characteristic_handle + + @property + def characteristic_uuid(self) -> str: + """UUID for the characteristic that this descriptor belongs to""" + return self.__characteristic_uuid + + @property + def uuid(self) -> str: + """UUID for this descriptor""" + return self.obj["UUID"] + + @property + def handle(self) -> int: + """Integer handle for this descriptor""" + return self.__handle + + @property + def path(self) -> str: + """The DBus path. Mostly needed by `bleak`, not by end user""" + return self.__path diff --git a/bleak/backends/bluezdbus/manager.py b/bleak/backends/bluezdbus/manager.py new file mode 100644 index 0000000..cedbd64 --- /dev/null +++ b/bleak/backends/bluezdbus/manager.py @@ -0,0 +1,1060 @@ +""" +BlueZ D-Bus manager module +-------------------------- + +This module contains code for the global BlueZ D-Bus object manager that is +used internally by Bleak. +""" + +import asyncio +import contextlib +import logging +import os +from collections import defaultdict +from typing import ( + Any, + Callable, + Coroutine, + Dict, + List, + MutableMapping, + NamedTuple, + Optional, + Set, + cast, +) +from weakref import WeakKeyDictionary + +from dbus_fast import BusType, Message, MessageType, Variant, unpack_variants +from dbus_fast.aio.message_bus import MessageBus + +from ...exc import BleakDBusError, BleakError +from ..service import BleakGATTServiceCollection +from . import defs +from .advertisement_monitor import AdvertisementMonitor, OrPatternLike +from .characteristic import BleakGATTCharacteristicBlueZDBus +from .defs import Device1, GattCharacteristic1, GattDescriptor1, GattService1 +from .descriptor import BleakGATTDescriptorBlueZDBus +from .service import BleakGATTServiceBlueZDBus +from .signals import MatchRules, add_match +from .utils import ( + assert_reply, + device_path_from_characteristic_path, + get_dbus_authenticator, +) + +logger = logging.getLogger(__name__) + +AdvertisementCallback = Callable[[str, Device1], None] +""" +A callback that is called when advertisement data is received. + +Args: + arg0: The D-Bus object path of the device. + arg1: The D-Bus properties of the device object. +""" + + +DevicePropertiesChangedCallback = Callable[[Optional[Any]], None] +""" +A callback that is called when the properties of a device change in BlueZ. + +Args: + arg0: The new property value. +""" + + +class DeviceConditionCallback(NamedTuple): + """ + Encapsulates a :data:`DevicePropertiesChangedCallback` and the property name being watched. + """ + + callback: DevicePropertiesChangedCallback + """ + The callback. + """ + + property_name: str + """ + The name of the property to watch. + """ + + +DeviceRemovedCallback = Callable[[str], None] +""" +A callback that is called when a device is removed from BlueZ. + +Args: + arg0: The D-Bus object path of the device. +""" + + +class DeviceRemovedCallbackAndState(NamedTuple): + """ + Encapsulates an :data:`DeviceRemovedCallback` and some state. + """ + + callback: DeviceRemovedCallback + """ + The callback. + """ + + adapter_path: str + """ + The D-Bus object path of the adapter associated with the callback. + """ + + +DeviceConnectedChangedCallback = Callable[[bool], None] +""" +A callback that is called when a device's "Connected" property changes. + +Args: + arg0: The current value of the "Connected" property. +""" + +CharacteristicValueChangedCallback = Callable[[str, bytes], None] +""" +A callback that is called when a characteristics's "Value" property changes. + +Args: + arg0: The D-Bus object path of the characteristic. + arg1: The current value of the "Value" property. +""" + + +class DeviceWatcher(NamedTuple): + device_path: str + """ + The D-Bus object path of the device. + """ + + on_connected_changed: DeviceConnectedChangedCallback + """ + A callback that is called when a device's "Connected" property changes. + """ + + on_characteristic_value_changed: CharacteristicValueChangedCallback + """ + A callback that is called when a characteristics's "Value" property changes. + """ + + +# set of org.bluez.Device1 property names that come from advertising data +_ADVERTISING_DATA_PROPERTIES = { + "AdvertisingData", + "AdvertisingFlags", + "ManufacturerData", + "Name", + "ServiceData", + "UUIDs", +} + + +class BlueZManager: + """ + BlueZ D-Bus object manager. + + Use :func:`bleak.backends.bluezdbus.get_global_bluez_manager` to get the global instance. + """ + + def __init__(self): + self._bus: Optional[MessageBus] = None + self._bus_lock = asyncio.Lock() + + # dict of object path: dict of interface name: dict of property name: property value + self._properties: Dict[str, Dict[str, Dict[str, Any]]] = {} + + # set of available adapters for quick lookup + self._adapters: Set[str] = set() + + # The BlueZ APIs only maps children to parents, so we need to keep maps + # to quickly find the children of a parent D-Bus object. + + # map of device d-bus object paths to set of service d-bus object paths + self._service_map: Dict[str, Set[str]] = {} + # map of service d-bus object paths to set of characteristic d-bus object paths + self._characteristic_map: Dict[str, Set[str]] = {} + # map of characteristic d-bus object paths to set of descriptor d-bus object paths + self._descriptor_map: Dict[str, Set[str]] = {} + + self._advertisement_callbacks: defaultdict[str, List[AdvertisementCallback]] = ( + defaultdict(list) + ) + self._device_removed_callbacks: List[DeviceRemovedCallbackAndState] = [] + self._device_watchers: Dict[str, Set[DeviceWatcher]] = {} + self._condition_callbacks: Dict[str, Set[DeviceConditionCallback]] = {} + self._services_cache: Dict[str, BleakGATTServiceCollection] = {} + + def _check_adapter(self, adapter_path: str) -> None: + """ + Raises: + BleakError: if adapter is not present in BlueZ + """ + if adapter_path not in self._properties: + raise BleakError(f"adapter '{adapter_path.split('/')[-1]}' not found") + + def _check_device(self, device_path: str) -> None: + """ + Raises: + BleakError: if device is not present in BlueZ + """ + if device_path not in self._properties: + raise BleakError(f"device '{device_path.split('/')[-1]}' not found") + + def _get_device_property( + self, device_path: str, interface: str, property_name: str + ) -> Any: + self._check_device(device_path) + device_properties = self._properties[device_path] + + try: + interface_properties = device_properties[interface] + except KeyError: + raise BleakError( + f"Interface {interface} not found for device '{device_path}'" + ) + + try: + value = interface_properties[property_name] + except KeyError: + raise BleakError( + f"Property '{property_name}' not found for '{interface}' in '{device_path}'" + ) + + return value + + async def async_init(self) -> None: + """ + Connects to the D-Bus message bus and begins monitoring signals. + + It is safe to call this method multiple times. If the bus is already + connected, no action is performed. + """ + async with self._bus_lock: + if self._bus and self._bus.connected: + return + + self._services_cache = {} + + # We need to create a new MessageBus each time as + # dbus-next will destroy the underlying file descriptors + # when the previous one is closed in its finalizer. + bus = MessageBus(bus_type=BusType.SYSTEM, auth=get_dbus_authenticator()) + await bus.connect() + + try: + # Add signal listeners + + bus.add_message_handler(self._parse_msg) + + rules = MatchRules( + interface=defs.OBJECT_MANAGER_INTERFACE, + member="InterfacesAdded", + arg0path="/org/bluez/", + ) + reply = await add_match(bus, rules) + assert_reply(reply) + + rules = MatchRules( + interface=defs.OBJECT_MANAGER_INTERFACE, + member="InterfacesRemoved", + arg0path="/org/bluez/", + ) + reply = await add_match(bus, rules) + assert_reply(reply) + + rules = MatchRules( + interface=defs.PROPERTIES_INTERFACE, + member="PropertiesChanged", + path_namespace="/org/bluez", + ) + reply = await add_match(bus, rules) + assert_reply(reply) + + # get existing objects after adding signal handlers to avoid + # race condition + + reply = await bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path="/", + member="GetManagedObjects", + interface=defs.OBJECT_MANAGER_INTERFACE, + ) + ) + assert_reply(reply) + + # dictionaries are cleared in case AddInterfaces was received first + # or there was a bus reset and we are reconnecting + self._properties.clear() + self._service_map.clear() + self._characteristic_map.clear() + self._descriptor_map.clear() + + for path, interfaces in reply.body[0].items(): + props = unpack_variants(interfaces) + self._properties[path] = props + + if defs.ADAPTER_INTERFACE in props: + self._adapters.add(path) + + service_props = cast( + GattService1, props.get(defs.GATT_SERVICE_INTERFACE) + ) + + if service_props: + self._service_map.setdefault( + service_props["Device"], set() + ).add(path) + + char_props = cast( + GattCharacteristic1, + props.get(defs.GATT_CHARACTERISTIC_INTERFACE), + ) + + if char_props: + self._characteristic_map.setdefault( + char_props["Service"], set() + ).add(path) + + desc_props = cast( + GattDescriptor1, props.get(defs.GATT_DESCRIPTOR_INTERFACE) + ) + + if desc_props: + self._descriptor_map.setdefault( + desc_props["Characteristic"], set() + ).add(path) + + if logger.isEnabledFor(logging.DEBUG): + logger.debug("initial properties: %s", self._properties) + + except BaseException: + # if setup failed, disconnect + bus.disconnect() + raise + + # Everything is setup, so save the bus + self._bus = bus + + def get_default_adapter(self) -> str: + """ + Gets the D-Bus object path of of the first powered Bluetooth adapter. + + Returns: + Name of the first found powered adapter on the system, i.e. "/org/bluez/hciX". + + Raises: + BleakError: + if there are no Bluetooth adapters or if none of the adapters are powered + """ + if not any(self._adapters): + raise BleakError("No Bluetooth adapters found.") + + for adapter_path in self._adapters: + if cast( + defs.Adapter1, self._properties[adapter_path][defs.ADAPTER_INTERFACE] + )["Powered"]: + return adapter_path + + raise BleakError("No powered Bluetooth adapters found.") + + async def active_scan( + self, + adapter_path: str, + filters: Dict[str, Variant], + advertisement_callback: AdvertisementCallback, + device_removed_callback: DeviceRemovedCallback, + ) -> Callable[[], Coroutine]: + """ + Configures the advertisement data filters and starts scanning. + + Args: + adapter_path: The D-Bus object path of the adapter to use for scanning. + filters: A dictionary of filters to pass to ``SetDiscoveryFilter``. + advertisement_callback: + A callable that will be called when new advertisement data is received. + device_removed_callback: + A callable that will be called when a device is removed from BlueZ. + + Returns: + An async function that is used to stop scanning and remove the filters. + + Raises: + BleakError: if the adapter is not present in BlueZ + """ + async with self._bus_lock: + # If the adapter doesn't exist, then the message calls below would + # fail with "method not found". This provides a more informative + # error message. + self._check_adapter(adapter_path) + + self._advertisement_callbacks[adapter_path].append(advertisement_callback) + + device_removed_callback_and_state = DeviceRemovedCallbackAndState( + device_removed_callback, adapter_path + ) + self._device_removed_callbacks.append(device_removed_callback_and_state) + + try: + # Apply the filters + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADAPTER_INTERFACE, + member="SetDiscoveryFilter", + signature="a{sv}", + body=[filters], + ) + ) + assert_reply(reply) + + # Start scanning + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADAPTER_INTERFACE, + member="StartDiscovery", + ) + ) + assert_reply(reply) + + async def stop() -> None: + # need to remove callbacks first, otherwise we get TxPower + # and RSSI properties removed during stop which causes + # incorrect advertisement data callbacks + self._advertisement_callbacks[adapter_path].remove( + advertisement_callback + ) + self._device_removed_callbacks.remove( + device_removed_callback_and_state + ) + + async with self._bus_lock: + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADAPTER_INTERFACE, + member="StopDiscovery", + ) + ) + + try: + assert_reply(reply) + except BleakDBusError as ex: + if ex.dbus_error != "org.bluez.Error.NotReady": + raise + else: + # remove the filters + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADAPTER_INTERFACE, + member="SetDiscoveryFilter", + signature="a{sv}", + body=[{}], + ) + ) + assert_reply(reply) + + return stop + except BaseException: + # if starting scanning failed, don't leak the callbacks + self._advertisement_callbacks[adapter_path].remove( + advertisement_callback + ) + self._device_removed_callbacks.remove(device_removed_callback_and_state) + raise + + async def passive_scan( + self, + adapter_path: str, + filters: List[OrPatternLike], + advertisement_callback: AdvertisementCallback, + device_removed_callback: DeviceRemovedCallback, + ) -> Callable[[], Coroutine]: + """ + Configures the advertisement data filters and starts scanning. + + Args: + adapter_path: The D-Bus object path of the adapter to use for scanning. + filters: A list of "or patterns" to pass to ``org.bluez.AdvertisementMonitor1``. + advertisement_callback: + A callable that will be called when new advertisement data is received. + device_removed_callback: + A callable that will be called when a device is removed from BlueZ. + + Returns: + An async function that is used to stop scanning and remove the filters. + + Raises: + BleakError: if the adapter is not present in BlueZ + """ + async with self._bus_lock: + # If the adapter doesn't exist, then the message calls below would + # fail with "method not found". This provides a more informative + # error message. + self._check_adapter(adapter_path) + + self._advertisement_callbacks[adapter_path].append(advertisement_callback) + + device_removed_callback_and_state = DeviceRemovedCallbackAndState( + device_removed_callback, adapter_path + ) + self._device_removed_callbacks.append(device_removed_callback_and_state) + + try: + monitor = AdvertisementMonitor(filters) + + # this should be a unique path to allow multiple python interpreters + # running bleak and multiple scanners within a single interpreter + monitor_path = f"/org/bleak/{os.getpid()}/{id(monitor)}" + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADVERTISEMENT_MONITOR_MANAGER_INTERFACE, + member="RegisterMonitor", + signature="o", + body=[monitor_path], + ) + ) + + if ( + reply.message_type == MessageType.ERROR + and reply.error_name == "org.freedesktop.DBus.Error.UnknownMethod" + ): + raise BleakError( + "passive scanning on Linux requires BlueZ >= 5.56 with --experimental enabled and Linux kernel >= 5.10" + ) + + assert_reply(reply) + + # It is important to export after registering, otherwise BlueZ + # won't use the monitor + self._bus.export(monitor_path, monitor) + + async def stop() -> None: + # need to remove callbacks first, otherwise we get TxPower + # and RSSI properties removed during stop which causes + # incorrect advertisement data callbacks + self._advertisement_callbacks[adapter_path].remove( + advertisement_callback + ) + self._device_removed_callbacks.remove( + device_removed_callback_and_state + ) + + async with self._bus_lock: + self._bus.unexport(monitor_path, monitor) + + reply = await self._bus.call( + Message( + destination=defs.BLUEZ_SERVICE, + path=adapter_path, + interface=defs.ADVERTISEMENT_MONITOR_MANAGER_INTERFACE, + member="UnregisterMonitor", + signature="o", + body=[monitor_path], + ) + ) + assert_reply(reply) + + return stop + + except BaseException: + # if starting scanning failed, don't leak the callbacks + self._advertisement_callbacks[adapter_path].remove( + advertisement_callback + ) + self._device_removed_callbacks.remove(device_removed_callback_and_state) + raise + + def add_device_watcher( + self, + device_path: str, + on_connected_changed: DeviceConnectedChangedCallback, + on_characteristic_value_changed: CharacteristicValueChangedCallback, + ) -> DeviceWatcher: + """ + Registers a device watcher to receive callbacks when device state + changes or events are received. + + Args: + device_path: + The D-Bus object path of the device. + on_connected_changed: + A callback that is called when the device's "Connected" + state changes. + on_characteristic_value_changed: + A callback that is called whenever a characteristic receives + a notification/indication. + + Returns: + A device watcher object that acts a token to unregister the watcher. + + Raises: + BleakError: if the device is not present in BlueZ + """ + self._check_device(device_path) + + watcher = DeviceWatcher( + device_path, on_connected_changed, on_characteristic_value_changed + ) + + self._device_watchers.setdefault(device_path, set()).add(watcher) + return watcher + + def remove_device_watcher(self, watcher: DeviceWatcher) -> None: + """ + Unregisters a device watcher. + + Args: + The device watcher token that was returned by + :meth:`add_device_watcher`. + """ + device_path = watcher.device_path + self._device_watchers[device_path].remove(watcher) + if not self._device_watchers[device_path]: + del self._device_watchers[device_path] + + async def get_services( + self, device_path: str, use_cached: bool, requested_services: Optional[Set[str]] + ) -> BleakGATTServiceCollection: + """ + Builds a new :class:`BleakGATTServiceCollection` from the current state. + + Args: + device_path: + The D-Bus object path of the Bluetooth device. + use_cached: + When ``True`` if there is a cached :class:`BleakGATTServiceCollection`, + the method will not wait for ``"ServicesResolved"`` to become true + and instead return the cached service collection immediately. + requested_services: + When given, only return services with UUID that is in the list + of requested services. + + Returns: + A new :class:`BleakGATTServiceCollection`. + + Raises: + BleakError: if the device is not present in BlueZ + """ + self._check_device(device_path) + + if use_cached: + services = self._services_cache.get(device_path) + if services is not None: + logger.debug("Using cached services for %s", device_path) + return services + + await self._wait_for_services_discovery(device_path) + + services = BleakGATTServiceCollection() + + for service_path in self._service_map.get(device_path, set()): + service_props = cast( + GattService1, + self._properties[service_path][defs.GATT_SERVICE_INTERFACE], + ) + + service = BleakGATTServiceBlueZDBus(service_props, service_path) + + if ( + requested_services is not None + and service.uuid not in requested_services + ): + continue + + services.add_service(service) + + for char_path in self._characteristic_map.get(service_path, set()): + char_props = cast( + GattCharacteristic1, + self._properties[char_path][defs.GATT_CHARACTERISTIC_INTERFACE], + ) + + char = BleakGATTCharacteristicBlueZDBus( + char_props, + char_path, + service.uuid, + service.handle, + # "MTU" property was added in BlueZ 5.62, otherwise fall + # back to minimum MTU according to Bluetooth spec. + lambda: char_props.get("MTU", 23) - 3, + ) + + services.add_characteristic(char) + + for desc_path in self._descriptor_map.get(char_path, set()): + desc_props = cast( + GattDescriptor1, + self._properties[desc_path][defs.GATT_DESCRIPTOR_INTERFACE], + ) + + desc = BleakGATTDescriptorBlueZDBus( + desc_props, + desc_path, + char.uuid, + char.handle, + ) + + services.add_descriptor(desc) + + self._services_cache[device_path] = services + + return services + + def get_device_name(self, device_path: str) -> str: + """ + Gets the value of the "Name" property for a device. + + Args: + device_path: The D-Bus object path of the device. + + Returns: + The current property value. + + Raises: + BleakError: if the device is not present in BlueZ + """ + return self._get_device_property(device_path, defs.DEVICE_INTERFACE, "Name") + + def is_connected(self, device_path: str) -> bool: + """ + Gets the value of the "Connected" property for a device. + + Args: + device_path: The D-Bus object path of the device. + + Returns: + The current property value or ``False`` if the device does not exist in BlueZ. + """ + try: + return self._properties[device_path][defs.DEVICE_INTERFACE]["Connected"] + except KeyError: + return False + + async def _wait_for_services_discovery(self, device_path: str) -> None: + """ + Waits for the device services to be discovered. + + If a disconnect happens before the completion a BleakError exception is raised. + + Raises: + BleakError: if the device is not present in BlueZ + """ + self._check_device(device_path) + + with contextlib.ExitStack() as stack: + services_discovered_wait_task = asyncio.create_task( + self._wait_condition(device_path, "ServicesResolved", True) + ) + stack.callback(services_discovered_wait_task.cancel) + + device_disconnected_wait_task = asyncio.create_task( + self._wait_condition(device_path, "Connected", False) + ) + stack.callback(device_disconnected_wait_task.cancel) + + # in some cases, we can get "InterfaceRemoved" without the + # "Connected" property changing, so we need to race against both + # conditions + device_removed_wait_task = asyncio.create_task( + self._wait_removed(device_path) + ) + stack.callback(device_removed_wait_task.cancel) + + done, _ = await asyncio.wait( + { + services_discovered_wait_task, + device_disconnected_wait_task, + device_removed_wait_task, + }, + return_when=asyncio.FIRST_COMPLETED, + ) + + # check for exceptions + for task in done: + task.result() + + if not done.isdisjoint( + {device_disconnected_wait_task, device_removed_wait_task} + ): + raise BleakError("failed to discover services, device disconnected") + + async def _wait_removed(self, device_path: str) -> None: + """ + Waits for the device interface to be removed. + + If the device is not present in BlueZ, this returns immediately. + + Args: + device_path: The D-Bus object path of a Bluetooth device. + """ + if device_path not in self._properties: + return + + event = asyncio.Event() + + def callback(o: str) -> None: + if o == device_path: + event.set() + + device_removed_callback_and_state = DeviceRemovedCallbackAndState( + callback, self._properties[device_path][defs.DEVICE_INTERFACE]["Adapter"] + ) + + with contextlib.ExitStack() as stack: + self._device_removed_callbacks.append(device_removed_callback_and_state) + stack.callback( + self._device_removed_callbacks.remove, device_removed_callback_and_state + ) + await event.wait() + + async def _wait_condition( + self, device_path: str, property_name: str, property_value: Any + ) -> None: + """ + Waits for a condition to become true. + + Args: + device_path: The D-Bus object path of a Bluetooth device. + property_name: The name of the property to test. + property_value: A value to compare the current property value to. + + Raises: + BleakError: if the device is not present in BlueZ + """ + value = self._get_device_property( + device_path, defs.DEVICE_INTERFACE, property_name + ) + + if value == property_value: + return + + event = asyncio.Event() + + def _wait_condition_callback(new_value: Optional[Any]) -> None: + """Callback for when a property changes.""" + if new_value == property_value: + event.set() + + condition_callbacks = self._condition_callbacks + device_callbacks = condition_callbacks.setdefault(device_path, set()) + callback = DeviceConditionCallback(_wait_condition_callback, property_name) + device_callbacks.add(callback) + + try: + # can be canceled + await event.wait() + finally: + device_callbacks.remove(callback) + if not device_callbacks: + del condition_callbacks[device_path] + + def _parse_msg(self, message: Message) -> None: + """ + Handles callbacks from dbus_fast. + """ + + if message.message_type != MessageType.SIGNAL: + return + + if logger.isEnabledFor(logging.DEBUG): + logger.debug( + "received D-Bus signal: %s.%s (%s): %s", + message.interface, + message.member, + message.path, + message.body, + ) + + # type hints + obj_path: str + interfaces_and_props: Dict[str, Dict[str, Variant]] + interfaces: List[str] + interface: str + changed: Dict[str, Variant] + invalidated: List[str] + + if message.member == "InterfacesAdded": + obj_path, interfaces_and_props = message.body + + for interface, props in interfaces_and_props.items(): + unpacked_props = unpack_variants(props) + self._properties.setdefault(obj_path, {})[interface] = unpacked_props + + if interface == defs.GATT_SERVICE_INTERFACE: + service_props = cast(GattService1, unpacked_props) + self._service_map.setdefault(service_props["Device"], set()).add( + obj_path + ) + elif interface == defs.GATT_CHARACTERISTIC_INTERFACE: + char_props = cast(GattCharacteristic1, unpacked_props) + self._characteristic_map.setdefault( + char_props["Service"], set() + ).add(obj_path) + elif interface == defs.GATT_DESCRIPTOR_INTERFACE: + desc_props = cast(GattDescriptor1, unpacked_props) + self._descriptor_map.setdefault( + desc_props["Characteristic"], set() + ).add(obj_path) + + elif interface == defs.ADAPTER_INTERFACE: + self._adapters.add(obj_path) + + # If this is a device and it has advertising data properties, + # then it should mean that this device just started advertising. + # Previously, we just relied on RSSI updates to determine if + # a device was actually advertising, but we were missing "slow" + # devices that only advertise once and then go to sleep for a while. + elif interface == defs.DEVICE_INTERFACE: + self._run_advertisement_callbacks( + obj_path, cast(Device1, unpacked_props) + ) + elif message.member == "InterfacesRemoved": + obj_path, interfaces = message.body + + for interface in interfaces: + try: + del self._properties[obj_path][interface] + except KeyError: + pass + + if interface == defs.ADAPTER_INTERFACE: + try: + self._adapters.remove(obj_path) + except KeyError: + pass + elif interface == defs.DEVICE_INTERFACE: + self._services_cache.pop(obj_path, None) + try: + del self._service_map[obj_path] + except KeyError: + pass + + for callback, adapter_path in self._device_removed_callbacks: + if obj_path.startswith(adapter_path): + callback(obj_path) + elif interface == defs.GATT_SERVICE_INTERFACE: + try: + del self._characteristic_map[obj_path] + except KeyError: + pass + elif interface == defs.GATT_CHARACTERISTIC_INTERFACE: + try: + del self._descriptor_map[obj_path] + except KeyError: + pass + + # Remove empty properties when all interfaces have been removed. + # This avoids wasting memory for people who have noisy devices + # with private addresses that change frequently. + if obj_path in self._properties and not self._properties[obj_path]: + del self._properties[obj_path] + elif message.member == "PropertiesChanged": + interface, changed, invalidated = message.body + message_path = message.path + assert message_path is not None + + try: + self_interface = self._properties[message.path][interface] + except KeyError: + # This can happen during initialization. The "PropertiesChanged" + # handler is attached before "GetManagedObjects" is called + # and so self._properties may not yet be populated. + # This is not a problem. We just discard the property value + # since "GetManagedObjects" will return a newer value. + pass + else: + # update self._properties first + + self_interface.update(unpack_variants(changed)) + + for name in invalidated: + try: + del self_interface[name] + except KeyError: + # sometimes there BlueZ tries to remove properties + # that were never added + pass + + # then call any callbacks so they will be called with the + # updated state + + if interface == defs.DEVICE_INTERFACE: + # handle advertisement watchers + device_path = message_path + + self._run_advertisement_callbacks( + device_path, cast(Device1, self_interface) + ) + + # handle device condition watchers + callbacks = self._condition_callbacks.get(device_path) + if callbacks: + for callback in callbacks: + name = callback.property_name + if name in changed: + callback.callback(self_interface.get(name)) + + # handle device connection change watchers + if "Connected" in changed: + new_connected = self_interface["Connected"] + watchers = self._device_watchers.get(device_path) + if watchers: + # callbacks may remove the watcher, hence the copy + for watcher in watchers.copy(): + watcher.on_connected_changed(new_connected) + + elif interface == defs.GATT_CHARACTERISTIC_INTERFACE: + # handle characteristic value change watchers + if "Value" in changed: + new_value = self_interface["Value"] + device_path = device_path_from_characteristic_path(message_path) + watchers = self._device_watchers.get(device_path) + if watchers: + for watcher in watchers: + watcher.on_characteristic_value_changed( + message_path, new_value + ) + + def _run_advertisement_callbacks(self, device_path: str, device: Device1) -> None: + """ + Runs any registered advertisement callbacks. + + Args: + device_path: The D-Bus object path of the remote device. + device: The current D-Bus properties of the device. + """ + adapter_path = device["Adapter"] + for callback in self._advertisement_callbacks[adapter_path]: + callback(device_path, device.copy()) + + +_global_instances: MutableMapping[Any, BlueZManager] = WeakKeyDictionary() + + +async def get_global_bluez_manager() -> BlueZManager: + """ + Gets an existing initialized global BlueZ manager instance associated with the current event loop, + or initializes a new instance. + """ + + loop = asyncio.get_running_loop() + try: + instance = _global_instances[loop] + except KeyError: + instance = _global_instances[loop] = BlueZManager() + + await instance.async_init() + + return instance diff --git a/bleak/backends/bluezdbus/scanner.py b/bleak/backends/bluezdbus/scanner.py new file mode 100644 index 0000000..186e7d4 --- /dev/null +++ b/bleak/backends/bluezdbus/scanner.py @@ -0,0 +1,286 @@ +import logging +from typing import Callable, Coroutine, Dict, List, Literal, Optional, TypedDict +from warnings import warn + +from dbus_fast import Variant + +from ...exc import BleakError +from ..scanner import AdvertisementData, AdvertisementDataCallback, BaseBleakScanner +from .advertisement_monitor import OrPatternLike +from .defs import Device1 +from .manager import get_global_bluez_manager +from .utils import bdaddr_from_device_path + +logger = logging.getLogger(__name__) + + +class BlueZDiscoveryFilters(TypedDict, total=False): + """ + Dictionary of arguments for the ``org.bluez.Adapter1.SetDiscoveryFilter`` + D-Bus method. + + https://github.com/bluez/bluez/blob/master/doc/org.bluez.Adapter.rst#void-setdiscoveryfilterdict-filter + """ + + UUIDs: List[str] + """ + Filter by service UUIDs, empty means match _any_ UUID. + + Normally, the ``service_uuids`` argument of :class:`bleak.BleakScanner` + is used instead. + """ + RSSI: int + """ + RSSI threshold value. + """ + Pathloss: int + """ + Pathloss threshold value. + """ + Transport: str + """ + Transport parameter determines the type of scan. + + This should not be used since it is required to be set to ``"le"``. + """ + DuplicateData: bool + """ + Disables duplicate detection of advertisement data. + + This does not affect the ``Filter Duplicates`` parameter of the ``LE Set Scan Enable`` + HCI command to the Bluetooth adapter! + + Although the default value for BlueZ is ``True``, Bleak sets this to ``False`` by default. + """ + Discoverable: bool + """ + Make adapter discoverable while discovering, + if the adapter is already discoverable setting + this filter won't do anything. + """ + Pattern: str + """ + Discover devices where the pattern matches + either the prefix of the address or + device name which is convenient way to limited + the number of device objects created during a + discovery. + """ + + +class BlueZScannerArgs(TypedDict, total=False): + """ + :class:`BleakScanner` args that are specific to the BlueZ backend. + """ + + filters: BlueZDiscoveryFilters + """ + Filters to pass to the adapter SetDiscoveryFilter D-Bus method. + + Only used for active scanning. + """ + + or_patterns: List[OrPatternLike] + """ + Or patterns to pass to the AdvertisementMonitor1 D-Bus interface. + + Only used for passive scanning. + """ + + +class BleakScannerBlueZDBus(BaseBleakScanner): + """The native Linux Bleak BLE Scanner. + + For possible values for `filters`, see the parameters to the + ``SetDiscoveryFilter`` method in the `BlueZ docs + `_ + + Args: + detection_callback: + Optional function that will be called each time a device is + discovered or advertising data has changed. + service_uuids: + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. Specifying this + also enables scanning while the screen is off on Android. + scanning_mode: + Set to ``"passive"`` to avoid the ``"active"`` scanning mode. + **bluez: + Dictionary of arguments specific to the BlueZ backend. + **adapter (str): + Bluetooth adapter to use for discovery. + """ + + def __init__( + self, + detection_callback: Optional[AdvertisementDataCallback], + service_uuids: Optional[List[str]], + scanning_mode: Literal["active", "passive"], + *, + bluez: BlueZScannerArgs, + **kwargs, + ): + super(BleakScannerBlueZDBus, self).__init__(detection_callback, service_uuids) + + self._scanning_mode = scanning_mode + + # kwarg "device" is for backwards compatibility + self._adapter: Optional[str] = kwargs.get("adapter", kwargs.get("device")) + + # callback from manager for stopping scanning if it has been started + self._stop: Optional[Callable[[], Coroutine]] = None + + # Discovery filters + + self._filters: Dict[str, Variant] = {} + + self._filters["Transport"] = Variant("s", "le") + self._filters["DuplicateData"] = Variant("b", False) + + if self._service_uuids: + self._filters["UUIDs"] = Variant("as", self._service_uuids) + + filters = kwargs.get("filters") + + if filters is None: + filters = bluez.get("filters") + else: + warn( + "the 'filters' kwarg is deprecated, use 'bluez' kwarg instead", + FutureWarning, + stacklevel=2, + ) + + if filters is not None: + self.set_scanning_filter(filters=filters) + + self._or_patterns = bluez.get("or_patterns") + + if self._scanning_mode == "passive" and service_uuids: + logger.warning( + "service uuid filtering is not implemented for passive scanning, use bluez or_patterns as a workaround" + ) + + if self._scanning_mode == "passive" and not self._or_patterns: + raise BleakError("passive scanning mode requires bluez or_patterns") + + async def start(self) -> None: + manager = await get_global_bluez_manager() + + if self._adapter: + adapter_path = f"/org/bluez/{self._adapter}" + else: + adapter_path = manager.get_default_adapter() + + self.seen_devices = {} + + if self._scanning_mode == "passive": + self._stop = await manager.passive_scan( + adapter_path, + self._or_patterns, + self._handle_advertising_data, + self._handle_device_removed, + ) + else: + self._stop = await manager.active_scan( + adapter_path, + self._filters, + self._handle_advertising_data, + self._handle_device_removed, + ) + + async def stop(self) -> None: + if self._stop: + # avoid reentrancy + stop, self._stop = self._stop, None + + await stop() + + def set_scanning_filter(self, **kwargs) -> None: + """Sets OS level scanning filters for the BleakScanner. + + For possible values for `filters`, see the parameters to the + ``SetDiscoveryFilter`` method in the `BlueZ docs + `_ + + See variant types here: + + Keyword Args: + filters (dict): A dict of filters to be applied on discovery. + + """ + for k, v in kwargs.get("filters", {}).items(): + if k == "UUIDs": + self._filters[k] = Variant("as", v) + elif k == "RSSI": + self._filters[k] = Variant("n", v) + elif k == "Pathloss": + self._filters[k] = Variant("n", v) + elif k == "Transport": + self._filters[k] = Variant("s", v) + elif k == "DuplicateData": + self._filters[k] = Variant("b", v) + elif k == "Discoverable": + self._filters[k] = Variant("b", v) + elif k == "Pattern": + self._filters[k] = Variant("s", v) + else: + logger.warning("Filter '%s' is not currently supported." % k) + + # Helper methods + + def _handle_advertising_data(self, path: str, props: Device1) -> None: + """ + Handles advertising data received from the BlueZ manager instance. + + Args: + path: The D-Bus object path of the device. + props: The D-Bus object properties of the device. + """ + _service_uuids = props.get("UUIDs", []) + + if not self.is_allowed_uuid(_service_uuids): + return + + # Get all the information wanted to pack in the advertisement data + _local_name = props.get("Name") + _manufacturer_data = { + k: bytes(v) for k, v in props.get("ManufacturerData", {}).items() + } + _service_data = {k: bytes(v) for k, v in props.get("ServiceData", {}).items()} + + # Get tx power data + tx_power = props.get("TxPower") + + # Pack the advertisement data + advertisement_data = AdvertisementData( + local_name=_local_name, + manufacturer_data=_manufacturer_data, + service_data=_service_data, + service_uuids=_service_uuids, + tx_power=tx_power, + rssi=props.get("RSSI", -127), + platform_data=(path, props), + ) + + device = self.create_or_update_device( + props["Address"], + props["Alias"], + {"path": path, "props": props}, + advertisement_data, + ) + + self.call_detection_callbacks(device, advertisement_data) + + def _handle_device_removed(self, device_path: str) -> None: + """ + Handles a device being removed from BlueZ. + """ + try: + bdaddr = bdaddr_from_device_path(device_path) + del self.seen_devices[bdaddr] + except KeyError: + # The device will not have been added to self.seen_devices if no + # advertising data was received, so this is expected to happen + # occasionally. + pass diff --git a/bleak/backends/bluezdbus/service.py b/bleak/backends/bluezdbus/service.py new file mode 100644 index 0000000..a1a8d3c --- /dev/null +++ b/bleak/backends/bluezdbus/service.py @@ -0,0 +1,44 @@ +from typing import Any, List + +from ..service import BleakGATTService +from .characteristic import BleakGATTCharacteristicBlueZDBus +from .utils import extract_service_handle_from_path + + +class BleakGATTServiceBlueZDBus(BleakGATTService): + """GATT Service implementation for the BlueZ DBus backend""" + + def __init__(self, obj: Any, path: str): + super().__init__(obj) + self.__characteristics = [] + self.__path = path + self.__handle = extract_service_handle_from_path(path) + + @property + def uuid(self) -> str: + """The UUID to this service""" + return self.obj["UUID"] + + @property + def handle(self) -> int: + """The integer handle of this service""" + return self.__handle + + @property + def characteristics(self) -> List[BleakGATTCharacteristicBlueZDBus]: + """List of characteristics for this service""" + return self.__characteristics + + def add_characteristic( + self, characteristic: BleakGATTCharacteristicBlueZDBus + ) -> None: + """Add a :py:class:`~BleakGATTCharacteristicBlueZDBus` to the service. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__characteristics.append(characteristic) + + @property + def path(self) -> str: + """The DBus path. Mostly needed by `bleak`, not by end user""" + return self.__path diff --git a/bleak/backends/bluezdbus/signals.py b/bleak/backends/bluezdbus/signals.py new file mode 100644 index 0000000..6a2ce35 --- /dev/null +++ b/bleak/backends/bluezdbus/signals.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +from __future__ import annotations + +import re +from typing import Any, Coroutine, Dict, Optional + +from dbus_fast.aio.message_bus import MessageBus +from dbus_fast.errors import InvalidObjectPathError +from dbus_fast.message import Message +from dbus_fast.validators import ( + assert_interface_name_valid, + assert_member_name_valid, + assert_object_path_valid, +) + +# TODO: this stuff should be improved and submitted upstream to dbus-next +# https://github.com/altdesktop/python-dbus-next/issues/53 + +_message_types = ["signal", "method_call", "method_return", "error"] + + +class InvalidMessageTypeError(TypeError): + def __init__(self, type: str): + super().__init__(f"invalid message type: {type}") + + +def is_message_type_valid(type: str) -> bool: + """Whether this is a valid message type. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-bus-routing-match-rules + + :param type: The message type to validate. + :type name: str + + :returns: Whether the name is a valid message type. + :rtype: bool + """ + return type in _message_types + + +def assert_bus_name_valid(type: str) -> None: + """Raise an error if this is not a valid message type. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-bus-routing-match-rules + + :param type: The message type to validate. + :type name: str + + :raises: + - :class:`InvalidMessageTypeError` - If this is not a valid message type. + """ + if not is_message_type_valid(type): + raise InvalidMessageTypeError(type) + + +class MatchRules: + """D-Bus signal match rules. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-bus-routing-match-rules + """ + + def __init__( + self, + type: str = "signal", + sender: Optional[str] = None, + interface: Optional[str] = None, + member: Optional[str] = None, + path: Optional[str] = None, + path_namespace: Optional[str] = None, + destination: Optional[str] = None, + arg0namespace: Optional[str] = None, + **kwargs, + ): + assert_bus_name_valid(type) + self.type: str = type + + if sender: + assert_bus_name_valid(sender) + self.sender: str = sender + else: + self.sender = None + + if interface: + assert_interface_name_valid(interface) + self.interface: str = interface + else: + self.interface = None + + if member: + assert_member_name_valid(member) + self.member: str = member + else: + self.member = None + + if path: + assert_object_path_valid(path) + self.path: str = path + else: + self.path = None + + if path_namespace: + assert_object_path_valid(path_namespace) + self.path_namespace: str = path_namespace + else: + self.path_namespace = None + + if path and path_namespace: + raise TypeError( + "message rules cannot have both 'path' and 'path_namespace' at the same time" + ) + + if destination: + assert_bus_name_valid(destination) + self.destination: str = destination + else: + self.destination = None + + if arg0namespace: + assert_bus_name_valid(arg0namespace) + self.arg0namespace: str = arg0namespace + else: + self.arg0namespace = None + + if kwargs: + for k, v in kwargs.items(): + if re.match(r"^arg\d+$", k): + if not isinstance(v, str): + raise TypeError(f"kwarg '{k}' must have a str value") + elif re.match(r"^arg\d+path$", k): + if not isinstance(v, str): + raise InvalidObjectPathError(v) + assert_object_path_valid(v[:-1] if v.endswith("/") else v) + else: + raise ValueError("kwargs must be in the form 'arg0' or 'arg0path'") + self.args: Dict[str, str] = kwargs + else: + self.args = None + + @staticmethod + def parse(rules: str) -> MatchRules: + return MatchRules(**dict(r.split("=") for r in rules.split(","))) + + def __str__(self) -> str: + rules = [f"type={self.type}"] + + if self.sender: + rules.append(f"sender={self.sender}") + + if self.interface: + rules.append(f"interface={self.interface}") + + if self.member: + rules.append(f"member={self.member}") + + if self.path: + rules.append(f"path={self.path}") + + if self.path_namespace: + rules.append(f"path_namespace={self.path_namespace}") + + if self.destination: + rules.append(f"destination={self.destination}") + + if self.args: + for k, v in self.args.items(): + rules.append(f"{k}={v}") + + if self.arg0namespace: + rules.append(f"arg0namespace={self.arg0namespace}") + + return ",".join(rules) + + def __repr__(self) -> str: + return f"MatchRules({self})" + + +def add_match(bus: MessageBus, rules: MatchRules) -> Coroutine[Any, Any, Message]: + """Calls org.freedesktop.DBus.AddMatch using ``rules``.""" + return bus.call( + Message( + destination="org.freedesktop.DBus", + interface="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + member="AddMatch", + signature="s", + body=[str(rules)], + ) + ) + + +def remove_match(bus: MessageBus, rules: MatchRules) -> Coroutine[Any, Any, Message]: + """Calls org.freedesktop.DBus.RemoveMatch using ``rules``.""" + return bus.call( + Message( + destination="org.freedesktop.DBus", + interface="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + member="RemoveMatch", + signature="s", + body=[str(rules)], + ) + ) diff --git a/bleak/backends/bluezdbus/utils.py b/bleak/backends/bluezdbus/utils.py new file mode 100644 index 0000000..e1acf99 --- /dev/null +++ b/bleak/backends/bluezdbus/utils.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +import os +from typing import Optional + +from dbus_fast.auth import AuthExternal +from dbus_fast.constants import MessageType +from dbus_fast.message import Message + +from ...exc import BleakDBusError, BleakError + + +def assert_reply(reply: Message) -> None: + """Checks that a D-Bus message is a valid reply. + + Raises: + BleakDBusError: if the message type is ``MessageType.ERROR`` + AssertionError: if the message type is not ``MessageType.METHOD_RETURN`` + """ + if reply.message_type == MessageType.ERROR: + raise BleakDBusError(reply.error_name, reply.body) + assert reply.message_type == MessageType.METHOD_RETURN + + +def extract_service_handle_from_path(path: str) -> int: + try: + return int(path[-4:], 16) + except Exception as e: + raise BleakError(f"Could not parse service handle from path: {path}") from e + + +def bdaddr_from_device_path(device_path: str) -> str: + """ + Scrape the Bluetooth address from a D-Bus device path. + + Args: + device_path: The D-Bus object path of the device. + + Returns: + A Bluetooth address as a string. + """ + return ":".join(device_path[-17:].split("_")) + + +def device_path_from_characteristic_path(characteristic_path: str) -> str: + """ + Scrape the device path from a D-Bus characteristic path. + + Args: + characteristic_path: The D-Bus object path of the characteristic. + + Returns: + A D-Bus object path of the device. + """ + # /org/bluez/hci1/dev_FA_23_9D_AA_45_46/service000c/char000d + return characteristic_path[:37] + + +def get_dbus_authenticator() -> Optional[AuthExternal]: + uid = None + try: + uid = int(os.environ.get("BLEAK_DBUS_AUTH_UID", "")) + except ValueError: + pass + + auth = None + if uid is not None: + auth = AuthExternal(uid=uid) + + return auth diff --git a/bleak/backends/bluezdbus/version.py b/bleak/backends/bluezdbus/version.py new file mode 100644 index 0000000..40842cb --- /dev/null +++ b/bleak/backends/bluezdbus/version.py @@ -0,0 +1,62 @@ +import asyncio +import contextlib +import logging +import re +from typing import Optional + +logger = logging.getLogger(__name__) + + +async def _get_bluetoothctl_version() -> Optional[re.Match]: + """Get the version of bluetoothctl.""" + with contextlib.suppress(Exception): + proc = await asyncio.create_subprocess_exec( + "bluetoothctl", "--version", stdout=asyncio.subprocess.PIPE + ) + out = await proc.stdout.read() + version = re.search(b"(\\d+).(\\d+)", out.strip(b"'")) + await proc.wait() + return version + return None + + +class BlueZFeatures: + """Check which features are supported by the BlueZ backend.""" + + checked_bluez_version = False + supported_version = True + can_write_without_response = True + write_without_response_workaround_needed = False + hides_battery_characteristic = True + hides_device_name_characteristic = True + _check_bluez_event: Optional[asyncio.Event] = None + + @classmethod + async def check_bluez_version(cls) -> None: + """Check the bluez version.""" + if cls._check_bluez_event: + # If there is already a check in progress + # it wins, wait for it instead + await cls._check_bluez_event.wait() + return + cls._check_bluez_event = asyncio.Event() + version_output = await _get_bluetoothctl_version() + if version_output: + major, minor = tuple(map(int, version_output.groups())) + cls.supported_version = major == 5 and minor >= 34 + cls.can_write_without_response = major == 5 and minor >= 46 + cls.write_without_response_workaround_needed = not ( + major == 5 and minor >= 51 + ) + cls.hides_battery_characteristic = major == 5 and minor >= 48 and minor < 55 + cls.hides_device_name_characteristic = major == 5 and minor >= 48 + else: + # Its possible they may be running inside a container where + # bluetoothctl is not available and they only have access to the + # BlueZ D-Bus API. + logging.warning( + "Could not determine BlueZ version, bluetoothctl not available, assuming 5.51+" + ) + + cls._check_bluez_event.set() + cls.checked_bluez_version = True diff --git a/bleak/backends/characteristic.py b/bleak/backends/characteristic.py new file mode 100644 index 0000000..eca52d5 --- /dev/null +++ b/bleak/backends/characteristic.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +""" +Interface class for the Bleak representation of a GATT Characteristic + +Created on 2019-03-19 by hbldh + +""" +import abc +import enum +from typing import Any, Callable, List, Union +from uuid import UUID + +from ..uuids import uuidstr_to_str +from .descriptor import BleakGATTDescriptor + + +class GattCharacteristicsFlags(enum.Enum): + broadcast = 0x0001 + read = 0x0002 + write_without_response = 0x0004 + write = 0x0008 + notify = 0x0010 + indicate = 0x0020 + authenticated_signed_writes = 0x0040 + extended_properties = 0x0080 + reliable_write = 0x0100 + writable_auxiliaries = 0x0200 + + +class BleakGATTCharacteristic(abc.ABC): + """Interface for the Bleak representation of a GATT Characteristic""" + + def __init__(self, obj: Any, max_write_without_response_size: Callable[[], int]): + """ + Args: + obj: + A platform-specific object for this characteristic. + max_write_without_response_size: + The maximum size in bytes that can be written to the + characteristic in a single write without response command. + """ + self.obj = obj + self._max_write_without_response_size = max_write_without_response_size + + def __str__(self): + return f"{self.uuid} (Handle: {self.handle}): {self.description}" + + @property + @abc.abstractmethod + def service_uuid(self) -> str: + """The UUID of the Service containing this characteristic""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def service_handle(self) -> int: + """The integer handle of the Service containing this characteristic""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def handle(self) -> int: + """The handle for this characteristic""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def uuid(self) -> str: + """The UUID for this characteristic""" + raise NotImplementedError() + + @property + def description(self) -> str: + """Description for this characteristic""" + return uuidstr_to_str(self.uuid) + + @property + @abc.abstractmethod + def properties(self) -> List[str]: + """Properties of this characteristic""" + raise NotImplementedError() + + @property + def max_write_without_response_size(self) -> int: + """ + Gets the maximum size in bytes that can be used for the *data* argument + of :meth:`BleakClient.write_gatt_char()` when ``response=False``. + + In rare cases, a device may take a long time to update this value, so + reading this property may return the default value of ``20`` and reading + it again after a some time may return the expected higher value. + + If you *really* need to wait for a higher value, you can do something + like this: + + .. code-block:: python + + async with asyncio.timeout(10): + while char.max_write_without_response_size == 20: + await asyncio.sleep(0.5) + + .. warning:: Linux quirk: For BlueZ versions < 5.62, this property + will always return ``20``. + + .. versionadded:: 0.16 + """ + + # for backwards compatibility + if isinstance(self._max_write_without_response_size, int): + return self._max_write_without_response_size + + return self._max_write_without_response_size() + + @property + @abc.abstractmethod + def descriptors(self) -> List[BleakGATTDescriptor]: + """List of descriptors for this service""" + raise NotImplementedError() + + @abc.abstractmethod + def get_descriptor( + self, specifier: Union[int, str, UUID] + ) -> Union[BleakGATTDescriptor, None]: + """Get a descriptor by handle (int) or UUID (str or uuid.UUID)""" + raise NotImplementedError() + + @abc.abstractmethod + def add_descriptor(self, descriptor: BleakGATTDescriptor) -> None: + """Add a :py:class:`~BleakGATTDescriptor` to the characteristic. + + Should not be used by end user, but rather by `bleak` itself. + """ + raise NotImplementedError() diff --git a/bleak/backends/client.py b/bleak/backends/client.py new file mode 100644 index 0000000..ddf77f2 --- /dev/null +++ b/bleak/backends/client.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +""" +Base class for backend clients. + +Created on 2018-04-23 by hbldh + +""" +import abc +import asyncio +import os +import platform +import sys +import uuid +from typing import Callable, Optional, Type, Union +from warnings import warn + +if sys.version_info < (3, 12): + from typing_extensions import Buffer +else: + from collections.abc import Buffer + +from ..exc import BleakError +from .characteristic import BleakGATTCharacteristic +from .device import BLEDevice +from .service import BleakGATTServiceCollection + +NotifyCallback = Callable[[bytearray], None] + + +class BaseBleakClient(abc.ABC): + """The Client Interface for Bleak Backend implementations to implement. + + The documentation of this interface should thus be safe to use as a reference for your implementation. + + Args: + address_or_ble_device (`BLEDevice` or str): The Bluetooth address of the BLE peripheral to connect to or the `BLEDevice` object representing it. + + Keyword Args: + timeout (float): Timeout for required ``discover`` call. Defaults to 10.0. + disconnected_callback (callable): Callback that will be scheduled in the + event loop when the client is disconnected. The callable must take one + argument, which will be this client object. + """ + + def __init__(self, address_or_ble_device: Union[BLEDevice, str], **kwargs): + if isinstance(address_or_ble_device, BLEDevice): + self.address = address_or_ble_device.address + else: + self.address = address_or_ble_device + + self.services: Optional[BleakGATTServiceCollection] = None + + self._timeout = kwargs.get("timeout", 10.0) + self._disconnected_callback: Optional[Callable[[], None]] = kwargs.get( + "disconnected_callback" + ) + + @property + @abc.abstractmethod + def mtu_size(self) -> int: + """Gets the negotiated MTU.""" + raise NotImplementedError + + # Connectivity methods + + def set_disconnected_callback( + self, callback: Optional[Callable[[], None]], **kwargs + ) -> None: + """Set the disconnect callback. + The callback will only be called on unsolicited disconnect event. + + Set the callback to ``None`` to remove any existing callback. + + Args: + callback: callback to be called on disconnection. + + """ + self._disconnected_callback = callback + + @abc.abstractmethod + async def connect(self, **kwargs) -> bool: + """Connect to the specified GATT server. + + Returns: + Boolean representing connection status. + + """ + raise NotImplementedError() + + @abc.abstractmethod + async def disconnect(self) -> bool: + """Disconnect from the specified GATT server. + + Returns: + Boolean representing connection status. + + """ + raise NotImplementedError() + + @abc.abstractmethod + async def pair(self, *args, **kwargs) -> bool: + """Pair with the peripheral.""" + raise NotImplementedError() + + @abc.abstractmethod + async def unpair(self) -> bool: + """Unpair with the peripheral.""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def is_connected(self) -> bool: + """Check connection status between this client and the server. + + Returns: + Boolean representing connection status. + + """ + raise NotImplementedError() + + class _DeprecatedIsConnectedReturn: + """Wrapper for ``is_connected`` return value to provide deprecation warning.""" + + def __init__(self, value: bool): + self._value = value + + def __bool__(self): + return self._value + + def __call__(self) -> bool: + warn( + "is_connected has been changed to a property. Calling it as an async method will be removed in a future version", + FutureWarning, + stacklevel=2, + ) + f = asyncio.Future() + f.set_result(self._value) + return f + + def __repr__(self) -> str: + return repr(self._value) + + # GATT services methods + + @abc.abstractmethod + async def get_services(self, **kwargs) -> BleakGATTServiceCollection: + """Get all services registered for this GATT server. + + Returns: + A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree. + + """ + raise NotImplementedError() + + # I/O methods + + @abc.abstractmethod + async def read_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID], + **kwargs, + ) -> bytearray: + """Perform read operation on the specified GATT characteristic. + + Args: + char_specifier (BleakGATTCharacteristic, int, str or UUID): The characteristic to read from, + specified by either integer handle, UUID or directly by the + BleakGATTCharacteristic object representing it. + + Returns: + (bytearray) The read data. + + """ + raise NotImplementedError() + + @abc.abstractmethod + async def read_gatt_descriptor(self, handle: int, **kwargs) -> bytearray: + """Perform read operation on the specified GATT descriptor. + + Args: + handle (int): The handle of the descriptor to read from. + + Returns: + (bytearray) The read data. + + """ + raise NotImplementedError() + + @abc.abstractmethod + async def write_gatt_char( + self, + characteristic: BleakGATTCharacteristic, + data: Buffer, + response: bool, + ) -> None: + """ + Perform a write operation on the specified GATT characteristic. + + Args: + characteristic: The characteristic to write to. + data: The data to send. + response: If write-with-response operation should be done. + """ + raise NotImplementedError() + + @abc.abstractmethod + async def write_gatt_descriptor(self, handle: int, data: Buffer) -> None: + """Perform a write operation on the specified GATT descriptor. + + Args: + handle: The handle of the descriptor to read from. + data: The data to send (any bytes-like object). + + """ + raise NotImplementedError() + + @abc.abstractmethod + async def start_notify( + self, + characteristic: BleakGATTCharacteristic, + callback: NotifyCallback, + **kwargs, + ) -> None: + """ + Activate notifications/indications on a characteristic. + + Implementers should call the OS function to enable notifications or + indications on the characteristic. + + To keep things the same cross-platform, notifications should be preferred + over indications if possible when a characteristic supports both. + """ + raise NotImplementedError() + + @abc.abstractmethod + async def stop_notify( + self, char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID] + ) -> None: + """Deactivate notification/indication on a specified characteristic. + + Args: + char_specifier (BleakGATTCharacteristic, int, str or UUID): The characteristic to deactivate + notification/indication on, specified by either integer handle, UUID or + directly by the BleakGATTCharacteristic object representing it. + + """ + raise NotImplementedError() + + +def get_platform_client_backend_type() -> Type[BaseBleakClient]: + """ + Gets the platform-specific :class:`BaseBleakClient` type. + """ + if os.environ.get("P4A_BOOTSTRAP") is not None: + from bleak.backends.p4android.client import BleakClientP4Android + + return BleakClientP4Android + + if platform.system() == "Linux": + from bleak.backends.bluezdbus.client import BleakClientBlueZDBus + + return BleakClientBlueZDBus + + if platform.system() == "Darwin": + from bleak.backends.corebluetooth.client import BleakClientCoreBluetooth + + return BleakClientCoreBluetooth + + if platform.system() == "Windows": + from bleak.backends.winrt.client import BleakClientWinRT + + return BleakClientWinRT + + raise BleakError(f"Unsupported platform: {platform.system()}") diff --git a/bleak/backends/corebluetooth/CentralManagerDelegate.py b/bleak/backends/corebluetooth/CentralManagerDelegate.py new file mode 100644 index 0000000..6b87664 --- /dev/null +++ b/bleak/backends/corebluetooth/CentralManagerDelegate.py @@ -0,0 +1,371 @@ +""" +CentralManagerDelegate will implement the CBCentralManagerDelegate protocol to +manage CoreBluetooth services and resources on the Central End + +Created on June, 25 2019 by kevincar + +""" + +import asyncio +import logging +import sys +import threading +from typing import Any, Callable, Dict, List, Optional + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout +else: + from asyncio import timeout as async_timeout + +import objc +from CoreBluetooth import ( + CBUUID, + CBCentralManager, + CBManagerStatePoweredOff, + CBManagerStatePoweredOn, + CBManagerStateResetting, + CBManagerStateUnauthorized, + CBManagerStateUnknown, + CBManagerStateUnsupported, + CBPeripheral, +) +from Foundation import ( + NSUUID, + NSArray, + NSDictionary, + NSError, + NSKeyValueChangeNewKey, + NSKeyValueObservingOptionNew, + NSNumber, + NSObject, + NSString, +) +from libdispatch import DISPATCH_QUEUE_SERIAL, dispatch_queue_create + +from ...exc import BleakError + +logger = logging.getLogger(__name__) +CBCentralManagerDelegate = objc.protocolNamed("CBCentralManagerDelegate") + + +DisconnectCallback = Callable[[], None] + + +class CentralManagerDelegate(NSObject): + """macOS conforming python class for managing the CentralManger for BLE""" + + ___pyobjc_protocols__ = [CBCentralManagerDelegate] + + def init(self) -> Optional["CentralManagerDelegate"]: + """macOS init function for NSObject""" + self = objc.super(CentralManagerDelegate, self).init() + + if self is None: + return None + + self.event_loop = asyncio.get_running_loop() + self._connect_futures: Dict[NSUUID, asyncio.Future] = {} + + self.callbacks: Dict[ + int, Callable[[CBPeripheral, Dict[str, Any], int], None] + ] = {} + self._disconnect_callbacks: Dict[NSUUID, DisconnectCallback] = {} + self._disconnect_futures: Dict[NSUUID, asyncio.Future] = {} + + self._did_update_state_event = threading.Event() + self.central_manager = CBCentralManager.alloc().initWithDelegate_queue_( + self, dispatch_queue_create(b"bleak.corebluetooth", DISPATCH_QUEUE_SERIAL) + ) + + # according to CoreBluetooth docs, it is not valid to call CBCentral + # methods until the centralManagerDidUpdateState_() delegate method + # is called and the current state is CBManagerStatePoweredOn. + # It doesn't take long for the callback to occur, so we should be able + # to do a blocking wait here without anyone complaining. + self._did_update_state_event.wait(1) + + if self.central_manager.state() == CBManagerStateUnsupported: + raise BleakError("BLE is unsupported") + + if self.central_manager.state() == CBManagerStateUnauthorized: + raise BleakError("BLE is not authorized - check macOS privacy settings") + + if self.central_manager.state() != CBManagerStatePoweredOn: + raise BleakError("Bluetooth device is turned off") + + # isScanning property was added in 10.13 + if objc.macos_available(10, 13): + self.central_manager.addObserver_forKeyPath_options_context_( + self, "isScanning", NSKeyValueObservingOptionNew, 0 + ) + self._did_start_scanning_event: Optional[asyncio.Event] = None + self._did_stop_scanning_event: Optional[asyncio.Event] = None + + return self + + def __del__(self) -> None: + if objc.macos_available(10, 13): + try: + self.central_manager.removeObserver_forKeyPath_(self, "isScanning") + except IndexError: + # If self.init() raised an exception before calling + # addObserver_forKeyPath_options_context_, attempting + # to remove the observer will fail with IndexError + pass + + # User defined functions + + @objc.python_method + async def start_scan(self, service_uuids: Optional[List[str]]) -> None: + service_uuids = ( + NSArray.alloc().initWithArray_( + list(map(CBUUID.UUIDWithString_, service_uuids)) + ) + if service_uuids + else None + ) + + self.central_manager.scanForPeripheralsWithServices_options_( + service_uuids, None + ) + + # The `isScanning` property was added in macOS 10.13, so before that + # just waiting some will have to do. + if objc.macos_available(10, 13): + event = asyncio.Event() + self._did_start_scanning_event = event + if not self.central_manager.isScanning(): + await event.wait() + else: + await asyncio.sleep(0.1) + + @objc.python_method + async def stop_scan(self) -> None: + self.central_manager.stopScan() + + # The `isScanning` property was added in macOS 10.13, so before that + # just waiting some will have to do. + if objc.macos_available(10, 13): + event = asyncio.Event() + self._did_stop_scanning_event = event + if self.central_manager.isScanning(): + await event.wait() + else: + await asyncio.sleep(0.1) + + @objc.python_method + async def connect( + self, + peripheral: CBPeripheral, + disconnect_callback: DisconnectCallback, + timeout: float = 10.0, + ) -> None: + try: + self._disconnect_callbacks[peripheral.identifier()] = disconnect_callback + future = self.event_loop.create_future() + + self._connect_futures[peripheral.identifier()] = future + try: + self.central_manager.connectPeripheral_options_(peripheral, None) + async with async_timeout(timeout): + await future + finally: + del self._connect_futures[peripheral.identifier()] + + except asyncio.TimeoutError: + logger.debug(f"Connection timed out after {timeout} seconds.") + del self._disconnect_callbacks[peripheral.identifier()] + future = self.event_loop.create_future() + + self._disconnect_futures[peripheral.identifier()] = future + try: + self.central_manager.cancelPeripheralConnection_(peripheral) + await future + finally: + del self._disconnect_futures[peripheral.identifier()] + + raise + + @objc.python_method + async def disconnect(self, peripheral: CBPeripheral) -> None: + future = self.event_loop.create_future() + + self._disconnect_futures[peripheral.identifier()] = future + try: + self.central_manager.cancelPeripheralConnection_(peripheral) + await future + finally: + del self._disconnect_futures[peripheral.identifier()] + + @objc.python_method + def _changed_is_scanning(self, is_scanning: bool) -> None: + if is_scanning: + if self._did_start_scanning_event: + self._did_start_scanning_event.set() + else: + if self._did_stop_scanning_event: + self._did_stop_scanning_event.set() + + def observeValueForKeyPath_ofObject_change_context_( + self, keyPath: NSString, object: Any, change: NSDictionary, context: int + ) -> None: + logger.debug("'%s' changed", keyPath) + + if keyPath != "isScanning": + return + + is_scanning = bool(change[NSKeyValueChangeNewKey]) + self.event_loop.call_soon_threadsafe(self._changed_is_scanning, is_scanning) + + # Protocol Functions + + def centralManagerDidUpdateState_(self, centralManager: CBCentralManager) -> None: + logger.debug("centralManagerDidUpdateState_") + if centralManager.state() == CBManagerStateUnknown: + logger.debug("Cannot detect bluetooth device") + elif centralManager.state() == CBManagerStateResetting: + logger.debug("Bluetooth is resetting") + elif centralManager.state() == CBManagerStateUnsupported: + logger.debug("Bluetooth is unsupported") + elif centralManager.state() == CBManagerStateUnauthorized: + logger.debug("Bluetooth is unauthorized") + elif centralManager.state() == CBManagerStatePoweredOff: + logger.debug("Bluetooth powered off") + elif centralManager.state() == CBManagerStatePoweredOn: + logger.debug("Bluetooth powered on") + + self._did_update_state_event.set() + + @objc.python_method + def did_discover_peripheral( + self, + central: CBCentralManager, + peripheral: CBPeripheral, + advertisementData: NSDictionary, + RSSI: NSNumber, + ) -> None: + # Note: this function might be called several times for same device. + # This can happen for instance when an active scan is done, and the + # second call with contain the data from the BLE scan response. + # Example a first time with the following keys in advertisementData: + # ['kCBAdvDataLocalName', 'kCBAdvDataIsConnectable', 'kCBAdvDataChannel'] + # ... and later a second time with other keys (and values) such as: + # ['kCBAdvDataServiceUUIDs', 'kCBAdvDataIsConnectable', 'kCBAdvDataChannel'] + # + # i.e it is best not to trust advertisementData for later use and data + # from it should be copied. + # + # This behaviour could be affected by the + # CBCentralManagerScanOptionAllowDuplicatesKey global setting. + + uuid_string = peripheral.identifier().UUIDString() + + for callback in self.callbacks.values(): + if callback: + callback(peripheral, advertisementData, RSSI) + + logger.debug( + "Discovered device %s: %s @ RSSI: %d (kCBAdvData %r) and Central: %r", + uuid_string, + peripheral.name(), + RSSI, + advertisementData.keys(), + central, + ) + + def centralManager_didDiscoverPeripheral_advertisementData_RSSI_( + self, + central: CBCentralManager, + peripheral: CBPeripheral, + advertisementData: NSDictionary, + RSSI: NSNumber, + ) -> None: + logger.debug("centralManager_didDiscoverPeripheral_advertisementData_RSSI_") + self.event_loop.call_soon_threadsafe( + self.did_discover_peripheral, + central, + peripheral, + advertisementData, + RSSI, + ) + + @objc.python_method + def did_connect_peripheral( + self, central: CBCentralManager, peripheral: CBPeripheral + ) -> None: + future = self._connect_futures.get(peripheral.identifier(), None) + if future is not None: + future.set_result(True) + + def centralManager_didConnectPeripheral_( + self, central: CBCentralManager, peripheral: CBPeripheral + ) -> None: + logger.debug("centralManager_didConnectPeripheral_") + self.event_loop.call_soon_threadsafe( + self.did_connect_peripheral, + central, + peripheral, + ) + + @objc.python_method + def did_fail_to_connect_peripheral( + self, + centralManager: CBCentralManager, + peripheral: CBPeripheral, + error: Optional[NSError], + ) -> None: + future = self._connect_futures.get(peripheral.identifier(), None) + if future is not None: + if error is not None: + future.set_exception(BleakError(f"failed to connect: {error}")) + else: + future.set_result(False) + + def centralManager_didFailToConnectPeripheral_error_( + self, + centralManager: CBCentralManager, + peripheral: CBPeripheral, + error: Optional[NSError], + ) -> None: + logger.debug("centralManager_didFailToConnectPeripheral_error_") + self.event_loop.call_soon_threadsafe( + self.did_fail_to_connect_peripheral, + centralManager, + peripheral, + error, + ) + + @objc.python_method + def did_disconnect_peripheral( + self, + central: CBCentralManager, + peripheral: CBPeripheral, + error: Optional[NSError], + ) -> None: + logger.debug("Peripheral Device disconnected!") + + future = self._disconnect_futures.get(peripheral.identifier(), None) + if future is not None: + if error is not None: + future.set_exception(BleakError(f"disconnect failed: {error}")) + else: + future.set_result(None) + + callback = self._disconnect_callbacks.pop(peripheral.identifier(), None) + + if callback is not None: + callback() + + def centralManager_didDisconnectPeripheral_error_( + self, + central: CBCentralManager, + peripheral: CBPeripheral, + error: Optional[NSError], + ) -> None: + logger.debug("centralManager_didDisconnectPeripheral_error_") + self.event_loop.call_soon_threadsafe( + self.did_disconnect_peripheral, + central, + peripheral, + error, + ) diff --git a/bleak/backends/corebluetooth/PeripheralDelegate.py b/bleak/backends/corebluetooth/PeripheralDelegate.py new file mode 100644 index 0000000..d48f477 --- /dev/null +++ b/bleak/backends/corebluetooth/PeripheralDelegate.py @@ -0,0 +1,629 @@ +""" + +PeripheralDelegate + +Created by kevincar + +""" + +from __future__ import annotations + +import asyncio +import itertools +import logging +import sys +from typing import Any, Dict, Iterable, NewType, Optional + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout +else: + from asyncio import timeout as async_timeout + +import objc +from CoreBluetooth import ( + CBCharacteristic, + CBCharacteristicWriteWithResponse, + CBDescriptor, + CBPeripheral, + CBService, +) +from Foundation import NSUUID, NSArray, NSData, NSError, NSNumber, NSObject, NSString + +from ...exc import BleakError +from ..client import NotifyCallback + +# logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger(__name__) + +CBPeripheralDelegate = objc.protocolNamed("CBPeripheralDelegate") + +CBCharacteristicWriteType = NewType("CBCharacteristicWriteType", int) + + +class PeripheralDelegate(NSObject): + """macOS conforming python class for managing the PeripheralDelegate for BLE""" + + ___pyobjc_protocols__ = [CBPeripheralDelegate] + + def initWithPeripheral_( + self, peripheral: CBPeripheral + ) -> Optional[PeripheralDelegate]: + """macOS init function for NSObject""" + self = objc.super(PeripheralDelegate, self).init() + + if self is None: + return None + + self.peripheral = peripheral + self.peripheral.setDelegate_(self) + + self._event_loop = asyncio.get_running_loop() + self._services_discovered_future = self._event_loop.create_future() + + self._service_characteristic_discovered_futures: Dict[int, asyncio.Future] = {} + self._characteristic_descriptor_discover_futures: Dict[int, asyncio.Future] = {} + + self._characteristic_read_futures: Dict[int, asyncio.Future] = {} + self._characteristic_write_futures: Dict[int, asyncio.Future] = {} + + self._descriptor_read_futures: Dict[int, asyncio.Future] = {} + self._descriptor_write_futures: Dict[int, asyncio.Future] = {} + + self._characteristic_notify_change_futures: Dict[int, asyncio.Future] = {} + self._characteristic_notify_callbacks: Dict[int, NotifyCallback] = {} + + self._read_rssi_futures: Dict[NSUUID, asyncio.Future] = {} + + return self + + @objc.python_method + def futures(self) -> Iterable[asyncio.Future]: + """ + Gets all futures for this delegate. + + These can be used to handle any pending futures when a peripheral is disconnected. + """ + services_discovered_future = ( + (self._services_discovered_future,) + if hasattr(self, "_services_discovered_future") + else () + ) + + return itertools.chain( + services_discovered_future, + self._service_characteristic_discovered_futures.values(), + self._characteristic_descriptor_discover_futures.values(), + self._characteristic_read_futures.values(), + self._characteristic_write_futures.values(), + self._descriptor_read_futures.values(), + self._descriptor_write_futures.values(), + self._characteristic_notify_change_futures.values(), + self._read_rssi_futures.values(), + ) + + @objc.python_method + async def discover_services(self, services: Optional[NSArray]) -> NSArray: + future = self._event_loop.create_future() + + self._services_discovered_future = future + try: + self.peripheral.discoverServices_(services) + return await future + finally: + del self._services_discovered_future + + @objc.python_method + async def discover_characteristics(self, service: CBService) -> NSArray: + future = self._event_loop.create_future() + + self._service_characteristic_discovered_futures[service.startHandle()] = future + try: + self.peripheral.discoverCharacteristics_forService_(None, service) + return await future + finally: + del self._service_characteristic_discovered_futures[service.startHandle()] + + @objc.python_method + async def discover_descriptors(self, characteristic: CBCharacteristic) -> NSArray: + future = self._event_loop.create_future() + + self._characteristic_descriptor_discover_futures[characteristic.handle()] = ( + future + ) + try: + self.peripheral.discoverDescriptorsForCharacteristic_(characteristic) + await future + finally: + del self._characteristic_descriptor_discover_futures[ + characteristic.handle() + ] + + return characteristic.descriptors() + + @objc.python_method + async def read_characteristic( + self, + characteristic: CBCharacteristic, + use_cached: bool = True, + timeout: int = 20, + ) -> NSData: + if characteristic.value() is not None and use_cached: + return characteristic.value() + + future = self._event_loop.create_future() + + self._characteristic_read_futures[characteristic.handle()] = future + try: + self.peripheral.readValueForCharacteristic_(characteristic) + async with async_timeout(timeout): + return await future + finally: + del self._characteristic_read_futures[characteristic.handle()] + + @objc.python_method + async def read_descriptor( + self, descriptor: CBDescriptor, use_cached: bool = True + ) -> Any: + if descriptor.value() is not None and use_cached: + return descriptor.value() + + future = self._event_loop.create_future() + + self._descriptor_read_futures[descriptor.handle()] = future + try: + self.peripheral.readValueForDescriptor_(descriptor) + return await future + finally: + del self._descriptor_read_futures[descriptor.handle()] + + @objc.python_method + async def write_characteristic( + self, + characteristic: CBCharacteristic, + value: NSData, + response: CBCharacteristicWriteType, + ) -> None: + # in CoreBluetooth there is no indication of success or failure of + # CBCharacteristicWriteWithoutResponse + if response == CBCharacteristicWriteWithResponse: + future = self._event_loop.create_future() + + self._characteristic_write_futures[characteristic.handle()] = future + try: + self.peripheral.writeValue_forCharacteristic_type_( + value, characteristic, response + ) + await future + finally: + del self._characteristic_write_futures[characteristic.handle()] + else: + self.peripheral.writeValue_forCharacteristic_type_( + value, characteristic, response + ) + + @objc.python_method + async def write_descriptor(self, descriptor: CBDescriptor, value: NSData) -> None: + future = self._event_loop.create_future() + + self._descriptor_write_futures[descriptor.handle()] = future + try: + self.peripheral.writeValue_forDescriptor_(value, descriptor) + await future + finally: + del self._descriptor_write_futures[descriptor.handle()] + + @objc.python_method + async def start_notifications( + self, characteristic: CBCharacteristic, callback: NotifyCallback + ) -> None: + c_handle = characteristic.handle() + if c_handle in self._characteristic_notify_callbacks: + raise ValueError("Characteristic notifications already started") + + self._characteristic_notify_callbacks[c_handle] = callback + + future = self._event_loop.create_future() + + self._characteristic_notify_change_futures[c_handle] = future + try: + self.peripheral.setNotifyValue_forCharacteristic_(True, characteristic) + await future + finally: + del self._characteristic_notify_change_futures[c_handle] + + @objc.python_method + async def stop_notifications(self, characteristic: CBCharacteristic) -> None: + c_handle = characteristic.handle() + if c_handle not in self._characteristic_notify_callbacks: + raise ValueError("Characteristic notification never started") + + future = self._event_loop.create_future() + + self._characteristic_notify_change_futures[c_handle] = future + try: + self.peripheral.setNotifyValue_forCharacteristic_(False, characteristic) + await future + finally: + del self._characteristic_notify_change_futures[c_handle] + + self._characteristic_notify_callbacks.pop(c_handle) + + @objc.python_method + async def read_rssi(self) -> NSNumber: + future = self._event_loop.create_future() + + self._read_rssi_futures[self.peripheral.identifier()] = future + try: + self.peripheral.readRSSI() + return await future + finally: + del self._read_rssi_futures[self.peripheral.identifier()] + + # Protocol Functions + + @objc.python_method + def did_discover_services( + self, peripheral: CBPeripheral, services: NSArray, error: Optional[NSError] + ) -> None: + future = self._services_discovered_future + if error is not None: + exception = BleakError(f"Failed to discover services {error}") + future.set_exception(exception) + else: + logger.debug("Services discovered") + future.set_result(services) + + def peripheral_didDiscoverServices_( + self, peripheral: CBPeripheral, error: Optional[NSError] + ) -> None: + logger.debug("peripheral_didDiscoverServices_") + self._event_loop.call_soon_threadsafe( + self.did_discover_services, + peripheral, + peripheral.services(), + error, + ) + + @objc.python_method + def did_discover_characteristics_for_service( + self, + peripheral: CBPeripheral, + service: CBService, + characteristics: NSArray, + error: Optional[NSError], + ) -> None: + future = self._service_characteristic_discovered_futures.get( + service.startHandle() + ) + if not future: + logger.debug( + f"Unexpected event didDiscoverCharacteristicsForService for {service.startHandle()}" + ) + return + if error is not None: + exception = BleakError( + f"Failed to discover characteristics for service {service.startHandle()}: {error}" + ) + future.set_exception(exception) + else: + logger.debug("Characteristics discovered") + future.set_result(characteristics) + + def peripheral_didDiscoverCharacteristicsForService_error_( + self, peripheral: CBPeripheral, service: CBService, error: Optional[NSError] + ) -> None: + logger.debug("peripheral_didDiscoverCharacteristicsForService_error_") + self._event_loop.call_soon_threadsafe( + self.did_discover_characteristics_for_service, + peripheral, + service, + service.characteristics(), + error, + ) + + @objc.python_method + def did_discover_descriptors_for_characteristic( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + future = self._characteristic_descriptor_discover_futures.get( + characteristic.handle() + ) + if not future: + logger.warning( + f"Unexpected event didDiscoverDescriptorsForCharacteristic for {characteristic.handle()}" + ) + return + if error is not None: + exception = BleakError( + f"Failed to discover descriptors for characteristic {characteristic.handle()}: {error}" + ) + future.set_exception(exception) + else: + logger.debug(f"Descriptor discovered {characteristic.handle()}") + future.set_result(None) + + def peripheral_didDiscoverDescriptorsForCharacteristic_error_( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didDiscoverDescriptorsForCharacteristic_error_") + self._event_loop.call_soon_threadsafe( + self.did_discover_descriptors_for_characteristic, + peripheral, + characteristic, + error, + ) + + @objc.python_method + def did_update_value_for_characteristic( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + value: NSData, + error: Optional[NSError], + ) -> None: + c_handle = characteristic.handle() + + future = self._characteristic_read_futures.get(c_handle) + + # If there is no pending read request, then this must be a notification + # (the same delegate callback is used by both). + if not future: + if error is None: + notify_callback = self._characteristic_notify_callbacks.get(c_handle) + + if notify_callback: + notify_callback(bytearray(value)) + return + + if error is not None: + exception = BleakError(f"Failed to read characteristic {c_handle}: {error}") + future.set_exception(exception) + else: + logger.debug("Read characteristic value") + future.set_result(value) + + def peripheral_didUpdateValueForCharacteristic_error_( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didUpdateValueForCharacteristic_error_") + self._event_loop.call_soon_threadsafe( + self.did_update_value_for_characteristic, + peripheral, + characteristic, + characteristic.value(), + error, + ) + + @objc.python_method + def did_update_value_for_descriptor( + self, + peripheral: CBPeripheral, + descriptor: CBDescriptor, + value: NSObject, + error: Optional[NSError], + ) -> None: + future = self._descriptor_read_futures.get(descriptor.handle()) + if not future: + logger.warning("Unexpected event didUpdateValueForDescriptor") + return + if error is not None: + exception = BleakError( + f"Failed to read descriptor {descriptor.handle()}: {error}" + ) + future.set_exception(exception) + else: + logger.debug("Read descriptor value") + future.set_result(value) + + def peripheral_didUpdateValueForDescriptor_error_( + self, + peripheral: CBPeripheral, + descriptor: CBDescriptor, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didUpdateValueForDescriptor_error_") + self._event_loop.call_soon_threadsafe( + self.did_update_value_for_descriptor, + peripheral, + descriptor, + descriptor.value(), + error, + ) + + @objc.python_method + def did_write_value_for_characteristic( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + future = self._characteristic_write_futures.get(characteristic.handle(), None) + if not future: + return # event only expected on write with response + if error is not None: + exception = BleakError( + f"Failed to write characteristic {characteristic.handle()}: {error}" + ) + future.set_exception(exception) + else: + logger.debug("Write Characteristic Value") + future.set_result(None) + + def peripheral_didWriteValueForCharacteristic_error_( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didWriteValueForCharacteristic_error_") + self._event_loop.call_soon_threadsafe( + self.did_write_value_for_characteristic, + peripheral, + characteristic, + error, + ) + + @objc.python_method + def did_write_value_for_descriptor( + self, + peripheral: CBPeripheral, + descriptor: CBDescriptor, + error: Optional[NSError], + ) -> None: + future = self._descriptor_write_futures.get(descriptor.handle()) + if not future: + logger.warning("Unexpected event didWriteValueForDescriptor") + return + if error is not None: + exception = BleakError( + f"Failed to write descriptor {descriptor.handle()}: {error}" + ) + future.set_exception(exception) + else: + logger.debug("Write Descriptor Value") + future.set_result(None) + + def peripheral_didWriteValueForDescriptor_error_( + self, + peripheral: CBPeripheral, + descriptor: CBDescriptor, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didWriteValueForDescriptor_error_") + self._event_loop.call_soon_threadsafe( + self.did_write_value_for_descriptor, + peripheral, + descriptor, + error, + ) + + @objc.python_method + def did_update_notification_for_characteristic( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + c_handle = characteristic.handle() + future = self._characteristic_notify_change_futures.get(c_handle) + if not future: + logger.warning( + "Unexpected event didUpdateNotificationStateForCharacteristic" + ) + return + if error is not None: + exception = BleakError( + f"Failed to update the notification status for characteristic {c_handle}: {error}" + ) + future.set_exception(exception) + else: + logger.debug("Character Notify Update") + future.set_result(None) + + def peripheral_didUpdateNotificationStateForCharacteristic_error_( + self, + peripheral: CBPeripheral, + characteristic: CBCharacteristic, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didUpdateNotificationStateForCharacteristic_error_") + self._event_loop.call_soon_threadsafe( + self.did_update_notification_for_characteristic, + peripheral, + characteristic, + error, + ) + + @objc.python_method + def did_read_rssi( + self, peripheral: CBPeripheral, rssi: NSNumber, error: Optional[NSError] + ) -> None: + future = self._read_rssi_futures.get(peripheral.identifier(), None) + + if not future: + logger.warning("Unexpected event did_read_rssi") + return + + if error is not None: + exception = BleakError(f"Failed to read RSSI: {error}") + future.set_exception(exception) + else: + future.set_result(rssi) + + # peripheral_didReadRSSI_error_ method is added dynamically later + + # Bleak currently doesn't use the callbacks below other than for debug logging + + @objc.python_method + def did_update_name(self, peripheral: CBPeripheral, name: NSString) -> None: + logger.debug(f"name of {peripheral.identifier()} changed to {name}") + + def peripheralDidUpdateName_(self, peripheral: CBPeripheral) -> None: + logger.debug("peripheralDidUpdateName_") + self._event_loop.call_soon_threadsafe( + self.did_update_name, peripheral, peripheral.name() + ) + + @objc.python_method + def did_modify_services( + self, peripheral: CBPeripheral, invalidated_services: NSArray + ) -> None: + logger.debug( + f"{peripheral.identifier()} invalidated services: {invalidated_services}" + ) + + def peripheral_didModifyServices_( + self, peripheral: CBPeripheral, invalidatedServices: NSArray + ) -> None: + logger.debug("peripheral_didModifyServices_") + self._event_loop.call_soon_threadsafe( + self.did_modify_services, peripheral, invalidatedServices + ) + + +# peripheralDidUpdateRSSI:error: was deprecated and replaced with +# peripheral:didReadRSSI:error: in macOS 10.13 +if objc.macos_available(10, 13): + + def peripheral_didReadRSSI_error_( + self: PeripheralDelegate, + peripheral: CBPeripheral, + rssi: NSNumber, + error: Optional[NSError], + ) -> None: + logger.debug("peripheral_didReadRSSI_error_") + self._event_loop.call_soon_threadsafe( + self.did_read_rssi, peripheral, rssi, error + ) + + objc.classAddMethod( + PeripheralDelegate, + b"peripheral:didReadRSSI:error:", + peripheral_didReadRSSI_error_, + ) + + +else: + + def peripheralDidUpdateRSSI_error_( + self: PeripheralDelegate, peripheral: CBPeripheral, error: Optional[NSError] + ) -> None: + logger.debug("peripheralDidUpdateRSSI_error_") + self._event_loop.call_soon_threadsafe( + self.did_read_rssi, peripheral, peripheral.RSSI(), error + ) + + objc.classAddMethod( + PeripheralDelegate, + b"peripheralDidUpdateRSSI:error:", + peripheralDidUpdateRSSI_error_, + ) diff --git a/bleak/backends/corebluetooth/__init__.py b/bleak/backends/corebluetooth/__init__.py new file mode 100644 index 0000000..ed160a3 --- /dev/null +++ b/bleak/backends/corebluetooth/__init__.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +""" +__init__.py + +Created on 2017-11-19 by hbldh + +""" + +import objc + +objc.options.verbose = True diff --git a/bleak/backends/corebluetooth/characteristic.py b/bleak/backends/corebluetooth/characteristic.py new file mode 100644 index 0000000..4bf6171 --- /dev/null +++ b/bleak/backends/corebluetooth/characteristic.py @@ -0,0 +1,121 @@ +""" +Interface class for the Bleak representation of a GATT Characteristic + +Created on 2019-06-28 by kevincar + +""" + +from enum import Enum +from typing import Callable, Dict, List, Optional, Tuple, Union + +from CoreBluetooth import CBCharacteristic + +from ..characteristic import BleakGATTCharacteristic +from ..descriptor import BleakGATTDescriptor +from .descriptor import BleakGATTDescriptorCoreBluetooth +from .utils import cb_uuid_to_str + + +class CBCharacteristicProperties(Enum): + BROADCAST = 0x1 + READ = 0x2 + WRITE_WITHOUT_RESPONSE = 0x4 + WRITE = 0x8 + NOTIFY = 0x10 + INDICATE = 0x20 + AUTHENTICATED_SIGNED_WRITES = 0x40 + EXTENDED_PROPERTIES = 0x80 + NOTIFY_ENCRYPTION_REQUIRED = 0x100 + INDICATE_ENCRYPTION_REQUIRED = 0x200 + + +_GattCharacteristicsPropertiesEnum: Dict[Optional[int], Tuple[str, str]] = { + None: ("None", "The characteristic doesn’t have any properties that apply"), + 1: ("Broadcast".lower(), "The characteristic supports broadcasting"), + 2: ("Read".lower(), "The characteristic is readable"), + 4: ( + "Write-Without-Response".lower(), + "The characteristic supports Write Without Response", + ), + 8: ("Write".lower(), "The characteristic is writable"), + 16: ("Notify".lower(), "The characteristic is notifiable"), + 32: ("Indicate".lower(), "The characteristic is indicatable"), + 64: ( + "Authenticated-Signed-Writes".lower(), + "The characteristic supports signed writes", + ), + 128: ( + "Extended-Properties".lower(), + "The ExtendedProperties Descriptor is present", + ), + 256: ("Reliable-Writes".lower(), "The characteristic supports reliable writes"), + 512: ( + "Writable-Auxiliaries".lower(), + "The characteristic has writable auxiliaries", + ), +} + + +class BleakGATTCharacteristicCoreBluetooth(BleakGATTCharacteristic): + """GATT Characteristic implementation for the CoreBluetooth backend""" + + def __init__( + self, obj: CBCharacteristic, max_write_without_response_size: Callable[[], int] + ): + super().__init__(obj, max_write_without_response_size) + self.__descriptors: List[BleakGATTDescriptorCoreBluetooth] = [] + # self.__props = obj.properties() + self.__props: List[str] = [ + _GattCharacteristicsPropertiesEnum[v][0] + for v in [2**n for n in range(10)] + if (self.obj.properties() & v) + ] + self._uuid: str = cb_uuid_to_str(self.obj.UUID()) + + @property + def service_uuid(self) -> str: + """The uuid of the Service containing this characteristic""" + return cb_uuid_to_str(self.obj.service().UUID()) + + @property + def service_handle(self) -> int: + return int(self.obj.service().startHandle()) + + @property + def handle(self) -> int: + """Integer handle for this characteristic""" + return int(self.obj.handle()) + + @property + def uuid(self) -> str: + """The uuid of this characteristic""" + return self._uuid + + @property + def properties(self) -> List[str]: + """Properties of this characteristic""" + return self.__props + + @property + def descriptors(self) -> List[BleakGATTDescriptor]: + """List of descriptors for this service""" + return self.__descriptors + + def get_descriptor(self, specifier) -> Union[BleakGATTDescriptor, None]: + """Get a descriptor by handle (int) or UUID (str or uuid.UUID)""" + try: + if isinstance(specifier, int): + return next(filter(lambda x: x.handle == specifier, self.descriptors)) + else: + return next( + filter(lambda x: x.uuid == str(specifier), self.descriptors) + ) + except StopIteration: + return None + + def add_descriptor(self, descriptor: BleakGATTDescriptor): + """Add a :py:class:`~BleakGATTDescriptor` to the characteristic. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__descriptors.append(descriptor) diff --git a/bleak/backends/corebluetooth/client.py b/bleak/backends/corebluetooth/client.py new file mode 100644 index 0000000..a682dad --- /dev/null +++ b/bleak/backends/corebluetooth/client.py @@ -0,0 +1,389 @@ +""" +BLE Client for CoreBluetooth on macOS + +Created on 2019-06-26 by kevincar +""" + +import asyncio +import logging +import sys +import uuid +from typing import Optional, Set, Union + +if sys.version_info < (3, 12): + from typing_extensions import Buffer +else: + from collections.abc import Buffer + +from CoreBluetooth import ( + CBUUID, + CBCharacteristicWriteWithoutResponse, + CBCharacteristicWriteWithResponse, + CBPeripheral, + CBPeripheralStateConnected, +) +from Foundation import NSArray, NSData + +from ... import BleakScanner +from ...exc import ( + BleakCharacteristicNotFoundError, + BleakDeviceNotFoundError, + BleakError, +) +from ..characteristic import BleakGATTCharacteristic +from ..client import BaseBleakClient, NotifyCallback +from ..device import BLEDevice +from ..service import BleakGATTServiceCollection +from .CentralManagerDelegate import CentralManagerDelegate +from .characteristic import BleakGATTCharacteristicCoreBluetooth +from .descriptor import BleakGATTDescriptorCoreBluetooth +from .PeripheralDelegate import PeripheralDelegate +from .scanner import BleakScannerCoreBluetooth +from .service import BleakGATTServiceCoreBluetooth +from .utils import cb_uuid_to_str + +logger = logging.getLogger(__name__) + + +class BleakClientCoreBluetooth(BaseBleakClient): + """CoreBluetooth class interface for BleakClient + + Args: + address_or_ble_device (`BLEDevice` or str): The Bluetooth address of the BLE peripheral to connect to or the `BLEDevice` object representing it. + services: Optional set of service UUIDs that will be used. + + Keyword Args: + timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0. + + """ + + def __init__( + self, + address_or_ble_device: Union[BLEDevice, str], + services: Optional[Set[str]] = None, + **kwargs, + ): + super(BleakClientCoreBluetooth, self).__init__(address_or_ble_device, **kwargs) + + self._peripheral: Optional[CBPeripheral] = None + self._delegate: Optional[PeripheralDelegate] = None + self._central_manager_delegate: Optional[CentralManagerDelegate] = None + + if isinstance(address_or_ble_device, BLEDevice): + ( + self._peripheral, + self._central_manager_delegate, + ) = address_or_ble_device.details + + self._requested_services = ( + NSArray.alloc().initWithArray_(list(map(CBUUID.UUIDWithString_, services))) + if services + else None + ) + + def __str__(self) -> str: + return "BleakClientCoreBluetooth ({})".format(self.address) + + async def connect(self, **kwargs) -> bool: + """Connect to a specified Peripheral + + Keyword Args: + timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0. + + Returns: + Boolean representing connection status. + + """ + timeout = kwargs.get("timeout", self._timeout) + if self._peripheral is None: + device = await BleakScanner.find_device_by_address( + self.address, timeout=timeout, backend=BleakScannerCoreBluetooth + ) + + if device: + self._peripheral, self._central_manager_delegate = device.details + else: + raise BleakDeviceNotFoundError( + self.address, f"Device with address {self.address} was not found" + ) + + if self._delegate is None: + self._delegate = PeripheralDelegate.alloc().initWithPeripheral_( + self._peripheral + ) + + def disconnect_callback() -> None: + # Ensure that `get_services` retrieves services again, rather + # than using the cached object + self.services = None + + # If there are any pending futures waiting for delegate callbacks, we + # need to raise an exception since the callback will no longer be + # called because the device is disconnected. + for future in self._delegate.futures(): + try: + future.set_exception(BleakError("disconnected")) + except asyncio.InvalidStateError: + # the future was already done + pass + + if self._disconnected_callback: + self._disconnected_callback() + + manager = self._central_manager_delegate + logger.debug("CentralManagerDelegate at {}".format(manager)) + logger.debug("Connecting to BLE device @ {}".format(self.address)) + await manager.connect(self._peripheral, disconnect_callback, timeout=timeout) + + # Now get services + await self.get_services() + + return True + + async def disconnect(self) -> bool: + """Disconnect from the peripheral device""" + if ( + self._peripheral is None + or self._peripheral.state() != CBPeripheralStateConnected + ): + return True + + await self._central_manager_delegate.disconnect(self._peripheral) + + return True + + @property + def is_connected(self) -> bool: + """Checks for current active connection""" + return self._DeprecatedIsConnectedReturn( + False + if self._peripheral is None + else self._peripheral.state() == CBPeripheralStateConnected + ) + + @property + def mtu_size(self) -> int: + """Get ATT MTU size for active connection""" + # Use type CBCharacteristicWriteWithoutResponse to get maximum write + # value length based on the negotiated ATT MTU size. Add the ATT header + # length (+3) to get the actual ATT MTU size. + return ( + self._peripheral.maximumWriteValueLengthForType_( + CBCharacteristicWriteWithoutResponse + ) + + 3 + ) + + async def pair(self, *args, **kwargs) -> bool: + """Attempt to pair with a peripheral. + + .. note:: + + This is not available on macOS since there is not explicit method to do a pairing, Instead the docs + state that it "auto-pairs" when trying to read a characteristic that requires encryption, something + Bleak cannot do apparently. + + Reference: + + - `Apple Docs `_ + - `Stack Overflow post #1 `_ + - `Stack Overflow post #2 `_ + + Returns: + Boolean regarding success of pairing. + + """ + raise NotImplementedError("Pairing is not available in Core Bluetooth.") + + async def unpair(self) -> bool: + """ + + Returns: + + """ + raise NotImplementedError("Pairing is not available in Core Bluetooth.") + + async def get_services(self, **kwargs) -> BleakGATTServiceCollection: + """Get all services registered for this GATT server. + + Returns: + A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree. + + """ + if self.services is not None: + return self.services + + services = BleakGATTServiceCollection() + + logger.debug("Retrieving services...") + cb_services = await self._delegate.discover_services(self._requested_services) + + for service in cb_services: + serviceUUID = service.UUID().UUIDString() + logger.debug( + "Retrieving characteristics for service {}".format(serviceUUID) + ) + characteristics = await self._delegate.discover_characteristics(service) + + services.add_service(BleakGATTServiceCoreBluetooth(service)) + + for characteristic in characteristics: + cUUID = characteristic.UUID().UUIDString() + logger.debug( + "Retrieving descriptors for characteristic {}".format(cUUID) + ) + descriptors = await self._delegate.discover_descriptors(characteristic) + + services.add_characteristic( + BleakGATTCharacteristicCoreBluetooth( + characteristic, + lambda: self._peripheral.maximumWriteValueLengthForType_( + CBCharacteristicWriteWithoutResponse + ), + ) + ) + for descriptor in descriptors: + services.add_descriptor( + BleakGATTDescriptorCoreBluetooth( + descriptor, + cb_uuid_to_str(characteristic.UUID()), + int(characteristic.handle()), + ) + ) + logger.debug("Services resolved for %s", str(self)) + self.services = services + return self.services + + async def read_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID], + use_cached: bool = False, + **kwargs, + ) -> bytearray: + """Perform read operation on the specified GATT characteristic. + + Args: + char_specifier (BleakGATTCharacteristic, int, str or UUID): The characteristic to read from, + specified by either integer handle, UUID or directly by the + BleakGATTCharacteristic object representing it. + use_cached (bool): `False` forces macOS to read the value from the + device again and not use its own cached value. Defaults to `False`. + + Returns: + (bytearray) The read data. + + """ + if not isinstance(char_specifier, BleakGATTCharacteristic): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + output = await self._delegate.read_characteristic( + characteristic.obj, use_cached=use_cached + ) + value = bytearray(output) + logger.debug("Read Characteristic {0} : {1}".format(characteristic.uuid, value)) + return value + + async def read_gatt_descriptor( + self, handle: int, use_cached: bool = False, **kwargs + ) -> bytearray: + """Perform read operation on the specified GATT descriptor. + + Args: + handle (int): The handle of the descriptor to read from. + use_cached (bool): `False` forces Windows to read the value from the + device again and not use its own cached value. Defaults to `False`. + + Returns: + (bytearray) The read data. + """ + descriptor = self.services.get_descriptor(handle) + if not descriptor: + raise BleakError("Descriptor {} was not found!".format(handle)) + + output = await self._delegate.read_descriptor( + descriptor.obj, use_cached=use_cached + ) + if isinstance( + output, str + ): # Sometimes a `pyobjc_unicode`or `__NSCFString` is returned and they can be used as regular Python strings. + value = bytearray(output.encode("utf-8")) + else: # _NSInlineData + value = bytearray(output) # value.getBytes_length_(None, len(value)) + logger.debug("Read Descriptor {0} : {1}".format(handle, value)) + return value + + async def write_gatt_char( + self, + characteristic: BleakGATTCharacteristic, + data: Buffer, + response: bool, + ) -> None: + value = NSData.alloc().initWithBytes_length_(data, len(data)) + await self._delegate.write_characteristic( + characteristic.obj, + value, + ( + CBCharacteristicWriteWithResponse + if response + else CBCharacteristicWriteWithoutResponse + ), + ) + logger.debug(f"Write Characteristic {characteristic.uuid} : {data}") + + async def write_gatt_descriptor(self, handle: int, data: Buffer) -> None: + """Perform a write operation on the specified GATT descriptor. + + Args: + handle: The handle of the descriptor to read from. + data: The data to send (any bytes-like object). + + """ + descriptor = self.services.get_descriptor(handle) + if not descriptor: + raise BleakError("Descriptor {} was not found!".format(handle)) + + value = NSData.alloc().initWithBytes_length_(data, len(data)) + await self._delegate.write_descriptor(descriptor.obj, value) + logger.debug("Write Descriptor {0} : {1}".format(handle, data)) + + async def start_notify( + self, + characteristic: BleakGATTCharacteristic, + callback: NotifyCallback, + **kwargs, + ) -> None: + """ + Activate notifications/indications on a characteristic. + """ + assert self._delegate is not None + + await self._delegate.start_notifications(characteristic.obj, callback) + + async def stop_notify( + self, char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID] + ) -> None: + """Deactivate notification/indication on a specified characteristic. + + Args: + char_specifier (BleakGATTCharacteristic, int, str or UUID): The characteristic to deactivate + notification/indication on, specified by either integer handle, UUID or + directly by the BleakGATTCharacteristic object representing it. + + + """ + if not isinstance(char_specifier, BleakGATTCharacteristic): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + await self._delegate.stop_notifications(characteristic.obj) + + async def get_rssi(self) -> int: + """To get RSSI value in dBm of the connected Peripheral""" + return int(await self._delegate.read_rssi()) diff --git a/bleak/backends/corebluetooth/descriptor.py b/bleak/backends/corebluetooth/descriptor.py new file mode 100644 index 0000000..646a160 --- /dev/null +++ b/bleak/backends/corebluetooth/descriptor.py @@ -0,0 +1,43 @@ +""" +Interface class for the Bleak representation of a GATT Descriptor + +Created on 2019-06-28 by kevincar + +""" + +from CoreBluetooth import CBDescriptor + +from ..corebluetooth.utils import cb_uuid_to_str +from ..descriptor import BleakGATTDescriptor + + +class BleakGATTDescriptorCoreBluetooth(BleakGATTDescriptor): + """GATT Descriptor implementation for CoreBluetooth backend""" + + def __init__( + self, obj: CBDescriptor, characteristic_uuid: str, characteristic_handle: int + ): + super(BleakGATTDescriptorCoreBluetooth, self).__init__(obj) + self.obj: CBDescriptor = obj + self.__characteristic_uuid: str = characteristic_uuid + self.__characteristic_handle: int = characteristic_handle + + @property + def characteristic_handle(self) -> int: + """handle for the characteristic that this descriptor belongs to""" + return self.__characteristic_handle + + @property + def characteristic_uuid(self) -> str: + """UUID for the characteristic that this descriptor belongs to""" + return self.__characteristic_uuid + + @property + def uuid(self) -> str: + """UUID for this descriptor""" + return cb_uuid_to_str(self.obj.UUID()) + + @property + def handle(self) -> int: + """Integer handle for this descriptor""" + return int(self.obj.handle()) diff --git a/bleak/backends/corebluetooth/scanner.py b/bleak/backends/corebluetooth/scanner.py new file mode 100644 index 0000000..3491577 --- /dev/null +++ b/bleak/backends/corebluetooth/scanner.py @@ -0,0 +1,185 @@ +import logging +from typing import Any, Dict, List, Literal, Optional, TypedDict + +import objc +from CoreBluetooth import CBPeripheral +from Foundation import NSBundle + +from ...exc import BleakError +from ..scanner import AdvertisementData, AdvertisementDataCallback, BaseBleakScanner +from .CentralManagerDelegate import CentralManagerDelegate +from .utils import cb_uuid_to_str + +logger = logging.getLogger(__name__) + + +class CBScannerArgs(TypedDict, total=False): + """ + Platform-specific :class:`BleakScanner` args for the CoreBluetooth backend. + """ + + use_bdaddr: bool + """ + If true, use Bluetooth address instead of UUID. + + .. warning:: This uses an undocumented IOBluetooth API to get the Bluetooth + address and may break in the future macOS releases. `It is known to not + work on macOS 10.15 `_. + """ + + +class BleakScannerCoreBluetooth(BaseBleakScanner): + """The native macOS Bleak BLE Scanner. + + Documentation: + https://developer.apple.com/documentation/corebluetooth/cbcentralmanager + + CoreBluetooth doesn't explicitly use Bluetooth addresses to identify peripheral + devices because private devices may obscure their Bluetooth addresses. To cope + with this, CoreBluetooth utilizes UUIDs for each peripheral. Bleak uses + this for the BLEDevice address on macOS. + + Args: + detection_callback: + Optional function that will be called each time a device is + discovered or advertising data has changed. + service_uuids: + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. Required on + macOS >= 12.0, < 12.3 (unless you create an app with ``py2app``). + scanning_mode: + Set to ``"passive"`` to avoid the ``"active"`` scanning mode. Not + supported on macOS! Will raise :class:`BleakError` if set to + ``"passive"`` + **timeout (float): + The scanning timeout to be used, in case of missing + ``stopScan_`` method. + """ + + def __init__( + self, + detection_callback: Optional[AdvertisementDataCallback], + service_uuids: Optional[List[str]], + scanning_mode: Literal["active", "passive"], + *, + cb: CBScannerArgs, + **kwargs + ): + super(BleakScannerCoreBluetooth, self).__init__( + detection_callback, service_uuids + ) + + self._use_bdaddr = cb.get("use_bdaddr", False) + + if scanning_mode == "passive": + raise BleakError("macOS does not support passive scanning") + + self._manager = CentralManagerDelegate.alloc().init() + self._timeout: float = kwargs.get("timeout", 5.0) + if ( + objc.macos_available(12, 0) + and not objc.macos_available(12, 3) + and not self._service_uuids + ): + # See https://github.com/hbldh/bleak/issues/720 + if NSBundle.mainBundle().bundleIdentifier() == "org.python.python": + logger.error( + "macOS 12.0, 12.1 and 12.2 require non-empty service_uuids kwarg, otherwise no advertisement data will be received" + ) + + async def start(self) -> None: + self.seen_devices = {} + + def callback(p: CBPeripheral, a: Dict[str, Any], r: int) -> None: + + service_uuids = [ + cb_uuid_to_str(u) for u in a.get("kCBAdvDataServiceUUIDs", []) + ] + + if not self.is_allowed_uuid(service_uuids): + return + + # Process service data + service_data_dict_raw = a.get("kCBAdvDataServiceData", {}) + service_data = { + cb_uuid_to_str(k): bytes(v) for k, v in service_data_dict_raw.items() + } + + # Process manufacturer data into a more friendly format + manufacturer_binary_data = a.get("kCBAdvDataManufacturerData") + manufacturer_data = {} + if manufacturer_binary_data: + manufacturer_id = int.from_bytes( + manufacturer_binary_data[0:2], byteorder="little" + ) + manufacturer_value = bytes(manufacturer_binary_data[2:]) + manufacturer_data[manufacturer_id] = manufacturer_value + + # set tx_power data if available + tx_power = a.get("kCBAdvDataTxPowerLevel") + + advertisement_data = AdvertisementData( + local_name=a.get("kCBAdvDataLocalName"), + manufacturer_data=manufacturer_data, + service_data=service_data, + service_uuids=service_uuids, + tx_power=tx_power, + rssi=r, + platform_data=(p, a, r), + ) + + if self._use_bdaddr: + # HACK: retrieveAddressForPeripheral_ is undocumented but seems to do the trick + address_bytes: bytes = ( + self._manager.central_manager.retrieveAddressForPeripheral_(p) + ) + if address_bytes is None: + logger.debug( + "Could not get Bluetooth address for %s. Ignoring this device.", + p.identifier().UUIDString(), + ) + address = address_bytes.hex(":").upper() + else: + address = p.identifier().UUIDString() + + device = self.create_or_update_device( + address, + p.name(), + (p, self._manager.central_manager.delegate()), + advertisement_data, + ) + + self.call_detection_callbacks(device, advertisement_data) + + self._manager.callbacks[id(self)] = callback + await self._manager.start_scan(self._service_uuids) + + async def stop(self) -> None: + await self._manager.stop_scan() + self._manager.callbacks.pop(id(self), None) + + def set_scanning_filter(self, **kwargs) -> None: + """Set scanning filter for the scanner. + + .. note:: + + This is not implemented for macOS yet. + + Raises: + + ``NotImplementedError`` + + """ + raise NotImplementedError( + "Need to evaluate which macOS versions to support first..." + ) + + # macOS specific methods + + @property + def is_scanning(self): + # TODO: Evaluate if newer macOS than 10.11 has isScanning. + try: + return self._manager.isScanning_ + except Exception: + return None diff --git a/bleak/backends/corebluetooth/service.py b/bleak/backends/corebluetooth/service.py new file mode 100644 index 0000000..14a35a7 --- /dev/null +++ b/bleak/backends/corebluetooth/service.py @@ -0,0 +1,42 @@ +from typing import List + +from CoreBluetooth import CBService + +from ..service import BleakGATTService +from .characteristic import BleakGATTCharacteristicCoreBluetooth +from .utils import cb_uuid_to_str + + +class BleakGATTServiceCoreBluetooth(BleakGATTService): + """GATT Characteristic implementation for the CoreBluetooth backend""" + + def __init__(self, obj: CBService): + super().__init__(obj) + self.__characteristics: List[BleakGATTCharacteristicCoreBluetooth] = [] + # N.B. the `startHandle` method of the CBService is an undocumented Core Bluetooth feature, + # which Bleak takes advantage of in order to have a service handle to use. + self.__handle: int = int(self.obj.startHandle()) + + @property + def handle(self) -> int: + """The integer handle of this service""" + return self.__handle + + @property + def uuid(self) -> str: + """UUID for this service.""" + return cb_uuid_to_str(self.obj.UUID()) + + @property + def characteristics(self) -> List[BleakGATTCharacteristicCoreBluetooth]: + """List of characteristics for this service""" + return self.__characteristics + + def add_characteristic( + self, characteristic: BleakGATTCharacteristicCoreBluetooth + ) -> None: + """Add a :py:class:`~BleakGATTCharacteristicCoreBluetooth` to the service. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__characteristics.append(characteristic) diff --git a/bleak/backends/corebluetooth/utils.py b/bleak/backends/corebluetooth/utils.py new file mode 100644 index 0000000..9209547 --- /dev/null +++ b/bleak/backends/corebluetooth/utils.py @@ -0,0 +1,42 @@ +from CoreBluetooth import CBUUID +from Foundation import NSData + +from ...uuids import normalize_uuid_str + + +def cb_uuid_to_str(uuid: CBUUID) -> str: + """Converts a CoreBluetooth UUID to a Python string. + + If ``uuid`` is a 16-bit UUID, it is assumed to be a Bluetooth GATT UUID + (``0000xxxx-0000-1000-8000-00805f9b34fb``). + + Args + uuid: The UUID. + + Returns: + The UUID as a lower case Python string (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxx``) + """ + return normalize_uuid_str(uuid.UUIDString()) + + +def _is_uuid_16bit_compatible(_uuid: str) -> bool: + test_uuid = "0000ffff-0000-1000-8000-00805f9b34fb" + test_int = _convert_uuid_to_int(test_uuid) + uuid_int = _convert_uuid_to_int(_uuid) + result_int = uuid_int & test_int + return uuid_int == result_int + + +def _convert_uuid_to_int(_uuid: str) -> int: + UUID_cb = CBUUID.alloc().initWithString_(_uuid) + UUID_data = UUID_cb.data() + UUID_bytes = UUID_data.getBytes_length_(None, len(UUID_data)) + UUID_int = int.from_bytes(UUID_bytes, byteorder="big") + return UUID_int + + +def _convert_int_to_uuid(i: int) -> str: + UUID_bytes = i.to_bytes(length=16, byteorder="big") + UUID_data = NSData.alloc().initWithBytes_length_(UUID_bytes, len(UUID_bytes)) + UUID_cb = CBUUID.alloc().initWithData_(UUID_data) + return UUID_cb.UUIDString().lower() diff --git a/bleak/backends/descriptor.py b/bleak/backends/descriptor.py new file mode 100644 index 0000000..828ead5 --- /dev/null +++ b/bleak/backends/descriptor.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +""" +Interface class for the Bleak representation of a GATT Descriptor + +Created on 2019-03-19 by hbldh + +""" +import abc +from typing import Any + +from ..uuids import normalize_uuid_16 + +_descriptor_descriptions = { + normalize_uuid_16(0x2905): [ + "Characteristic Aggregate Format", + "org.bluetooth.descriptor.gatt.characteristic_aggregate_format", + "0x2905", + "GSS", + ], + normalize_uuid_16(0x2900): [ + "Characteristic Extended Properties", + "org.bluetooth.descriptor.gatt.characteristic_extended_properties", + "0x2900", + "GSS", + ], + normalize_uuid_16(0x2904): [ + "Characteristic Presentation Format", + "org.bluetooth.descriptor.gatt.characteristic_presentation_format", + "0x2904", + "GSS", + ], + normalize_uuid_16(0x2901): [ + "Characteristic User Description", + "org.bluetooth.descriptor.gatt.characteristic_user_description", + "0x2901", + "GSS", + ], + normalize_uuid_16(0x2902): [ + "Client Characteristic Configuration", + "org.bluetooth.descriptor.gatt.client_characteristic_configuration", + "0x2902", + "GSS", + ], + normalize_uuid_16(0x290B): [ + "Environmental Sensing Configuration", + "org.bluetooth.descriptor.es_configuration", + "0x290B", + "GSS", + ], + normalize_uuid_16(0x290C): [ + "Environmental Sensing Measurement", + "org.bluetooth.descriptor.es_measurement", + "0x290C", + "GSS", + ], + normalize_uuid_16(0x290D): [ + "Environmental Sensing Trigger Setting", + "org.bluetooth.descriptor.es_trigger_setting", + "0x290D", + "GSS", + ], + normalize_uuid_16(0x2907): [ + "External Report Reference", + "org.bluetooth.descriptor.external_report_reference", + "0x2907", + "GSS", + ], + normalize_uuid_16(0x2909): [ + "Number of Digitals", + "org.bluetooth.descriptor.number_of_digitals", + "0x2909", + "GSS", + ], + normalize_uuid_16(0x2908): [ + "Report Reference", + "org.bluetooth.descriptor.report_reference", + "0x2908", + "GSS", + ], + normalize_uuid_16(0x2903): [ + "Server Characteristic Configuration", + "org.bluetooth.descriptor.gatt.server_characteristic_configuration", + "0x2903", + "GSS", + ], + normalize_uuid_16(0x290E): [ + "Time Trigger Setting", + "org.bluetooth.descriptor.time_trigger_setting", + "0x290E", + "GSS", + ], + normalize_uuid_16(0x2906): [ + "Valid Range", + "org.bluetooth.descriptor.valid_range", + "0x2906", + "GSS", + ], + normalize_uuid_16(0x290A): [ + "Value Trigger Setting", + "org.bluetooth.descriptor.value_trigger_setting", + "0x290A", + "GSS", + ], +} + + +class BleakGATTDescriptor(abc.ABC): + """Interface for the Bleak representation of a GATT Descriptor""" + + def __init__(self, obj: Any): + self.obj = obj + + def __str__(self): + return f"{self.uuid} (Handle: {self.handle}): {self.description}" + + @property + @abc.abstractmethod + def characteristic_uuid(self) -> str: + """UUID for the characteristic that this descriptor belongs to""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def characteristic_handle(self) -> int: + """handle for the characteristic that this descriptor belongs to""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def uuid(self) -> str: + """UUID for this descriptor""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def handle(self) -> int: + """Integer handle for this descriptor""" + raise NotImplementedError() + + @property + def description(self) -> str: + """A text description of what this descriptor represents""" + return _descriptor_descriptions.get(self.uuid, ["Unknown"])[0] diff --git a/bleak/backends/device.py b/bleak/backends/device.py new file mode 100644 index 0000000..5ce5c89 --- /dev/null +++ b/bleak/backends/device.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" +Wrapper class for Bluetooth LE servers returned from calling +:py:meth:`bleak.discover`. + +Created on 2018-04-23 by hbldh + +""" + + +from typing import Any, Optional +from warnings import warn + + +class BLEDevice: + """ + A simple wrapper class representing a BLE server detected during scanning. + """ + + __slots__ = ("address", "name", "details", "_rssi", "_metadata") + + def __init__( + self, address: str, name: Optional[str], details: Any, rssi: int, **kwargs + ): + #: The Bluetooth address of the device on this machine (UUID on macOS). + self.address = address + #: The operating system name of the device (not necessarily the local name + #: from the advertising data), suitable for display to the user. + self.name = name + #: The OS native details required for connecting to the device. + self.details = details + + # for backwards compatibility + self._rssi = rssi + self._metadata = kwargs + + @property + def rssi(self) -> int: + """ + Gets the RSSI of the last received advertisement. + + .. deprecated:: 0.19.0 + Use :class:`AdvertisementData` from detection callback or + :attr:`BleakScanner.discovered_devices_and_advertisement_data` instead. + """ + warn( + "BLEDevice.rssi is deprecated and will be removed in a future version of Bleak, use AdvertisementData.rssi instead", + FutureWarning, + stacklevel=2, + ) + return self._rssi + + @property + def metadata(self) -> dict: + """ + Gets additional advertisement data for the device. + + .. deprecated:: 0.19.0 + Use :class:`AdvertisementData` from detection callback or + :attr:`BleakScanner.discovered_devices_and_advertisement_data` instead. + """ + warn( + "BLEDevice.metadata is deprecated and will be removed in a future version of Bleak, use AdvertisementData instead", + FutureWarning, + stacklevel=2, + ) + return self._metadata + + def __str__(self): + return f"{self.address}: {self.name}" + + def __repr__(self): + return f"BLEDevice({self.address}, {self.name})" diff --git a/bleak/backends/p4android/__init__.py b/bleak/backends/p4android/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/bleak/backends/p4android/characteristic.py b/bleak/backends/p4android/characteristic.py new file mode 100644 index 0000000..d9f6f19 --- /dev/null +++ b/bleak/backends/p4android/characteristic.py @@ -0,0 +1,96 @@ +from typing import Callable, List, Union +from uuid import UUID + +from ...exc import BleakError +from ..characteristic import BleakGATTCharacteristic +from ..descriptor import BleakGATTDescriptor +from . import defs + + +class BleakGATTCharacteristicP4Android(BleakGATTCharacteristic): + """GATT Characteristic implementation for the python-for-android backend""" + + def __init__( + self, + java, + service_uuid: str, + service_handle: int, + max_write_without_response_size: Callable[[], int], + ): + super(BleakGATTCharacteristicP4Android, self).__init__( + java, max_write_without_response_size + ) + self.__uuid = self.obj.getUuid().toString() + self.__handle = self.obj.getInstanceId() + self.__service_uuid = service_uuid + self.__service_handle = service_handle + self.__descriptors = [] + self.__notification_descriptor = None + + self.__properties = [ + name + for flag, name in defs.CHARACTERISTIC_PROPERTY_DBUS_NAMES.items() + if flag & self.obj.getProperties() + ] + + @property + def service_uuid(self) -> str: + """The uuid of the Service containing this characteristic""" + return self.__service_uuid + + @property + def service_handle(self) -> int: + """The integer handle of the Service containing this characteristic""" + return int(self.__service_handle) + + @property + def handle(self) -> int: + """The handle of this characteristic""" + return self.__handle + + @property + def uuid(self) -> str: + """The uuid of this characteristic""" + return self.__uuid + + @property + def properties(self) -> List[str]: + """Properties of this characteristic""" + return self.__properties + + @property + def descriptors(self) -> List[BleakGATTDescriptor]: + """List of descriptors for this service""" + return self.__descriptors + + def get_descriptor( + self, specifier: Union[str, UUID] + ) -> Union[BleakGATTDescriptor, None]: + """Get a descriptor by UUID (str or uuid.UUID)""" + if isinstance(specifier, int): + raise BleakError( + "The Android Bluetooth API does not provide access to descriptor handles." + ) + + matches = [ + descriptor + for descriptor in self.descriptors + if descriptor.uuid == str(specifier) + ] + if len(matches) == 0: + return None + return matches[0] + + def add_descriptor(self, descriptor: BleakGATTDescriptor): + """Add a :py:class:`~BleakGATTDescriptor` to the characteristic. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__descriptors.append(descriptor) + if descriptor.uuid == defs.CLIENT_CHARACTERISTIC_CONFIGURATION_UUID: + self.__notification_descriptor = descriptor + + @property + def notification_descriptor(self) -> BleakGATTDescriptor: + """The notification descriptor. Mostly needed by `bleak`, not by end user""" + return self.__notification_descriptor diff --git a/bleak/backends/p4android/client.py b/bleak/backends/p4android/client.py new file mode 100644 index 0000000..f1bca4d --- /dev/null +++ b/bleak/backends/p4android/client.py @@ -0,0 +1,545 @@ +# -*- coding: utf-8 -*- +""" +BLE Client for python-for-android +""" +import asyncio +import logging +import uuid +import warnings +from typing import Optional, Set, Union + +from android.broadcast import BroadcastReceiver +from jnius import java_method + +from ...exc import BleakCharacteristicNotFoundError, BleakError +from ..characteristic import BleakGATTCharacteristic +from ..client import BaseBleakClient, NotifyCallback +from ..device import BLEDevice +from ..service import BleakGATTServiceCollection +from . import defs, utils +from .characteristic import BleakGATTCharacteristicP4Android +from .descriptor import BleakGATTDescriptorP4Android +from .service import BleakGATTServiceP4Android + +logger = logging.getLogger(__name__) + + +class BleakClientP4Android(BaseBleakClient): + """A python-for-android Bleak Client + + Args: + address_or_ble_device: + The Bluetooth address of the BLE peripheral to connect to or the + :class:`BLEDevice` object representing it. + services: + Optional set of services UUIDs to filter. + """ + + def __init__( + self, + address_or_ble_device: Union[BLEDevice, str], + services: Optional[Set[uuid.UUID]], + **kwargs, + ): + super(BleakClientP4Android, self).__init__(address_or_ble_device, **kwargs) + self._requested_services = ( + set(map(defs.UUID.fromString, services)) if services else None + ) + # kwarg "device" is for backwards compatibility + self.__adapter = kwargs.get("adapter", kwargs.get("device", None)) + self.__gatt = None + self.__mtu = 23 + + def __del__(self): + if self.__gatt is not None: + self.__gatt.close() + self.__gatt = None + + # Connectivity methods + + async def connect(self, **kwargs) -> bool: + """Connect to the specified GATT server. + + Returns: + Boolean representing connection status. + + """ + loop = asyncio.get_running_loop() + + self.__adapter = defs.BluetoothAdapter.getDefaultAdapter() + if self.__adapter is None: + raise BleakError("Bluetooth is not supported on this hardware platform") + if self.__adapter.getState() != defs.BluetoothAdapter.STATE_ON: + raise BleakError("Bluetooth is not turned on") + + self.__device = self.__adapter.getRemoteDevice(self.address) + + self.__callbacks = _PythonBluetoothGattCallback(self, loop) + + self._subscriptions = {} + + logger.debug(f"Connecting to BLE device @ {self.address}") + + (self.__gatt,) = await self.__callbacks.perform_and_wait( + dispatchApi=self.__device.connectGatt, + dispatchParams=( + defs.context, + False, + self.__callbacks.java, + defs.BluetoothDevice.TRANSPORT_LE, + ), + resultApi="onConnectionStateChange", + resultExpected=(defs.BluetoothProfile.STATE_CONNECTED,), + return_indicates_status=False, + ) + + try: + logger.debug("Connection successful.") + + # unlike other backends, Android doesn't automatically negotiate + # the MTU, so we request the largest size possible like BlueZ + logger.debug("requesting mtu...") + (self.__mtu,) = await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.requestMtu, + dispatchParams=(517,), + resultApi="onMtuChanged", + ) + + logger.debug("discovering services...") + await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.discoverServices, + dispatchParams=(), + resultApi="onServicesDiscovered", + ) + + await self.get_services() + except BaseException: + # if connecting is canceled or one of the above fails, we need to + # disconnect + try: + await self.disconnect() + except Exception: + pass + raise + + return True + + async def disconnect(self) -> bool: + """Disconnect from the specified GATT server. + + Returns: + Boolean representing if device is disconnected. + + """ + logger.debug("Disconnecting from BLE device...") + if self.__gatt is None: + # No connection exists. Either one hasn't been created or + # we have already called disconnect and closed the gatt + # connection. + logger.debug("already disconnected") + return True + + # Try to disconnect the actual device/peripheral + try: + await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.disconnect, + dispatchParams=(), + resultApi="onConnectionStateChange", + resultExpected=(defs.BluetoothProfile.STATE_DISCONNECTED,), + unless_already=True, + return_indicates_status=False, + ) + self.__gatt.close() + except Exception as e: + logger.error(f"Attempt to disconnect device failed: {e}") + + self.__gatt = None + self.__callbacks = None + + # Reset all stored services. + self.services = None + + return True + + async def pair(self, *args, **kwargs) -> bool: + """Pair with the peripheral. + + You can use ConnectDevice method if you already know the MAC address of the device. + Else you need to StartDiscovery, Trust, Pair and Connect in sequence. + + Returns: + Boolean regarding success of pairing. + + """ + loop = asyncio.get_running_loop() + + bondedFuture = loop.create_future() + + def handleBondStateChanged(context, intent): + bond_state = intent.getIntExtra(defs.BluetoothDevice.EXTRA_BOND_STATE, -1) + if bond_state == -1: + loop.call_soon_threadsafe( + bondedFuture.set_exception, + BleakError(f"Unexpected bond state {bond_state}"), + ) + elif bond_state == defs.BluetoothDevice.BOND_NONE: + loop.call_soon_threadsafe( + bondedFuture.set_exception, + BleakError( + f"Device with address {self.address} could not be paired with." + ), + ) + elif bond_state == defs.BluetoothDevice.BOND_BONDED: + loop.call_soon_threadsafe(bondedFuture.set_result, True) + + receiver = BroadcastReceiver( + handleBondStateChanged, + actions=[defs.BluetoothDevice.ACTION_BOND_STATE_CHANGED], + ) + receiver.start() + try: + # See if it is already paired. + bond_state = self.__device.getBondState() + if bond_state == defs.BluetoothDevice.BOND_BONDED: + return True + elif bond_state == defs.BluetoothDevice.BOND_NONE: + logger.debug(f"Pairing to BLE device @ {self.address}") + if not self.__device.createBond(): + raise BleakError( + f"Could not initiate bonding with device @ {self.address}" + ) + return await bondedFuture + finally: + await receiver.stop() + + async def unpair(self) -> bool: + """Unpair with the peripheral. + + Returns: + Boolean regarding success of unpairing. + + """ + warnings.warn( + "Unpairing is seemingly unavailable in the Android API at the moment." + ) + return False + + @property + def is_connected(self) -> bool: + """Check connection status between this client and the server. + + Returns: + Boolean representing connection status. + + """ + return ( + self.__callbacks is not None + and self.__callbacks.states["onConnectionStateChange"][1] + == defs.BluetoothProfile.STATE_CONNECTED + ) + + @property + def mtu_size(self) -> Optional[int]: + return self.__mtu + + # GATT services methods + + async def get_services(self) -> BleakGATTServiceCollection: + """Get all services registered for this GATT server. + + Returns: + A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree. + + """ + if self.services is not None: + return self.services + + services = BleakGATTServiceCollection() + + logger.debug("Get Services...") + for java_service in self.__gatt.getServices(): + if ( + self._requested_services is not None + and java_service.getUuid() not in self._requested_services + ): + continue + + service = BleakGATTServiceP4Android(java_service) + services.add_service(service) + + for java_characteristic in java_service.getCharacteristics(): + + characteristic = BleakGATTCharacteristicP4Android( + java_characteristic, + service.uuid, + service.handle, + lambda: self.__mtu - 3, + ) + services.add_characteristic(characteristic) + + for descriptor_index, java_descriptor in enumerate( + java_characteristic.getDescriptors() + ): + + descriptor = BleakGATTDescriptorP4Android( + java_descriptor, + characteristic.uuid, + characteristic.handle, + descriptor_index, + ) + services.add_descriptor(descriptor) + + self.services = services + return self.services + + # IO methods + + async def read_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristicP4Android, int, str, uuid.UUID], + **kwargs, + ) -> bytearray: + """Perform read operation on the specified GATT characteristic. + + Args: + char_specifier (BleakGATTCharacteristicP4Android, int, str or UUID): The characteristic to read from, + specified by either integer handle, UUID or directly by the + BleakGATTCharacteristicP4Android object representing it. + + Returns: + (bytearray) The read data. + + """ + if not isinstance(char_specifier, BleakGATTCharacteristicP4Android): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + (value,) = await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.readCharacteristic, + dispatchParams=(characteristic.obj,), + resultApi=("onCharacteristicRead", characteristic.handle), + ) + value = bytearray(value) + logger.debug( + f"Read Characteristic {characteristic.uuid} | {characteristic.handle}: {value}" + ) + return value + + async def read_gatt_descriptor( + self, + desc_specifier: Union[BleakGATTDescriptorP4Android, str, uuid.UUID], + **kwargs, + ) -> bytearray: + """Perform read operation on the specified GATT descriptor. + + Args: + desc_specifier (BleakGATTDescriptorP4Android, str or UUID): The descriptor to read from, + specified by either UUID or directly by the + BleakGATTDescriptorP4Android object representing it. + + Returns: + (bytearray) The read data. + + """ + if not isinstance(desc_specifier, BleakGATTDescriptorP4Android): + descriptor = self.services.get_descriptor(desc_specifier) + else: + descriptor = desc_specifier + + if not descriptor: + raise BleakError(f"Descriptor with UUID {desc_specifier} was not found!") + + (value,) = await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.readDescriptor, + dispatchParams=(descriptor.obj,), + resultApi=("onDescriptorRead", descriptor.uuid), + ) + value = bytearray(value) + + logger.debug( + f"Read Descriptor {descriptor.uuid} | {descriptor.handle}: {value}" + ) + + return value + + async def write_gatt_char( + self, + characteristic: BleakGATTCharacteristic, + data: bytearray, + response: bool, + ) -> None: + if response: + characteristic.obj.setWriteType( + defs.BluetoothGattCharacteristic.WRITE_TYPE_DEFAULT + ) + else: + characteristic.obj.setWriteType( + defs.BluetoothGattCharacteristic.WRITE_TYPE_NO_RESPONSE + ) + + characteristic.obj.setValue(data) + + await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.writeCharacteristic, + dispatchParams=(characteristic.obj,), + resultApi=("onCharacteristicWrite", characteristic.handle), + ) + + logger.debug( + f"Write Characteristic {characteristic.uuid} | {characteristic.handle}: {data}" + ) + + async def write_gatt_descriptor( + self, + desc_specifier: Union[BleakGATTDescriptorP4Android, str, uuid.UUID], + data: bytearray, + ) -> None: + """Perform a write operation on the specified GATT descriptor. + + Args: + desc_specifier (BleakGATTDescriptorP4Android, str or UUID): The descriptor to write + to, specified by either UUID or directly by the + BleakGATTDescriptorP4Android object representing it. + data (bytes or bytearray): The data to send. + + """ + if not isinstance(desc_specifier, BleakGATTDescriptorP4Android): + descriptor = self.services.get_descriptor(desc_specifier) + else: + descriptor = desc_specifier + + if not descriptor: + raise BleakError(f"Descriptor {desc_specifier} was not found!") + + descriptor.obj.setValue(data) + + await self.__callbacks.perform_and_wait( + dispatchApi=self.__gatt.writeDescriptor, + dispatchParams=(descriptor.obj,), + resultApi=("onDescriptorWrite", descriptor.uuid), + ) + + logger.debug( + f"Write Descriptor {descriptor.uuid} | {descriptor.handle}: {data}" + ) + + async def start_notify( + self, + characteristic: BleakGATTCharacteristic, + callback: NotifyCallback, + **kwargs, + ) -> None: + """ + Activate notifications/indications on a characteristic. + """ + self._subscriptions[characteristic.handle] = callback + + assert self.__gatt is not None + + if not self.__gatt.setCharacteristicNotification(characteristic.obj, True): + raise BleakError( + f"Failed to enable notification for characteristic {characteristic.uuid}" + ) + + await self.write_gatt_descriptor( + characteristic.notification_descriptor, + defs.BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE, + ) + + async def stop_notify( + self, + char_specifier: Union[BleakGATTCharacteristicP4Android, int, str, uuid.UUID], + ) -> None: + """Deactivate notification/indication on a specified characteristic. + + Args: + char_specifier (BleakGATTCharacteristicP4Android, int, str or UUID): The characteristic to deactivate + notification/indication on, specified by either integer handle, UUID or + directly by the BleakGATTCharacteristicP4Android object representing it. + + """ + if not isinstance(char_specifier, BleakGATTCharacteristicP4Android): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + await self.write_gatt_descriptor( + characteristic.notification_descriptor, + defs.BluetoothGattDescriptor.DISABLE_NOTIFICATION_VALUE, + ) + + if not self.__gatt.setCharacteristicNotification(characteristic.obj, False): + raise BleakError( + f"Failed to disable notification for characteristic {characteristic.uuid}" + ) + del self._subscriptions[characteristic.handle] + + +class _PythonBluetoothGattCallback(utils.AsyncJavaCallbacks): + __javainterfaces__ = [ + "com.github.hbldh.bleak.PythonBluetoothGattCallback$Interface" + ] + + def __init__(self, client, loop): + super().__init__(loop) + self._client = client + self.java = defs.PythonBluetoothGattCallback(self) + + def result_state(self, status, resultApi, *data): + if status == defs.BluetoothGatt.GATT_SUCCESS: + failure_str = None + else: + failure_str = defs.GATT_STATUS_STRINGS.get(status, status) + self._loop.call_soon_threadsafe( + self._result_state_unthreadsafe, failure_str, resultApi, data + ) + + @java_method("(II)V") + def onConnectionStateChange(self, status, new_state): + try: + self.result_state(status, "onConnectionStateChange", new_state) + except BleakError: + pass + if ( + new_state == defs.BluetoothProfile.STATE_DISCONNECTED + and self._client._disconnected_callback is not None + ): + self._client._disconnected_callback() + + @java_method("(II)V") + def onMtuChanged(self, mtu, status): + self.result_state(status, "onMtuChanged", mtu) + + @java_method("(I)V") + def onServicesDiscovered(self, status): + self.result_state(status, "onServicesDiscovered") + + @java_method("(I[B)V") + def onCharacteristicChanged(self, handle, value): + self._loop.call_soon_threadsafe( + self._client._subscriptions[handle], bytearray(value.tolist()) + ) + + @java_method("(II[B)V") + def onCharacteristicRead(self, handle, status, value): + self.result_state( + status, ("onCharacteristicRead", handle), bytes(value.tolist()) + ) + + @java_method("(II)V") + def onCharacteristicWrite(self, handle, status): + self.result_state(status, ("onCharacteristicWrite", handle)) + + @java_method("(Ljava/lang/String;I[B)V") + def onDescriptorRead(self, uuid, status, value): + self.result_state(status, ("onDescriptorRead", uuid), bytes(value.tolist())) + + @java_method("(Ljava/lang/String;I)V") + def onDescriptorWrite(self, uuid, status): + self.result_state(status, ("onDescriptorWrite", uuid)) diff --git a/bleak/backends/p4android/defs.py b/bleak/backends/p4android/defs.py new file mode 100644 index 0000000..832f92b --- /dev/null +++ b/bleak/backends/p4android/defs.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- + +import enum + +from jnius import autoclass, cast + +import bleak.exc +from bleak.uuids import normalize_uuid_16 + +# caching constants avoids unnecessary extra use of the jni-python interface, which can be slow + +List = autoclass("java.util.ArrayList") +UUID = autoclass("java.util.UUID") +BluetoothAdapter = autoclass("android.bluetooth.BluetoothAdapter") +ScanCallback = autoclass("android.bluetooth.le.ScanCallback") +ScanFilter = autoclass("android.bluetooth.le.ScanFilter") +ScanFilterBuilder = autoclass("android.bluetooth.le.ScanFilter$Builder") +ScanSettings = autoclass("android.bluetooth.le.ScanSettings") +ScanSettingsBuilder = autoclass("android.bluetooth.le.ScanSettings$Builder") +BluetoothDevice = autoclass("android.bluetooth.BluetoothDevice") +BluetoothGatt = autoclass("android.bluetooth.BluetoothGatt") +BluetoothGattCharacteristic = autoclass("android.bluetooth.BluetoothGattCharacteristic") +BluetoothGattDescriptor = autoclass("android.bluetooth.BluetoothGattDescriptor") +BluetoothProfile = autoclass("android.bluetooth.BluetoothProfile") + +PythonActivity = autoclass("org.kivy.android.PythonActivity") +ParcelUuid = autoclass("android.os.ParcelUuid") +activity = cast("android.app.Activity", PythonActivity.mActivity) +context = cast("android.content.Context", activity.getApplicationContext()) + +ScanResult = autoclass("android.bluetooth.le.ScanResult") + +BLEAK_JNI_NAMESPACE = "com.github.hbldh.bleak" +PythonScanCallback = autoclass(BLEAK_JNI_NAMESPACE + ".PythonScanCallback") +PythonBluetoothGattCallback = autoclass( + BLEAK_JNI_NAMESPACE + ".PythonBluetoothGattCallback" +) + + +class ScanFailed(enum.IntEnum): + ALREADY_STARTED = ScanCallback.SCAN_FAILED_ALREADY_STARTED + APPLICATION_REGISTRATION_FAILED = ( + ScanCallback.SCAN_FAILED_APPLICATION_REGISTRATION_FAILED + ) + FEATURE_UNSUPPORTED = ScanCallback.SCAN_FAILED_FEATURE_UNSUPPORTED + INTERNAL_ERROR = ScanCallback.SCAN_FAILED_INTERNAL_ERROR + + +GATT_SUCCESS = 0x0000 +# TODO: we may need different lookups, e.g. one for bleak.exc.CONTROLLER_ERROR_CODES +GATT_STATUS_STRINGS = { + # https://developer.android.com/reference/android/bluetooth/BluetoothGatt + # https://android.googlesource.com/platform/external/bluetooth/bluedroid/+/5738f83aeb59361a0a2eda2460113f6dc9194271/stack/include/gatt_api.h + # https://android.googlesource.com/platform/system/bt/+/master/stack/include/gatt_api.h + # https://www.bluetooth.com/specifications/bluetooth-core-specification/ + **bleak.exc.PROTOCOL_ERROR_CODES, + 0x007F: "Too Short", + 0x0080: "No Resources", + 0x0081: "Internal Error", + 0x0082: "Wrong State", + 0x0083: "DB Full", + 0x0084: "Busy", + 0x0085: "Error", + 0x0086: "Command Started", + 0x0087: "Illegal Parameter", + 0x0088: "Pending", + 0x0089: "Auth Failure", + 0x008A: "More", + 0x008B: "Invalid Configuration", + 0x008C: "Service Started", + 0x008D: "Encrypted No MITM", + 0x008E: "Not Encrypted", + 0x008F: "Congested", + 0x0090: "Duplicate Reg", + 0x0091: "Already Open", + 0x0092: "Cancel", + 0x0101: "Failure", +} + +CHARACTERISTIC_PROPERTY_DBUS_NAMES = { + BluetoothGattCharacteristic.PROPERTY_BROADCAST: "broadcast", + BluetoothGattCharacteristic.PROPERTY_EXTENDED_PROPS: "extended-properties", + BluetoothGattCharacteristic.PROPERTY_INDICATE: "indicate", + BluetoothGattCharacteristic.PROPERTY_NOTIFY: "notify", + BluetoothGattCharacteristic.PROPERTY_READ: "read", + BluetoothGattCharacteristic.PROPERTY_SIGNED_WRITE: "authenticated-signed-writes", + BluetoothGattCharacteristic.PROPERTY_WRITE: "write", + BluetoothGattCharacteristic.PROPERTY_WRITE_NO_RESPONSE: "write-without-response", +} + +CLIENT_CHARACTERISTIC_CONFIGURATION_UUID = normalize_uuid_16(0x2902) diff --git a/bleak/backends/p4android/descriptor.py b/bleak/backends/p4android/descriptor.py new file mode 100644 index 0000000..844316e --- /dev/null +++ b/bleak/backends/p4android/descriptor.py @@ -0,0 +1,37 @@ +from ..descriptor import BleakGATTDescriptor + + +class BleakGATTDescriptorP4Android(BleakGATTDescriptor): + """GATT Descriptor implementation for python-for-android backend""" + + def __init__( + self, java, characteristic_uuid: str, characteristic_handle: int, index: int + ): + super(BleakGATTDescriptorP4Android, self).__init__(java) + self.__uuid = self.obj.getUuid().toString() + self.__characteristic_uuid = characteristic_uuid + self.__characteristic_handle = characteristic_handle + # many devices have sequential handles and this formula will mysteriously work for them + # it's possible this formula could make duplicate handles on other devices. + self.__fake_handle = self.__characteristic_handle + 1 + index + + @property + def characteristic_handle(self) -> int: + """handle for the characteristic that this descriptor belongs to""" + return self.__characteristic_handle + + @property + def characteristic_uuid(self) -> str: + """UUID for the characteristic that this descriptor belongs to""" + return self.__characteristic_uuid + + @property + def uuid(self) -> str: + """UUID for this descriptor""" + return self.__uuid + + @property + def handle(self) -> int: + """Integer handle for this descriptor""" + # 2021-01 The Android Bluetooth API does not appear to provide access to descriptor handles. + return self.__fake_handle diff --git a/bleak/backends/p4android/java/com/github/hbldh/bleak/PythonBluetoothGattCallback.java b/bleak/backends/p4android/java/com/github/hbldh/bleak/PythonBluetoothGattCallback.java new file mode 100644 index 0000000..76434ea --- /dev/null +++ b/bleak/backends/p4android/java/com/github/hbldh/bleak/PythonBluetoothGattCallback.java @@ -0,0 +1,84 @@ +package com.github.hbldh.bleak; + +import java.net.ConnectException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.HashMap; +import java.util.UUID; + +import android.bluetooth.BluetoothGatt; +import android.bluetooth.BluetoothGattCallback; +import android.bluetooth.BluetoothGattCharacteristic; +import android.bluetooth.BluetoothGattDescriptor; +import android.bluetooth.BluetoothProfile; + + +public final class PythonBluetoothGattCallback extends BluetoothGattCallback +{ + public interface Interface + { + public void onConnectionStateChange(int status, int newState); + public void onMtuChanged(int mtu, int status); + public void onServicesDiscovered(int status); + public void onCharacteristicChanged(int handle, byte[] value); + public void onCharacteristicRead(int handle, int status, byte[] value); + public void onCharacteristicWrite(int handle, int status); + public void onDescriptorRead(String uuid, int status, byte[] value); + public void onDescriptorWrite(String uuid, int status); + } + private Interface callback; + + public PythonBluetoothGattCallback(Interface pythonCallback) + { + callback = pythonCallback; + } + + @Override + public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) + { + callback.onConnectionStateChange(status, newState); + } + + @Override + public void onMtuChanged(BluetoothGatt gatt, int mtu, int status) + { + callback.onMtuChanged(mtu, status); + } + + @Override + public void onServicesDiscovered(BluetoothGatt gatt, int status) + { + callback.onServicesDiscovered(status); + } + + @Override + public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) + { + callback.onCharacteristicRead(characteristic.getInstanceId(), status, characteristic.getValue()); + } + + @Override + public void onCharacteristicWrite(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) + { + callback.onCharacteristicWrite(characteristic.getInstanceId(), status); + } + + @Override + public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) + { + callback.onCharacteristicChanged(characteristic.getInstanceId(), characteristic.getValue()); + } + + @Override + public void onDescriptorRead(BluetoothGatt gatt, BluetoothGattDescriptor descriptor, int status) + { + callback.onDescriptorRead(descriptor.getUuid().toString(), status, descriptor.getValue()); + } + + @Override + public void onDescriptorWrite(BluetoothGatt gatt, BluetoothGattDescriptor descriptor, int status) + { + callback.onDescriptorWrite(descriptor.getUuid().toString(), status); + } +} diff --git a/bleak/backends/p4android/java/com/github/hbldh/bleak/PythonScanCallback.java b/bleak/backends/p4android/java/com/github/hbldh/bleak/PythonScanCallback.java new file mode 100644 index 0000000..8b873b2 --- /dev/null +++ b/bleak/backends/p4android/java/com/github/hbldh/bleak/PythonScanCallback.java @@ -0,0 +1,41 @@ +package com.github.hbldh.bleak; + +import java.util.List; + +import android.bluetooth.le.ScanCallback; +import android.bluetooth.le.ScanResult; + +public final class PythonScanCallback extends ScanCallback +{ + public interface Interface + { + public void onScanFailed(int code); + public void onScanResult(ScanResult result); + } + private Interface callback; + + public PythonScanCallback(Interface pythonCallback) + { + callback = pythonCallback; + } + + @Override + public void onBatchScanResults(List results) + { + for (ScanResult result : results) { + callback.onScanResult(result); + } + } + + @Override + public void onScanFailed(int errorCode) + { + callback.onScanFailed(errorCode); + } + + @Override + public void onScanResult(int callbackType, ScanResult result) + { + callback.onScanResult(result); + } +} diff --git a/bleak/backends/p4android/recipes/bleak/__init__.py b/bleak/backends/p4android/recipes/bleak/__init__.py new file mode 100644 index 0000000..5c10397 --- /dev/null +++ b/bleak/backends/p4android/recipes/bleak/__init__.py @@ -0,0 +1,58 @@ +import os +from os.path import join + +import sh +from pythonforandroid.recipe import PythonRecipe +from pythonforandroid.toolchain import info, shprint + + +class BleakRecipe(PythonRecipe): + version = None # Must be none for p4a to correctly clone repo + fix_setup_py_version = "bleak develop branch" + url = "git+https://github.com/hbldh/bleak.git" + name = "bleak" + + depends = ["pyjnius"] + call_hostpython_via_targetpython = False + + fix_setup_filename = "fix_setup.py" + + def prepare_build_dir(self, arch): + super().prepare_build_dir(arch) # Unpack the url file to the get_build_dir + build_dir = self.get_build_dir(arch) + + setup_py_path = join(build_dir, "setup.py") + if not os.path.exists(setup_py_path): + # Perform the p4a temporary fix + # At the moment, p4a recipe installing requires setup.py to be present + # So, we create a setup.py file only for android + + fix_setup_py_path = join(self.get_recipe_dir(), self.fix_setup_filename) + with open(fix_setup_py_path, "r") as f: + contents = f.read() + + # Write to the correct location and fill in the version number + with open(setup_py_path, "w") as f: + f.write(contents.replace("[VERSION]", self.fix_setup_py_version)) + else: + info("setup.py found in bleak directory, are you installing older version?") + + def get_recipe_env(self, arch=None, with_flags_in_cc=True): + env = super().get_recipe_env(arch, with_flags_in_cc) + # to find jnius and identify p4a + env["PYJNIUS_PACKAGES"] = self.ctx.get_site_packages_dir(arch) + return env + + def postbuild_arch(self, arch): + super().postbuild_arch(arch) + + info("Copying java files") + dest_dir = self.ctx.javaclass_dir + path = join( + self.get_build_dir(arch.arch), "bleak", "backends", "p4android", "java", "." + ) + + shprint(sh.cp, "-a", path, dest_dir) + + +recipe = BleakRecipe() diff --git a/bleak/backends/p4android/recipes/bleak/fix_setup.py b/bleak/backends/p4android/recipes/bleak/fix_setup.py new file mode 100644 index 0000000..b43d2c1 --- /dev/null +++ b/bleak/backends/p4android/recipes/bleak/fix_setup.py @@ -0,0 +1,10 @@ +from setuptools import find_packages, setup + +VERSION = "[VERSION]" # Version will be filled in by the bleak recipe +NAME = "bleak" + +setup( + name=NAME, + version=VERSION, + packages=find_packages(exclude=("tests", "examples", "docs")), +) diff --git a/bleak/backends/p4android/scanner.py b/bleak/backends/p4android/scanner.py new file mode 100644 index 0000000..fb3be74 --- /dev/null +++ b/bleak/backends/p4android/scanner.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- + +import asyncio +import logging +import sys +import warnings +from typing import List, Literal, Optional + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout +else: + from asyncio import timeout as async_timeout + +from android.broadcast import BroadcastReceiver +from android.permissions import Permission, request_permissions +from jnius import cast, java_method + +from ...exc import BleakError +from ..scanner import AdvertisementData, AdvertisementDataCallback, BaseBleakScanner +from . import defs, utils + +logger = logging.getLogger(__name__) + + +class BleakScannerP4Android(BaseBleakScanner): + """ + The python-for-android Bleak BLE Scanner. + + Args: + detection_callback: + Optional function that will be called each time a device is + discovered or advertising data has changed. + service_uuids: + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. Specifying this + also enables scanning while the screen is off on Android. + scanning_mode: + Set to ``"passive"`` to avoid the ``"active"`` scanning mode. + """ + + __scanner = None + + def __init__( + self, + detection_callback: Optional[AdvertisementDataCallback], + service_uuids: Optional[List[str]], + scanning_mode: Literal["active", "passive"], + **kwargs, + ): + super(BleakScannerP4Android, self).__init__(detection_callback, service_uuids) + + if scanning_mode == "passive": + self.__scan_mode = defs.ScanSettings.SCAN_MODE_OPPORTUNISTIC + else: + self.__scan_mode = defs.ScanSettings.SCAN_MODE_LOW_LATENCY + + self.__adapter = None + self.__javascanner = None + self.__callback = None + + def __del__(self) -> None: + self.__stop() + + async def start(self) -> None: + if BleakScannerP4Android.__scanner is not None: + raise BleakError("A BleakScanner is already scanning on this adapter.") + + logger.debug("Starting BTLE scan") + + loop = asyncio.get_running_loop() + + if self.__javascanner is None: + if self.__callback is None: + self.__callback = _PythonScanCallback(self, loop) + + permission_acknowledged = loop.create_future() + + def handle_permissions(permissions, grantResults): + if any(grantResults): + loop.call_soon_threadsafe( + permission_acknowledged.set_result, grantResults + ) + else: + loop.call_soon_threadsafe( + permission_acknowledged.set_exception( + BleakError("User denied access to " + str(permissions)) + ) + ) + + request_permissions( + [ + Permission.ACCESS_FINE_LOCATION, + Permission.ACCESS_COARSE_LOCATION, + "android.permission.ACCESS_BACKGROUND_LOCATION", + ], + handle_permissions, + ) + await permission_acknowledged + + self.__adapter = defs.BluetoothAdapter.getDefaultAdapter() + if self.__adapter is None: + raise BleakError("Bluetooth is not supported on this hardware platform") + if self.__adapter.getState() != defs.BluetoothAdapter.STATE_ON: + raise BleakError("Bluetooth is not turned on") + + self.__javascanner = self.__adapter.getBluetoothLeScanner() + + BleakScannerP4Android.__scanner = self + + filters = cast("java.util.List", defs.List()) + if self._service_uuids: + for uuid in self._service_uuids: + filters.add( + defs.ScanFilterBuilder() + .setServiceUuid(defs.ParcelUuid.fromString(uuid)) + .build() + ) + + scanfuture = self.__callback.perform_and_wait( + dispatchApi=self.__javascanner.startScan, + dispatchParams=( + filters, + defs.ScanSettingsBuilder() + .setScanMode(self.__scan_mode) + .setReportDelay(0) + .setPhy(defs.ScanSettings.PHY_LE_ALL_SUPPORTED) + .setNumOfMatches(defs.ScanSettings.MATCH_NUM_MAX_ADVERTISEMENT) + .setMatchMode(defs.ScanSettings.MATCH_MODE_AGGRESSIVE) + .setCallbackType(defs.ScanSettings.CALLBACK_TYPE_ALL_MATCHES) + .build(), + self.__callback.java, + ), + resultApi="onScan", + return_indicates_status=False, + ) + self.__javascanner.flushPendingScanResults(self.__callback.java) + + try: + async with async_timeout(0.2): + await scanfuture + except asyncio.exceptions.TimeoutError: + pass + except BleakError as bleakerror: + await self.stop() + if bleakerror.args != ( + "onScan", + "SCAN_FAILED_APPLICATION_REGISTRATION_FAILED", + ): + raise bleakerror + else: + # there might be a clearer solution to this if android source and vendor + # documentation are reviewed for the meaning of the error + # https://stackoverflow.com/questions/27516399/solution-for-ble-scans-scan-failed-application-registration-failed + warnings.warn( + "BT API gave SCAN_FAILED_APPLICATION_REGISTRATION_FAILED. Resetting adapter." + ) + + def handlerWaitingForState(state, stateFuture): + def handleAdapterStateChanged(context, intent): + adapter_state = intent.getIntExtra( + defs.BluetoothAdapter.EXTRA_STATE, + defs.BluetoothAdapter.STATE_ERROR, + ) + if adapter_state == defs.BluetoothAdapter.STATE_ERROR: + loop.call_soon_threadsafe( + stateOffFuture.set_exception, + BleakError(f"Unexpected adapter state {adapter_state}"), + ) + elif adapter_state == state: + loop.call_soon_threadsafe( + stateFuture.set_result, adapter_state + ) + + return handleAdapterStateChanged + + logger.info( + "disabling bluetooth adapter to handle SCAN_FAILED_APPLICATION_REGSTRATION_FAILED ..." + ) + stateOffFuture = loop.create_future() + receiver = BroadcastReceiver( + handlerWaitingForState( + defs.BluetoothAdapter.STATE_OFF, stateOffFuture + ), + actions=[defs.BluetoothAdapter.ACTION_STATE_CHANGED], + ) + receiver.start() + try: + self.__adapter.disable() + await stateOffFuture + finally: + receiver.stop() + + logger.info("re-enabling bluetooth adapter ...") + stateOnFuture = loop.create_future() + receiver = BroadcastReceiver( + handlerWaitingForState( + defs.BluetoothAdapter.STATE_ON, stateOnFuture + ), + actions=[defs.BluetoothAdapter.ACTION_STATE_CHANGED], + ) + receiver.start() + try: + self.__adapter.enable() + await stateOnFuture + finally: + receiver.stop() + logger.debug("restarting scan ...") + + return await self.start() + + def __stop(self) -> None: + if self.__javascanner is not None: + logger.debug("Stopping BTLE scan") + self.__javascanner.stopScan(self.__callback.java) + BleakScannerP4Android.__scanner = None + self.__javascanner = None + else: + logger.debug("BTLE scan already stopped") + + async def stop(self) -> None: + self.__stop() + + def set_scanning_filter(self, **kwargs) -> None: + # If we do end up implementing this, this should accept List + # and ScanSettings java objects to pass to startScan(). + raise NotImplementedError("not implemented in Android backend") + + def _handle_scan_result(self, result) -> None: + native_device = result.getDevice() + record = result.getScanRecord() + + service_uuids = record.getServiceUuids() + if service_uuids is not None: + service_uuids = [service_uuid.toString() for service_uuid in service_uuids] + + if not self.is_allowed_uuid(service_uuids): + return + + manufacturer_data = record.getManufacturerSpecificData() + manufacturer_data = { + manufacturer_data.keyAt(index): bytes(manufacturer_data.valueAt(index)) + for index in range(manufacturer_data.size()) + } + + service_data = { + entry.getKey().toString(): bytes(entry.getValue()) + for entry in record.getServiceData().entrySet() + } + tx_power = record.getTxPowerLevel() + + # change "not present" value to None to match other backends + if tx_power == -2147483648: # Integer#MIN_VALUE + tx_power = None + + advertisement = AdvertisementData( + local_name=record.getDeviceName(), + manufacturer_data=manufacturer_data, + service_data=service_data, + service_uuids=service_uuids, + tx_power=tx_power, + rssi=result.getRssi(), + platform_data=(result,), + ) + + device = self.create_or_update_device( + native_device.getAddress(), + native_device.getName(), + native_device, + advertisement, + ) + + self.call_detection_callbacks(device, advertisement) + + +class _PythonScanCallback(utils.AsyncJavaCallbacks): + __javainterfaces__ = ["com.github.hbldh.bleak.PythonScanCallback$Interface"] + + def __init__(self, scanner: BleakScannerP4Android, loop: asyncio.AbstractEventLoop): + super().__init__(loop) + self._scanner = scanner + self.java = defs.PythonScanCallback(self) + + def result_state(self, status_str, name, *data): + self._loop.call_soon_threadsafe( + self._result_state_unthreadsafe, status_str, name, data + ) + + @java_method("(I)V") + def onScanFailed(self, errorCode): + self.result_state(defs.ScanFailed(errorCode).name, "onScan") + + @java_method("(Landroid/bluetooth/le/ScanResult;)V") + def onScanResult(self, result): + self._loop.call_soon_threadsafe(self._scanner._handle_scan_result, result) + + if "onScan" not in self.states: + self.result_state(None, "onScan", result) diff --git a/bleak/backends/p4android/service.py b/bleak/backends/p4android/service.py new file mode 100644 index 0000000..aab1fc2 --- /dev/null +++ b/bleak/backends/p4android/service.py @@ -0,0 +1,36 @@ +from typing import List + +from ..service import BleakGATTService +from .characteristic import BleakGATTCharacteristicP4Android + + +class BleakGATTServiceP4Android(BleakGATTService): + """GATT Service implementation for the python-for-android backend""" + + def __init__(self, java): + super().__init__(java) + self.__uuid = self.obj.getUuid().toString() + self.__handle = self.obj.getInstanceId() + self.__characteristics = [] + + @property + def uuid(self) -> str: + """The UUID to this service""" + return self.__uuid + + @property + def handle(self) -> int: + """A unique identifier for this service""" + return self.__handle + + @property + def characteristics(self) -> List[BleakGATTCharacteristicP4Android]: + """List of characteristics for this service""" + return self.__characteristics + + def add_characteristic(self, characteristic: BleakGATTCharacteristicP4Android): + """Add a :py:class:`~BleakGATTCharacteristicP4Android` to the service. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__characteristics.append(characteristic) diff --git a/bleak/backends/p4android/utils.py b/bleak/backends/p4android/utils.py new file mode 100644 index 0000000..a4fafda --- /dev/null +++ b/bleak/backends/p4android/utils.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- + +import asyncio +import logging +import warnings + +from jnius import PythonJavaClass + +from ...exc import BleakError + +logger = logging.getLogger(__name__) + + +class AsyncJavaCallbacks(PythonJavaClass): + __javacontext__ = "app" + + def __init__(self, loop: asyncio.AbstractEventLoop): + self._loop = loop + self.states = {} + self.futures = {} + + @staticmethod + def _if_expected(result, expected): + if result[: len(expected)] == expected[:]: + return result[len(expected) :] + else: + return None + + async def perform_and_wait( + self, + dispatchApi, + dispatchParams, + resultApi, + resultExpected=(), + unless_already=False, + return_indicates_status=True, + ): + result2 = None + if unless_already: + if resultApi in self.states: + result2 = self._if_expected(self.states[resultApi][1:], resultExpected) + result1 = True + + if result2 is not None: + logger.debug( + f"Not waiting for android api {resultApi} because found {resultExpected}" + ) + else: + logger.debug(f"Waiting for android api {resultApi}") + + state = self._loop.create_future() + self.futures[resultApi] = state + result1 = dispatchApi(*dispatchParams) + if return_indicates_status and not result1: + del self.futures[resultApi] + raise BleakError(f"api call failed, not waiting for {resultApi}") + data = await state + result2 = self._if_expected(data, resultExpected) + if result2 is None: + raise BleakError("Expected", resultExpected, "got", data) + + logger.debug(f"{resultApi} succeeded {result2}") + + if return_indicates_status: + return result2 + else: + return (result1, *result2) + + def _result_state_unthreadsafe(self, failure_str, source, data): + logger.debug(f"Java state transfer {source} error={failure_str} data={data}") + self.states[source] = (failure_str, *data) + future = self.futures.get(source, None) + if future is not None and not future.done(): + if failure_str is None: + future.set_result(data) + else: + future.set_exception(BleakError(source, failure_str, *data)) + else: + if failure_str is not None: + # an error happened with nothing waiting for it + exception = BleakError(source, failure_str, *data) + namedfutures = [ + namedfuture + for namedfuture in self.futures.items() + if not namedfuture[1].done() + ] + if len(namedfutures): + # send it on existing requests + for name, future in namedfutures: + warnings.warn(f"Redirecting error without home to {name}") + future.set_exception(exception) + else: + # send it on the event thread + raise exception diff --git a/bleak/backends/scanner.py b/bleak/backends/scanner.py new file mode 100644 index 0000000..eb0d71f --- /dev/null +++ b/bleak/backends/scanner.py @@ -0,0 +1,335 @@ +import abc +import asyncio +import inspect +import os +import platform +from typing import ( + Any, + Callable, + Coroutine, + Dict, + Hashable, + List, + NamedTuple, + Optional, + Set, + Tuple, + Type, +) + +from ..exc import BleakError +from .device import BLEDevice + +# prevent tasks from being garbage collected +_background_tasks: Set[asyncio.Task] = set() + + +class AdvertisementData(NamedTuple): + """ + Wrapper around the advertisement data that each platform returns upon discovery + """ + + local_name: Optional[str] + """ + The local name of the device or ``None`` if not included in advertising data. + """ + + manufacturer_data: Dict[int, bytes] + """ + Dictionary of manufacturer data in bytes from the received advertisement data or empty dict if not present. + + The keys are Bluetooth SIG assigned Company Identifiers and the values are bytes. + + https://www.bluetooth.com/specifications/assigned-numbers/company-identifiers/ + """ + + service_data: Dict[str, bytes] + """ + Dictionary of service data from the received advertisement data or empty dict if not present. + """ + + service_uuids: List[str] + """ + List of service UUIDs from the received advertisement data or empty list if not present. + """ + + tx_power: Optional[int] + """ + TX Power Level of the remote device from the received advertising data or ``None`` if not present. + + .. versionadded:: 0.17 + """ + + rssi: int + """ + The Radio Receive Signal Strength (RSSI) in dBm. + + .. versionadded:: 0.19 + """ + + platform_data: Tuple + """ + Tuple of platform specific data. + + This is not a stable API. The actual values may change between releases. + """ + + def __repr__(self) -> str: + kwargs = [] + if self.local_name: + kwargs.append(f"local_name={repr(self.local_name)}") + if self.manufacturer_data: + kwargs.append(f"manufacturer_data={repr(self.manufacturer_data)}") + if self.service_data: + kwargs.append(f"service_data={repr(self.service_data)}") + if self.service_uuids: + kwargs.append(f"service_uuids={repr(self.service_uuids)}") + if self.tx_power is not None: + kwargs.append(f"tx_power={repr(self.tx_power)}") + kwargs.append(f"rssi={repr(self.rssi)}") + return f"AdvertisementData({', '.join(kwargs)})" + + +AdvertisementDataCallback = Callable[ + [BLEDevice, AdvertisementData], + Optional[Coroutine[Any, Any, None]], +] +""" +Type alias for callback called when advertisement data is received. +""" + +AdvertisementDataFilter = Callable[ + [BLEDevice, AdvertisementData], + bool, +] +""" +Type alias for an advertisement data filter function. + +Implementations should return ``True`` for matches, otherwise ``False``. +""" + + +class BaseBleakScanner(abc.ABC): + """ + Interface for Bleak Bluetooth LE Scanners + + Args: + detection_callback: + Optional function that will be called each time a device is + discovered or advertising data has changed. + service_uuids: + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. + """ + + seen_devices: Dict[str, Tuple[BLEDevice, AdvertisementData]] + """ + Map of device identifier to BLEDevice and most recent advertisement data. + + This map must be cleared when scanning starts. + """ + + def __init__( + self, + detection_callback: Optional[AdvertisementDataCallback], + service_uuids: Optional[List[str]], + ): + super(BaseBleakScanner, self).__init__() + + self._ad_callbacks: Dict[ + Hashable, Callable[[BLEDevice, AdvertisementData], None] + ] = {} + """ + List of callbacks to call when an advertisement is received. + """ + + if detection_callback is not None: + self.register_detection_callback(detection_callback) + + self._service_uuids: Optional[List[str]] = ( + [u.lower() for u in service_uuids] if service_uuids is not None else None + ) + + self.seen_devices = {} + + def register_detection_callback( + self, callback: Optional[AdvertisementDataCallback] + ) -> Callable[[], None]: + """ + Register a callback that is called when an advertisement event from the + OS is received. + + The ``callback`` is a function or coroutine that takes two arguments: :class:`BLEDevice` + and :class:`AdvertisementData`. + + Args: + callback: A function, coroutine or ``None``. + + Returns: + A method that can be called to unregister the callback. + """ + error_text = "callback must be callable with 2 parameters" + + if not callable(callback): + raise TypeError(error_text) + + handler_signature = inspect.signature(callback) + + if len(handler_signature.parameters) != 2: + raise TypeError(error_text) + + if inspect.iscoroutinefunction(callback): + + def detection_callback(s: BLEDevice, d: AdvertisementData) -> None: + task = asyncio.create_task(callback(s, d)) + _background_tasks.add(task) + task.add_done_callback(_background_tasks.discard) + + else: + detection_callback = callback + + token = object() + + self._ad_callbacks[token] = detection_callback + + def remove() -> None: + self._ad_callbacks.pop(token, None) + + return remove + + def is_allowed_uuid(self, service_uuids: Optional[List[str]]) -> bool: + """ + Check if the advertisement data contains any of the service UUIDs + matching the filter. If no filter is set, this will always return + ``True``. + + Args: + service_uuids: The service UUIDs from the advertisement data. + + Returns: + ``True`` if the advertisement data should be allowed or ``False`` + if the advertisement data should be filtered out. + """ + # Backends will make best effort to filter out advertisements that + # don't match the service UUIDs, but if other apps are scanning at the + # same time or something like that, we may still receive advertisements + # that don't match. So we need to do more filtering here to get the + # expected behavior. + + if not self._service_uuids: + # if there is no filter, everything is allowed + return True + + if not service_uuids: + # if there is a filter the advertisement data doesn't contain any + # service UUIDs, filter it out + return False + + for uuid in service_uuids: + if uuid in self._service_uuids: + # match was found, keep this advertisement + return True + + # there were no matching service uuids, filter this one out + return False + + def call_detection_callbacks( + self, device: BLEDevice, advertisement_data: AdvertisementData + ) -> None: + """ + Calls all registered detection callbacks. + + Backend implementations should call this method when an advertisement + event is received from the OS. + """ + + for callback in self._ad_callbacks.values(): + callback(device, advertisement_data) + + def create_or_update_device( + self, address: str, name: str, details: Any, adv: AdvertisementData + ) -> BLEDevice: + """ + Creates or updates a device in :attr:`seen_devices`. + + Args: + address: The Bluetooth address of the device (UUID on macOS). + name: The OS display name for the device. + details: The platform-specific handle for the device. + adv: The most recent advertisement data received. + + Returns: + The updated device. + """ + + # for backwards compatibility, see https://github.com/hbldh/bleak/issues/1025 + metadata = dict( + uuids=adv.service_uuids, + manufacturer_data=adv.manufacturer_data, + ) + + try: + device, _ = self.seen_devices[address] + + device.name = name + device._rssi = adv.rssi + device._metadata = metadata + except KeyError: + device = BLEDevice( + address, + name, + details, + adv.rssi, + **metadata, + ) + + self.seen_devices[address] = (device, adv) + + return device + + @abc.abstractmethod + async def start(self) -> None: + """Start scanning for devices""" + raise NotImplementedError() + + @abc.abstractmethod + async def stop(self) -> None: + """Stop scanning for devices""" + raise NotImplementedError() + + @abc.abstractmethod + def set_scanning_filter(self, **kwargs) -> None: + """Set scanning filter for the BleakScanner. + + Args: + **kwargs: The filter details. This will differ a lot between backend implementations. + + """ + raise NotImplementedError() + + +def get_platform_scanner_backend_type() -> Type[BaseBleakScanner]: + """ + Gets the platform-specific :class:`BaseBleakScanner` type. + """ + if os.environ.get("P4A_BOOTSTRAP") is not None: + from bleak.backends.p4android.scanner import BleakScannerP4Android + + return BleakScannerP4Android + + if platform.system() == "Linux": + from bleak.backends.bluezdbus.scanner import BleakScannerBlueZDBus + + return BleakScannerBlueZDBus + + if platform.system() == "Darwin": + from bleak.backends.corebluetooth.scanner import BleakScannerCoreBluetooth + + return BleakScannerCoreBluetooth + + if platform.system() == "Windows": + from bleak.backends.winrt.scanner import BleakScannerWinRT + + return BleakScannerWinRT + + raise BleakError(f"Unsupported platform: {platform.system()}") diff --git a/bleak/backends/service.py b/bleak/backends/service.py new file mode 100644 index 0000000..09c503c --- /dev/null +++ b/bleak/backends/service.py @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- +""" +Gatt Service Collection class and interface class for the Bleak representation of a GATT Service. + +Created on 2019-03-19 by hbldh + +""" +import abc +import logging +from typing import Any, Dict, Iterator, List, Optional, Union +from uuid import UUID + +from ..exc import BleakError +from ..uuids import normalize_uuid_str, uuidstr_to_str +from .characteristic import BleakGATTCharacteristic +from .descriptor import BleakGATTDescriptor + +logger = logging.getLogger(__name__) + + +class BleakGATTService(abc.ABC): + """Interface for the Bleak representation of a GATT Service.""" + + def __init__(self, obj: Any) -> None: + self.obj = obj + + def __str__(self) -> str: + return f"{self.uuid} (Handle: {self.handle}): {self.description}" + + @property + @abc.abstractmethod + def handle(self) -> int: + """The handle of this service""" + raise NotImplementedError() + + @property + @abc.abstractmethod + def uuid(self) -> str: + """The UUID to this service""" + raise NotImplementedError() + + @property + def description(self) -> str: + """String description for this service""" + return uuidstr_to_str(self.uuid) + + @property + @abc.abstractmethod + def characteristics(self) -> List[BleakGATTCharacteristic]: + """List of characteristics for this service""" + raise NotImplementedError() + + @abc.abstractmethod + def add_characteristic(self, characteristic: BleakGATTCharacteristic) -> None: + """Add a :py:class:`~BleakGATTCharacteristic` to the service. + + Should not be used by end user, but rather by `bleak` itself. + """ + raise NotImplementedError() + + def get_characteristic( + self, uuid: Union[str, UUID] + ) -> Union[BleakGATTCharacteristic, None]: + """Get a characteristic by UUID. + + Args: + uuid: The UUID to match. + + Returns: + The first characteristic matching ``uuid`` or ``None`` if no + matching characteristic was found. + """ + uuid = normalize_uuid_str(str(uuid)) + + try: + return next(filter(lambda x: x.uuid == uuid, self.characteristics)) + except StopIteration: + return None + + +class BleakGATTServiceCollection: + """Simple data container for storing the peripheral's service complement.""" + + def __init__(self) -> None: + self.__services = {} + self.__characteristics = {} + self.__descriptors = {} + + def __getitem__( + self, item: Union[str, int, UUID] + ) -> Optional[ + Union[BleakGATTService, BleakGATTCharacteristic, BleakGATTDescriptor] + ]: + """Get a service, characteristic or descriptor from uuid or handle""" + return ( + self.get_service(item) + or self.get_characteristic(item) + or self.get_descriptor(item) + ) + + def __iter__(self) -> Iterator[BleakGATTService]: + """Returns an iterator over all BleakGATTService objects""" + return iter(self.services.values()) + + @property + def services(self) -> Dict[int, BleakGATTService]: + """Returns dictionary of handles mapping to BleakGATTService""" + return self.__services + + @property + def characteristics(self) -> Dict[int, BleakGATTCharacteristic]: + """Returns dictionary of handles mapping to BleakGATTCharacteristic""" + return self.__characteristics + + @property + def descriptors(self) -> Dict[int, BleakGATTDescriptor]: + """Returns a dictionary of integer handles mapping to BleakGATTDescriptor""" + return self.__descriptors + + def add_service(self, service: BleakGATTService) -> None: + """Add a :py:class:`~BleakGATTService` to the service collection. + + Should not be used by end user, but rather by `bleak` itself. + """ + if service.handle not in self.__services: + self.__services[service.handle] = service + else: + logger.error( + "The service '%s' is already present in this BleakGATTServiceCollection!", + service.handle, + ) + + def get_service( + self, specifier: Union[int, str, UUID] + ) -> Optional[BleakGATTService]: + """Get a service by handle (int) or UUID (str or uuid.UUID)""" + if isinstance(specifier, int): + return self.services.get(specifier) + + uuid = normalize_uuid_str(str(specifier)) + + x = list( + filter( + lambda x: x.uuid == uuid, + self.services.values(), + ) + ) + + if len(x) > 1: + raise BleakError( + "Multiple Services with this UUID, refer to your desired service by the `handle` attribute instead." + ) + + return x[0] if x else None + + def add_characteristic(self, characteristic: BleakGATTCharacteristic) -> None: + """Add a :py:class:`~BleakGATTCharacteristic` to the service collection. + + Should not be used by end user, but rather by `bleak` itself. + """ + if characteristic.handle not in self.__characteristics: + self.__characteristics[characteristic.handle] = characteristic + self.__services[characteristic.service_handle].add_characteristic( + characteristic + ) + else: + logger.error( + "The characteristic '%s' is already present in this BleakGATTServiceCollection!", + characteristic.handle, + ) + + def get_characteristic( + self, specifier: Union[int, str, UUID] + ) -> Optional[BleakGATTCharacteristic]: + """Get a characteristic by handle (int) or UUID (str or uuid.UUID)""" + if isinstance(specifier, int): + return self.characteristics.get(specifier) + + uuid = normalize_uuid_str(str(specifier)) + + # Assume uuid usage. + x = list( + filter( + lambda x: x.uuid == uuid, + self.characteristics.values(), + ) + ) + + if len(x) > 1: + raise BleakError( + "Multiple Characteristics with this UUID, refer to your desired characteristic by the `handle` attribute instead." + ) + + return x[0] if x else None + + def add_descriptor(self, descriptor: BleakGATTDescriptor) -> None: + """Add a :py:class:`~BleakGATTDescriptor` to the service collection. + + Should not be used by end user, but rather by `bleak` itself. + """ + if descriptor.handle not in self.__descriptors: + self.__descriptors[descriptor.handle] = descriptor + self.__characteristics[descriptor.characteristic_handle].add_descriptor( + descriptor + ) + else: + logger.error( + "The descriptor '%s' is already present in this BleakGATTServiceCollection!", + descriptor.handle, + ) + + def get_descriptor(self, handle: int) -> Optional[BleakGATTDescriptor]: + """Get a descriptor by integer handle""" + return self.descriptors.get(handle) diff --git a/bleak/backends/winrt/__init__.py b/bleak/backends/winrt/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/bleak/backends/winrt/characteristic.py b/bleak/backends/winrt/characteristic.py new file mode 100644 index 0000000..6a576bf --- /dev/null +++ b/bleak/backends/winrt/characteristic.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +import sys +from typing import Callable, List, Union +from uuid import UUID + +if sys.version_info >= (3, 12): + from winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattCharacteristic, + GattCharacteristicProperties, + ) +else: + from bleak_winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattCharacteristic, + GattCharacteristicProperties, + ) + +from ..characteristic import BleakGATTCharacteristic +from ..descriptor import BleakGATTDescriptor + +_GattCharacteristicsPropertiesMap = { + GattCharacteristicProperties.NONE: ( + "None", + "The characteristic doesn’t have any properties that apply", + ), + GattCharacteristicProperties.BROADCAST: ( + "Broadcast".lower(), + "The characteristic supports broadcasting", + ), + GattCharacteristicProperties.READ: ( + "Read".lower(), + "The characteristic is readable", + ), + GattCharacteristicProperties.WRITE_WITHOUT_RESPONSE: ( + "Write-Without-Response".lower(), + "The characteristic supports Write Without Response", + ), + GattCharacteristicProperties.WRITE: ( + "Write".lower(), + "The characteristic is writable", + ), + GattCharacteristicProperties.NOTIFY: ( + "Notify".lower(), + "The characteristic is notifiable", + ), + GattCharacteristicProperties.INDICATE: ( + "Indicate".lower(), + "The characteristic is indicatable", + ), + GattCharacteristicProperties.AUTHENTICATED_SIGNED_WRITES: ( + "Authenticated-Signed-Writes".lower(), + "The characteristic supports signed writes", + ), + GattCharacteristicProperties.EXTENDED_PROPERTIES: ( + "Extended-Properties".lower(), + "The ExtendedProperties Descriptor is present", + ), + GattCharacteristicProperties.RELIABLE_WRITES: ( + "Reliable-Writes".lower(), + "The characteristic supports reliable writes", + ), + GattCharacteristicProperties.WRITABLE_AUXILIARIES: ( + "Writable-Auxiliaries".lower(), + "The characteristic has writable auxiliaries", + ), +} + + +class BleakGATTCharacteristicWinRT(BleakGATTCharacteristic): + """GATT Characteristic implementation for the .NET backend, implemented with WinRT""" + + def __init__( + self, + obj: GattCharacteristic, + max_write_without_response_size: Callable[[], int], + ): + super().__init__(obj, max_write_without_response_size) + self.__descriptors = [] + self.__props = [ + _GattCharacteristicsPropertiesMap[v][0] + for v in [2**n for n in range(10)] + if (self.obj.characteristic_properties & v) + ] + + @property + def service_uuid(self) -> str: + """The uuid of the Service containing this characteristic""" + return str(self.obj.service.uuid) + + @property + def service_handle(self) -> int: + """The integer handle of the Service containing this characteristic""" + return int(self.obj.service.attribute_handle) + + @property + def handle(self) -> int: + """The handle of this characteristic""" + return int(self.obj.attribute_handle) + + @property + def uuid(self) -> str: + """The uuid of this characteristic""" + return str(self.obj.uuid) + + @property + def description(self) -> str: + """Description for this characteristic""" + return ( + self.obj.user_description + if self.obj.user_description + else super().description + ) + + @property + def properties(self) -> List[str]: + """Properties of this characteristic""" + return self.__props + + @property + def descriptors(self) -> List[BleakGATTDescriptor]: + """List of descriptors for this characteristic""" + return self.__descriptors + + def get_descriptor( + self, specifier: Union[int, str, UUID] + ) -> Union[BleakGATTDescriptor, None]: + """Get a descriptor by handle (int) or UUID (str or uuid.UUID)""" + try: + if isinstance(specifier, int): + return next(filter(lambda x: x.handle == specifier, self.descriptors)) + else: + return next( + filter(lambda x: x.uuid == str(specifier), self.descriptors) + ) + except StopIteration: + return None + + def add_descriptor(self, descriptor: BleakGATTDescriptor): + """Add a :py:class:`~BleakGATTDescriptor` to the characteristic. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__descriptors.append(descriptor) diff --git a/bleak/backends/winrt/client.py b/bleak/backends/winrt/client.py new file mode 100644 index 0000000..a04ec7c --- /dev/null +++ b/bleak/backends/winrt/client.py @@ -0,0 +1,1134 @@ +# -*- coding: utf-8 -*- +""" +BLE Client for Windows 10 systems, implemented with WinRT. + +Created on 2020-08-19 by hbldh +""" + +import asyncio +import logging +import sys +import uuid +import warnings +from ctypes import WinError +from typing import ( + Any, + Dict, + List, + Literal, + Optional, + Protocol, + Sequence, + Set, + TypedDict, + Union, + cast, +) + +if sys.version_info < (3, 12): + from typing_extensions import Buffer +else: + from collections.abc import Buffer + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout +else: + from asyncio import timeout as async_timeout + +if sys.version_info >= (3, 12): + from winrt.windows.devices.bluetooth import ( + BluetoothAddressType, + BluetoothCacheMode, + BluetoothError, + BluetoothLEDevice, + ) + from winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattCharacteristic, + GattCharacteristicProperties, + GattClientCharacteristicConfigurationDescriptorValue, + GattCommunicationStatus, + GattDescriptor, + GattDeviceService, + GattSession, + GattSessionStatus, + GattSessionStatusChangedEventArgs, + GattValueChangedEventArgs, + GattWriteOption, + ) + from winrt.windows.devices.enumeration import ( + DeviceInformation, + DevicePairingKinds, + DevicePairingResultStatus, + DeviceUnpairingResultStatus, + ) + from winrt.windows.foundation import ( + AsyncStatus, + EventRegistrationToken, + IAsyncOperation, + ) + from winrt.windows.storage.streams import Buffer as WinBuffer +else: + from bleak_winrt.windows.devices.bluetooth import ( + BluetoothAddressType, + BluetoothCacheMode, + BluetoothError, + BluetoothLEDevice, + ) + from bleak_winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattCharacteristic, + GattCharacteristicProperties, + GattClientCharacteristicConfigurationDescriptorValue, + GattCommunicationStatus, + GattDescriptor, + GattDeviceService, + GattSession, + GattSessionStatus, + GattSessionStatusChangedEventArgs, + GattValueChangedEventArgs, + GattWriteOption, + ) + from bleak_winrt.windows.devices.enumeration import ( + DeviceInformation, + DevicePairingKinds, + DevicePairingResultStatus, + DeviceUnpairingResultStatus, + ) + from bleak_winrt.windows.foundation import ( + AsyncStatus, + EventRegistrationToken, + IAsyncOperation, + ) + from bleak_winrt.windows.storage.streams import Buffer as WinBuffer + +from ... import BleakScanner +from ...exc import ( + PROTOCOL_ERROR_CODES, + BleakCharacteristicNotFoundError, + BleakDeviceNotFoundError, + BleakError, +) +from ..characteristic import BleakGATTCharacteristic +from ..client import BaseBleakClient, NotifyCallback +from ..device import BLEDevice +from ..service import BleakGATTServiceCollection +from .characteristic import BleakGATTCharacteristicWinRT +from .descriptor import BleakGATTDescriptorWinRT +from .scanner import BleakScannerWinRT +from .service import BleakGATTServiceWinRT + +logger = logging.getLogger(__name__) + + +class _Result(Protocol): + status: GattCommunicationStatus + protocol_error: int + + +def _address_to_int(address: str) -> int: + """Converts the Bluetooth device address string to its representing integer + + Args: + address (str): Bluetooth device address to convert + + Returns: + int: integer representation of the given Bluetooth device address + """ + _address_separators = [":", "-"] + for char in _address_separators: + address = address.replace(char, "") + + return int(address, base=16) + + +def _ensure_success(result: _Result, attr: Optional[str], fail_msg: str) -> Any: + """ + Ensures that *status* is ``GattCommunicationStatus.SUCCESS``, otherwise + raises ``BleakError``. + + Args: + result: The result returned by a WinRT API method. + attr: The name of the attribute containing the result. + fail_msg: A message to include in the exception. + """ + status = result.status if hasattr(result, "status") else result + + if status == GattCommunicationStatus.SUCCESS: + return None if attr is None else getattr(result, attr) + + if status == GattCommunicationStatus.PROTOCOL_ERROR: + err = PROTOCOL_ERROR_CODES.get(result.protocol_error, "Unknown") + raise BleakError( + f"{fail_msg}: Protocol Error 0x{result.protocol_error:02X}: {err}" + ) + + if status == GattCommunicationStatus.ACCESS_DENIED: + raise BleakError(f"{fail_msg}: Access Denied") + + if status == GattCommunicationStatus.UNREACHABLE: + raise BleakError(f"{fail_msg}: Unreachable") + + raise BleakError(f"{fail_msg}: Unexpected status code 0x{status:02X}") + + +class WinRTClientArgs(TypedDict, total=False): + """ + Windows-specific arguments for :class:`BleakClient`. + """ + + address_type: Literal["public", "random"] + """ + Can either be ``"public"`` or ``"random"``, depending on the required address + type needed to connect to your device. + """ + + use_cached_services: bool + """ + ``True`` allows Windows to fetch the services, characteristics and descriptors + from the Windows cache instead of reading them from the device. Can be very + much faster for known, unchanging devices, but not recommended for DIY peripherals + where the GATT layout can change between connections. + + ``False`` will force the attribute database to be read from the remote device + instead of using the OS cache. + + If omitted, the OS Bluetooth stack will do what it thinks is best. + """ + + +class BleakClientWinRT(BaseBleakClient): + """Native Windows Bleak Client. + + Args: + address_or_ble_device (str or BLEDevice): The Bluetooth address of the BLE peripheral + to connect to or the ``BLEDevice`` object representing it. + services: Optional set of service UUIDs that will be used. + winrt (dict): A dictionary of Windows-specific configuration values. + **timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0. + """ + + def __init__( + self, + address_or_ble_device: Union[BLEDevice, str], + services: Optional[Set[str]] = None, + *, + winrt: WinRTClientArgs, + **kwargs, + ): + super(BleakClientWinRT, self).__init__(address_or_ble_device, **kwargs) + + # Backend specific. WinRT objects. + if isinstance(address_or_ble_device, BLEDevice): + data = address_or_ble_device.details + self._device_info = (data.adv or data.scan).bluetooth_address + else: + self._device_info = None + self._requested_services = ( + [uuid.UUID(s) for s in services] if services else None + ) + self._requester: Optional[BluetoothLEDevice] = None + self._services_changed_events: List[asyncio.Event] = [] + self._session_active_events: List[asyncio.Event] = [] + self._session_closed_events: List[asyncio.Event] = [] + self._session: GattSession = None + self._notification_callbacks: Dict[int, NotifyCallback] = {} + + if "address_type" in kwargs: + warnings.warn( + "The address_type keyword arg will in a future version be moved into the win dict input instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + # os-specific options + self._use_cached_services = winrt.get("use_cached_services") + self._address_type = winrt.get("address_type", kwargs.get("address_type")) + self._retry_on_services_changed = False + + self._session_services_changed_token: Optional[EventRegistrationToken] = None + self._session_status_changed_token: Optional[EventRegistrationToken] = None + self._max_pdu_size_changed_token: Optional[EventRegistrationToken] = None + + def __str__(self): + return f"{type(self).__name__} ({self.address})" + + # Connectivity methods + + async def _create_requester(self, bluetooth_address: int) -> BluetoothLEDevice: + args = [ + bluetooth_address, + ] + if self._address_type is not None: + args.append( + BluetoothAddressType.PUBLIC + if self._address_type == "public" + else BluetoothAddressType.RANDOM + ) + requester = await BluetoothLEDevice.from_bluetooth_address_async(*args) + + # https://github.com/microsoft/Windows-universal-samples/issues/1089#issuecomment-487586755 + if requester is None: + raise BleakDeviceNotFoundError( + self.address, f"Device with address {self.address} was not found." + ) + return requester + + async def connect(self, **kwargs) -> bool: + """Connect to the specified GATT server. + + Keyword Args: + timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0. + + Returns: + Boolean representing connection status. + + """ + # Try to find the desired device. + timeout = kwargs.get("timeout", self._timeout) + if self._device_info is None: + device = await BleakScanner.find_device_by_address( + self.address, timeout=timeout, backend=BleakScannerWinRT + ) + + if device is None: + raise BleakDeviceNotFoundError( + self.address, f"Device with address {self.address} was not found." + ) + + data = device.details + self._device_info = (data.adv or data.scan).bluetooth_address + + logger.debug("Connecting to BLE device @ %s", self.address) + + loop = asyncio.get_running_loop() + + self._requester = await self._create_requester(self._device_info) + + def handle_services_changed(): + if not self._services_changed_events: + logger.warning("%s: unhandled services changed event", self.address) + else: + for event in self._services_changed_events: + event.set() + + def services_changed_handler(sender, args): + logger.debug("%s: services changed", self.address) + loop.call_soon_threadsafe(handle_services_changed) + + self._services_changed_token = self._requester.add_gatt_services_changed( + services_changed_handler + ) + + # Called on disconnect event or on failure to connect. + def handle_disconnect(): + if self._requester: + if self._services_changed_token: + self._requester.remove_gatt_services_changed( + self._services_changed_token + ) + self._services_changed_token = None + + logger.debug("closing requester") + self._requester.close() + self._requester = None + + if self._session: + if self._session_status_changed_token: + self._session.remove_session_status_changed( + self._session_status_changed_token + ) + self._session_status_changed_token = None + + if self._max_pdu_size_changed_token: + self._session.remove_max_pdu_size_changed( + self._max_pdu_size_changed_token + ) + self._max_pdu_size_changed_token = None + + logger.debug("closing session") + self._session.close() + self._session = None + + is_connect_complete = False + + def handle_session_status_changed( + args: GattSessionStatusChangedEventArgs, + ): + if args.error != BluetoothError.SUCCESS: + logger.error("Unhandled GATT error %r", args.error) + + if args.status == GattSessionStatus.ACTIVE: + for e in self._session_active_events: + e.set() + + # Don't run this if we have not exited from the connect method yet. + # Cleanup is handled by the connect method in that case. + elif args.status == GattSessionStatus.CLOSED and is_connect_complete: + if self._disconnected_callback: + self._disconnected_callback() + + for e in self._session_closed_events: + e.set() + + handle_disconnect() + + # this is the WinRT event handler will be called on another thread + def session_status_changed_event_handler( + sender: GattSession, args: GattSessionStatusChangedEventArgs + ): + logger.debug( + "session_status_changed_event_handler: id: %s, error: %r, status: %r", + sender.device_id.id, + args.error, + args.status, + ) + loop.call_soon_threadsafe(handle_session_status_changed, args) + + def max_pdu_size_changed_handler(sender: GattSession, args): + try: + max_pdu_size = sender.max_pdu_size + except OSError: + # There is a race condition where this event was already + # queued when the GattSession object was closed. In that + # case, we get a Windows error which we can just ignore. + return + + logger.debug("max_pdu_size_changed_handler: %d", max_pdu_size) + + # Start a GATT Session to connect + event = asyncio.Event() + self._session_active_events.append(event) + try: + self._session = await GattSession.from_device_id_async( + self._requester.bluetooth_device_id + ) + + if not self._session.can_maintain_connection: + raise BleakError("device does not support GATT sessions") + + self._session_status_changed_token = ( + self._session.add_session_status_changed( + session_status_changed_event_handler + ) + ) + + self._max_pdu_size_changed_token = self._session.add_max_pdu_size_changed( + max_pdu_size_changed_handler + ) + + services_changed_event = asyncio.Event() + self._services_changed_events.append(services_changed_event) + + try: + # Windows does not support explicitly connecting to a device. + # Instead it has the concept of a GATT session that is owned + # by the calling program. + self._session.maintain_connection = True + # This keeps the device connected until we set maintain_connection = False. + + cache_mode = None + + if self._use_cached_services is not None: + cache_mode = ( + BluetoothCacheMode.CACHED + if self._use_cached_services + else BluetoothCacheMode.UNCACHED + ) + + # if we receive a services changed event before get_gatt_services_async() + # finishes, we need to call it again with BluetoothCacheMode.CACHED + # to ensure we have the correct services as described in + # https://learn.microsoft.com/en-us/uwp/api/windows.devices.bluetooth.bluetoothledevice.gattserviceschanged + service_cache_mode = cache_mode + + async with async_timeout(timeout): + if self._retry_on_services_changed: + while True: + services_changed_event.clear() + services_changed_event_task = asyncio.create_task( + services_changed_event.wait() + ) + + get_services_task = asyncio.create_task( + self.get_services( + service_cache_mode=service_cache_mode, + cache_mode=cache_mode, + ) + ) + + _, pending = await asyncio.wait( + [services_changed_event_task, get_services_task], + return_when=asyncio.FIRST_COMPLETED, + ) + + for p in pending: + p.cancel() + + if not services_changed_event.is_set(): + # services did not change while getting services, + # so this is the final result + self.services = get_services_task.result() + break + + logger.debug( + "%s: restarting get services due to services changed event", + self.address, + ) + service_cache_mode = BluetoothCacheMode.CACHED + + # ensure the task ran to completion to avoid OSError + # on next call to get_services() + try: + await get_services_task + except OSError: + pass + except asyncio.CancelledError: + pass + else: + self.services = await self.get_services( + service_cache_mode=service_cache_mode, + cache_mode=cache_mode, + ) + + # a connection may not be made until we request info from the + # device, so we have to get services before the GATT session + # is set to active + await event.wait() + is_connect_complete = True + finally: + self._services_changed_events.remove(services_changed_event) + + except BaseException: + handle_disconnect() + raise + finally: + self._session_active_events.remove(event) + + return True + + async def disconnect(self) -> bool: + """Disconnect from the specified GATT server. + + Returns: + Boolean representing if device is disconnected. + + """ + logger.debug("Disconnecting from BLE device...") + # Remove notifications. + for handle, event_handler_token in list(self._notification_callbacks.items()): + char = self.services.get_characteristic(handle) + char.obj.remove_value_changed(event_handler_token) + self._notification_callbacks.clear() + + # Dispose all service components that we have requested and created. + if self.services: + # HACK: sometimes GattDeviceService.Close() hangs forever, so we + # add a delay to give the Windows Bluetooth stack some time to + # "settle" before closing the services + await asyncio.sleep(0.1) + + for service in self.services: + service.obj.close() + self.services = None + + # Without this, disposing the BluetoothLEDevice won't disconnect it + if self._session: + self._session.maintain_connection = False + # calling self._session.close() here prevents any further GATT + # session status events, so we defer that until after the session + # is no longer active + + # Dispose of the BluetoothLEDevice and see that the session + # status is now closed. + if self._requester: + event = asyncio.Event() + self._session_closed_events.append(event) + try: + self._requester.close() + # sometimes it can take over one minute before Windows decides + # to end the GATT session/disconnect the device + async with async_timeout(120): + await event.wait() + finally: + self._session_closed_events.remove(event) + + return True + + @property + def is_connected(self) -> bool: + """Check connection status between this client and the server. + + Returns: + Boolean representing connection status. + + """ + return self._DeprecatedIsConnectedReturn( + False + if self._session is None + else self._session.session_status == GattSessionStatus.ACTIVE + ) + + @property + def mtu_size(self) -> int: + """Get ATT MTU size for active connection""" + return self._session.max_pdu_size + + async def pair(self, protection_level: int = None, **kwargs) -> bool: + """Attempts to pair with the device. + + Keyword Args: + protection_level (int): A ``DevicePairingProtectionLevel`` enum value: + + 1. None - Pair the device using no levels of protection. + 2. Encryption - Pair the device using encryption. + 3. EncryptionAndAuthentication - Pair the device using + encryption and authentication. (This will not work in Bleak...) + + Returns: + Boolean regarding success of pairing. + + """ + # New local device information object created since the object from the requester isn't updated + device_information = await DeviceInformation.create_from_id_async( + self._requester.device_information.id + ) + if ( + device_information.pairing.can_pair + and not device_information.pairing.is_paired + ): + # Currently only supporting Just Works solutions... + ceremony = DevicePairingKinds.CONFIRM_ONLY + custom_pairing = device_information.pairing.custom + + def handler(sender, args): + args.accept() + + pairing_requested_token = custom_pairing.add_pairing_requested(handler) + try: + if protection_level: + pairing_result = await custom_pairing.pair_async( + ceremony, protection_level + ) + else: + pairing_result = await custom_pairing.pair_async(ceremony) + + except Exception as e: + raise BleakError("Failure trying to pair with device!") from e + finally: + custom_pairing.remove_pairing_requested(pairing_requested_token) + + if pairing_result.status not in ( + DevicePairingResultStatus.PAIRED, + DevicePairingResultStatus.ALREADY_PAIRED, + ): + raise BleakError(f"Could not pair with device: {pairing_result.status}") + else: + logger.info( + "Paired to device with protection level %r.", + pairing_result.protection_level_used, + ) + return True + else: + return device_information.pairing.is_paired + + async def unpair(self) -> bool: + """Attempts to unpair from the device. + + N.B. unpairing also leads to disconnection in the Windows backend. + + Returns: + Boolean on whether the unparing was successful. + + """ + device = await self._create_requester( + self._device_info + if self._device_info is not None + else _address_to_int(self.address) + ) + + try: + unpairing_result = await device.device_information.pairing.unpair_async() + if unpairing_result.status not in ( + DeviceUnpairingResultStatus.UNPAIRED, + DeviceUnpairingResultStatus.ALREADY_UNPAIRED, + ): + raise BleakError( + f"Could not unpair with device: {unpairing_result.status}" + ) + logger.info("Unpaired with device.") + finally: + device.close() + + return True + + # GATT services methods + + async def get_services( + self, + *, + service_cache_mode: Optional[BluetoothCacheMode] = None, + cache_mode: Optional[BluetoothCacheMode] = None, + **kwargs, + ) -> BleakGATTServiceCollection: + """Get all services registered for this GATT server. + + Returns: + A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree. + + """ + + # Return the Service Collection. + if self.services is not None: + return self.services + + logger.debug( + "getting services (service_cache_mode=%r, cache_mode=%r)...", + service_cache_mode, + cache_mode, + ) + + new_services = BleakGATTServiceCollection() + + # Each of the get_serv/char/desc_async() methods has two forms, one + # with no args and one with a cache_mode argument + srv_args = [] + args = [] + + # If the os-specific use_cached_services arg was given when BleakClient + # was created, the we use the second form with explicit cache mode. + # Otherwise we use the first form with no explicit cache mode which + # allows the OS Bluetooth stack to decide what is best. + + if service_cache_mode is not None: + srv_args.append(service_cache_mode) + + if cache_mode is not None: + args.append(cache_mode) + + def dispose_on_cancel(future): + if future._cancel_requested and future._result is not None: + logger.debug("disposing services object because of cancel") + for service in future._result: + service.close() + + services: Sequence[GattDeviceService] + + if self._requested_services is None: + future = FutureLike(self._requester.get_gatt_services_async(*srv_args)) + future.add_done_callback(dispose_on_cancel) + + services = _ensure_success( + await FutureLike(self._requester.get_gatt_services_async(*srv_args)), + "services", + "Could not get GATT services", + ) + else: + services = [] + # REVISIT: should properly dispose services on cancel or protect from cancellation + + for s in self._requested_services: + services.extend( + _ensure_success( + await FutureLike( + self._requester.get_gatt_services_for_uuid_async( + s, *srv_args + ) + ), + "services", + "Could not get GATT services", + ) + ) + + try: + for service in services: + result = await FutureLike(service.get_characteristics_async(*args)) + + if result.status == GattCommunicationStatus.ACCESS_DENIED: + # Windows does not allow access to services "owned" by the + # OS. This includes services like HID and Bond Manager. + logger.debug( + "skipping service %s due to access denied", service.uuid + ) + continue + + characteristics: Sequence[GattCharacteristic] = _ensure_success( + result, + "characteristics", + f"Could not get GATT characteristics for service {service.uuid} ({service.attribute_handle})", + ) + + new_services.add_service(BleakGATTServiceWinRT(service)) + + for characteristic in characteristics: + descriptors: Sequence[GattDescriptor] = _ensure_success( + await FutureLike(characteristic.get_descriptors_async(*args)), + "descriptors", + f"Could not get GATT descriptors for characteristic {characteristic.uuid} ({characteristic.attribute_handle})", + ) + + new_services.add_characteristic( + BleakGATTCharacteristicWinRT( + characteristic, lambda: self._session.max_pdu_size - 3 + ) + ) + + for descriptor in descriptors: + new_services.add_descriptor( + BleakGATTDescriptorWinRT( + descriptor, + str(characteristic.uuid), + characteristic.attribute_handle, + ) + ) + + return new_services + except BaseException: + # Don't leak services. WinRT is quite particular about services + # being closed. + logger.debug("disposing service objects") + + # HACK: sometimes GattDeviceService.Close() hangs forever, so we + # add a delay to give the Windows Bluetooth stack some time to + # "settle" before closing the services + await asyncio.sleep(0.1) + + for service in services: + service.close() + raise + + # I/O methods + + async def read_gatt_char( + self, + char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID], + **kwargs, + ) -> bytearray: + """Perform read operation on the specified GATT characteristic. + + Args: + char_specifier (BleakGATTCharacteristic, int, str or UUID): The characteristic to read from, + specified by either integer handle, UUID or directly by the + BleakGATTCharacteristic object representing it. + + Keyword Args: + use_cached (bool): ``False`` forces Windows to read the value from the + device again and not use its own cached value. Defaults to ``False``. + + Returns: + (bytearray) The read data. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + use_cached = kwargs.get("use_cached", False) + + if not isinstance(char_specifier, BleakGATTCharacteristic): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + value = bytearray( + _ensure_success( + await characteristic.obj.read_value_async( + BluetoothCacheMode.CACHED + if use_cached + else BluetoothCacheMode.UNCACHED + ), + "value", + f"Could not read characteristic handle {characteristic.handle}", + ) + ) + + logger.debug("Read Characteristic %04X : %s", characteristic.handle, value) + + return value + + async def read_gatt_descriptor(self, handle: int, **kwargs) -> bytearray: + """Perform read operation on the specified GATT descriptor. + + Args: + handle (int): The handle of the descriptor to read from. + + Keyword Args: + use_cached (bool): `False` forces Windows to read the value from the + device again and not use its own cached value. Defaults to `False`. + + Returns: + (bytearray) The read data. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + use_cached = kwargs.get("use_cached", False) + + descriptor = self.services.get_descriptor(handle) + if not descriptor: + raise BleakError(f"Descriptor with handle {handle} was not found!") + + value = bytearray( + _ensure_success( + await descriptor.obj.read_value_async( + BluetoothCacheMode.CACHED + if use_cached + else BluetoothCacheMode.UNCACHED + ), + "value", + f"Could not read Descriptor value for {handle:04X}", + ) + ) + + logger.debug("Read Descriptor %04X : %s", handle, value) + + return value + + async def write_gatt_char( + self, + characteristic: BleakGATTCharacteristic, + data: Buffer, + response: bool, + ) -> None: + if not self.is_connected: + raise BleakError("Not connected") + + response = ( + GattWriteOption.WRITE_WITH_RESPONSE + if response + else GattWriteOption.WRITE_WITHOUT_RESPONSE + ) + buf = WinBuffer(len(data)) + buf.length = buf.capacity + with memoryview(buf) as mv: + mv[:] = data + _ensure_success( + await characteristic.obj.write_value_with_result_async(buf, response), + None, + f"Could not write value {data} to characteristic {characteristic.handle:04X}", + ) + + async def write_gatt_descriptor(self, handle: int, data: Buffer) -> None: + """Perform a write operation on the specified GATT descriptor. + + Args: + handle: The handle of the descriptor to read from. + data: The data to send (any bytes-like object). + + """ + if not self.is_connected: + raise BleakError("Not connected") + + descriptor = self.services.get_descriptor(handle) + if not descriptor: + raise BleakError(f"Descriptor with handle {handle} was not found!") + + buf = WinBuffer(len(data)) + buf.length = buf.capacity + with memoryview(buf) as mv: + mv[:] = data + _ensure_success( + await descriptor.obj.write_value_with_result_async(buf), + None, + f"Could not write value {data!r} to descriptor {handle:04X}", + ) + + logger.debug("Write Descriptor %04X : %s", handle, data) + + async def start_notify( + self, + characteristic: BleakGATTCharacteristic, + callback: NotifyCallback, + **kwargs, + ) -> None: + """ + Activate notifications/indications on a characteristic. + + Keyword Args: + force_indicate (bool): If this is set to True, then Bleak will set up a indication request instead of a + notification request, given that the characteristic supports notifications as well as indications. + """ + winrt_char = cast(GattCharacteristic, characteristic.obj) + + # If we want to force indicate even when notify is available, also check if the device + # actually supports indicate as well. + if not kwargs.get("force_indicate", False) and ( + winrt_char.characteristic_properties & GattCharacteristicProperties.NOTIFY + ): + cccd = GattClientCharacteristicConfigurationDescriptorValue.NOTIFY + elif ( + winrt_char.characteristic_properties & GattCharacteristicProperties.INDICATE + ): + cccd = GattClientCharacteristicConfigurationDescriptorValue.INDICATE + else: + raise BleakError( + "characteristic does not support notifications or indications" + ) + + loop = asyncio.get_running_loop() + + def handle_value_changed( + sender: GattCharacteristic, args: GattValueChangedEventArgs + ): + value = bytearray(args.characteristic_value) + return loop.call_soon_threadsafe(callback, value) + + event_handler_token = winrt_char.add_value_changed(handle_value_changed) + self._notification_callbacks[characteristic.handle] = event_handler_token + + try: + _ensure_success( + await winrt_char.write_client_characteristic_configuration_descriptor_async( + cccd + ), + None, + f"Could not start notify on {characteristic.handle:04X}", + ) + except BaseException: + # This usually happens when a device reports that it supports indicate, + # but it actually doesn't. + if characteristic.handle in self._notification_callbacks: + event_handler_token = self._notification_callbacks.pop( + characteristic.handle + ) + winrt_char.remove_value_changed(event_handler_token) + + raise + + async def stop_notify( + self, char_specifier: Union[BleakGATTCharacteristic, int, str, uuid.UUID] + ) -> None: + """Deactivate notification/indication on a specified characteristic. + + Args: + char_specifier (BleakGATTCharacteristic, int, str or UUID): The characteristic to deactivate + notification/indication on, specified by either integer handle, UUID or + directly by the BleakGATTCharacteristic object representing it. + + """ + if not self.is_connected: + raise BleakError("Not connected") + + if not isinstance(char_specifier, BleakGATTCharacteristic): + characteristic = self.services.get_characteristic(char_specifier) + else: + characteristic = char_specifier + if not characteristic: + raise BleakCharacteristicNotFoundError(char_specifier) + + _ensure_success( + await characteristic.obj.write_client_characteristic_configuration_descriptor_async( + GattClientCharacteristicConfigurationDescriptorValue.NONE + ), + None, + f"Could not stop notify on {characteristic.handle:04X}", + ) + + event_handler_token = self._notification_callbacks.pop(characteristic.handle) + characteristic.obj.remove_value_changed(event_handler_token) + + +class FutureLike: + """ + Wraps a WinRT IAsyncOperation in a "future-like" object so that it can + be passed to Python APIs. + + Needed until https://github.com/pywinrt/pywinrt/issues/14 + """ + + _asyncio_future_blocking = False + + def __init__(self, op: IAsyncOperation) -> None: + self._op = op + self._callbacks = [] + self._loop = asyncio.get_running_loop() + self._cancel_requested = False + self._result = None + + def call_callbacks(): + for c in self._callbacks: + c(self) + + def call_callbacks_threadsafe(op: IAsyncOperation, status: AsyncStatus): + if status == AsyncStatus.COMPLETED: + # have to get result on this thread, otherwise it may not return correct value + self._result = op.get_results() + + self._loop.call_soon_threadsafe(call_callbacks) + + op.completed = call_callbacks_threadsafe + + def result(self) -> Any: + if self._op.status == AsyncStatus.STARTED: + raise asyncio.InvalidStateError + + if self._op.status == AsyncStatus.COMPLETED: + if self._cancel_requested: + raise asyncio.CancelledError + + return self._result + + if self._op.status == AsyncStatus.CANCELED: + raise asyncio.CancelledError + + if self._op.status == AsyncStatus.ERROR: + if self._cancel_requested: + raise asyncio.CancelledError + + error_code = self._op.error_code.value + raise WinError(error_code) + + def done(self) -> bool: + return self._op.status != AsyncStatus.STARTED + + def cancelled(self) -> bool: + return self._cancel_requested or self._op.status == AsyncStatus.CANCELED + + def add_done_callback(self, callback, *, context=None) -> None: + self._callbacks.append(callback) + + def remove_done_callback(self, callback) -> None: + self._callbacks.remove(callback) + + def cancel(self, msg=None) -> bool: + if self._cancel_requested or self._op.status != AsyncStatus.STARTED: + return False + + self._cancel_requested = True + self._op.cancel() + + return True + + def exception(self) -> Optional[Exception]: + if self._op.status == AsyncStatus.STARTED: + raise asyncio.InvalidStateError + + if self._op.status == AsyncStatus.COMPLETED: + if self._cancel_requested: + raise asyncio.CancelledError + + return None + + if self._op.status == AsyncStatus.CANCELED: + raise asyncio.CancelledError + + if self._op.status == AsyncStatus.ERROR: + if self._cancel_requested: + raise asyncio.CancelledError + + error_code = self._op.error_code.value + + return WinError(error_code) + + def get_loop(self) -> asyncio.AbstractEventLoop: + return self._loop + + def __await__(self): + if not self.done(): + self._asyncio_future_blocking = True + yield self # This tells Task to wait for completion. + + if not self.done(): + raise RuntimeError("await wasn't used with future") + + return self.result() # May raise too. diff --git a/bleak/backends/winrt/descriptor.py b/bleak/backends/winrt/descriptor.py new file mode 100644 index 0000000..1203b75 --- /dev/null +++ b/bleak/backends/winrt/descriptor.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +import sys + +if sys.version_info >= (3, 12): + from winrt.windows.devices.bluetooth.genericattributeprofile import GattDescriptor +else: + from bleak_winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattDescriptor, + ) + +from ..descriptor import BleakGATTDescriptor + + +class BleakGATTDescriptorWinRT(BleakGATTDescriptor): + """GATT Descriptor implementation for .NET backend, implemented with WinRT""" + + def __init__( + self, obj: GattDescriptor, characteristic_uuid: str, characteristic_handle: int + ): + super(BleakGATTDescriptorWinRT, self).__init__(obj) + self.obj = obj + self.__characteristic_uuid = characteristic_uuid + self.__characteristic_handle = characteristic_handle + + @property + def characteristic_handle(self) -> int: + """handle for the characteristic that this descriptor belongs to""" + return self.__characteristic_handle + + @property + def characteristic_uuid(self) -> str: + """UUID for the characteristic that this descriptor belongs to""" + return self.__characteristic_uuid + + @property + def uuid(self) -> str: + """UUID for this descriptor""" + return str(self.obj.uuid) + + @property + def handle(self) -> int: + """Integer handle for this descriptor""" + return self.obj.attribute_handle diff --git a/bleak/backends/winrt/scanner.py b/bleak/backends/winrt/scanner.py new file mode 100644 index 0000000..723ae1f --- /dev/null +++ b/bleak/backends/winrt/scanner.py @@ -0,0 +1,300 @@ +import asyncio +import logging +import sys +from typing import Dict, List, Literal, NamedTuple, Optional +from uuid import UUID + +from .util import assert_mta + +if sys.version_info >= (3, 12): + from winrt.windows.devices.bluetooth.advertisement import ( + BluetoothLEAdvertisementReceivedEventArgs, + BluetoothLEAdvertisementType, + BluetoothLEAdvertisementWatcher, + BluetoothLEAdvertisementWatcherStatus, + BluetoothLEScanningMode, + ) +else: + from bleak_winrt.windows.devices.bluetooth.advertisement import ( + BluetoothLEAdvertisementReceivedEventArgs, + BluetoothLEAdvertisementType, + BluetoothLEAdvertisementWatcher, + BluetoothLEAdvertisementWatcherStatus, + BluetoothLEScanningMode, + ) + +from ...assigned_numbers import AdvertisementDataType +from ...exc import BleakError +from ...uuids import normalize_uuid_str +from ..scanner import AdvertisementData, AdvertisementDataCallback, BaseBleakScanner + +logger = logging.getLogger(__name__) + + +def _format_bdaddr(a: int) -> str: + return ":".join(f"{x:02X}" for x in a.to_bytes(6, byteorder="big")) + + +def _format_event_args(e: BluetoothLEAdvertisementReceivedEventArgs) -> str: + try: + return f"{_format_bdaddr(e.bluetooth_address)}: {e.advertisement.local_name}" + except Exception: + return _format_bdaddr(e.bluetooth_address) + + +class _RawAdvData(NamedTuple): + """ + Platform-specific advertisement data. + + Windows does not combine advertising data with type SCAN_RSP with other + advertising data like other platforms, so se have to do it ourselves. + """ + + adv: Optional[BluetoothLEAdvertisementReceivedEventArgs] + """ + The advertisement data received from the BluetoothLEAdvertisementWatcher.Received event. + """ + scan: Optional[BluetoothLEAdvertisementReceivedEventArgs] + """ + The scan response for the same device as *adv*. + """ + + +class BleakScannerWinRT(BaseBleakScanner): + """The native Windows Bleak BLE Scanner. + + Implemented using `Python/WinRT `_. + + Args: + detection_callback: + Optional function that will be called each time a device is + discovered or advertising data has changed. + service_uuids: + Optional list of service UUIDs to filter on. Only advertisements + containing this advertising data will be received. + scanning_mode: + Set to ``"passive"`` to avoid the ``"active"`` scanning mode. + + """ + + def __init__( + self, + detection_callback: Optional[AdvertisementDataCallback], + service_uuids: Optional[List[str]], + scanning_mode: Literal["active", "passive"], + **kwargs, + ): + super(BleakScannerWinRT, self).__init__(detection_callback, service_uuids) + + self.watcher: Optional[BluetoothLEAdvertisementWatcher] = None + self._advertisement_pairs: Dict[int, _RawAdvData] = {} + self._stopped_event = None + + # case insensitivity is for backwards compatibility on Windows only + if scanning_mode.lower() == "passive": + self._scanning_mode = BluetoothLEScanningMode.PASSIVE + else: + self._scanning_mode = BluetoothLEScanningMode.ACTIVE + + # Unfortunately, due to the way Windows handles filtering, we can't + # make use of the service_uuids filter here. If we did we would only + # get the advertisement data or the scan data, but not both, so would + # miss out on other essential data. Advanced users can pass their own + # filters though if they want to. + self._signal_strength_filter = kwargs.get("SignalStrengthFilter", None) + self._advertisement_filter = kwargs.get("AdvertisementFilter", None) + + self._received_token = None + self._stopped_token = None + + def _received_handler( + self, + sender: BluetoothLEAdvertisementWatcher, + event_args: BluetoothLEAdvertisementReceivedEventArgs, + ): + """Callback for AdvertisementWatcher.Received""" + # TODO: Cannot check for if sender == self.watcher in winrt? + logger.debug("Received %s.", _format_event_args(event_args)) + + # REVISIT: if scanning filters with BluetoothSignalStrengthFilter.OutOfRangeTimeout + # are in place, an RSSI of -127 means that the device has gone out of range and should + # be removed from the list of seen devices instead of processing the advertisement data. + # https://learn.microsoft.com/en-us/uwp/api/windows.devices.bluetooth.bluetoothsignalstrengthfilter.outofrangetimeout + + bdaddr = _format_bdaddr(event_args.bluetooth_address) + + # Unlike other platforms, Windows does not combine advertising data for + # us (regular advertisement + scan response) so we have to do it manually. + + # get the previous advertising data/scan response pair or start a new one + raw_data = self._advertisement_pairs.get(bdaddr, _RawAdvData(None, None)) + + # update the advertising data depending on the advertising data type + if event_args.advertisement_type == BluetoothLEAdvertisementType.SCAN_RESPONSE: + raw_data = _RawAdvData(raw_data.adv, event_args) + else: + raw_data = _RawAdvData(event_args, raw_data.scan) + + self._advertisement_pairs[bdaddr] = raw_data + + uuids = [] + mfg_data = {} + service_data = {} + local_name = None + tx_power = None + + for args in filter(lambda d: d is not None, raw_data): + for u in args.advertisement.service_uuids: + uuids.append(str(u)) + + for m in args.advertisement.manufacturer_data: + mfg_data[m.company_id] = bytes(m.data) + + # local name is empty string rather than None if not present + if args.advertisement.local_name: + local_name = args.advertisement.local_name + + try: + if args.transmit_power_level_in_d_bm is not None: + tx_power = args.transmit_power_level_in_d_bm + except AttributeError: + # the transmit_power_level_in_d_bm property was introduce in + # Windows build 19041 so we have a fallback for older versions + for section in args.advertisement.get_sections_by_type( + AdvertisementDataType.TX_POWER_LEVEL + ): + tx_power = bytes(section.data)[0] + + # Decode service data + for section in args.advertisement.get_sections_by_type( + AdvertisementDataType.SERVICE_DATA_UUID16 + ): + data = bytes(section.data) + service_data[normalize_uuid_str(f"{data[1]:02x}{data[0]:02x}")] = data[ + 2: + ] + for section in args.advertisement.get_sections_by_type( + AdvertisementDataType.SERVICE_DATA_UUID32 + ): + data = bytes(section.data) + service_data[ + normalize_uuid_str( + f"{data[3]:02x}{data[2]:02x}{data[1]:02x}{data[0]:02x}" + ) + ] = data[4:] + for section in args.advertisement.get_sections_by_type( + AdvertisementDataType.SERVICE_DATA_UUID128 + ): + data = bytes(section.data) + service_data[str(UUID(bytes=bytes(data[15::-1])))] = data[16:] + + if not self.is_allowed_uuid(uuids): + return + + # Use the BLEDevice to populate all the fields for the advertisement data to return + advertisement_data = AdvertisementData( + local_name=local_name, + manufacturer_data=mfg_data, + service_data=service_data, + service_uuids=uuids, + tx_power=tx_power, + rssi=event_args.raw_signal_strength_in_d_bm, + platform_data=(sender, raw_data), + ) + + device = self.create_or_update_device( + bdaddr, local_name, raw_data, advertisement_data + ) + + self.call_detection_callbacks(device, advertisement_data) + + def _stopped_handler(self, sender, e): + logger.debug( + "%s devices found. Watcher status: %r.", + len(self.seen_devices), + sender.status, + ) + self._stopped_event.set() + + async def start(self) -> None: + if self.watcher: + raise BleakError("Scanner already started") + + # Callbacks for WinRT async methods will never happen in STA mode if + # there is nothing pumping a Windows message loop. + await assert_mta() + + # start with fresh list of discovered devices + self.seen_devices = {} + self._advertisement_pairs.clear() + + self.watcher = BluetoothLEAdvertisementWatcher() + self.watcher.scanning_mode = self._scanning_mode + + event_loop = asyncio.get_running_loop() + self._stopped_event = asyncio.Event() + + self._received_token = self.watcher.add_received( + lambda s, e: event_loop.call_soon_threadsafe(self._received_handler, s, e) + ) + self._stopped_token = self.watcher.add_stopped( + lambda s, e: event_loop.call_soon_threadsafe(self._stopped_handler, s, e) + ) + + if self._signal_strength_filter is not None: + self.watcher.signal_strength_filter = self._signal_strength_filter + if self._advertisement_filter is not None: + self.watcher.advertisement_filter = self._advertisement_filter + + self.watcher.start() + + # no events for status changes, so we have to poll :-( + while self.watcher.status == BluetoothLEAdvertisementWatcherStatus.CREATED: + await asyncio.sleep(0.01) + + if self.watcher.status == BluetoothLEAdvertisementWatcherStatus.ABORTED: + raise BleakError("Failed to start scanner. Is Bluetooth turned on?") + + if self.watcher.status != BluetoothLEAdvertisementWatcherStatus.STARTED: + raise BleakError(f"Unexpected watcher status: {self.watcher.status.name}") + + async def stop(self) -> None: + self.watcher.stop() + + if self.watcher.status == BluetoothLEAdvertisementWatcherStatus.STOPPING: + await self._stopped_event.wait() + else: + logger.debug( + "skipping waiting for stop because status is %r", + self.watcher.status, + ) + + try: + self.watcher.remove_received(self._received_token) + self.watcher.remove_stopped(self._stopped_token) + except Exception as e: + logger.debug("Could not remove event handlers: %s", e) + + self._stopped_token = None + self._received_token = None + + self.watcher = None + + def set_scanning_filter(self, **kwargs) -> None: + """Set a scanning filter for the BleakScanner. + + Keyword Args: + SignalStrengthFilter (``Windows.Devices.Bluetooth.BluetoothSignalStrengthFilter``): A + BluetoothSignalStrengthFilter object used for configuration of Bluetooth + LE advertisement filtering that uses signal strength-based filtering. + AdvertisementFilter (Windows.Devices.Bluetooth.Advertisement.BluetoothLEAdvertisementFilter): A + BluetoothLEAdvertisementFilter object used for configuration of Bluetooth LE + advertisement filtering that uses payload section-based filtering. + + """ + if "SignalStrengthFilter" in kwargs: + # TODO: Handle SignalStrengthFilter parameters + self._signal_strength_filter = kwargs["SignalStrengthFilter"] + if "AdvertisementFilter" in kwargs: + # TODO: Handle AdvertisementFilter parameters + self._advertisement_filter = kwargs["AdvertisementFilter"] diff --git a/bleak/backends/winrt/service.py b/bleak/backends/winrt/service.py new file mode 100644 index 0000000..dde2d4e --- /dev/null +++ b/bleak/backends/winrt/service.py @@ -0,0 +1,42 @@ +import sys +from typing import List + +if sys.version_info >= (3, 12): + from winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattDeviceService, + ) +else: + from bleak_winrt.windows.devices.bluetooth.genericattributeprofile import ( + GattDeviceService, + ) + +from ..service import BleakGATTService +from ..winrt.characteristic import BleakGATTCharacteristicWinRT + + +class BleakGATTServiceWinRT(BleakGATTService): + """GATT Characteristic implementation for the .NET backend, implemented with WinRT""" + + def __init__(self, obj: GattDeviceService): + super().__init__(obj) + self.__characteristics = [] + + @property + def uuid(self) -> str: + return str(self.obj.uuid) + + @property + def handle(self) -> int: + return self.obj.attribute_handle + + @property + def characteristics(self) -> List[BleakGATTCharacteristicWinRT]: + """List of characteristics for this service""" + return self.__characteristics + + def add_characteristic(self, characteristic: BleakGATTCharacteristicWinRT): + """Add a :py:class:`~BleakGATTCharacteristicWinRT` to the service. + + Should not be used by end user, but rather by `bleak` itself. + """ + self.__characteristics.append(characteristic) diff --git a/bleak/backends/winrt/util.py b/bleak/backends/winrt/util.py new file mode 100644 index 0000000..9053794 --- /dev/null +++ b/bleak/backends/winrt/util.py @@ -0,0 +1,223 @@ +import asyncio +import ctypes +import sys +from ctypes import wintypes +from enum import IntEnum +from typing import Tuple + +from ...exc import BleakError + +if sys.version_info < (3, 11): + from async_timeout import timeout as async_timeout +else: + from asyncio import timeout as async_timeout + + +def _check_result(result, func, args): + if not result: + raise ctypes.WinError() + + return args + + +def _check_hresult(result, func, args): + if result: + raise ctypes.WinError(result) + + return args + + +# not defined in wintypes +_UINT_PTR = wintypes.WPARAM + +# https://learn.microsoft.com/en-us/windows/win32/api/winuser/nc-winuser-timerproc +_TIMERPROC = ctypes.WINFUNCTYPE( + None, wintypes.HWND, _UINT_PTR, wintypes.UINT, wintypes.DWORD +) + +# https://learn.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-settimer +_SET_TIMER_PROTOTYPE = ctypes.WINFUNCTYPE( + _UINT_PTR, wintypes.HWND, _UINT_PTR, wintypes.UINT, _TIMERPROC +) +_SET_TIMER_PARAM_FLAGS = ( + (1, "hwnd", None), + (1, "nidevent"), + (1, "uelapse"), + (1, "lptimerfunc", None), +) +_SetTimer = _SET_TIMER_PROTOTYPE( + ("SetTimer", ctypes.windll.user32), _SET_TIMER_PARAM_FLAGS +) +_SetTimer.errcheck = _check_result + +# https://learn.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-killtimer +_KILL_TIMER_PROTOTYPE = ctypes.WINFUNCTYPE(wintypes.BOOL, wintypes.HWND, _UINT_PTR) +_KILL_TIMER_PARAM_FLAGS = ( + (1, "hwnd", None), + (1, "uidevent"), +) +_KillTimer = _KILL_TIMER_PROTOTYPE( + ("KillTimer", ctypes.windll.user32), _KILL_TIMER_PARAM_FLAGS +) + +# https://learn.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-cogetapartmenttype +_CO_GET_APARTMENT_TYPE_PROTOTYPE = ctypes.WINFUNCTYPE( + ctypes.c_int, + ctypes.POINTER(ctypes.c_int), + ctypes.POINTER(ctypes.c_int), +) +_CO_GET_APARTMENT_TYPE_PARAM_FLAGS = ( + (1, "papttype", None), + (1, "paptqualifier", None), +) +_CoGetApartmentType = _CO_GET_APARTMENT_TYPE_PROTOTYPE( + ("CoGetApartmentType", ctypes.windll.ole32), _CO_GET_APARTMENT_TYPE_PARAM_FLAGS +) +_CoGetApartmentType.errcheck = _check_hresult + +_CO_E_NOTINITIALIZED = -2147221008 + + +# https://learn.microsoft.com/en-us/windows/win32/api/objidl/ne-objidl-apttype +class _AptType(IntEnum): + CURRENT = -1 + STA = 0 + MTA = 1 + NA = 2 + MAIN_STA = 3 + + +# https://learn.microsoft.com/en-us/windows/win32/api/objidl/ne-objidl-apttypequalifier +class _AptQualifierType(IntEnum): + NONE = 0 + IMPLICIT_MTA = 1 + NA_ON_MTA = 2 + NA_ON_STA = 3 + NA_ON_IMPLICIT_STA = 4 + NA_ON_MAIN_STA = 5 + APPLICATION_STA = 6 + RESERVED_1 = 7 + + +def _get_apartment_type() -> Tuple[_AptType, _AptQualifierType]: + """ + Calls CoGetApartmentType to get the current apartment type and qualifier. + + Returns: + The current apartment type and qualifier. + Raises: + OSError: If the call to CoGetApartmentType fails. + """ + api_type = ctypes.c_int() + api_type_qualifier = ctypes.c_int() + _CoGetApartmentType(ctypes.byref(api_type), ctypes.byref(api_type_qualifier)) + return _AptType(api_type.value), _AptQualifierType(api_type_qualifier.value) + + +async def assert_mta() -> None: + """ + Asserts that the current apartment type is MTA. + + Raises: + BleakError: + If the current apartment type is not MTA and there is no Windows + message loop running. + + .. versionadded:: 0.22 + + .. versionchanged:: 0.22.2 + + Function is now async and will not raise if the current apartment type + is STA and the Windows message loop is running. + """ + if hasattr(allow_sta, "_allowed"): + return + + try: + apt_type, _ = _get_apartment_type() + except OSError as e: + # All is OK if not initialized yet. WinRT will initialize it. + if e.winerror == _CO_E_NOTINITIALIZED: + return + + raise + + if apt_type == _AptType.MTA: + # if we get here, WinRT probably set the apartment type to MTA and all + # is well, we don't need to check again + setattr(allow_sta, "_allowed", True) + return + + event = asyncio.Event() + + def wait_event(*_): + event.set() + + # have to keep a reference to the callback or it will be garbage collected + # before it is called + callback = _TIMERPROC(wait_event) + + # set a timer to see if we get a callback to ensure the windows event loop + # is running + timer = _SetTimer(None, 1, 0, callback) + + try: + async with async_timeout(0.5): + await event.wait() + except asyncio.TimeoutError: + raise BleakError( + "Thread is configured for Windows GUI but callbacks are not working." + + ( + " Suspect unwanted side effects from importing 'pythoncom'." + if "pythoncom" in sys.modules + else "" + ) + ) + else: + # if the windows event loop is running, we assume it is going to keep + # running and we don't need to check again + setattr(allow_sta, "_allowed", True) + finally: + _KillTimer(None, timer) + + +def allow_sta(): + """ + Suppress check for MTA thread type and allow STA. + + Bleak will hang forever if the current thread is not MTA - unless there is + a Windows event loop running that is properly integrated with asyncio in + Python. + + If your program meets that condition, you must call this function do disable + the check for MTA. If your program doesn't have a graphical user interface + you probably shouldn't call this function. and use ``uninitialize_sta()`` + instead. + + .. versionadded:: 0.22.1 + """ + allow_sta._allowed = True + + +def uninitialize_sta(): + """ + Uninitialize the COM library on the current thread if it was not initialized + as MTA. + + This is intended to undo the implicit initialization of the COM library as STA + by packages like pywin32. + + It should be called as early as possible in your application after the + offending package has been imported. + + .. versionadded:: 0.22 + """ + + try: + _get_apartment_type() + except OSError as e: + # All is OK if not initialized yet. WinRT will initialize it. + if e.winerror == _CO_E_NOTINITIALIZED: + return + else: + ctypes.windll.ole32.CoUninitialize() diff --git a/bleak/exc.py b/bleak/exc.py new file mode 100644 index 0000000..d03d203 --- /dev/null +++ b/bleak/exc.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +import uuid +from typing import Optional, Union + + +class BleakError(Exception): + """Base Exception for bleak.""" + + pass + + +class BleakCharacteristicNotFoundError(BleakError): + """ + Exception which is raised if a device does not support a characteristic. + + .. versionadded: 0.22 + """ + + char_specifier: Union[int, str, uuid.UUID] + + def __init__(self, char_specifier: Union[int, str, uuid.UUID]) -> None: + """ + Args: + characteristic (str): handle or UUID of the characteristic which was not found + """ + super().__init__(f"Characteristic {char_specifier} was not found!") + self.char_specifier = char_specifier + + +class BleakDeviceNotFoundError(BleakError): + """ + Exception which is raised if a device can not be found by ``connect``, ``pair`` and ``unpair``. + This is the case if the OS Bluetooth stack has never seen this device or it was removed and forgotten. + + .. versionadded: 0.19 + """ + + identifier: str + + def __init__(self, identifier: str, *args: object) -> None: + """ + Args: + identifier (str): device identifier (Bluetooth address or UUID) of the device which was not found + """ + super().__init__(*args) + self.identifier = identifier + + +class BleakDBusError(BleakError): + """Specialized exception type for D-Bus errors.""" + + def __init__(self, dbus_error: str, error_body: list): + """ + Args: + dbus_error (str): The D-Bus error, e.g. ``org.freedesktop.DBus.Error.UnknownObject``. + error_body (list): Body of the D-Bus error, sometimes containing error description or details. + """ + super().__init__(dbus_error, *error_body) + + @property + def dbus_error(self) -> str: + """Gets the D-Bus error name, e.g. ``org.freedesktop.DBus.Error.UnknownObject``.""" + return self.args[0] + + @property + def dbus_error_details(self) -> Optional[str]: + """Gets the optional D-Bus error details, e.g. 'Invalid UUID'.""" + if len(self.args) > 1: + details = self.args[1] + # Some error descriptions can be further parsed to be even more helpful + if "ATT error: 0x" in details: + more_detail = PROTOCOL_ERROR_CODES.get( + int(details.rsplit("x")[1], 16), "Unknown code" + ) + details += f" ({more_detail})" + return details + return None + + def __str__(self) -> str: + name = f"[{self.dbus_error}]" + details = self.dbus_error_details + return (name + " " + details) if details else name + + +CONTROLLER_ERROR_CODES = { + 0x00: "Success", + 0x01: "Unknown HCI Command", + 0x02: "Unknown Connection Identifier", + 0x03: "Hardware Failure", + 0x04: "Page Timeout", + 0x05: "Authentication Failure", + 0x06: "PIN or Key Missing", + 0x07: "Memory Capacity Exceeded", + 0x08: "Connection Timeout", + 0x09: "Connection Limit Exceeded", + 0x0A: "Synchronous Connection Limit To A Device Exceeded", + 0x0B: "Connection Already Exists", + 0x0C: "Command Disallowed", + 0x0D: "Connection Rejected due to Limited Resources", + 0x0E: "Connection Rejected Due To Security Reasons", + 0x0F: "Connection Rejected due to Unacceptable BD_ADDR", + 0x10: "Connection Accept Timeout Exceeded", + 0x11: "Unsupported Feature or Parameter Value", + 0x12: "Invalid HCI Command Parameters", + 0x13: "Remote User Terminated Connection", + 0x14: "Remote Device Terminated Connection due to Low Resources", + 0x15: "Remote Device Terminated Connection due to Power Off", + 0x16: "Connection Terminated By Local Host", + 0x17: "Repeated Attempts", + 0x18: "Pairing Not Allowed", + 0x19: "Unknown LMP PDU", + 0x1A: "Unsupported Remote Feature / Unsupported LMP Feature", + 0x1B: "SCO Offset Rejected", + 0x1C: "SCO Interval Rejected", + 0x1D: "SCO Air Mode Rejected", + 0x1E: "Invalid LMP Parameters / Invalid LL Parameters", + 0x1F: "Unspecified Error", + 0x20: "Unsupported LMP Parameter Value / Unsupported LL Parameter Value", + 0x21: "Role Change Not Allowed", + 0x22: "LMP Response Timeout / LL Response Timeout", + 0x23: "LMP Error Transaction Collision / LL Procedure Collision", + 0x24: "LMP PDU Not Allowed", + 0x25: "Encryption Mode Not Acceptable", + 0x26: "Link Key cannot be Changed", + 0x27: "Requested QoS Not Supported", + 0x28: "Instant Passed", + 0x29: "Pairing With Unit Key Not Supported", + 0x2A: "Different Transaction Collision", + 0x2B: "Reserved for future use", + 0x2C: "QoS Unacceptable Parameter", + 0x2D: "QoS Rejected", + 0x2E: "Channel Classification Not Supported", + 0x2F: "Insufficient Security", + 0x30: "Parameter Out Of Mandatory Range", + 0x31: "Reserved for future use", + 0x32: "Role Switch Pending", + 0x33: "Reserved for future use", + 0x34: "Reserved Slot Violation", + 0x35: "Role Switch Failed", + 0x36: "Extended Inquiry Response Too Large", + 0x37: "Secure Simple Pairing Not Supported By Host", + 0x38: "Host Busy - Pairing", + 0x39: "Connection Rejected due to No Suitable Channel Found", + 0x3A: "Controller Busy", + 0x3B: "Unacceptable Connection Parameters", + 0x3C: "Advertising Timeout", + 0x3D: "Connection Terminated due to MIC Failure", + 0x3E: "Connection Failed to be Established / Synchronization Timeout", + 0x3F: "MAC Connection Failed", + 0x40: "Coarse Clock Adjustment Rejected but Will Try to Adjust Using Clock", + 0x41: "Type0 Submap Not Defined", + 0x42: "Unknown Advertising Identifier", + 0x43: "Limit Reached", + 0x44: "Operation Cancelled by Host", + 0x45: "Packet Too Long", +} + +# as defined in Bluetooth Core Specification v5.2, volume 3, part F, section 3.4.1.1, table 3.4. +PROTOCOL_ERROR_CODES = { + 0x01: "Invalid Handle", + 0x02: "Read Not Permitted", + 0x03: "Write Not Permitted", + 0x04: "Invalid PDU", + 0x05: "Insufficient Authentication", + 0x06: "Request Not Supported", + 0x07: "Invalid Offset", + 0x08: "Insufficient Authorization", + 0x09: "Prepare Queue Full", + 0x0A: "Attribute Not Found", + 0x0B: "Attribute Not Long", + 0x0C: "Insufficient Encryption Key Size", + 0x0D: "Invalid Attribute Value Length", + 0x0E: "Unlikely Error", + 0x0F: "Insufficient Authentication", + 0x10: "Unsupported Group Type", + 0x11: "Insufficient Resource", + 0x12: "Database Out Of Sync", + 0x13: "Value Not Allowed", + # REVISIT: do we need Application Errors 0x80-0x9F? + 0xFC: "Write Request Rejected", + 0xFD: "Client Characteristic Configuration Descriptor Improperly Configured", + 0xFE: "Procedure Already in Progress", + 0xFF: "Out of Range", +} diff --git a/bleak/py.typed b/bleak/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/bleak/uuids.py b/bleak/uuids.py new file mode 100644 index 0000000..1e91aac --- /dev/null +++ b/bleak/uuids.py @@ -0,0 +1,1274 @@ +# -*- coding: utf-8 -*- + +from typing import Dict +from uuid import UUID + +uuid16_dict: Dict[int, str] = { + 0x0001: "SDP", + 0x0003: "RFCOMM", + 0x0005: "TCS-BIN", + 0x0007: "ATT", + 0x0008: "OBEX", + 0x000F: "BNEP", + 0x0010: "UPNP", + 0x0011: "HIDP", + 0x0012: "Hardcopy Control Channel", + 0x0014: "Hardcopy Data Channel", + 0x0016: "Hardcopy Notification", + 0x0017: "AVCTP", + 0x0019: "AVDTP", + 0x001B: "CMTP", + 0x001E: "MCAP Control Channel", + 0x001F: "MCAP Data Channel", + 0x0100: "L2CAP", + # 0x0101 to 0x0fff undefined */ + 0x1000: "Service Discovery Server Service Class", + 0x1001: "Browse Group Descriptor Service Class", + 0x1002: "Public Browse Root", + # 0x1003 to 0x1100 undefined */ + 0x1101: "Serial Port", + 0x1102: "LAN Access Using PPP", + 0x1103: "Dialup Networking", + 0x1104: "IrMC Sync", + 0x1105: "OBEX Object Push", + 0x1106: "OBEX File Transfer", + 0x1107: "IrMC Sync Command", + 0x1108: "Headset", + 0x1109: "Cordless Telephony", + 0x110A: "Audio Source", + 0x110B: "Audio Sink", + 0x110C: "A/V Remote Control Target", + 0x110D: "Advanced Audio Distribution", + 0x110E: "A/V Remote Control", + 0x110F: "A/V Remote Control Controller", + 0x1110: "Intercom", + 0x1111: "Fax", + 0x1112: "Headset AG", + 0x1113: "WAP", + 0x1114: "WAP Client", + 0x1115: "PANU", + 0x1116: "NAP", + 0x1117: "GN", + 0x1118: "Direct Printing", + 0x1119: "Reference Printing", + 0x111A: "Basic Imaging Profile", + 0x111B: "Imaging Responder", + 0x111C: "Imaging Automatic Archive", + 0x111D: "Imaging Referenced Objects", + 0x111E: "Handsfree", + 0x111F: "Handsfree Audio Gateway", + 0x1120: "Direct Printing Refrence Objects Service", + 0x1121: "Reflected UI", + 0x1122: "Basic Printing", + 0x1123: "Printing Status", + 0x1124: "Human Interface Device Service", + 0x1125: "Hardcopy Cable Replacement", + 0x1126: "HCR Print", + 0x1127: "HCR Scan", + 0x1128: "Common ISDN Access", + # 0x1129 and 0x112a undefined */ + 0x112D: "SIM Access", + 0x112E: "Phonebook Access Client", + 0x112F: "Phonebook Access Server", + 0x1130: "Phonebook Access", + 0x1131: "Headset HS", + 0x1132: "Message Access Server", + 0x1133: "Message Notification Server", + 0x1134: "Message Access Profile", + 0x1135: "GNSS", + 0x1136: "GNSS Server", + 0x1137: "3D Display", + 0x1138: "3D Glasses", + 0x1139: "3D Synchronization", + 0x113A: "MPS Profile", + 0x113B: "MPS Service", + 0x113C: "CTN Access Service", + 0x113D: "CTN Notification Service", + 0x113E: "CTN Profile", + # 0x113f to 0x11ff undefined */ + 0x1200: "PnP Information", + 0x1201: "Generic Networking", + 0x1202: "Generic File Transfer", + 0x1203: "Generic Audio", + 0x1204: "Generic Telephony", + 0x1205: "UPNP Service", + 0x1206: "UPNP IP Service", + 0x1300: "UPNP IP PAN", + 0x1301: "UPNP IP LAP", + 0x1302: "UPNP IP L2CAP", + 0x1303: "Video Source", + 0x1304: "Video Sink", + 0x1305: "Video Distribution", + # 0x1306 to 0x13ff undefined */ + 0x1400: "HDP", + 0x1401: "HDP Source", + 0x1402: "HDP Sink", + # 0x1403 to 0x17ff undefined */ + 0x1800: "Generic Access Profile", + 0x1801: "Generic Attribute Profile", + 0x1802: "Immediate Alert", + 0x1803: "Link Loss", + 0x1804: "Tx Power", + 0x1805: "Current Time Service", + 0x1806: "Reference Time Update Service", + 0x1807: "Next DST Change Service", + 0x1808: "Glucose", + 0x1809: "Health Thermometer", + 0x180A: "Device Information", + # 0x180b and 0x180c undefined */ + 0x180D: "Heart Rate", + 0x180E: "Phone Alert Status Service", + 0x180F: "Battery Service", + 0x1810: "Blood Pressure", + 0x1811: "Alert Notification Service", + 0x1812: "Human Interface Device", + 0x1813: "Scan Parameters", + 0x1814: "Running Speed and Cadence", + 0x1815: "Automation IO", + 0x1816: "Cycling Speed and Cadence", + # 0x1817 undefined */ + 0x1818: "Cycling Power", + 0x1819: "Location and Navigation", + 0x181A: "Environmental Sensing", + 0x181B: "Body Composition", + 0x181C: "User Data", + 0x181D: "Weight Scale", + 0x181E: "Bond Management", + 0x181F: "Continuous Glucose Monitoring", + 0x1820: "Internet Protocol Support", + 0x1821: "Indoor Positioning", + 0x1822: "Pulse Oximeter", + 0x1823: "HTTP Proxy", + 0x1824: "Transport Discovery", + 0x1825: "Object Transfer", + 0x1826: "Fitness Machine", + 0x1827: "Mesh Provisioning", + 0x1828: "Mesh Proxy", + 0x1829: "Reconnection Configuration", + # 0x182a-0x1839 undefined + 0x183A: "Insulin Delivery", + 0x183B: "Binary Sensor", + 0x183C: "Emergency Configuration", + 0x183D: "Authorization Control", + 0x183E: "Physical Activity Monitor", + 0x183F: "Elapsed Time", + 0x1840: "Generic Health Sensor", + 0x1843: "Audio Input Control", + 0x1844: "Volume Control", + 0x1845: "Volume Offset Control", + 0x1846: "Coordinated Set Identification Service", + 0x1847: "Device Time", + 0x1848: "Media Control Service", + 0x1849: "Generic Media Control Service", + 0x184A: "Constant Tone Extension", + 0x184B: "Telephone Bearer Service", + 0x184C: "Generic Telephone Bearer Service", + 0x184D: "Microphone Control", + 0x184E: "Audio Stream Control Service", + 0x184F: "Broadcast Audio Scan Service", + 0x1850: "Published Audio Capabilities Service", + 0x1851: "Basic Audio Announcement Service", + 0x1852: "Broadcast Audio Announcement Service", + 0x1853: "Common Audio", + 0x1854: "Hearing Access", + 0x1855: "Telephony and Media Audio", + 0x1856: "Public Broadcast Announcement", + 0x1857: "Electronic Shelf Label", + 0x1859: "Mesh Proxy Solicitation", + # 0x185A to 0x26ff undefined */ + # 0x2700.. GATT Units + 0x2700: "unitless", + 0x2701: "length (metre)", + 0x2702: "mass (kilogram)", + 0x2703: "time (second)", + 0x2704: "electric current (ampere)", + 0x2705: "thermodynamic temperature (kelvin)", + 0x2706: "amount of substance (mole)", + 0x2707: "luminous intensity (candela)", + 0x2710: "area (square metres)", + 0x2711: "volume (cubic metres)", + 0x2712: "velocity (metres per second)", + 0x2713: "acceleration (metres per second squared)", + 0x2714: "wavenumber (reciprocal metre)", + 0x2715: "density (kilogram per cubic metre)", + 0x2716: "surface density (kilogram per square metre)", + 0x2717: "specific volume (cubic metre per kilogram)", + 0x2718: "current density (ampere per square metre)", + 0x2719: "magnetic field strength (ampere per metre)", + 0x271A: "amount concentration (mole per cubic metre)", + 0x271B: "mass concentration (kilogram per cubic metre)", + 0x271C: "luminance (candela per square metre)", + 0x271D: "refractive index", + 0x271E: "relative permeability", + 0x2720: "plane angle (radian)", + 0x2721: "solid angle (steradian)", + 0x2722: "frequency (hertz)", + 0x2723: "force (newton)", + 0x2724: "pressure (pascal)", + 0x2725: "energy (joule)", + 0x2726: "power (watt)", + 0x2727: "electric charge (coulomb)", + 0x2728: "electric potential difference (volt)", + 0x2729: "capacitance (farad)", + 0x272A: "electric resistance (ohm)", + 0x272B: "electric conductance (siemens)", + 0x272C: "magnetic flux (weber)", + 0x272D: "magnetic flux density (tesla)", + 0x272E: "inductance (henry)", + 0x272F: "Celsius temperature (degree Celsius)", + 0x2730: "luminous flux (lumen)", + 0x2731: "illuminance (lux)", + 0x2732: "activity referred to a radionuclide (becquerel)", + 0x2733: "absorbed dose (gray)", + 0x2734: "dose equivalent (sievert)", + 0x2735: "catalytic activity (katal)", + 0x2740: "dynamic viscosity (pascal second)", + 0x2741: "moment of force (newton metre)", + 0x2742: "surface tension (newton per metre)", + 0x2743: "angular velocity (radian per second)", + 0x2744: "angular acceleration (radian per second squared)", + 0x2745: "heat flux density (watt per square metre)", + 0x2746: "heat capacity (joule per kelvin)", + 0x2747: "specific heat capacity (joule per kilogram kelvin)", + 0x2748: "specific energy (joule per kilogram)", + 0x2749: "thermal conductivity (watt per metre kelvin)", + 0x274A: "energy density (joule per cubic metre)", + 0x274B: "electric field strength (volt per metre)", + 0x274C: "electric charge density (coulomb per cubic metre)", + 0x274D: "surface charge density (coulomb per square metre)", + 0x274E: "electric flux density (coulomb per square metre)", + 0x274F: "permittivity (farad per metre)", + 0x2750: "permeability (henry per metre)", + 0x2751: "molar energy (joule per mole)", + 0x2752: "molar entropy (joule per mole kelvin)", + 0x2753: "exposure (coulomb per kilogram)", + 0x2754: "absorbed dose rate (gray per second)", + 0x2755: "radiant intensity (watt per steradian)", + 0x2756: "radiance (watt per square metre steradian)", + 0x2757: "catalytic activity concentration (katal per cubic metre)", + 0x2760: "time (minute)", + 0x2761: "time (hour)", + 0x2762: "time (day)", + 0x2763: "plane angle (degree)", + 0x2764: "plane angle (minute)", + 0x2765: "plane angle (second)", + 0x2766: "area (hectare)", + 0x2767: "volume (litre)", + 0x2768: "mass (tonne)", + 0x2780: "pressure (bar)", + 0x2781: "pressure (millimetre of mercury)", + 0x2782: "length (ångström)", + 0x2783: "length (nautical mile)", + 0x2784: "area (barn)", + 0x2785: "velocity (knot)", + 0x2786: "logarithmic radio quantity (neper)", + 0x2787: "logarithmic radio quantity (bel)", + 0x27A0: "length (yard)", + 0x27A1: "length (parsec)", + 0x27A2: "length (inch)", + 0x27A3: "length (foot)", + 0x27A4: "length (mile)", + 0x27A5: "pressure (pound-force per square inch)", + 0x27A6: "velocity (kilometre per hour)", + 0x27A7: "velocity (mile per hour)", + 0x27A8: "angular velocity (revolution per minute)", + 0x27A9: "energy (gram calorie)", + 0x27AA: "energy (kilogram calorie)", + 0x27AB: "energy (kilowatt hour)", + 0x27AC: "thermodynamic temperature (degree Fahrenheit)", + 0x27AD: "percentage", + 0x27AE: "per mille", + 0x27AF: "period (beats per minute)", + 0x27B0: "electric charge (ampere hours)", + 0x27B1: "mass density (milligram per decilitre)", + 0x27B2: "mass density (millimole per litre)", + 0x27B3: "time (year)", + 0x27B4: "time (month)", + 0x27B5: "concentration (count per cubic metre)", + 0x27B6: "irradiance (watt per square metre)", + 0x27B7: "milliliter (per kilogram per minute)", + 0x27B8: "mass (pound)", + 0x27B9: "metabolic equivalent", + 0x27BA: "step (per minute)", + 0x27BC: "stroke (per minute)", + 0x27BD: "pace (kilometre per minute)", + 0x27BE: "luminous efficacy (lumen per watt)", + 0x27BF: "luminous energy (lumen hour)", + 0x27C0: "luminous exposure (lux hour)", + 0x27C1: "mass flow (gram per second)", + 0x27C2: "volume flow (litre per second)", + 0x27C3: "sound pressure (decible)", + 0x27C4: "parts per million", + 0x27C5: "parts per billion", + 0x2800: "Primary Service", + 0x2801: "Secondary Service", + 0x2802: "Include", + 0x2803: "Characteristic", + # 0x2804 to 0x28ff undefined */ + # Descriptors (SIG) + 0x2900: "Characteristic Extended Properties", + 0x2901: "Characteristic User Description", + 0x2902: "Client Characteristic Configuration", + 0x2903: "Server Characteristic Configuration", + 0x2904: "Characteristic Presentation Format", + 0x2905: "Characteristic Aggregate Format", + 0x2906: "Valid Range", + 0x2907: "External Report Reference", + 0x2908: "Report Reference", + 0x2909: "Number of Digitals", + 0x290A: "Value Trigger Setting", + 0x290B: "Environmental Sensing Configuration", + 0x290C: "Environmental Sensing Measurement", + 0x290D: "Environmental Sensing Trigger Setting", + 0x290E: "Time Trigger Setting", + 0x290F: "Complete BR-EDR Transport Block Data", + # 0x2910 to 0x29ff undefined */ + # 0x2a00.. GATT characteristic and Object Types + 0x2A00: "Device Name", + 0x2A01: "Appearance", + 0x2A02: "Peripheral Privacy Flag", + 0x2A03: "Reconnection Address", + 0x2A04: "Peripheral Preferred Connection Parameters", + 0x2A05: "Service Changed", + 0x2A06: "Alert Level", + 0x2A07: "Tx Power Level", + 0x2A08: "Date Time", + 0x2A09: "Day of Week", + 0x2A0A: "Day Date Time", + 0x2A0B: "Exact Time 100", + 0x2A0C: "Exact Time 256", + 0x2A0D: "DST Offset", + 0x2A0E: "Time Zone", + 0x2A0F: "Local Time Information", + 0x2A10: "Secondary Time Zone", + 0x2A11: "Time with DST", + 0x2A12: "Time Accuracy", + 0x2A13: "Time Source", + 0x2A14: "Reference Time Information", + 0x2A15: "Time Broadcast", + 0x2A16: "Time Update Control Point", + 0x2A17: "Time Update State", + 0x2A18: "Glucose Measurement", + 0x2A19: "Battery Level", + 0x2A1A: "Battery Power State", + 0x2A1B: "Battery Level State", + 0x2A1C: "Temperature Measurement", + 0x2A1D: "Temperature Type", + 0x2A1E: "Intermediate Temperature", + 0x2A1F: "Temperature Celsius", + 0x2A20: "Temperature Fahrenheit", + 0x2A21: "Measurement Interval", + 0x2A22: "Boot Keyboard Input Report", + 0x2A23: "System ID", + 0x2A24: "Model Number String", + 0x2A25: "Serial Number String", + 0x2A26: "Firmware Revision String", + 0x2A27: "Hardware Revision String", + 0x2A28: "Software Revision String", + 0x2A29: "Manufacturer Name String", + 0x2A2A: "IEEE 11073-20601 Regulatory Cert. Data List", + 0x2A2B: "Current Time", + 0x2A2C: "Magnetic Declination", + # 0x2a2d to 0x2a2e undefined */ + 0x2A2F: "Position 2D", + 0x2A30: "Position 3D", + 0x2A31: "Scan Refresh", + 0x2A32: "Boot Keyboard Output Report", + 0x2A33: "Boot Mouse Input Report", + 0x2A34: "Glucose Measurement Context", + 0x2A35: "Blood Pressure Measurement", + 0x2A36: "Intermediate Cuff Pressure", + 0x2A37: "Heart Rate Measurement", + 0x2A38: "Body Sensor Location", + 0x2A39: "Heart Rate Control Point", + 0x2A3A: "Removable", + 0x2A3B: "Service Required", + 0x2A3C: "Scientific Temperature Celsius", + 0x2A3D: "String", + 0x2A3E: "Network Availability", + 0x2A3F: "Alert Status", + 0x2A40: "Ringer Control Point", + 0x2A41: "Ringer Setting", + 0x2A42: "Alert Category ID Bit Mask", + 0x2A43: "Alert Category ID", + 0x2A44: "Alert Notification Control Point", + 0x2A45: "Unread Alert Status", + 0x2A46: "New Alert", + 0x2A47: "Supported New Alert Category", + 0x2A48: "Supported Unread Alert Category", + 0x2A49: "Blood Pressure Feature", + 0x2A4A: "HID Information", + 0x2A4B: "Report Map", + 0x2A4C: "HID Control Point", + 0x2A4D: "Report", + 0x2A4E: "Protocol Mode", + 0x2A4F: "Scan Interval Window", + 0x2A50: "PnP ID", + 0x2A51: "Glucose Feature", + 0x2A52: "Record Access Control Point", + 0x2A53: "RSC Measurement", + 0x2A54: "RSC Feature", + 0x2A55: "SC Control Point", + 0x2A56: "Digital", + 0x2A57: "Digital Output", + 0x2A58: "Analog", + 0x2A59: "Analog Output", + 0x2A5A: "Aggregate", + 0x2A5B: "CSC Measurement", + 0x2A5C: "CSC Feature", + 0x2A5D: "Sensor Location", + 0x2A5E: "PLX Spot-Check Measurement", + 0x2A5F: "PLX Continuous Measurement Characteristic", + 0x2A60: "PLX Features", + 0x2A62: "Pulse Oximetry Control Point", + 0x2A63: "Cycling Power Measurement", + 0x2A64: "Cycling Power Vector", + 0x2A65: "Cycling Power Feature", + 0x2A66: "Cycling Power Control Point", + 0x2A67: "Location and Speed", + 0x2A68: "Navigation", + 0x2A69: "Position Quality", + 0x2A6A: "LN Feature", + 0x2A6B: "LN Control Point", + 0x2A6C: "Elevation", + 0x2A6D: "Pressure", + 0x2A6E: "Temperature", + 0x2A6F: "Humidity", + 0x2A70: "True Wind Speed", + 0x2A71: "True Wind Direction", + 0x2A72: "Apparent Wind Speed", + 0x2A73: "Apparent Wind Direction", + 0x2A74: "Gust Factor", + 0x2A75: "Pollen Concentration", + 0x2A76: "UV Index", + 0x2A77: "Irradiance", + 0x2A78: "Rainfall", + 0x2A79: "Wind Chill", + 0x2A7A: "Heat Index", + 0x2A7B: "Dew Point", + 0x2A7C: "Trend", + 0x2A7D: "Descriptor Value Changed", + 0x2A7E: "Aerobic Heart Rate Lower Limit", + 0x2A7F: "Aerobic Threshold", + 0x2A80: "Age", + 0x2A81: "Anaerobic Heart Rate Lower Limit", + 0x2A82: "Anaerobic Heart Rate Upper Limit", + 0x2A83: "Anaerobic Threshold", + 0x2A84: "Aerobic Heart Rate Upper Limit", + 0x2A85: "Date of Birth", + 0x2A86: "Date of Threshold Assessment", + 0x2A87: "Email Address", + 0x2A88: "Fat Burn Heart Rate Lower Limit", + 0x2A89: "Fat Burn Heart Rate Upper Limit", + 0x2A8A: "First Name", + 0x2A8B: "Five Zone Heart Rate Limits", + 0x2A8C: "Gender", + 0x2A8D: "Heart Rate Max", + 0x2A8E: "Height", + 0x2A8F: "Hip Circumference", + 0x2A90: "Last Name", + 0x2A91: "Maximum Recommended Heart Rate", + 0x2A92: "Resting Heart Rate", + 0x2A93: "Sport Type for Aerobic/Anaerobic Thresholds", + 0x2A94: "Three Zone Heart Rate Limits", + 0x2A95: "Two Zone Heart Rate Limit", + 0x2A96: "VO2 Max", + 0x2A97: "Waist Circumference", + 0x2A98: "Weight", + 0x2A99: "Database Change Increment", + 0x2A9A: "User Index", + 0x2A9B: "Body Composition Feature", + 0x2A9C: "Body Composition Measurement", + 0x2A9D: "Weight Measurement", + 0x2A9E: "Weight Scale Feature", + 0x2A9F: "User Control Point", + 0x2AA0: "Magnetic Flux Density - 2D", + 0x2AA1: "Magnetic Flux Density - 3D", + 0x2AA2: "Language", + 0x2AA3: "Barometric Pressure Trend", + 0x2AA4: "Bond Management Control Point", + 0x2AA5: "Bond Management Feature", + 0x2AA6: "Central Address Resolution", + 0x2AA7: "CGM Measurement", + 0x2AA8: "CGM Feature", + 0x2AA9: "CGM Status", + 0x2AAA: "CGM Session Start Time", + 0x2AAB: "CGM Session Run Time", + 0x2AAC: "CGM Specific Ops Control Point", + 0x2AAD: "Indoor Positioning Configuration", + 0x2AAE: "Latitude", + 0x2AAF: "Longitude", + 0x2AB0: "Local North Coordinate", + 0x2AB1: "Local East Coordinate", + 0x2AB2: "Floor Number", + 0x2AB3: "Altitude", + 0x2AB4: "Uncertainty", + 0x2AB5: "Location Name", + 0x2AB6: "URI", + 0x2AB7: "HTTP Headers", + 0x2AB8: "HTTP Status Code", + 0x2AB9: "HTTP Entity Body", + 0x2ABA: "HTTP Control Point", + 0x2ABB: "HTTPS Security", + 0x2ABC: "TDS Control Point", + 0x2ABD: "OTS Feature", + 0x2ABE: "Object Name", + 0x2ABF: "Object Type", + 0x2AC0: "Object Size", + 0x2AC1: "Object First-Created", + 0x2AC2: "Object Last-Modified", + 0x2AC3: "Object ID", + 0x2AC4: "Object Properties", + 0x2AC5: "Object Action Control Point", + 0x2AC6: "Object List Control Point", + 0x2AC7: "Object List Filter", + 0x2AC8: "Object Changed", + 0x2AC9: "Resolvable Private Address Only", + # 0x2aca and 0x2acb undefined */ + 0x2ACC: "Fitness Machine Feature", + 0x2ACD: "Treadmill Data", + 0x2ACE: "Cross Trainer Data", + 0x2ACF: "Step Climber Data", + 0x2AD0: "Stair Climber Data", + 0x2AD1: "Rower Data", + 0x2AD2: "Indoor Bike Data", + 0x2AD3: "Training Status", + 0x2AD4: "Supported Speed Range", + 0x2AD5: "Supported Inclination Range", + 0x2AD6: "Supported Resistance Level Range", + 0x2AD7: "Supported Heart Rate Range", + 0x2AD8: "Supported Power Range", + 0x2AD9: "Fitness Machine Control Point", + 0x2ADA: "Fitness Machine Status", + 0x2ADB: "Mesh Provisioning Data In", + 0x2ADC: "Mesh Provisioning Data Out", + 0x2ADD: "Mesh Proxy Data In", + 0x2ADE: "Mesh Proxy Data Out", + 0x2AE0: "Average Current", + 0x2AE1: "Average Voltage", + 0x2AE2: "Boolean", + 0x2AE3: "Chromatic Distance From Planckian", + 0x2AE4: "Chromaticity Coordinates", + 0x2AE5: "Chromaticity In CCT And Duv Values", + 0x2AE6: "Chromaticity Tolerance", + 0x2AE7: "CIE 13.3-1995 Color Rendering Index", + 0x2AE8: "Coefficient", + 0x2AE9: "Correlated Color Temperature", + 0x2AEA: "Count 16", + 0x2AEB: "Count 24", + 0x2AEC: "Country Code", + 0x2AED: "Date UTC", + 0x2AEE: "Electric Current", + 0x2AEF: "Electric Current Range", + 0x2AF0: "Electric Current Specification", + 0x2AF1: "Electric Current Statistics", + 0x2AF2: "Energy", + 0x2AF3: "Energy In A Period Of Day", + 0x2AF4: "Event Statistics", + 0x2AF5: "Fixed String 16", + 0x2AF6: "Fixed String 24", + 0x2AF7: "Fixed String 36", + 0x2AF8: "Fixed String 8", + 0x2AF9: "Generic Level", + 0x2AFA: "Global Trade Item Number", + 0x2AFB: "Illuminance", + 0x2AFC: "Luminous Efficacy", + 0x2AFD: "Luminous Energy", + 0x2AFE: "Luminous Exposure", + 0x2AFF: "Luminous Flux", + 0x2B00: "Luminous Flux Range", + 0x2B01: "Luminous Intensity", + 0x2B02: "Mass Flow", + 0x2B03: "Perceived Lightness", + 0x2B04: "Percentage 8", + 0x2B05: "Power", + 0x2B06: "Power Specification", + 0x2B07: "Relative Runtime In A Current Range", + 0x2B08: "Relative Runtime In A Generic Level Range", + 0x2B09: "Relative Value In A Voltage Range", + 0x2B0A: "Relative Value In An Illuminance Range", + 0x2B0B: "Relative Value In A Period of Day", + 0x2B0C: "Relative Value In A Temperature Range", + 0x2B0D: "Temperature 8", + 0x2B0E: "Temperature 8 In A Period Of Day", + 0x2B0F: "Temperature 8 Statistics", + 0x2B10: "Temperature Range", + 0x2B11: "Temperature Statistics", + 0x2B12: "Time Decihour 8", + 0x2B13: "Time Exponential 8", + 0x2B14: "Time Hour 24", + 0x2B15: "Time Millisecond 24", + 0x2B16: "Time Second 16", + 0x2B17: "Time Second 8", + 0x2B18: "Voltage", + 0x2B19: "Voltage Specification", + 0x2B1A: "Voltage Statistics", + 0x2B1B: "Volume Flow", + 0x2B1C: "Chromaticity Coordinate", + 0x2B1D: "RC Feature", + 0x2B1E: "RC Settings", + 0x2B1F: "Reconnection Configuration Control Point", + 0x2B20: "IDD Status Changed", + 0x2B21: "IDD Status", + 0x2B22: "IDD Annunciation Status", + 0x2B23: "IDD Features", + 0x2B24: "IDD Status Reader Control Point", + 0x2B25: "IDD Command Control Point", + 0x2B26: "IDD Command Data", + 0x2B27: "IDD Record Access Control Point", + 0x2B28: "IDD History Data", + 0x2B29: "Client Supported Features", + 0x2B2A: "Database Hash", + 0x2B2B: "BSS Control Point", + 0x2B2C: "BSS Response", + 0x2B2D: "Emergency ID", + 0x2B2E: "Emergency Text", + 0x2B2F: "ACS Status", + 0x2B30: "ACS Data In", + 0x2B31: "ACS Data Out Notify", + 0x2B32: "ACS Data Out Indicate", + 0x2B33: "ACS Control Point", + 0x2B34: "Enhanced Blood Pressure Measurement", + 0x2B35: "Enhanced Intermediate Cuff Pressure", + 0x2B36: "Blood Pressure Record", + 0x2B37: "Registered User", + 0x2B38: "BR-EDR Handover Data", + 0x2B39: "Bluetooth SIG Data", + 0x2B3A: "Server Supported Features", + 0x2B3B: "Physical Activity Monitor Features", + 0x2B3C: "General Activity Instantaneous Data", + 0x2B3D: "General Activity Summary Data", + 0x2B3E: "CardioRespiratory Activity Instantaneous Data", + 0x2B3F: "CardioRespiratory Activity Summary Data", + 0x2B40: "Step Counter Activity Summary Data", + 0x2B41: "Sleep Activity Instantaneous Data", + 0x2B42: "Sleep Activity Summary Data", + 0x2B43: "Physical Activity Monitor Control Point", + 0x2B44: "Current Session", + 0x2B45: "Session", + 0x2B46: "Preferred Units", + 0x2B47: "High Resolution Height", + 0x2B48: "Middle Name", + 0x2B49: "Stride Length", + 0x2B4A: "Handedness", + 0x2B4B: "Device Wearing Position", + 0x2B4C: "Four Zone Heart Rate Limits", + 0x2B4D: "High Intensity Exercise Threshold", + 0x2B4E: "Activity Goal", + 0x2B4F: "Sedentary Interval Notification", + 0x2B50: "Caloric Intake", + 0x2B51: "TMAP Role", + 0x2B77: "Audio Input State", + 0x2B78: "Gain Settings Attribute", + 0x2B79: "Audio Input Type", + 0x2B7A: "Audio Input Status", + 0x2B7B: "Audio Input Control Point", + 0x2B7C: "Audio Input Description", + 0x2B7D: "Volume State", + 0x2B7E: "Volume Control Point", + 0x2B7F: "Volume Flags", + 0x2B80: "Offset State", + 0x2B81: "Audio Location", + 0x2B82: "Volume Offset Control Point", + 0x2B83: "Audio Output Description", + 0x2B84: "Set Identity Resolving Key Characteristic", + 0x2B85: "Size Characteristic", + 0x2B86: "Lock Characteristic", + 0x2B87: "Rank Characteristic", + 0x2B88: "Encrypted Data Key Material", + 0x2B89: "Apparent Energy 32", + 0x2B8A: "Apparent Power", + 0x2B8B: "Live Health Observations", + 0x2B8C: "CO\textsubscript{2} Concentration", + 0x2B8D: "Cosine of the Angle", + 0x2B8E: "Device Time Feature", + 0x2B8F: "Device Time Parameters", + 0x2B90: "Device Time", + 0x2B91: "Device Time Control Point", + 0x2B92: "Time Change Log Data", + 0x2B93: "Media Player Name", + 0x2B94: "Media Player Icon Object ID", + 0x2B95: "Media Player Icon URL", + 0x2B96: "Track Changed", + 0x2B97: "Track Title", + 0x2B98: "Track Duration", + 0x2B99: "Track Position", + 0x2B9A: "Playback Speed", + 0x2B9B: "Seeking Speed", + 0x2B9C: "Current Track Segments Object ID", + 0x2B9D: "Current Track Object ID", + 0x2B9E: "Next Track Object ID", + 0x2B9F: "Parent Group Object ID", + 0x2BA0: "Current Group Object ID", + 0x2BA1: "Playing Order", + 0x2BA2: "Playing Orders Supported", + 0x2BA3: "Media State", + 0x2BA4: "Media Control Point", + 0x2BA5: "Media Control Point Opcodes Supported", + 0x2BA6: "Search Results Object ID", + 0x2BA7: "Search Control Point", + 0x2BA8: "Energy 32", + 0x2BA9: "Media Player Icon Object Type", + 0x2BAA: "Track Segments Object Type", + 0x2BAB: "Track Object Type", + 0x2BAC: "Group Object Type", + 0x2BAD: "Constant Tone Extension Enable", + 0x2BAE: "Advertising Constant Tone Extension Minimum Length", + 0x2BAF: "Advertising Constant Tone Extension Minimum Transmit Count", + 0x2BB0: "Advertising Constant Tone Extension Transmit Duration", + 0x2BB1: "Advertising Constant Tone Extension Interval", + 0x2BB2: "Advertising Constant Tone Extension PHY", + 0x2BB3: "Bearer Provider Name", + 0x2BB4: "Bearer UCI", + 0x2BB5: "Bearer Technology", + 0x2BB6: "Bearer URI Schemes Supported List", + 0x2BB7: "Bearer Signal Strength", + 0x2BB8: "Bearer Signal Strength Reporting Interval", + 0x2BB9: "Bearer List Current Calls", + 0x2BBA: "Content Control ID", + 0x2BBB: "Status Flags", + 0x2BBC: "Incoming Call Target Bearer URI", + 0x2BBD: "Call State", + 0x2BBE: "Call Control Point", + 0x2BBF: "Call Control Point Optional Opcodes", + 0x2BC0: "Termination Reason", + 0x2BC1: "Incoming Call", + 0x2BC2: "Call Friendly Name", + 0x2BC3: "Mute", + 0x2BC4: "Sink ASE", + 0x2BC5: "Source ASE", + 0x2BC6: "ASE Control Point", + 0x2BC7: "Broadcast Audio Scan Control Point", + 0x2BC8: "Broadcast Receive State", + 0x2BC9: "Sink PAC", + 0x2BCA: "Sink Audio Locations", + 0x2BCB: "Source PAC", + 0x2BCC: "Source Audio Locations", + 0x2BCD: "Available Audio Contexts", + 0x2BCE: "Supported Audio Contexts", + 0x2BCF: "Ammonia Concentration", + 0x2BD0: "Carbon Monoxide Concentration", + 0x2BD1: "Methane Concentration", + 0x2BD2: "Nitrogen Dioxide Concentration", + 0x2BD3: "Non-Methane Volatile Organic Compounds Concentration", + 0x2BD4: "Ozone Concentration", + 0x2BD5: "Particulate Matter - PM1 Concentration", + 0x2BD6: "Particulate Matter - PM2.5 Concentration", + 0x2BD7: "Particulate Matter - PM10 Concentration", + 0x2BD8: "Sulfur Dioxide Concentration", + 0x2BD9: "Sulfur Hexafluoride Concentration", + 0x2BDA: "Hearing Aid Features", + 0x2BDB: "Hearing Aid Preset Control Point", + 0x2BDC: "Active Preset Index", + 0x2BDD: "Stored Health Observations", + 0x2BDE: "Fixed String 64", + 0x2BDF: "High Temperature", + 0x2BE0: "High Voltage", + 0x2BE1: "Light Distribution", + 0x2BE2: "Light Output", + 0x2BE3: "Light Source Type", + 0x2BE4: "Noise", + 0x2BE5: "Relative Runtime in a Correlated Color Temperature Range", + 0x2BE6: "Time Second 32", + 0x2BE7: "VOC Concentration", + 0x2BE8: "Voltage Frequency", + 0x2BE9: "Battery Critical Status", + 0x2BEA: "Battery Health Status", + 0x2BEB: "Battery Health Information", + 0x2BEC: "Battery Information", + 0x2BED: "Battery Level Status", + 0x2BEE: "Battery Time Status", + 0x2BEF: "Estimated Service Date", + 0x2BF0: "Battery Energy Status", + 0x2BF1: "Observation Schedule Changed", + 0x2BF2: "Current Elapsed Time", + 0x2BF3: "Health Sensor Features", + 0x2BF4: "GHS Control Point", + 0x2BF5: "LE GATT Security Levels", + 0x2BF6: "ESL Address", + 0x2BF7: "AP Sync Key Material", + 0x2BF8: "ESL Response Key Material", + 0x2BF9: "ESL Current Absolute Time", + 0x2BFA: "ESL Display Information", + 0x2BFB: "ESL Image Information", + 0x2BFC: "ESL Sensor Information", + 0x2BFD: "ESL LED Information", + 0x2BFE: "ESL Control Point", + 0x2BFF: "UDI for Medical Devices", + 0xFE1C: "NetMedia: Inc.", + 0xFE1D: "Illuminati Instrument Corporation", + 0xFE1E: "Smart Innovations Co.: Ltd", + 0xFE1F: "Garmin International: Inc.", + 0xFE20: "Emerson", + 0xFE21: "Bose Corporation", + 0xFE22: "Zoll Medical Corporation", + 0xFE23: "Zoll Medical Corporation", + 0xFE24: "August Home Inc", + 0xFE25: "Apple: Inc.", + 0xFE26: "Google Inc.", + 0xFE27: "Google Inc.", + 0xFE28: "Ayla Network", + 0xFE29: "Gibson Innovations", + 0xFE2A: "DaisyWorks: Inc.", + 0xFE2B: "ITT Industries", + 0xFE2C: "Google Inc.", + 0xFE2D: "SMART INNOVATION Co.,Ltd", + 0xFE2E: "ERi,Inc.", + 0xFE2F: "CRESCO Wireless: Inc", + 0xFE30: "Volkswagen AG", + 0xFE31: "Volkswagen AG", + 0xFE32: "Pro-Mark: Inc.", + 0xFE33: "CHIPOLO d.o.o.", + 0xFE34: "SmallLoop LLC", + 0xFE35: "HUAWEI Technologies Co.: Ltd", + 0xFE36: "HUAWEI Technologies Co.: Ltd", + 0xFE37: "Spaceek LTD", + 0xFE38: "Spaceek LTD", + 0xFE39: "TTS Tooltechnic Systems AG & Co. KG", + 0xFE3A: "TTS Tooltechnic Systems AG & Co. KG", + 0xFE3B: "Dolby Laboratories", + 0xFE3C: "Alibaba", + 0xFE3D: "BD Medical", + 0xFE3E: "BD Medical", + 0xFE3F: "Friday Labs Limited", + 0xFE40: "Inugo Systems Limited", + 0xFE41: "Inugo Systems Limited", + 0xFE42: "Nets A/S", + 0xFE43: "Andreas Stihl AG & Co. KG", + 0xFE44: "SK Telecom", + 0xFE45: "Snapchat Inc", + 0xFE46: "B&O Play A/S", + 0xFE47: "General Motors", + 0xFE48: "General Motors", + 0xFE49: "SenionLab AB", + 0xFE4A: "OMRON HEALTHCARE Co.: Ltd.", + 0xFE4B: "Koninklijke Philips N.V.", + 0xFE4C: "Volkswagen AG", + 0xFE4D: "Casambi Technologies Oy", + 0xFE4E: "NTT docomo", + 0xFE4F: "Molekule: Inc.", + 0xFE50: "Google Inc.", + 0xFE51: "SRAM", + 0xFE52: "SetPoint Medical", + 0xFE53: "3M", + 0xFE54: "Motiv: Inc.", + 0xFE55: "Google Inc.", + 0xFE56: "Google Inc.", + 0xFE57: "Dotted Labs", + 0xFE58: "Nordic Semiconductor ASA", + 0xFE59: "Nordic Semiconductor ASA", + 0xFE5A: "Chronologics Corporation", + 0xFE5B: "GT-tronics HK Ltd", + 0xFE5C: "million hunters GmbH", + 0xFE5D: "Grundfos A/S", + 0xFE5E: "Plastc Corporation", + 0xFE5F: "Eyefi: Inc.", + 0xFE60: "Lierda Science & Technology Group Co.: Ltd.", + 0xFE61: "Logitech International SA", + 0xFE62: "Indagem Tech LLC", + 0xFE63: "Connected Yard: Inc.", + 0xFE64: "Siemens AG", + 0xFE65: "CHIPOLO d.o.o.", + 0xFE66: "Intel Corporation", + 0xFE67: "Lab Sensor Solutions", + 0xFE68: "Qualcomm Life Inc", + 0xFE69: "Qualcomm Life Inc", + 0xFE6A: "Kontakt Micro-Location Sp. z o.o.", + 0xFE6B: "TASER International: Inc.", + 0xFE6C: "TASER International: Inc.", + 0xFE6D: "The University of Tokyo", + 0xFE6E: "The University of Tokyo", + 0xFE6F: "LINE Corporation", + 0xFE70: "Beijing Jingdong Century Trading Co.: Ltd.", + 0xFE71: "Plume Design Inc", + 0xFE72: "St. Jude Medical: Inc.", + 0xFE73: "St. Jude Medical: Inc.", + 0xFE74: "unwire", + 0xFE75: "TangoMe", + 0xFE76: "TangoMe", + 0xFE77: "Hewlett-Packard Company", + 0xFE78: "Hewlett-Packard Company", + 0xFE79: "Zebra Technologies", + 0xFE7A: "Bragi GmbH", + 0xFE7B: "Orion Labs: Inc.", + 0xFE7C: "Stollmann E+V GmbH", + 0xFE7D: "Aterica Health Inc.", + 0xFE7E: "Awear Solutions Ltd", + 0xFE7F: "Doppler Lab", + 0xFE80: "Doppler Lab", + 0xFE81: "Medtronic Inc.", + 0xFE82: "Medtronic Inc.", + 0xFE83: "Blue Bite", + 0xFE84: "RF Digital Corp", + 0xFE85: "RF Digital Corp", + 0xFE86: "HUAWEI Technologies Co.: Ltd.", + 0xFE87: "Qingdao Yeelink Information Technology Co.: Ltd.", + 0xFE88: "SALTO SYSTEMS S.L.", + 0xFE89: "B&O Play A/S", + 0xFE8A: "Apple: Inc.", + 0xFE8B: "Apple: Inc.", + 0xFE8C: "TRON Forum", + 0xFE8D: "Interaxon Inc.", + 0xFE8E: "ARM Ltd", + 0xFE8F: "CSR", + 0xFE90: "JUMA", + 0xFE91: "Shanghai Imilab Technology Co.,Ltd", + 0xFE92: "Jarden Safety & Security", + 0xFE93: "OttoQ Inc.", + 0xFE94: "OttoQ Inc.", + 0xFE95: "Xiaomi Inc.", + 0xFE96: "Tesla Motor Inc.", + 0xFE97: "Tesla Motor Inc.", + 0xFE98: "Currant: Inc.", + 0xFE99: "Currant: Inc.", + 0xFE9A: "Estimote", + 0xFE9B: "Samsara Networks: Inc", + 0xFE9C: "GSI Laboratories: Inc.", + 0xFE9D: "Mobiquity Networks Inc", + 0xFE9E: "Dialog Semiconductor B.V.", + 0xFE9F: "Google", + 0xFEA0: "Google", + 0xFEA1: "Intrepid Control Systems: Inc.", + 0xFEA2: "Intrepid Control Systems: Inc.", + 0xFEA3: "ITT Industries", + 0xFEA4: "Paxton Access Ltd", + 0xFEA5: "GoPro: Inc.", + 0xFEA6: "GoPro: Inc.", + 0xFEA7: "UTC Fire and Security", + 0xFEA8: "Savant Systems LLC", + 0xFEA9: "Savant Systems LLC", + 0xFEAA: "Google", + 0xFEAB: "Nokia Corporation", + 0xFEAC: "Nokia Corporation", + 0xFEAD: "Nokia Corporation", + 0xFEAE: "Nokia Corporation", + 0xFEAF: "Nest Labs Inc.", + 0xFEB0: "Nest Labs Inc.", + 0xFEB1: "Electronics Tomorrow Limited", + 0xFEB2: "Microsoft Corporation", + 0xFEB3: "Taobao", + 0xFEB4: "WiSilica Inc.", + 0xFEB5: "WiSilica Inc.", + 0xFEB6: "Vencer Co: Ltd", + 0xFEB7: "Facebook: Inc.", + 0xFEB8: "Facebook: Inc.", + 0xFEB9: "LG Electronics", + 0xFEBA: "Tencent Holdings Limited", + 0xFEBB: "adafruit industries", + 0xFEBC: "Dexcom: Inc.", + 0xFEBD: "Clover Network: Inc.", + 0xFEBE: "Bose Corporation", + 0xFEBF: "Nod: Inc.", + 0xFEC0: "KDDI Corporation", + 0xFEC1: "KDDI Corporation", + 0xFEC2: "Blue Spark Technologies: Inc.", + 0xFEC3: "360fly: Inc.", + 0xFEC4: "PLUS Location Systems", + 0xFEC5: "Realtek Semiconductor Corp.", + 0xFEC6: "Kocomojo: LLC", + 0xFEC7: "Apple: Inc.", + 0xFEC8: "Apple: Inc.", + 0xFEC9: "Apple: Inc.", + 0xFECA: "Apple: Inc.", + 0xFECB: "Apple: Inc.", + 0xFECC: "Apple: Inc.", + 0xFECD: "Apple: Inc.", + 0xFECE: "Apple: Inc.", + 0xFECF: "Apple: Inc.", + 0xFED0: "Apple: Inc.", + 0xFED1: "Apple: Inc.", + 0xFED2: "Apple: Inc.", + 0xFED3: "Apple: Inc.", + 0xFED4: "Apple: Inc.", + 0xFED5: "Plantronics Inc.", + 0xFED6: "Broadcom Corporation", + 0xFED7: "Broadcom Corporation", + 0xFED8: "Google", + 0xFED9: "Pebble Technology Corporation", + 0xFEDA: "ISSC Technologies Corporation", + 0xFEDB: "Perka: Inc.", + 0xFEDC: "Jawbone", + 0xFEDD: "Jawbone", + 0xFEDE: "Coin: Inc.", + 0xFEDF: "Design SHIFT", + 0xFEE0: "Anhui Huami Information Technology Co.", + 0xFEE1: "Anhui Huami Information Technology Co.", + 0xFEE2: "Anki: Inc.", + 0xFEE3: "Anki: Inc.", + 0xFEE4: "Nordic Semiconductor ASA", + 0xFEE5: "Nordic Semiconductor ASA", + 0xFEE6: "Seed Labs: Inc.", + 0xFEE7: "Tencent Holdings Limited", + 0xFEE8: "Quintic Corp.", + 0xFEE9: "Quintic Corp.", + 0xFEEA: "Swirl Networks: Inc.", + 0xFEEB: "Swirl Networks: Inc.", + 0xFEEC: "Tile: Inc.", + 0xFEED: "Tile: Inc.", + 0xFEEE: "Polar Electro Oy", + 0xFEEF: "Polar Electro Oy", + 0xFEF0: "Intel", + 0xFEF1: "CSR", + 0xFEF2: "CSR", + 0xFEF3: "Google", + 0xFEF4: "Google", + 0xFEF5: "Dialog Semiconductor GmbH", + 0xFEF6: "Wicentric: Inc.", + 0xFEF7: "Aplix Corporation", + 0xFEF8: "Aplix Corporation", + 0xFEF9: "PayPal: Inc.", + 0xFEFA: "PayPal: Inc.", + 0xFEFB: "Stollmann E+V GmbH", + 0xFEFC: "Gimbal: Inc.", + 0xFEFD: "Gimbal: Inc.", + 0xFEFE: "GN ReSound A/S", + 0xFEFF: "GN Netcom", + 0xFFFC: "AirFuel Alliance", + 0xFFFD: "Fast IDentity Online Alliance (FIDO)", + 0xFFFE: "Alliance for Wireless Power (A4WP)", +} + +uuid128_dict: Dict[str, str] = { + "a3c87500-8ed3-4bdf-8a39-a01bebede295": "Eddystone Configuration Service", + "a3c87501-8ed3-4bdf-8a39-a01bebede295": "Capabilities", + "a3c87502-8ed3-4bdf-8a39-a01bebede295": "Active Slot", + "a3c87503-8ed3-4bdf-8a39-a01bebede295": "Advertising Interval", + "a3c87504-8ed3-4bdf-8a39-a01bebede295": "Radio Tx Power", + "a3c87505-8ed3-4bdf-8a39-a01bebede295": "(Advanced) Advertised Tx Power", + "a3c87506-8ed3-4bdf-8a39-a01bebede295": "Lock State", + "a3c87507-8ed3-4bdf-8a39-a01bebede295": "Unlock", + "a3c87508-8ed3-4bdf-8a39-a01bebede295": "Public ECDH Key", + "a3c87509-8ed3-4bdf-8a39-a01bebede295": "EID Identity Key", + "a3c8750a-8ed3-4bdf-8a39-a01bebede295": "ADV Slot Data", + "a3c8750b-8ed3-4bdf-8a39-a01bebede295": "(Advanced) Factory reset", + "a3c8750c-8ed3-4bdf-8a39-a01bebede295": "(Advanced) Remain Connectable", + # BBC micro:bit Bluetooth Profiles */ + "e95d0753-251d-470a-a062-fa1922dfa9a8": "MicroBit Accelerometer Service", + "e95dca4b-251d-470a-a062-fa1922dfa9a8": "MicroBit Accelerometer Data", + "e95dfb24-251d-470a-a062-fa1922dfa9a8": "MicroBit Accelerometer Period", + "e95df2d8-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Service", + "e95dfb11-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Data", + "e95d386c-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Period", + "e95d9715-251d-470a-a062-fa1922dfa9a8": "MicroBit Magnetometer Bearing", + "e95d9882-251d-470a-a062-fa1922dfa9a8": "MicroBit Button Service", + "e95dda90-251d-470a-a062-fa1922dfa9a8": "MicroBit Button A State", + "e95dda91-251d-470a-a062-fa1922dfa9a8": "MicroBit Button B State", + "e95d127b-251d-470a-a062-fa1922dfa9a8": "MicroBit IO PIN Service", + "e95d8d00-251d-470a-a062-fa1922dfa9a8": "MicroBit PIN Data", + "e95d5899-251d-470a-a062-fa1922dfa9a8": "MicroBit PIN AD Configuration", + "e95dd822-251d-470a-a062-fa1922dfa9a8": "MicroBit PWM Control", + "e95dd91d-251d-470a-a062-fa1922dfa9a8": "MicroBit LED Service", + "e95d7b77-251d-470a-a062-fa1922dfa9a8": "MicroBit LED Matrix state", + "e95d93ee-251d-470a-a062-fa1922dfa9a8": "MicroBit LED Text", + "e95d0d2d-251d-470a-a062-fa1922dfa9a8": "MicroBit Scrolling Delay", + "e95d93af-251d-470a-a062-fa1922dfa9a8": "MicroBit Event Service", + "e95db84c-251d-470a-a062-fa1922dfa9a8": "MicroBit Requirements", + "e95d9775-251d-470a-a062-fa1922dfa9a8": "MicroBit Event Data", + "e95d23c4-251d-470a-a062-fa1922dfa9a8": "MicroBit Client Requirements", + "e95d5404-251d-470a-a062-fa1922dfa9a8": "MicroBit Client Events", + "e95d93b0-251d-470a-a062-fa1922dfa9a8": "MicroBit DFU Control Service", + "e95d93b1-251d-470a-a062-fa1922dfa9a8": "MicroBit DFU Control", + "e95d6100-251d-470a-a062-fa1922dfa9a8": "MicroBit Temperature Service", + "e95d1b25-251d-470a-a062-fa1922dfa9a8": "MicroBit Temperature Period", + # Nordic UART Port Emulation */ + "6e400001-b5a3-f393-e0a9-e50e24dcca9e": "Nordic UART Service", + "6e400003-b5a3-f393-e0a9-e50e24dcca9e": "Nordic UART TX", + "6e400002-b5a3-f393-e0a9-e50e24dcca9e": "Nordic UART RX", + # LEGO + "00001623-1212-efde-1623-785feabcd123": "LEGO Wireless Protocol v3 Hub Service", + "00001624-1212-efde-1623-785feabcd123": "LEGO Wireless Protocol v3 Hub Characteristic", + "00001625-1212-efde-1623-785feabcd123": "LEGO Wireless Protocol v3 Bootloader Service", + "00001626-1212-efde-1623-785feabcd123": "LEGO Wireless Protocol v3 Bootloader Characteristic", + "c5f50001-8280-46da-89f4-6d8051e4aeef": "Pybricks Service", + "c5f50002-8280-46da-89f4-6d8051e4aeef": "Pybricks Characteristic", + # from nRF connect + "be15bee0-6186-407e-8381-0bd89c4d8df4": "Anki Drive Vehicle Service READ", + "be15bee1-6186-407e-8381-0bd89c4d8df4": "Anki Drive Vehicle Service WRITE", + "955a1524-0fe2-f5aa-a094-84b8d4f3e8ad": "Beacon UUID", + "00001524-1212-efde-1523-785feabcd123": "Button", + "8ec90003-f315-4f60-9fb8-838830daea50": "Buttonless DFU", + "955a1525-0fe2-f5aa-a094-84b8d4f3e8ad": "Calibration", + "a6c31338-6c07-453e-961a-d8a8a41bf368": "Candy Control Point", + "955a1528-0fe2-f5aa-a094-84b8d4f3e8ad": "Connection Interval", + "00001531-1212-efde-1523-785feabcd123": "DFU Control Point", + "8ec90001-f315-4f60-9fb8-838830daea50": "DFU Control Point", + "00001532-1212-efde-1523-785feabcd123": "DFU Packet", + "8ec90002-f315-4f60-9fb8-838830daea50": "DFU Packet", + "00001534-1212-efde-1523-785feabcd123": "DFU Version", + "ee0c2084-8786-40ba-ab96-99b91ac981d8": "Data", + "b35d7da9-eed4-4d59-8f89-f6573edea967": "Data Length", + "b35d7da7-eed4-4d59-8f89-f6573edea967": "Data One", + "22eac6e9-24d6-4bb5-be44-b36ace7c7bfb": "Data Source", + "b35d7da8-eed4-4d59-8f89-f6573edea967": "Data Two", + "c6b2f38c-23ab-46d8-a6ab-a3a870bbd5d7": "Entity Attribute", + "2f7cabce-808d-411f-9a0c-bb92ba96c102": "Entity Update", + "ee0c2085-8786-40ba-ab96-99b91ac981d8": "Flags", + "88400002-e95a-844e-c53f-fbec32ed5e54": "Fly Button Characteristic", + "00001525-1212-efde-1523-785feabcd123": "LED", + "955a1529-0fe2-f5aa-a094-84b8d4f3e8ad": "LED Config", + "ee0c2082-8786-40ba-ab96-99b91ac981d8": "Lock", + "ee0c2081-8786-40ba-ab96-99b91ac981d8": "Lock State", + "955a1526-0fe2-f5aa-a094-84b8d4f3e8ad": "Major & Minor", + "955a1527-0fe2-f5aa-a094-84b8d4f3e8ad": "Manufacturer ID", + "9fbf120d-6301-42d9-8c58-25e699a21dbd": "Notification Source", + "ee0c2088-8786-40ba-ab96-99b91ac981d8": "Period", + "ee0c2086-8786-40ba-ab96-99b91ac981d8": "Power Levels", + "ee0c2087-8786-40ba-ab96-99b91ac981d8": "Power Mode", + "9b3c81d8-57b1-4a8a-b8df-0e56f7ca51c2": "Remote Command", + "ee0c2089-8786-40ba-ab96-99b91ac981d8": "Reset", + "da2e7828-fbce-4e01-ae9e-261174997c48": "SMP Characteristic", + "8ec90004-f315-4f60-9fb8-838830daea50": "Secure Buttonless DFU", + "ef680102-9b35-4933-9b10-52ffa9740042": "Thingy Advertising Parameters Characteristic", + "ef680204-9b35-4933-9b10-52ffa9740042": "Thingy Air Quality Characteristic", + "ef680302-9b35-4933-9b10-52ffa9740042": "Thingy Button Characteristic", + "ef680106-9b35-4933-9b10-52ffa9740042": "Thingy Cloud Token Characteristic", + "ef680104-9b35-4933-9b10-52ffa9740042": "Thingy Connection Parameters Characteristic", + "ef680105-9b35-4933-9b10-52ffa9740042": "Thingy Eddystone URL Characteristic", + "ef680206-9b35-4933-9b10-52ffa9740042": "Thingy Environment Configuration Characteristic", + "ef680407-9b35-4933-9b10-52ffa9740042": "Thingy Euler Characteristic", + "ef680303-9b35-4933-9b10-52ffa9740042": "Thingy External Pin Characteristic", + "ef680107-9b35-4933-9b10-52ffa9740042": "Thingy FW Version Characteristic", + "ef68040a-9b35-4933-9b10-52ffa9740042": "Thingy Gravity Vector Characteristic", + "ef680409-9b35-4933-9b10-52ffa9740042": "Thingy Heading Characteristic", + "ef680203-9b35-4933-9b10-52ffa9740042": "Thingy Humidity Characteristic", + "ef680301-9b35-4933-9b10-52ffa9740042": "Thingy LED Characteristic", + "ef680205-9b35-4933-9b10-52ffa9740042": "Thingy Light Intensity Characteristic", + "ef680108-9b35-4933-9b10-52ffa9740042": "Thingy MTU Request Characteristic", + "ef680504-9b35-4933-9b10-52ffa9740042": "Thingy Microphone Characteristic", + "ef680401-9b35-4933-9b10-52ffa9740042": "Thingy Motion Configuration Characteristic", + "ef680101-9b35-4933-9b10-52ffa9740042": "Thingy Name Characteristic", + "ef680403-9b35-4933-9b10-52ffa9740042": "Thingy Orientation Characteristic", + "ef680405-9b35-4933-9b10-52ffa9740042": "Thingy Pedometer Characteristic", + "ef680202-9b35-4933-9b10-52ffa9740042": "Thingy Pressure Characteristic", + "ef680404-9b35-4933-9b10-52ffa9740042": "Thingy Quaternion Characteristic", + "ef680406-9b35-4933-9b10-52ffa9740042": "Thingy Raw Data Characteristic", + "ef680408-9b35-4933-9b10-52ffa9740042": "Thingy Rotation Characteristic", + "ef680501-9b35-4933-9b10-52ffa9740042": "Thingy Sound Configuration Characteristic", + "ef680502-9b35-4933-9b10-52ffa9740042": "Thingy Speaker Data Characteristic", + "ef680503-9b35-4933-9b10-52ffa9740042": "Thingy Speaker Status Characteristic", + "ef680402-9b35-4933-9b10-52ffa9740042": "Thingy Tap Characteristic", + "ef680201-9b35-4933-9b10-52ffa9740042": "Thingy Temperature Characteristic", + "ee0c2083-8786-40ba-ab96-99b91ac981d8": "Unlock", + "e95db9fe-251d-470a-a062-fa1922dfa9a8": "micro:bit Pin IO Configuration", + "e95d9250-251d-470a-a062-fa1922dfa9a8": "micro:bit Temperature", + "be15beef-6186-407e-8381-0bd89c4d8df4": "Anki Drive Vehicle Service", + "7905f431-b5ce-4e99-a40f-4b1e122d00d0": "Apple Notification Center Service", + "d0611e78-bbb4-4591-a5f8-487910ae4366": "Apple Continuity Service", + "8667556c-9a37-4c91-84ed-54ee27d90049": "Apple Continuity Characteristic", + "9fa480e0-4967-4542-9390-d343dc5d04ae": "Apple Nearby Service", + "af0badb1-5b99-43cd-917a-a77bc549e3cc": "Nearby Characteristic", + "69d1d8f3-45e1-49a8-9821-9bbdfdaad9d9": "Control Point", + "89d3502b-0f36-433a-8ef4-c502ad55f8dc": "Apple Media Service", + "955a1523-0fe2-f5aa-a094-84b8d4f3e8ad": "Beacon Config", + "a6c31337-6c07-453e-961a-d8a8a41bf368": "Candy Dispenser Service", + "00001530-1212-efde-1523-785feabcd123": "Device Firmware Update Service", + "88400001-e95a-844e-c53f-fbec32ed5e54": "Digital Bird Service", + "ee0c2080-8786-40ba-ab96-99b91ac981d8": "Eddystone-URL Configuration Service", + "8e400001-f315-4f60-9fb8-838830daea50": "Experimental Buttonless DFU Service", + "00001523-1212-efde-1523-785feabcd123": "Nordic LED Button Service", + "8d53dc1d-1db7-4cd3-868b-8a527460aa84": "SMP Service", + "ef680100-9b35-4933-9b10-52ffa9740042": "Thingy Configuration Service", + "ef680200-9b35-4933-9b10-52ffa9740042": "Thingy Environment Service", + "ef680400-9b35-4933-9b10-52ffa9740042": "Thingy Motion Service", + "ef680500-9b35-4933-9b10-52ffa9740042": "Thingy Sound Service", + "ef680300-9b35-4933-9b10-52ffa9740042": "Thingy User Interface Service", + "b35d7da6-eed4-4d59-8f89-f6573edea967": "URI Beacon Config (V1)", +} + + +def uuidstr_to_str(uuid_: str) -> str: + uuid_ = uuid_.lower() + s = uuid128_dict.get(uuid_) + if s: + return s + + if not s and uuid_.endswith("-0000-1000-8000-00805f9b34fb"): + s = "Vendor specific" + v = int(uuid_[:8], 16) + if (v & 0xFFFF0000) == 0x0000: + s = uuid16_dict.get(v & 0x0000FFFF, s) + if not s: + return "Unknown" + + return s + + +def register_uuids(uuids_to_descriptions: Dict[str, str]) -> None: + """Add or modify the mapping of 128-bit UUIDs for services and characteristics to descriptions. + + Args: + uuids_to_descriptions: A dictionary of new mappings + + """ + uuid128_dict.update(uuids_to_descriptions) + + +def normalize_uuid_str(uuid: str) -> str: + """ + Normaizes a UUID to the format used by Bleak. + + - Converted to lower case. + - 16-bit and 32-bit UUIDs are expanded to 128-bit. + + Example:: + + # 16-bit + uuid1 = normalize_uuid_str("1234") + # uuid1 == "00001234-0000-1000-8000-00805f9b34fb" + + # 32-bit + uuid2 = normalize_uuid_str("12345678") + # uuid2 == "12345678-0000-1000-8000-00805f9b34fb" + + # 128-bit + uuid3 = normalize_uuid_str("12345678-0000-1234-1234-1234567890ABC") + # uuid3 == "12345678-0000-1234-1234-1234567890abc" + + .. versionadded:: 0.20 + .. versionchanged:: 0.21 + Added support for 32-bit UUIDs. + """ + # See: BLUETOOTH CORE SPECIFICATION Version 5.4 | Vol 3, Part B - Section 2.5.1 + if len(uuid) == 4: + # Bluetooth SIG registered 16-bit UUIDs + uuid = f"0000{uuid}-0000-1000-8000-00805f9b34fb" + elif len(uuid) == 8: + # Bluetooth SIG registered 32-bit UUIDs + uuid = f"{uuid}-0000-1000-8000-00805f9b34fb" + + # let UUID class do the validation and conversion to lower case + return str(UUID(uuid)) + + +def normalize_uuid_16(uuid: int) -> str: + """ + Normaizes a 16-bit integer UUID to the format used by Bleak. + + Returns: + 128-bit UUID as string with the format ``"0000xxxx-0000-1000-8000-00805f9b34fb"``. + + Example:: + + uuid = normalize_uuid_16(0x1234) + # uuid == "00001234-0000-1000-8000-00805f9b34fb" + + .. versionadded:: 0.21 + """ + return normalize_uuid_str(f"{uuid:04X}") + + +def normalize_uuid_32(uuid: int) -> str: + """ + Normaizes a 32-bit integer UUID to the format used by Bleak. + + Returns: + 128-bit UUID as string with the format ``"xxxxxxxx-0000-1000-8000-00805f9b34fb"``. + + Example:: + + uuid = normalize_uuid_32(0x12345678) + # uuid == "12345678-0000-1000-8000-00805f9b34fb" + + .. versionadded:: 0.21 + """ + return normalize_uuid_str(f"{uuid:08X}") diff --git a/dbus_fast/__init__.py b/dbus_fast/__init__.py new file mode 100644 index 0000000..28fbe47 --- /dev/null +++ b/dbus_fast/__init__.py @@ -0,0 +1,82 @@ +from . import introspection, message_bus, proxy_object, service +from .constants import ( + ArgDirection, + BusType, + ErrorType, + MessageFlag, + MessageType, + NameFlag, + PropertyAccess, + ReleaseNameReply, + RequestNameReply, +) +from .errors import ( + AuthError, + DBusError, + InterfaceNotFoundError, + InvalidAddressError, + InvalidBusNameError, + InvalidInterfaceNameError, + InvalidIntrospectionError, + InvalidMemberNameError, + InvalidMessageError, + InvalidObjectPathError, + InvalidSignatureError, + SignalDisabledError, + SignatureBodyMismatchError, +) +from .message import Message +from .signature import SignatureTree, SignatureType, Variant +from .unpack import unpack_variants +from .validators import ( + assert_bus_name_valid, + assert_interface_name_valid, + assert_member_name_valid, + assert_object_path_valid, + is_bus_name_valid, + is_interface_name_valid, + is_member_name_valid, + is_object_path_valid, +) + +__all__ = [ + "introspection", + "message_bus", + "proxy_object", + "service", + "ArgDirection", + "BusType", + "ErrorType", + "MessageFlag", + "MessageType", + "NameFlag", + "PropertyAccess", + "ReleaseNameReply", + "RequestNameReply", + "AuthError", + "DBusError", + "InterfaceNotFoundError", + "InvalidAddressError", + "InvalidBusNameError", + "InvalidInterfaceNameError", + "InvalidIntrospectionError", + "InvalidMemberNameError", + "InvalidMessageError", + "InvalidObjectPathError", + "InvalidSignatureError", + "SignalDisabledError", + "SignatureBodyMismatchError", + "Message", + "SignatureTree", + "SignatureType", + "Variant", + "assert_bus_name_valid", + "assert_interface_name_valid", + "assert_member_name_valid", + "assert_object_path_valid", + "is_bus_name_valid", + "is_interface_name_valid", + "is_member_name_valid", + "is_object_path_valid", + "unpack_variants", +] diff --git a/dbus_fast/__version__.py b/dbus_fast/__version__.py new file mode 100644 index 0000000..aa25254 --- /dev/null +++ b/dbus_fast/__version__.py @@ -0,0 +1,10 @@ +__title__ = "dbus_fast" +__description__ = ( + "A performant zero-dependency DBus library for Python with asyncio support" +) +__url__ = "https://github.com/bluetooth-devices/dbus-fast" +__version__ = "2.24.4" +__author__ = "Bluetooth Devices authors, Tony Crisci" +__author_email__ = "bluetooth@koston.org" +__license__ = "MIT" +__copyright__ = "Copyright 2022 Bluetooth Devices authors, 2019 Tony Crisci" diff --git a/dbus_fast/_private/__init__.py b/dbus_fast/_private/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dbus_fast/_private/_cython_compat.py b/dbus_fast/_private/_cython_compat.py new file mode 100644 index 0000000..27c9626 --- /dev/null +++ b/dbus_fast/_private/_cython_compat.py @@ -0,0 +1,12 @@ +"""Stub for when Cython is not available.""" + + +class FakeCython: + """Stub for when Cython is not available.""" + + @property + def compiled(self) -> bool: + return False + + +FAKE_CYTHON = FakeCython() diff --git a/dbus_fast/_private/address.pxd b/dbus_fast/_private/address.pxd new file mode 100644 index 0000000..ddec3b5 --- /dev/null +++ b/dbus_fast/_private/address.pxd @@ -0,0 +1,15 @@ +"""cdefs for address.py""" + +import cython + + +cdef object unquote + +@cython.locals(kv=cython.str, opt_string=cython.str, address=cython.str) +cpdef parse_address(cython.str address_str) + +cpdef get_bus_address(object bus_type) + +cpdef get_session_bus_address() + +cpdef get_system_bus_address() diff --git a/dbus_fast/_private/address.py b/dbus_fast/_private/address.py new file mode 100644 index 0000000..9201e0f --- /dev/null +++ b/dbus_fast/_private/address.py @@ -0,0 +1,116 @@ +import os +import re +from typing import Dict, List, Optional, Tuple +from urllib.parse import unquote + +from ..constants import BusType +from ..errors import InvalidAddressError + +invalid_address_chars_re = re.compile(r"[^-0-9A-Za-z_/.%]") + +str_ = str + + +def parse_address(address_str: str_) -> List[Tuple[str, Dict[str, str]]]: + """Parse a dbus address string into a list of addresses.""" + addresses: List[Tuple[str, Dict[str, str]]] = [] + + for address in address_str.split(";"): + if not address: + continue + if address.find(":") == -1: + raise InvalidAddressError("address did not contain a transport") + + transport, opt_string = address.split(":", 1) + options: Dict[str, str] = {} + + for kv in opt_string.split(","): + if not kv: + continue + if kv.find("=") == -1: + raise InvalidAddressError("address option did not contain a value") + k, v = kv.split("=", 1) + if invalid_address_chars_re.search(v): + raise InvalidAddressError("address contains invalid characters") + # XXX the actual unquote rules are simpler than this + options[k] = unquote(v) + + addresses.append((transport, options)) + + if not addresses: + raise InvalidAddressError( + f'address string contained no addresses: "{address_str}"' + ) + + return addresses + + +def get_system_bus_address() -> str: + """Get the system bus address from the environment or return the default.""" + return ( + os.environ.get("DBUS_SYSTEM_BUS_ADDRESS") + or "unix:path=/var/run/dbus/system_bus_socket" + ) + + +display_re = re.compile(r".*:([0-9]+)\.?.*") +remove_quotes_re = re.compile(r"""^['"]?(.*?)['"]?$""") + + +def get_session_bus_address() -> str: + """Get the session bus address from the environment or return the default.""" + dbus_session_bus_address = os.environ.get("DBUS_SESSION_BUS_ADDRESS") + if dbus_session_bus_address: + return dbus_session_bus_address + + home = os.environ["HOME"] + if "DISPLAY" not in os.environ: + raise InvalidAddressError( + "DBUS_SESSION_BUS_ADDRESS not set and could not get DISPLAY environment variable to get bus address" + ) + + display = os.environ["DISPLAY"] + try: + display = display_re.search(display).group(1) + except Exception: + raise InvalidAddressError( + f"DBUS_SESSION_BUS_ADDRESS not set and could not parse DISPLAY environment variable to get bus address: {display}" + ) + + # XXX: this will block but they're very small files and fs operations + # should be fairly reliable. fix this by passing in an async func to read + # the file for each io backend. + machine_id = None + with open("/var/lib/dbus/machine-id") as f: + machine_id = f.read().rstrip() + + dbus_info_file_name = f"{home}/.dbus/session-bus/{machine_id}-{display}" + dbus_info: Optional[str] = None + try: + with open(dbus_info_file_name) as f: + dbus_info = f.read().rstrip() + except Exception: + raise InvalidAddressError( + f"could not open dbus info file: {dbus_info_file_name}" + ) + + for line in dbus_info.split("\n"): + if line.strip().startswith("DBUS_SESSION_BUS_ADDRESS="): + _, addr = line.split("=", 1) + if not addr: + raise InvalidAddressError( + f"DBUS_SESSION_BUS_ADDRESS variable not set correctly in dbus info file: {dbus_info_file_name}" + ) + addr = remove_quotes_re.search(addr).group(1) + return addr + + raise InvalidAddressError("could not find dbus session bus address") + + +def get_bus_address(bus_type: BusType) -> str: + """Get the address of the bus specified by the bus type.""" + if bus_type == BusType.SESSION: + return get_session_bus_address() + if bus_type == BusType.SYSTEM: + return get_system_bus_address() + raise Exception(f"got unknown bus type: {bus_type}") diff --git a/dbus_fast/_private/constants.py b/dbus_fast/_private/constants.py new file mode 100644 index 0000000..605c3cf --- /dev/null +++ b/dbus_fast/_private/constants.py @@ -0,0 +1,18 @@ +from enum import Enum + +PROTOCOL_VERSION = 1 + +LITTLE_ENDIAN = ord("l") +BIG_ENDIAN = ord("B") + + +class HeaderField(Enum): + PATH = 1 + INTERFACE = 2 + MEMBER = 3 + ERROR_NAME = 4 + REPLY_SERIAL = 5 + DESTINATION = 6 + SENDER = 7 + SIGNATURE = 8 + UNIX_FDS = 9 diff --git a/dbus_fast/_private/marshaller.pxd b/dbus_fast/_private/marshaller.pxd new file mode 100644 index 0000000..f87c389 --- /dev/null +++ b/dbus_fast/_private/marshaller.pxd @@ -0,0 +1,110 @@ +"""cdefs for marshaller.py""" + +import cython + +from ..signature cimport SignatureTree, SignatureType, Variant + + +cdef object PACK_UINT32 + +cdef bytes PACKED_UINT32_ZERO +cdef bytes PACKED_BOOL_TRUE +cdef bytes PACKED_BOOL_FALSE + +cdef get_signature_tree + +cdef class Marshaller: + + cdef SignatureTree signature_tree + cdef bytearray _buf + cdef cython.list body + + cdef _buffer(self) + + cpdef align(self, unsigned int n) + + @cython.locals( + offset=cython.ulong, + ) + cdef unsigned int _align(self, unsigned int n) + + cpdef write_boolean(self, object boolean, SignatureType type_) + + @cython.locals( + written=cython.uint, + ) + cdef unsigned int _write_boolean(self, object boolean) + + cpdef write_string(self, object value, SignatureType type_) + + @cython.locals( + buf=cython.bytearray, + value_len=cython.uint, + signature_len=cython.uint, + written=cython.uint, + ) + cdef unsigned int _write_string(self, object value) + + @cython.locals( + signature_len=cython.uint, + ) + cdef unsigned int _write_signature(self, bytes signature_bytes) + + cpdef write_array(self, object array, SignatureType type_) + + @cython.locals( + array_len=cython.uint, + buf=cython.bytearray, + written=cython.uint, + token=cython.str, + child_type=SignatureType, + array_len_packed=cython.bytes, + size=cython.uint, + writer=cython.object, + packer=cython.object, + i=cython.uint, + ) + cdef unsigned int _write_array(self, object array, SignatureType type_) + + cpdef write_struct(self, object array, SignatureType type_) + + @cython.locals( + written=cython.uint, + i=cython.uint, + ) + cdef unsigned int _write_struct(self, object array, SignatureType type_) + + cpdef write_variant(self, Variant variant, SignatureType type_) + + @cython.locals( + written=cython.uint, + signature=cython.str, + signature_bytes=cython.bytes, + ) + cdef unsigned int _write_variant(self, Variant variant, SignatureType type_) + + @cython.locals( + written=cython.uint, + size=cython.uint, + ) + cdef unsigned int _write_single(self, SignatureType type_, object body) + + @cython.locals( + written=cython.uint, + t=cython.str, + ) + cpdef write_dict_entry(self, cython.list dict_entry, SignatureType type_) + + cpdef marshall(self) + + cdef _marshall(self) + + @cython.locals( + offset=cython.ulong, + t=cython.str, + size=cython.uint, + writer=cython.object, + packer=cython.object, + type_=SignatureType, + ) + cdef _construct_buffer(self) diff --git a/dbus_fast/_private/marshaller.py b/dbus_fast/_private/marshaller.py new file mode 100644 index 0000000..9c1aa23 --- /dev/null +++ b/dbus_fast/_private/marshaller.py @@ -0,0 +1,229 @@ +from struct import Struct, error +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from ..signature import SignatureType, Variant, get_signature_tree + +PACK_LITTLE_ENDIAN = "<" + +PACK_UINT32 = Struct(f"{PACK_LITTLE_ENDIAN}I").pack +PACKED_UINT32_ZERO = PACK_UINT32(0) +PACKED_BOOL_FALSE = PACK_UINT32(int(0)) +PACKED_BOOL_TRUE = PACK_UINT32(int(1)) + +_int = int +_bytes = bytes +_str = str + + +class Marshaller: + """Marshall data for Dbus.""" + + __slots__ = ("signature_tree", "_buf", "body") + + def __init__(self, signature: str, body: List[Any]) -> None: + """Marshaller constructor.""" + self.signature_tree = get_signature_tree(signature) + self._buf = bytearray() + self.body = body + + @property + def buffer(self) -> bytearray: + return self._buf + + def _buffer(self) -> bytearray: + return self._buf + + def align(self, n: _int) -> int: + return self._align(n) + + def _align(self, n: _int) -> _int: + offset = n - len(self._buf) % n + if offset == 0 or offset == n: + return 0 + for _ in range(offset): + self._buf.append(0) + return offset + + def write_boolean(self, boolean: bool, type_: SignatureType) -> int: + return self._write_boolean(boolean) + + def _write_boolean(self, boolean: bool) -> int: + written = self._align(4) + self._buf += PACKED_BOOL_TRUE if boolean else PACKED_BOOL_FALSE + return written + 4 + + def write_signature(self, signature: str, type_: SignatureType) -> int: + return self._write_signature(signature.encode()) + + def _write_signature(self, signature_bytes: _bytes) -> int: + signature_len = len(signature_bytes) + buf = self._buf + buf.append(signature_len) + buf += signature_bytes + buf.append(0) + return signature_len + 2 + + def write_string(self, value: _str, type_: SignatureType) -> int: + return self._write_string(value) + + def _write_string(self, value: _str) -> int: + value_bytes = value.encode() + value_len = len(value_bytes) + written = self._align(4) + 4 + buf = self._buf + buf += PACK_UINT32(value_len) + buf += value_bytes + written += value_len + buf.append(0) + written += 1 + return written + + def write_variant(self, variant: Variant, type_: SignatureType) -> int: + return self._write_variant(variant, type_) + + def _write_variant(self, variant: Variant, type_: SignatureType) -> int: + signature = variant.signature + signature_bytes = signature.encode() + written = self._write_signature(signature_bytes) + written += self._write_single(variant.type, variant.value) # type: ignore[has-type] + return written + + def write_array( + self, array: Union[List[Any], Dict[Any, Any]], type_: SignatureType + ) -> int: + return self._write_array(array, type_) + + def _write_array( + self, array: Union[List[Any], Dict[Any, Any]], type_: SignatureType + ) -> int: + # TODO max array size is 64MiB (67108864 bytes) + written = self._align(4) + # length placeholder + buf = self._buf + offset = len(buf) + written += self._align(4) + 4 + buf += PACKED_UINT32_ZERO + child_type = type_.children[0] + token = child_type.token + + if token in "xtd{(": + # the first alignment is not included in array size + written += self._align(8) + + array_len = 0 + if token == "{": + for key, value in array.items(): # type: ignore[union-attr] + array_len += self.write_dict_entry([key, value], child_type) + elif token == "y": + array_len = len(array) + buf += array + elif token == "(": + for value in array: + array_len += self._write_struct(value, child_type) + else: + writer, packer, size = self._writers[token] + if not writer: + for value in array: + array_len += self._align(size) + size + buf += packer(value) # type: ignore[misc] + else: + for value in array: + array_len += writer(self, value, child_type) + + array_len_packed = PACK_UINT32(array_len) + for i in range(offset, offset + 4): + buf[i] = array_len_packed[i - offset] + + return written + array_len + + def write_struct( + self, array: Union[Tuple[Any], List[Any]], type_: SignatureType + ) -> int: + return self._write_struct(array, type_) + + def _write_struct( + self, array: Union[Tuple[Any], List[Any]], type_: SignatureType + ) -> int: + written = self._align(8) + for i, value in enumerate(array): + written += self._write_single(type_.children[i], value) + return written + + def write_dict_entry(self, dict_entry: List[Any], type_: SignatureType) -> int: + written = self._align(8) + written += self._write_single(type_.children[0], dict_entry[0]) + written += self._write_single(type_.children[1], dict_entry[1]) + return written + + def _write_single(self, type_: SignatureType, body: Any) -> int: + t = type_.token + if t == "y": + self._buf.append(body) + return 1 + elif t == "u": + written = self._align(4) + self._buf += PACK_UINT32(body) + return written + 4 + elif t == "a": + return self._write_array(body, type_) + elif t == "s" or t == "o": + return self._write_string(body) + elif t == "v": + return self._write_variant(body, type_) + elif t == "b": + return self._write_boolean(body) + else: + writer, packer, size = self._writers[t] + if not writer: + written = self._align(size) + self._buf += packer(body) # type: ignore[misc] + return written + size + return writer(self, body, type_) + + def marshall(self) -> bytearray: + """Marshalls the body into a byte array""" + return self._marshall() + + def _marshall(self) -> bytearray: + """Marshalls the body into a byte array""" + try: + return self._construct_buffer() + except KeyError as ex: + raise NotImplementedError(f'type is not implemented yet: "{ex.args}"') + except error: + self.signature_tree.verify(self.body) + raise RuntimeError("should not reach here") + + def _construct_buffer(self) -> bytearray: + self._buf.clear() + body = self.body + for i, type_ in enumerate(self.signature_tree.types): + self._write_single(type_, body[i]) + return self._buf + + _writers: Dict[ + str, + Tuple[ + Optional[Callable[[Any, Any, SignatureType], int]], + Optional[Callable[[Any], bytes]], + int, + ], + ] = { + "y": (None, Struct(f"{PACK_LITTLE_ENDIAN}B").pack, 1), + "b": (write_boolean, None, 0), + "n": (None, Struct(f"{PACK_LITTLE_ENDIAN}h").pack, 2), + "q": (None, Struct(f"{PACK_LITTLE_ENDIAN}H").pack, 2), + "i": (None, Struct(f"{PACK_LITTLE_ENDIAN}i").pack, 4), + "u": (None, PACK_UINT32, 4), + "x": (None, Struct(f"{PACK_LITTLE_ENDIAN}q").pack, 8), + "t": (None, Struct(f"{PACK_LITTLE_ENDIAN}Q").pack, 8), + "d": (None, Struct(f"{PACK_LITTLE_ENDIAN}d").pack, 8), + "h": (None, Struct(f"{PACK_LITTLE_ENDIAN}I").pack, 4), + "o": (write_string, None, 0), + "s": (write_string, None, 0), + "g": (write_signature, None, 0), + "a": (write_array, None, 0), + "(": (write_struct, None, 0), + "{": (write_dict_entry, None, 0), + "v": (write_variant, None, 0), + } diff --git a/dbus_fast/_private/unmarshaller.pxd b/dbus_fast/_private/unmarshaller.pxd new file mode 100644 index 0000000..94ae1c5 --- /dev/null +++ b/dbus_fast/_private/unmarshaller.pxd @@ -0,0 +1,241 @@ +"""cdefs for unmarshaller.py""" + +import cython + +from ..message cimport Message +from ..signature cimport SignatureTree, SignatureType, Variant + + +cdef object MAX_UNIX_FDS_SIZE +cdef object ARRAY +cdef object UNIX_FDS_CMSG_LENGTH +cdef object SOL_SOCKET +cdef object SCM_RIGHTS +cdef object MESSAGE_FLAG_INTENUM + +cdef unsigned int UINT32_SIZE +cdef unsigned int INT16_SIZE +cdef unsigned int UINT16_SIZE + +cdef unsigned int HEADER_ARRAY_OF_STRUCT_SIGNATURE_POSITION +cdef unsigned int HEADER_SIGNATURE_SIZE +cdef unsigned int LITTLE_ENDIAN +cdef unsigned int BIG_ENDIAN +cdef unsigned int PROTOCOL_VERSION +cdef unsigned int HEADER_UNIX_FDS_IDX +cdef cython.list HEADER_IDX_TO_ARG_NAME + +cdef str UINT32_CAST +cdef str INT16_CAST +cdef str UINT16_CAST + +cdef bint SYS_IS_LITTLE_ENDIAN +cdef bint SYS_IS_BIG_ENDIAN + +cdef object UNPACK_HEADER_LITTLE_ENDIAN +cdef object UNPACK_HEADER_BIG_ENDIAN + +cdef object UINT32_UNPACK_LITTLE_ENDIAN +cdef object UINT32_UNPACK_BIG_ENDIAN + +cdef object INT16_UNPACK_LITTLE_ENDIAN +cdef object INT16_UNPACK_BIG_ENDIAN + +cdef object UINT16_UNPACK_LITTLE_ENDIAN +cdef object UINT16_UNPACK_BIG_ENDIAN + +cdef cython.dict MESSAGE_TYPE_MAP +cdef cython.dict MESSAGE_FLAG_MAP +cdef dict HEADER_MESSAGE_ARG_NAME + +cdef SignatureTree SIGNATURE_TREE_EMPTY +cdef SignatureTree SIGNATURE_TREE_B +cdef SignatureTree SIGNATURE_TREE_N +cdef SignatureTree SIGNATURE_TREE_O +cdef SignatureTree SIGNATURE_TREE_S +cdef SignatureTree SIGNATURE_TREE_U +cdef SignatureTree SIGNATURE_TREE_Y + +cdef SignatureTree SIGNATURE_TREE_AS +cdef SignatureType SIGNATURE_TREE_AS_TYPES_0 +cdef SignatureTree SIGNATURE_TREE_AO +cdef SignatureType SIGNATURE_TREE_AO_TYPES_0 +cdef SignatureTree SIGNATURE_TREE_A_SV +cdef SignatureType SIGNATURE_TREE_A_SV_TYPES_0 +cdef SignatureTree SIGNATURE_TREE_SA_SV_AS +cdef SignatureType SIGNATURE_TREE_SA_SV_AS_TYPES_1 +cdef SignatureType SIGNATURE_TREE_SA_SV_AS_TYPES_2 +cdef SignatureTree SIGNATURE_TREE_OAS +cdef SignatureType SIGNATURE_TREE_OAS_TYPES_1 +cdef SignatureTree SIGNATURE_TREE_OA_SA_SV +cdef SignatureType SIGNATURE_TREE_OA_SA_SV_TYPES_1 +cdef SignatureTree SIGNATURE_TREE_AY +cdef SignatureType SIGNATURE_TREE_AY_TYPES_0 +cdef SignatureTree SIGNATURE_TREE_A_QV +cdef SignatureType SIGNATURE_TREE_A_QV_TYPES_0 +cdef SignatureTree SIGNATURE_TREE_A_OA_SA_SV +cdef SignatureType SIGNATURE_TREE_A_OA_SA_SV_TYPES_0 + +cdef unsigned int TOKEN_B_AS_INT +cdef unsigned int TOKEN_U_AS_INT +cdef unsigned int TOKEN_Y_AS_INT +cdef unsigned int TOKEN_A_AS_INT +cdef unsigned int TOKEN_O_AS_INT +cdef unsigned int TOKEN_S_AS_INT +cdef unsigned int TOKEN_G_AS_INT +cdef unsigned int TOKEN_N_AS_INT +cdef unsigned int TOKEN_X_AS_INT +cdef unsigned int TOKEN_T_AS_INT +cdef unsigned int TOKEN_D_AS_INT +cdef unsigned int TOKEN_Q_AS_INT +cdef unsigned int TOKEN_V_AS_INT +cdef unsigned int TOKEN_LEFT_CURLY_AS_INT +cdef unsigned int TOKEN_LEFT_PAREN_AS_INT + +cdef object MARSHALL_STREAM_END_ERROR +cdef object DEFAULT_BUFFER_SIZE + +cdef cython.uint EAGAIN +cdef cython.uint EWOULDBLOCK + +cdef get_signature_tree + + +cdef inline unsigned long _cast_uint32_native(const char * payload, unsigned int offset): + cdef unsigned long *u32p = &payload[offset] + return u32p[0] + +cdef inline short _cast_int16_native(const char * payload, unsigned int offset): + cdef short *s16p = &payload[offset] + return s16p[0] + +cdef inline unsigned short _cast_uint16_native(const char * payload, unsigned int offset): + cdef unsigned short *u16p = &payload[offset] + return u16p[0] + + + +cdef class Unmarshaller: + + cdef object _unix_fds + cdef bytearray _buf + cdef unsigned int _pos + cdef object _stream + cdef object _sock + cdef object _message + cdef object _readers + cdef unsigned int _body_len + cdef unsigned int _serial + cdef unsigned int _header_len + cdef object _message_type + cdef object _flag + cdef unsigned int _msg_len + cdef unsigned int _is_native + cdef object _uint32_unpack + cdef object _int16_unpack + cdef object _uint16_unpack + cdef object _stream_reader + cdef object _sock_reader + cdef bint _negotiate_unix_fd + cdef bint _read_complete + cdef unsigned int _endian + + cdef _next_message(self) + + cdef bint _has_another_message_in_buffer(self) + + @cython.locals( + msg=cython.bytes, + recv=cython.tuple, + errno=cython.uint + ) + cdef void _read_sock_with_fds(self, unsigned int pos, unsigned int missing_bytes) + + @cython.locals( + data=cython.bytes, + errno=cython.uint + ) + cdef void _read_sock_without_fds(self, unsigned int pos) + + @cython.locals( + data=cython.bytes + ) + cdef void _read_stream(self, unsigned int pos, unsigned int missing_bytes) + + cdef void _read_to_pos(self, unsigned int pos) + + cpdef read_boolean(self, SignatureType type_) + + cdef _read_boolean(self) + + cpdef read_uint32_unpack(self, SignatureType type_) + + cdef unsigned int _read_uint32_unpack(self) + + cpdef read_int16_unpack(self, SignatureType type_) + + cdef int _read_int16_unpack(self) + + cpdef read_uint16_unpack(self, SignatureType type_) + + cdef unsigned int _read_uint16_unpack(self) + + cpdef read_string_unpack(self, SignatureType type_) + + @cython.locals( + str_start=cython.uint, + ) + cdef str _read_string_unpack(self) + + @cython.locals( + tree=SignatureTree, + token_as_int=cython.uint, + ) + cdef Variant _read_variant(self) + + @cython.locals( + beginning_pos=cython.ulong, + array_length=cython.uint, + children=cython.list, + child_type=SignatureType, + child_0=SignatureType, + child_1=SignatureType, + token_as_int=cython.uint, + ) + cpdef object read_array(self, SignatureType type_) + + cpdef read_signature(self, SignatureType type_) + + @cython.locals( + o=cython.ulong, + signature_len=cython.uint, + ) + cdef str _read_signature(self) + + @cython.locals( + endian=cython.uint, + buffer=cython.bytearray, + protocol_version=cython.uint, + key=cython.str, + ) + cdef _read_header(self) + + @cython.locals( + body=cython.list, + header_fields=cython.dict, + token_as_int=cython.uint, + signature=cython.str, + ) + cdef _read_body(self) + + cdef _unmarshall(self) + + cpdef unmarshall(self) + + @cython.locals( + beginning_pos=cython.ulong, + o=cython.ulong, + token_as_int=cython.uint, + signature_len=cython.uint, + ) + cdef cython.dict _header_fields(self, unsigned int header_length) diff --git a/dbus_fast/_private/unmarshaller.py b/dbus_fast/_private/unmarshaller.py new file mode 100644 index 0000000..c9e7b66 --- /dev/null +++ b/dbus_fast/_private/unmarshaller.py @@ -0,0 +1,811 @@ +import array +import errno +import io +import socket +import sys +from struct import Struct +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union + +from ..constants import MESSAGE_FLAG_MAP, MESSAGE_TYPE_MAP, MessageFlag +from ..errors import InvalidMessageError +from ..message import Message +from ..signature import SignatureType, Variant, get_signature_tree +from .constants import BIG_ENDIAN, LITTLE_ENDIAN, PROTOCOL_VERSION + +MESSAGE_FLAG_INTENUM = MessageFlag + +MAX_UNIX_FDS = 16 +MAX_UNIX_FDS_SIZE = array.array("i").itemsize +UNIX_FDS_CMSG_LENGTH = socket.CMSG_LEN(MAX_UNIX_FDS_SIZE) + +UNPACK_SYMBOL = {LITTLE_ENDIAN: "<", BIG_ENDIAN: ">"} + +UINT32_CAST = "I" +UINT32_SIZE = 4 +UINT32_DBUS_TYPE = "u" + +INT16_CAST = "h" +INT16_SIZE = 2 +INT16_DBUS_TYPE = "n" + +UINT16_CAST = "H" +UINT16_SIZE = 2 +UINT16_DBUS_TYPE = "q" + +SYS_IS_LITTLE_ENDIAN = sys.byteorder == "little" +SYS_IS_BIG_ENDIAN = sys.byteorder == "big" + +DBUS_TO_CTYPE = { + "y": ("B", 1), # byte + INT16_DBUS_TYPE: (INT16_CAST, INT16_SIZE), # int16 + UINT16_DBUS_TYPE: (UINT16_CAST, UINT16_SIZE), # uint16 + "i": ("i", 4), # int32 + UINT32_DBUS_TYPE: (UINT32_CAST, UINT32_SIZE), # uint32 + "x": ("q", 8), # int64 + "t": ("Q", 8), # uint64 + "d": ("d", 8), # double + "h": (UINT32_CAST, UINT32_SIZE), # uint32 +} + +UNPACK_HEADER_LITTLE_ENDIAN = Struct("III").unpack_from + +UINT32_UNPACK_LITTLE_ENDIAN = Struct(f"<{UINT32_CAST}").unpack_from +UINT32_UNPACK_BIG_ENDIAN = Struct(f">{UINT32_CAST}").unpack_from + +INT16_UNPACK_LITTLE_ENDIAN = Struct(f"<{INT16_CAST}").unpack_from +INT16_UNPACK_BIG_ENDIAN = Struct(f">{INT16_CAST}").unpack_from + +UINT16_UNPACK_LITTLE_ENDIAN = Struct(f"<{UINT16_CAST}").unpack_from +UINT16_UNPACK_BIG_ENDIAN = Struct(f">{UINT16_CAST}").unpack_from + +HEADER_SIGNATURE_SIZE = 16 +HEADER_ARRAY_OF_STRUCT_SIGNATURE_POSITION = 12 + + +SIGNATURE_TREE_EMPTY = get_signature_tree("") +SIGNATURE_TREE_B = get_signature_tree("b") +SIGNATURE_TREE_N = get_signature_tree("n") +SIGNATURE_TREE_S = get_signature_tree("s") +SIGNATURE_TREE_O = get_signature_tree("o") +SIGNATURE_TREE_U = get_signature_tree("u") +SIGNATURE_TREE_Y = get_signature_tree("y") + +SIGNATURE_TREE_AY = get_signature_tree("ay") +SIGNATURE_TREE_AS = get_signature_tree("as") +SIGNATURE_TREE_AS_TYPES_0 = SIGNATURE_TREE_AS.types[0] +SIGNATURE_TREE_A_SV = get_signature_tree("a{sv}") +SIGNATURE_TREE_A_SV_TYPES_0 = SIGNATURE_TREE_A_SV.types[0] + +SIGNATURE_TREE_AO = get_signature_tree("ao") +SIGNATURE_TREE_AO_TYPES_0 = SIGNATURE_TREE_AO.types[0] + +SIGNATURE_TREE_OAS = get_signature_tree("oas") +SIGNATURE_TREE_OAS_TYPES_1 = SIGNATURE_TREE_OAS.types[1] + +SIGNATURE_TREE_AY_TYPES_0 = SIGNATURE_TREE_AY.types[0] +SIGNATURE_TREE_A_QV = get_signature_tree("a{qv}") +SIGNATURE_TREE_A_QV_TYPES_0 = SIGNATURE_TREE_A_QV.types[0] + +SIGNATURE_TREE_SA_SV_AS = get_signature_tree("sa{sv}as") +SIGNATURE_TREE_SA_SV_AS_TYPES_1 = SIGNATURE_TREE_SA_SV_AS.types[1] +SIGNATURE_TREE_SA_SV_AS_TYPES_2 = SIGNATURE_TREE_SA_SV_AS.types[2] + +SIGNATURE_TREE_OA_SA_SV = get_signature_tree("oa{sa{sv}}") +SIGNATURE_TREE_OA_SA_SV_TYPES_1 = SIGNATURE_TREE_OA_SA_SV.types[1] + +SIGNATURE_TREE_A_OA_SA_SV = get_signature_tree("a{oa{sa{sv}}}") +SIGNATURE_TREE_A_OA_SA_SV_TYPES_0 = SIGNATURE_TREE_A_OA_SA_SV.types[0] + +TOKEN_B_AS_INT = ord("b") +TOKEN_U_AS_INT = ord("u") +TOKEN_Y_AS_INT = ord("y") +TOKEN_A_AS_INT = ord("a") +TOKEN_O_AS_INT = ord("o") +TOKEN_S_AS_INT = ord("s") +TOKEN_G_AS_INT = ord("g") +TOKEN_N_AS_INT = ord("n") +TOKEN_X_AS_INT = ord("x") +TOKEN_T_AS_INT = ord("t") +TOKEN_D_AS_INT = ord("d") +TOKEN_Q_AS_INT = ord("q") +TOKEN_V_AS_INT = ord("v") +TOKEN_LEFT_CURLY_AS_INT = ord("{") +TOKEN_LEFT_PAREN_AS_INT = ord("(") + + +ARRAY = array.array +SOL_SOCKET = socket.SOL_SOCKET +SCM_RIGHTS = socket.SCM_RIGHTS + +EAGAIN = errno.EAGAIN +EWOULDBLOCK = errno.EWOULDBLOCK + +HEADER_IDX_TO_ARG_NAME = [ + "", + "path", + "interface", + "member", + "error_name", + "reply_serial", + "destination", + "sender", + "signature", + "unix_fds", +] +HEADER_UNIX_FDS_IDX = HEADER_IDX_TO_ARG_NAME.index("unix_fds") + +_SignatureType = SignatureType +_int = int + +READER_TYPE = Callable[["Unmarshaller", SignatureType], Any] + +MARSHALL_STREAM_END_ERROR = BlockingIOError + +DEFAULT_BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE + + +def unpack_parser_factory(unpack_from: Callable, size: int) -> READER_TYPE: + """Build a parser that unpacks the bytes using the given unpack_from function.""" + + def _unpack_from_parser(self: "Unmarshaller", signature: SignatureType) -> Any: + self._pos += size + (-self._pos & (size - 1)) # align + return unpack_from(self._buf, self._pos - size)[0] + + return _unpack_from_parser + + +def build_simple_parsers( + endian: int, +) -> Dict[str, Callable[["Unmarshaller", SignatureType], Any]]: + """Build a dict of parsers for simple types.""" + parsers: Dict[str, READER_TYPE] = {} + for dbus_type, ctype_size in DBUS_TO_CTYPE.items(): + ctype, size = ctype_size + size = ctype_size[1] + parsers[dbus_type] = unpack_parser_factory( + Struct(f"{UNPACK_SYMBOL[endian]}{ctype}").unpack_from, size + ) + return parsers + + +try: + import cython +except ImportError: + from ._cython_compat import FAKE_CYTHON as cython + + +# +# Alignment padding is handled with the following formula below +# +# For any align value, the correct padding formula is: +# +# (align - (pos % align)) % align +# +# However, if align is a power of 2 (always the case here), the slow MOD +# operator can be replaced by a bitwise AND: +# +# (align - (pos & (align - 1))) & (align - 1) +# +# Which can be simplified to: +# +# (-pos) & (align - 1) +# +# +class Unmarshaller: + """Unmarshall messages from a stream. + + When calling with sock and _negotiate_unix_fd False, the unmashaller must + be called continuously for each new message as it will buffer the data + until a complete message is available. + """ + + __slots__ = ( + "_unix_fds", + "_buf", + "_pos", + "_stream", + "_sock", + "_message", + "_readers", + "_body_len", + "_serial", + "_header_len", + "_message_type", + "_flag", + "_msg_len", + "_uint32_unpack", + "_int16_unpack", + "_uint16_unpack", + "_is_native", + "_stream_reader", + "_sock_reader", + "_negotiate_unix_fd", + "_read_complete", + "_endian", + ) + + def __init__( + self, + stream: Optional[io.BufferedRWPair] = None, + sock: Optional[socket.socket] = None, + negotiate_unix_fd: bool = True, + ) -> None: + self._unix_fds: List[int] = [] + self._buf = bytearray() # Actual buffer + self._stream = stream + self._sock = sock + self._message: Optional[Message] = None + self._readers: Dict[str, READER_TYPE] = {} + self._pos = 0 + self._body_len = 0 + self._serial = 0 + self._header_len = 0 + self._message_type = 0 + self._flag = 0 + self._msg_len = 0 + self._is_native = 0 + self._uint32_unpack: Optional[Callable] = None + self._int16_unpack: Optional[Callable] = None + self._uint16_unpack: Optional[Callable] = None + self._stream_reader: Optional[Callable] = None + self._negotiate_unix_fd = negotiate_unix_fd + self._read_complete = False + if stream: + if isinstance(stream, io.BufferedRWPair) and hasattr(stream, "reader"): + self._stream_reader = stream.reader.read # type: ignore[attr-defined] + self._stream_reader = stream.read + elif self._negotiate_unix_fd: + self._sock_reader = self._sock.recvmsg + else: + self._sock_reader = self._sock.recv + self._endian = 0 + + def _next_message(self) -> None: + """Reset the unmarshaller to its initial state. + + Call this before processing a new message. + """ + self._unix_fds = [] + to_clear = HEADER_SIGNATURE_SIZE + self._msg_len + if len(self._buf) == to_clear: + self._buf = bytearray() + else: + del self._buf[:to_clear] + self._msg_len = 0 # used to check if we have ready the header + self._read_complete = False # used to check if we have ready the message + # No need to reset the unpack functions, they are set in _read_header + # every time a new message is processed. + + @property + def message(self) -> Optional[Message]: + """Return the message that has been unmarshalled.""" + if self._read_complete: + return self._message + return None + + def _has_another_message_in_buffer(self) -> bool: + """Check if there is another message in the buffer.""" + return len(self._buf) > HEADER_SIGNATURE_SIZE + self._msg_len + + def _read_sock_with_fds(self, pos: _int, missing_bytes: _int) -> None: + """reads from the socket, storing any fds sent and handling errors + from the read itself. + + This function is greedy and will read as much data as possible + from the underlying socket. + """ + # This will raise BlockingIOError if there is no data to read + # which we store in the MARSHALL_STREAM_END_ERROR object + try: + recv = self._sock_reader(missing_bytes, UNIX_FDS_CMSG_LENGTH) # type: ignore[union-attr] + except OSError as e: + errno = e.errno + if errno == EAGAIN or errno == EWOULDBLOCK: + raise MARSHALL_STREAM_END_ERROR + raise + msg = recv[0] + ancdata = recv[1] + if ancdata: + for level, type_, data in ancdata: + if not (level == SOL_SOCKET and type_ == SCM_RIGHTS): + continue + self._unix_fds.extend( + ARRAY("i", data[: len(data) - (len(data) % MAX_UNIX_FDS_SIZE)]) + ) + if not msg: + raise EOFError() + self._buf += msg + if len(self._buf) < pos: + raise MARSHALL_STREAM_END_ERROR + + def _read_sock_without_fds(self, pos: _int) -> None: + """reads from the socket and handling errors from the read itself. + + This function is greedy and will read as much data as possible + from the underlying socket. + """ + # This will raise BlockingIOError if there is no data to read + # which we store in the MARSHALL_STREAM_END_ERROR object + while True: + try: + data = self._sock_reader(DEFAULT_BUFFER_SIZE) # type: ignore[union-attr] + except OSError as e: + errno = e.errno + if errno == EAGAIN or errno == EWOULDBLOCK: + raise MARSHALL_STREAM_END_ERROR + raise + if not data: + raise EOFError() + self._buf += data + if len(self._buf) >= pos: + return + + def _read_stream(self, pos: _int, missing_bytes: _int) -> bytes: + """Read from the stream.""" + data = self._stream_reader(missing_bytes) # type: ignore[misc] + if data is None: + raise MARSHALL_STREAM_END_ERROR + if not data: + raise EOFError() + self._buf += data + if len(self._buf) < pos: + raise MARSHALL_STREAM_END_ERROR + + def _read_to_pos(self, pos: _int) -> None: + """ + Read from underlying socket into buffer. + + Raises BlockingIOError if there is not enough data to be read. + + :arg pos: + The pos to read to. If not enough bytes are available in the + buffer, read more from it. + + :returns: + None + """ + missing_bytes = pos - len(self._buf) + if missing_bytes <= 0: + return + if self._sock is None: + self._read_stream(pos, missing_bytes) + elif self._negotiate_unix_fd: + self._read_sock_with_fds(pos, missing_bytes) + else: + self._read_sock_without_fds(pos) + + def read_uint32_unpack(self, type_: _SignatureType) -> int: + return self._read_uint32_unpack() + + def _read_uint32_unpack(self) -> int: + self._pos += UINT32_SIZE + (-self._pos & (UINT32_SIZE - 1)) # align + if self._is_native and cython.compiled: + return _cast_uint32_native( # type: ignore[name-defined] # pragma: no cover + self._buf, self._pos - UINT32_SIZE + ) + return self._uint32_unpack(self._buf, self._pos - UINT32_SIZE)[0] # type: ignore[misc] + + def read_uint16_unpack(self, type_: _SignatureType) -> int: + return self._read_uint16_unpack() + + def _read_uint16_unpack(self) -> int: + self._pos += UINT16_SIZE + (-self._pos & (UINT16_SIZE - 1)) # align + if self._is_native and cython.compiled: + return _cast_uint16_native( # type: ignore[name-defined] # pragma: no cover + self._buf, self._pos - UINT16_SIZE + ) + return self._uint16_unpack(self._buf, self._pos - UINT16_SIZE)[0] # type: ignore[misc] + + def read_int16_unpack(self, type_: _SignatureType) -> int: + return self._read_int16_unpack() + + def _read_int16_unpack(self) -> int: + self._pos += INT16_SIZE + (-self._pos & (INT16_SIZE - 1)) # align + if self._is_native and cython.compiled: + return _cast_int16_native( # type: ignore[name-defined] # pragma: no cover + self._buf, self._pos - INT16_SIZE + ) + return self._int16_unpack(self._buf, self._pos - INT16_SIZE)[0] # type: ignore[misc] + + def read_boolean(self, type_: _SignatureType) -> bool: + return self._read_boolean() + + def _read_boolean(self) -> bool: + return bool(self._read_uint32_unpack()) + + def read_string_unpack(self, type_: _SignatureType) -> str: + return self._read_string_unpack() + + def _read_string_unpack(self) -> str: + """Read a string using unpack.""" + self._pos += UINT32_SIZE + (-self._pos & (UINT32_SIZE - 1)) # align + str_start = self._pos + # read terminating '\0' byte as well (str_length + 1) + if self._is_native and cython.compiled: + self._pos += ( # pragma: no cover + _cast_uint32_native(self._buf, str_start - UINT32_SIZE) + 1 # type: ignore[name-defined] + ) + else: + self._pos += self._uint32_unpack(self._buf, str_start - UINT32_SIZE)[0] + 1 # type: ignore[misc] + return self._buf[str_start : self._pos - 1].decode() + + def read_signature(self, type_: _SignatureType) -> str: + return self._read_signature() + + def _read_signature(self) -> str: + signature_len = self._buf[self._pos] # byte + o = self._pos + 1 + # read terminating '\0' byte as well (str_length + 1) + self._pos = o + signature_len + 1 + return self._buf[o : o + signature_len].decode() + + def read_variant(self, type_: _SignatureType) -> Variant: + return self._read_variant() + + def _read_variant(self) -> Variant: + signature = self._read_signature() + token_as_int = ord(signature[0]) + # verify in Variant is only useful on construction not unmarshalling + if len(signature) == 1: + if token_as_int == TOKEN_N_AS_INT: + return Variant(SIGNATURE_TREE_N, self._read_int16_unpack(), False) + if token_as_int == TOKEN_S_AS_INT: + return Variant(SIGNATURE_TREE_S, self._read_string_unpack(), False) + if token_as_int == TOKEN_B_AS_INT: + return Variant(SIGNATURE_TREE_B, self._read_boolean(), False) + if token_as_int == TOKEN_O_AS_INT: + return Variant(SIGNATURE_TREE_O, self._read_string_unpack(), False) + if token_as_int == TOKEN_U_AS_INT: + return Variant(SIGNATURE_TREE_U, self._read_uint32_unpack(), False) + if token_as_int == TOKEN_Y_AS_INT: + self._pos += 1 + return Variant(SIGNATURE_TREE_Y, self._buf[self._pos - 1], False) + elif token_as_int == TOKEN_A_AS_INT: + if signature == "ay": + return Variant( + SIGNATURE_TREE_AY, self.read_array(SIGNATURE_TREE_AY_TYPES_0), False + ) + if signature == "a{qv}": + return Variant( + SIGNATURE_TREE_A_QV, + self.read_array(SIGNATURE_TREE_A_QV_TYPES_0), + False, + ) + if signature == "as": + return Variant( + SIGNATURE_TREE_AS, self.read_array(SIGNATURE_TREE_AS_TYPES_0), False + ) + if signature == "a{sv}": + return Variant( + SIGNATURE_TREE_A_SV, + self.read_array(SIGNATURE_TREE_A_SV_TYPES_0), + False, + ) + if signature == "ao": + return Variant( + SIGNATURE_TREE_AO, self.read_array(SIGNATURE_TREE_AO_TYPES_0), False + ) + tree = get_signature_tree(signature) + signature_type = tree.types[0] + return Variant( + tree, + self._readers[signature_type.token](self, signature_type), + False, + ) + + def read_struct(self, type_: _SignatureType) -> List[Any]: + self._pos += -self._pos & 7 # align 8 + readers = self._readers + return [ + readers[child_type.token](self, child_type) for child_type in type_.children + ] + + def read_dict_entry(self, type_: _SignatureType) -> Tuple[Any, Any]: + self._pos += -self._pos & 7 # align 8 + return self._readers[type_.children[0].token]( + self, type_.children[0] + ), self._readers[type_.children[1].token](self, type_.children[1]) + + def read_array(self, type_: _SignatureType) -> Iterable[Any]: + self._pos += -self._pos & 3 # align 4 for the array + self._pos += ( + -self._pos & (UINT32_SIZE - 1) + ) + UINT32_SIZE # align for the uint32 + if self._is_native and cython.compiled: + array_length = _cast_uint32_native( # type: ignore[name-defined] # pragma: no cover + self._buf, self._pos - UINT32_SIZE + ) + else: + array_length = self._uint32_unpack(self._buf, self._pos - UINT32_SIZE)[0] # type: ignore[misc] + + child_type: SignatureType = type_.children[0] + token_as_int = ord(child_type.token[0]) + + if ( + token_as_int == TOKEN_X_AS_INT + or token_as_int == TOKEN_T_AS_INT + or token_as_int == TOKEN_D_AS_INT + or token_as_int == TOKEN_LEFT_CURLY_AS_INT + or token_as_int == TOKEN_LEFT_PAREN_AS_INT + ): + # the first alignment is not included in the array size + self._pos += -self._pos & 7 # align 8 + + if token_as_int == TOKEN_Y_AS_INT: + self._pos += array_length + return self._buf[self._pos - array_length : self._pos] + + if token_as_int == TOKEN_LEFT_CURLY_AS_INT: + result_dict: Dict[Any, Any] = {} + beginning_pos = self._pos + children = child_type.children + child_0 = children[0] + child_1 = children[1] + child_0_token_as_int = ord(child_0.token[0]) + child_1_token_as_int = ord(child_1.token[0]) + # Strings with variant values are the most common case + # so we optimize for that by inlining the string reading + # and the variant reading here + if ( + child_0_token_as_int == TOKEN_O_AS_INT + or child_0_token_as_int == TOKEN_S_AS_INT + ) and child_1_token_as_int == TOKEN_V_AS_INT: + while self._pos - beginning_pos < array_length: + self._pos += -self._pos & 7 # align 8 + key: Union[str, int] = self._read_string_unpack() + result_dict[key] = self._read_variant() + elif ( + child_0_token_as_int == TOKEN_Q_AS_INT + and child_1_token_as_int == TOKEN_V_AS_INT + ): + while self._pos - beginning_pos < array_length: + self._pos += -self._pos & 7 # align 8 + key = self._read_uint16_unpack() + result_dict[key] = self._read_variant() + if ( + child_0_token_as_int == TOKEN_O_AS_INT + or child_0_token_as_int == TOKEN_S_AS_INT + ) and child_1_token_as_int == TOKEN_A_AS_INT: + while self._pos - beginning_pos < array_length: + self._pos += -self._pos & 7 # align 8 + key = self._read_string_unpack() + result_dict[key] = self.read_array(child_1) + else: + reader_1 = self._readers[child_1.token] + reader_0 = self._readers[child_0.token] + while self._pos - beginning_pos < array_length: + self._pos += -self._pos & 7 # align 8 + key = reader_0(self, child_0) + result_dict[key] = reader_1(self, child_1) + + return result_dict + + if array_length == 0: + return [] + + result_list = [] + beginning_pos = self._pos + if token_as_int == TOKEN_O_AS_INT or token_as_int == TOKEN_S_AS_INT: + while self._pos - beginning_pos < array_length: + result_list.append(self._read_string_unpack()) + return result_list + reader = self._readers[child_type.token] + while self._pos - beginning_pos < array_length: + result_list.append(reader(self, child_type)) + return result_list + + def _header_fields(self, header_length: _int) -> Dict[str, Any]: + """Header fields are always a(yv).""" + beginning_pos = self._pos + headers = {} + buf = self._buf + readers = self._readers + while self._pos - beginning_pos < header_length: + # Now read the y (byte) of struct (yv) + self._pos += (-self._pos & 7) + 1 # align 8 + 1 for 'y' byte + field_0 = buf[self._pos - 1] + + # Now read the v (variant) of struct (yv) + # first we read the signature + signature_len = buf[self._pos] # byte + o = self._pos + 1 + self._pos += signature_len + 2 # one for the byte, one for the '\0' + if field_0 == HEADER_UNIX_FDS_IDX: # defined by self._unix_fds + continue + token_as_int = buf[o] + # Now that we have the token we can read the variant value + key = HEADER_IDX_TO_ARG_NAME[field_0] + # Strings and signatures are the most common types + # so we inline them for performance + if token_as_int == TOKEN_O_AS_INT or token_as_int == TOKEN_S_AS_INT: + headers[key] = self._read_string_unpack() + elif token_as_int == TOKEN_G_AS_INT: + headers[key] = self._read_signature() + else: + token = buf[o : o + signature_len].decode() + # There shouldn't be any other types in the header + # but just in case, we'll read it using the slow path + headers[key] = readers[token](self, get_signature_tree(token).types[0]) + return headers + + def _read_header(self) -> None: + """Read the header of the message.""" + # Signature is of the header is + # BYTE, BYTE, BYTE, BYTE, UINT32, UINT32, ARRAY of STRUCT of (BYTE,VARIANT) + self._read_to_pos(HEADER_SIGNATURE_SIZE) + buffer = self._buf + endian = buffer[0] + self._message_type = buffer[1] + self._flag = buffer[2] + protocol_version = buffer[3] + + if protocol_version != PROTOCOL_VERSION: + raise InvalidMessageError( + f"got unknown protocol version: {protocol_version}" + ) + + if cython.compiled and ( + (endian == LITTLE_ENDIAN and SYS_IS_LITTLE_ENDIAN) + or (endian == BIG_ENDIAN and SYS_IS_BIG_ENDIAN) + ): + self._is_native = 1 # pragma: no cover + self._body_len = _cast_uint32_native( # type: ignore[name-defined] # pragma: no cover + buffer, 4 + ) + self._serial = _cast_uint32_native( # type: ignore[name-defined] # pragma: no cover + buffer, 8 + ) + self._header_len = _cast_uint32_native( # type: ignore[name-defined] # pragma: no cover + buffer, 12 + ) + elif endian == LITTLE_ENDIAN: + ( + self._body_len, + self._serial, + self._header_len, + ) = UNPACK_HEADER_LITTLE_ENDIAN(buffer, 4) + self._uint32_unpack = UINT32_UNPACK_LITTLE_ENDIAN + self._int16_unpack = INT16_UNPACK_LITTLE_ENDIAN + self._uint16_unpack = UINT16_UNPACK_LITTLE_ENDIAN + elif endian == BIG_ENDIAN: + self._body_len, self._serial, self._header_len = UNPACK_HEADER_BIG_ENDIAN( + buffer, 4 + ) + self._uint32_unpack = UINT32_UNPACK_BIG_ENDIAN + self._int16_unpack = INT16_UNPACK_BIG_ENDIAN + self._uint16_unpack = UINT16_UNPACK_BIG_ENDIAN + else: + raise InvalidMessageError( + f"Expecting endianness as the first byte, got {endian} from {buffer}" + ) + + self._msg_len = ( + self._header_len + (-self._header_len & 7) + self._body_len + ) # align 8 + if self._endian != endian: + self._readers = self._readers_by_type[endian] + self._endian = endian + + def _read_body(self) -> None: + """Read the body of the message.""" + self._read_to_pos(HEADER_SIGNATURE_SIZE + self._msg_len) + self._pos = HEADER_ARRAY_OF_STRUCT_SIGNATURE_POSITION + header_fields = self._header_fields(self._header_len) + self._pos += -self._pos & 7 # align 8 + signature = header_fields.pop("signature", "") + if not self._body_len: + tree = SIGNATURE_TREE_EMPTY + body: List[Any] = [] + else: + token_as_int = ord(signature[0]) + if len(signature) == 1: + if token_as_int == TOKEN_O_AS_INT: + tree = SIGNATURE_TREE_O + body = [self._read_string_unpack()] + elif token_as_int == TOKEN_S_AS_INT: + tree = SIGNATURE_TREE_S + body = [self._read_string_unpack()] + else: + tree = get_signature_tree(signature) + body = [self._readers[t.token](self, t) for t in tree.types] + elif token_as_int == TOKEN_S_AS_INT and signature == "sa{sv}as": + tree = SIGNATURE_TREE_SA_SV_AS + body = [ + self._read_string_unpack(), + self.read_array(SIGNATURE_TREE_SA_SV_AS_TYPES_1), + self.read_array(SIGNATURE_TREE_SA_SV_AS_TYPES_2), + ] + elif token_as_int == TOKEN_O_AS_INT and signature == "oa{sa{sv}}": + tree = SIGNATURE_TREE_OA_SA_SV + body = [ + self._read_string_unpack(), + self.read_array(SIGNATURE_TREE_OA_SA_SV_TYPES_1), + ] + elif token_as_int == TOKEN_O_AS_INT and signature == "oas": + tree = SIGNATURE_TREE_OAS + body = [ + self._read_string_unpack(), + self.read_array(SIGNATURE_TREE_OAS_TYPES_1), + ] + elif token_as_int == TOKEN_A_AS_INT and signature == "a{oa{sa{sv}}}": + tree = SIGNATURE_TREE_A_OA_SA_SV + body = [self.read_array(SIGNATURE_TREE_A_OA_SA_SV_TYPES_0)] + else: + tree = get_signature_tree(signature) + body = [self._readers[t.token](self, t) for t in tree.types] + + flags = MESSAGE_FLAG_MAP.get(self._flag) + if flags is None: + flags = MESSAGE_FLAG_INTENUM(self._flag) + self._message = Message( + message_type=MESSAGE_TYPE_MAP[self._message_type], + flags=flags, + unix_fds=self._unix_fds, + signature=tree, + body=body, + serial=self._serial, + # The D-Bus implementation already validates the message, + # so we don't need to do it again. + validate=False, + **header_fields, + ) + self._read_complete = True + + def unmarshall(self) -> Optional[Message]: + """Unmarshall the message. + + The underlying read function will raise BlockingIOError if the + if there are not enough bytes in the buffer. This allows unmarshall + to be resumed when more data comes in over the wire. + """ + return self._unmarshall() + + def _unmarshall(self) -> Optional[Message]: + """Unmarshall the message. + + The underlying read function will raise BlockingIOError if the + if there are not enough bytes in the buffer. This allows unmarshall + to be resumed when more data comes in over the wire. + """ + if self._read_complete: + self._next_message() + try: + if not self._msg_len: + self._read_header() + self._read_body() + except MARSHALL_STREAM_END_ERROR: + return None + return self._message + + _complex_parsers_unpack: Dict[ + str, Callable[["Unmarshaller", SignatureType], Any] + ] = { + "b": read_boolean, + "o": read_string_unpack, + "s": read_string_unpack, + "g": read_signature, + "a": read_array, + "(": read_struct, + "{": read_dict_entry, + "v": read_variant, + "h": read_uint32_unpack, + UINT32_DBUS_TYPE: read_uint32_unpack, + INT16_DBUS_TYPE: read_int16_unpack, + UINT16_DBUS_TYPE: read_uint16_unpack, + } + + _ctype_by_endian: Dict[int, Dict[str, READER_TYPE]] = { + endian: build_simple_parsers(endian) for endian in (LITTLE_ENDIAN, BIG_ENDIAN) + } + + _readers_by_type: Dict[int, Dict[str, READER_TYPE]] = { + LITTLE_ENDIAN: { + **_ctype_by_endian[LITTLE_ENDIAN], + **_complex_parsers_unpack, + }, + BIG_ENDIAN: { + **_ctype_by_endian[BIG_ENDIAN], + **_complex_parsers_unpack, + }, + } diff --git a/dbus_fast/_private/util.py b/dbus_fast/_private/util.py new file mode 100644 index 0000000..72e3b89 --- /dev/null +++ b/dbus_fast/_private/util.py @@ -0,0 +1,172 @@ +import ast +import inspect +from typing import Any, List, Tuple, Union + +from ..signature import SignatureTree, Variant, get_signature_tree + + +def signature_contains_type( + signature: Union[str, SignatureTree], body: List[Any], token: str +) -> bool: + """For a given signature and body, check to see if it contains any members + with the given token""" + if type(signature) is str: + signature = get_signature_tree(signature) + + queue = [] + contains_variants = False + for st in signature.types: + queue.append(st) + + while True: + if not queue: + break + st = queue.pop() + if st.token == token: + return True + elif st.token == "v": + contains_variants = True + queue.extend(st.children) + + if not contains_variants: + return False + + for member in body: + queue.append(member) + + while True: + if not queue: + return False + member = queue.pop() + if type(member) is Variant and signature_contains_type( + member.signature, [member.value], token + ): + return True + elif type(member) is list: + queue.extend(member) + elif type(member) is dict: + queue.extend(member.values()) + + +def replace_fds_with_idx( + signature: Union[str, SignatureTree], body: List[Any] +) -> Tuple[List[Any], List[int]]: + """Take the high level body format and convert it into the low level body + format. Type 'h' refers directly to the fd in the body. Replace that with + an index and return the corresponding list of unix fds that can be set on + the Message""" + if type(signature) is str: + signature = get_signature_tree(signature) + + if not signature_contains_type(signature, body, "h"): + return body, [] + + unix_fds = [] + + def _replace(fd): + try: + return unix_fds.index(fd) + except ValueError: + unix_fds.append(fd) + return len(unix_fds) - 1 + + _replace_fds(body, signature.types, _replace) + + return body, unix_fds + + +def replace_idx_with_fds( + signature: Union[str, SignatureTree], body: List[Any], unix_fds: List[int] +) -> List[Any]: + """Take the low level body format and return the high level body format. + Type 'h' refers to an index in the unix_fds array. Replace those with the + actual file descriptor or `None` if one does not exist.""" + if type(signature) is str: + signature = get_signature_tree(signature) + + if not signature_contains_type(signature, body, "h"): + return body + + def _replace(idx): + try: + return unix_fds[idx] + except IndexError: + return None + + _replace_fds(body, signature.types, _replace) + + return body + + +def parse_annotation(annotation: str) -> str: + """ + Because of PEP 563, if `from __future__ import annotations` is used in code + or on Python version >=3.10 where this is the default, return annotations + from the `inspect` module will return annotations as "forward definitions". + In this case, we must eval the result which we do only when given a string + constant. + """ + + def raise_value_error(): + raise ValueError( + f"service annotations must be a string constant (got {annotation})" + ) + + if not annotation or annotation is inspect.Signature.empty: + return "" + if type(annotation) is not str: + raise_value_error() + try: + body = ast.parse(annotation).body + if len(body) == 1 and type(body[0].value) is ast.Constant: + if type(body[0].value.value) is not str: + raise_value_error() + return body[0].value.value + except SyntaxError: + pass + + return annotation + + +def _replace_fds(body_obj: List[Any], children, replace_fn): + """Replace any type 'h' with the value returned by replace_fn() given the + value of the fd field. This is used by the high level interfaces which + allow type 'h' to be the fd directly instead of an index in an external + array such as in the spec.""" + for index, st in enumerate(children): + if not any(sig in st.signature for sig in "hv"): + continue + if st.signature == "h": + body_obj[index] = replace_fn(body_obj[index]) + elif st.token == "a": + if st.children[0].token == "{": + _replace_fds(body_obj[index], st.children, replace_fn) + else: + for i, child in enumerate(body_obj[index]): + if st.signature == "ah": + body_obj[index][i] = replace_fn(child) + else: + _replace_fds([child], st.children, replace_fn) + elif st.token in "(": + _replace_fds(body_obj[index], st.children, replace_fn) + elif st.token in "{": + for key, value in list(body_obj.items()): + body_obj.pop(key) + if st.children[0].signature == "h": + key = replace_fn(key) + if st.children[1].signature == "h": + value = replace_fn(value) + else: + _replace_fds([value], [st.children[1]], replace_fn) + body_obj[key] = value + + elif st.signature == "v": + if body_obj[index].signature == "h": + body_obj[index].value = replace_fn(body_obj[index].value) + else: + _replace_fds( + [body_obj[index].value], [body_obj[index].type], replace_fn + ) + + elif st.children: + _replace_fds(body_obj[index], st.children, replace_fn) diff --git a/dbus_fast/aio/__init__.py b/dbus_fast/aio/__init__.py new file mode 100644 index 0000000..020dcc7 --- /dev/null +++ b/dbus_fast/aio/__init__.py @@ -0,0 +1,2 @@ +from .message_bus import MessageBus +from .proxy_object import ProxyInterface, ProxyObject diff --git a/dbus_fast/aio/message_bus.py b/dbus_fast/aio/message_bus.py new file mode 100644 index 0000000..10081d2 --- /dev/null +++ b/dbus_fast/aio/message_bus.py @@ -0,0 +1,553 @@ +import array +import asyncio +import contextlib +import logging +import socket +from collections import deque +from copy import copy +from functools import partial +from typing import Any, Callable, List, Optional, Set, Tuple + +from .. import introspection as intr +from ..auth import Authenticator, AuthExternal +from ..constants import ( + BusType, + MessageFlag, + MessageType, + NameFlag, + ReleaseNameReply, + RequestNameReply, +) +from ..errors import AuthError +from ..message import Message +from ..message_bus import BaseMessageBus, _block_unexpected_reply +from ..service import ServiceInterface +from .message_reader import build_message_reader +from .proxy_object import ProxyObject + +NO_REPLY_EXPECTED_VALUE = MessageFlag.NO_REPLY_EXPECTED.value + + +def _generate_hello_serialized(next_serial: int) -> bytes: + return Message( + destination="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + interface="org.freedesktop.DBus", + member="Hello", + serial=next_serial, + )._marshall(False) + + +HELLO_1_SERIALIZED = _generate_hello_serialized(1) + + +def _future_set_exception(fut: asyncio.Future, exc: Exception) -> None: + if fut is not None and not fut.done(): + fut.set_exception(exc) + + +def _future_set_result(fut: asyncio.Future, result: Any) -> None: + if fut is not None and not fut.done(): + fut.set_result(result) + + +class _MessageWriter: + """A class to handle writing messages to the message bus.""" + + def __init__(self, bus: "MessageBus") -> None: + """A class to handle writing messages to the message bus.""" + self.messages: deque[ + Tuple[bytearray, Optional[List[int]], Optional[asyncio.Future]] + ] = deque() + self.negotiate_unix_fd = bus._negotiate_unix_fd + self.bus = bus + self.sock = bus._sock + self.loop = bus._loop + self.buf: Optional[memoryview] = None + self.fd = bus._fd + self.offset = 0 + self.unix_fds: Optional[List[int]] = None + self.fut: Optional[asyncio.Future] = None + + def write_callback(self, remove_writer: bool = True) -> None: + """The callback to write messages to the message bus.""" + sock = self.sock + try: + while True: + if self.buf is None: + # If there is no buffer, get the next message + if not self.messages: + # nothing more to write + if remove_writer: + self.loop.remove_writer(self.fd) + return + + # Get the next message + buf, unix_fds, fut = self.messages.popleft() + self.unix_fds = unix_fds + self.buf = memoryview(buf) + self.offset = 0 + self.fut = fut + + if self.unix_fds and self.negotiate_unix_fd: + ancdata = [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + array.array("i", self.unix_fds), + ) + ] + self.offset += sock.sendmsg([self.buf[self.offset :]], ancdata) + self.unix_fds = None + else: + self.offset += sock.send(self.buf[self.offset :]) + + if self.offset < len(self.buf): + # wait for writable + return + + # finished writing + self.buf = None + _future_set_result(self.fut, None) + except Exception as e: + if self.bus._user_disconnect: + _future_set_result(self.fut, None) + else: + _future_set_exception(self.fut, e) + self.bus._finalize(e) + + def buffer_message( + self, msg: Message, future: Optional[asyncio.Future] = None + ) -> None: + """Buffer a message to be sent later.""" + unix_fds = msg.unix_fds + self.messages.append( + ( + msg._marshall(self.negotiate_unix_fd), + copy(unix_fds) if unix_fds else None, + future, + ) + ) + + def _write_without_remove_writer(self) -> None: + """Call the write callback without removing the writer.""" + self.write_callback(remove_writer=False) + + def schedule_write( + self, msg: Optional[Message] = None, future: Optional[asyncio.Future] = None + ) -> None: + """Schedule a message to be written.""" + queue_is_empty = not self.messages + if msg is not None: + self.buffer_message(msg, future) + + if self.bus.unique_name: + # Optimization: try to send now if the queue + # is empty. With bleak this usually means we + # can send right away 99% of the time which + # is a huge improvement in latency. + if queue_is_empty: + self._write_without_remove_writer() + + if ( + self.buf is not None + or self.messages + or not self.fut + or not self.fut.done() + ): + self.loop.add_writer(self.fd, self.write_callback) + + +class MessageBus(BaseMessageBus): + """The message bus implementation for use with asyncio. + + The message bus class is the entry point into all the features of the + library. It sets up a connection to the DBus daemon and exposes an + interface to send and receive messages and expose services. + + You must call :func:`connect() ` before + using this message bus. + + :param bus_type: The type of bus to connect to. Affects the search path for + the bus address. + :type bus_type: :class:`BusType ` + :param bus_address: A specific bus address to connect to. Should not be + used under normal circumstances. + :param auth: The authenticator to use, defaults to an instance of + :class:`AuthExternal `. + :type auth: :class:`Authenticator ` + :param negotiate_unix_fd: Allow the bus to send and receive Unix file + descriptors (DBus type 'h'). This must be supported by the transport. + :type negotiate_unix_fd: bool + + :ivar unique_name: The unique name of the message bus connection. It will + be :class:`None` until the message bus connects. + :vartype unique_name: str + :ivar connected: True if this message bus is expected to be able to send + and receive messages. + :vartype connected: bool + """ + + __slots__ = ("_loop", "_auth", "_writer", "_disconnect_future", "_pending_futures") + + def __init__( + self, + bus_address: Optional[str] = None, + bus_type: BusType = BusType.SESSION, + auth: Optional[Authenticator] = None, + negotiate_unix_fd: bool = False, + ) -> None: + super().__init__(bus_address, bus_type, ProxyObject, negotiate_unix_fd) + self._loop = asyncio.get_running_loop() + + self._writer = _MessageWriter(self) + + if auth is None: + self._auth = AuthExternal() + else: + self._auth = auth + + self._disconnect_future = self._loop.create_future() + self._pending_futures: Set[asyncio.Future] = set() + + async def connect(self) -> "MessageBus": + """Connect this message bus to the DBus daemon. + + This method must be called before the message bus can be used. + + :returns: This message bus for convenience. + :rtype: :class:`MessageBus ` + + :raises: + - :class:`AuthError ` - If authorization to \ + the DBus daemon failed. + - :class:`Exception` - If there was a connection error. + """ + await self._authenticate() + + future = self._loop.create_future() + + self._loop.add_reader( + self._fd, + build_message_reader( + self._sock, + self._process_message, + self._finalize, + self._negotiate_unix_fd, + ), + ) + + def on_hello(reply, err): + try: + if err: + raise err + self.unique_name = reply.body[0] + self._writer.schedule_write() + _future_set_result(future, self) + except Exception as e: + _future_set_exception(future, e) + self.disconnect() + self._finalize(err) + + next_serial = self.next_serial() + self._method_return_handlers[next_serial] = on_hello + if next_serial == 1: + serialized = HELLO_1_SERIALIZED + else: + serialized = _generate_hello_serialized(next_serial) + self._stream.write(serialized) + self._stream.flush() + return await future + + async def introspect( + self, bus_name: str, path: str, timeout: float = 30.0 + ) -> intr.Node: + """Get introspection data for the node at the given path from the given + bus name. + + Calls the standard ``org.freedesktop.DBus.Introspectable.Introspect`` + on the bus for the path. + + :param bus_name: The name to introspect. + :type bus_name: str + :param path: The path to introspect. + :type path: str + :param timeout: The timeout to introspect. + :type timeout: float + + :returns: The introspection data for the name at the path. + :rtype: :class:`Node ` + + :raises: + - :class:`InvalidObjectPathError ` \ + - If the given object path is not valid. + - :class:`InvalidBusNameError ` - If \ + the given bus name is not valid. + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + - :class:`asyncio.TimeoutError` - Waited for future but time run out. + """ + future = self._loop.create_future() + + super().introspect( + bus_name, + path, + partial(self._reply_handler, future), + check_callback_type=False, + ) + + timer_handle = self._loop.call_later( + timeout, _future_set_exception, future, asyncio.TimeoutError + ) + try: + return await future + finally: + timer_handle.cancel() + + async def request_name( + self, name: str, flags: NameFlag = NameFlag.NONE + ) -> RequestNameReply: + """Request that this message bus owns the given name. + + :param name: The name to request. + :type name: str + :param flags: Name flags that affect the behavior of the name request. + :type flags: :class:`NameFlag ` + + :returns: The reply to the name request. + :rtype: :class:`RequestNameReply ` + + :raises: + - :class:`InvalidBusNameError ` - If \ + the given bus name is not valid. + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + """ + future = self._loop.create_future() + + super().request_name( + name, flags, partial(self._reply_handler, future), check_callback_type=False + ) + + return await future + + async def release_name(self, name: str) -> ReleaseNameReply: + """Request that this message bus release the given name. + + :param name: The name to release. + :type name: str + + :returns: The reply to the release request. + :rtype: :class:`ReleaseNameReply ` + + :raises: + - :class:`InvalidBusNameError ` - If \ + the given bus name is not valid. + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + """ + future = self._loop.create_future() + + super().release_name( + name, partial(self._reply_handler, future), check_callback_type=False + ) + + return await future + + async def call(self, msg: Message) -> Optional[Message]: + """Send a method call and wait for a reply from the DBus daemon. + + :param msg: The method call message to send. + :type msg: :class:`Message ` + + :returns: A message in reply to the message sent. If the message does + not expect a reply based on the message flags or type, returns + ``None`` after the message is sent. + :rtype: :class:`Message ` or :class:`None` if no reply is expected. + + :raises: + - :class:`Exception` - If a connection error occurred. + """ + if ( + msg.flags.value & NO_REPLY_EXPECTED_VALUE + or msg.message_type is not MessageType.METHOD_CALL + ): + await self.send(msg) + return None + + future = self._loop.create_future() + + self._call(msg, partial(self._reply_handler, future)) + + await future + + return future.result() + + def send(self, msg: Message) -> asyncio.Future: + """Asynchronously send a message on the message bus. + + .. note:: This method may change to a couroutine function in the 1.0 + release of the library. + + :param msg: The message to send. + :type msg: :class:`Message ` + + :returns: A future that resolves when the message is sent or a + connection error occurs. + :rtype: :class:`Future ` + """ + if not msg.serial: + msg.serial = self.next_serial() + + future = self._loop.create_future() + self._writer.schedule_write(msg, future) + return future + + def get_proxy_object( + self, bus_name: str, path: str, introspection: intr.Node + ) -> ProxyObject: + return super().get_proxy_object(bus_name, path, introspection) + + async def wait_for_disconnect(self): + """Wait for the message bus to disconnect. + + :returns: :class:`None` when the message bus has disconnected. + :rtype: :class:`None` + + :raises: + - :class:`Exception` - If connection was terminated unexpectedly or \ + an internal error occurred in the library. + """ + return await self._disconnect_future + + def _make_method_handler(self, interface, method): + if not asyncio.iscoroutinefunction(method.fn): + return super()._make_method_handler(interface, method) + + negotiate_unix_fd = self._negotiate_unix_fd + msg_body_to_args = ServiceInterface._msg_body_to_args + fn_result_to_body = ServiceInterface._fn_result_to_body + + def _coroutine_method_handler( + msg: Message, send_reply: Callable[[Message], None] + ) -> None: + """A coroutine method handler.""" + args = msg_body_to_args(msg) if msg.unix_fds else msg.body + fut = asyncio.ensure_future(method.fn(interface, *args)) + # Hold a strong reference to the future to ensure + # it is not garbage collected before it is done. + self._pending_futures.add(fut) + if ( + send_reply is _block_unexpected_reply + or msg.flags.value & NO_REPLY_EXPECTED_VALUE + ): + fut.add_done_callback(self._pending_futures.discard) + return + + # We only create the closure function if we are actually going to reply + def _done(fut: asyncio.Future) -> None: + """The callback for when the method is done.""" + with send_reply: + result = fut.result() + body, unix_fds = fn_result_to_body( + result, method.out_signature_tree, replace_fds=negotiate_unix_fd + ) + send_reply( + Message.new_method_return( + msg, method.out_signature, body, unix_fds + ) + ) + + fut.add_done_callback(_done) + # Discard the future only after running the done callback + fut.add_done_callback(self._pending_futures.discard) + + return _coroutine_method_handler + + async def _auth_readline(self) -> str: + buf = b"" + while buf[-2:] != b"\r\n": + # The auth protocol is line based, so we can read until we get a + # newline. + buf += await self._loop.sock_recv(self._sock, 1024) + return buf[:-2].decode() + + async def _authenticate(self) -> None: + await self._loop.sock_sendall(self._sock, b"\0") + + first_line = self._auth._authentication_start( + negotiate_unix_fd=self._negotiate_unix_fd + ) + + if first_line is not None: + if type(first_line) is not str: + raise AuthError("authenticator gave response not type str") + await self._loop.sock_sendall( + self._sock, Authenticator._format_line(first_line) + ) + + while True: + response = self._auth._receive_line(await self._auth_readline()) + if response is not None: + await self._loop.sock_sendall( + self._sock, Authenticator._format_line(response) + ) + self._stream.flush() + if response == "BEGIN": + # The first octet received by the server after the \r\n of the BEGIN command + # from the client must be the first octet of the authenticated/encrypted stream + # of D-Bus messages. + break + + def disconnect(self) -> None: + """Disconnect the message bus by closing the underlying connection asynchronously. + + All pending and future calls will error with a connection error. + """ + super().disconnect() + try: + self._sock.close() + except Exception: + logging.warning("could not close socket", exc_info=True) + + def _finalize(self, err: Optional[Exception] = None) -> None: + try: + self._loop.remove_reader(self._fd) + except Exception: + logging.warning("could not remove message reader", exc_info=True) + try: + self._loop.remove_writer(self._fd) + except Exception: + logging.warning("could not remove message writer", exc_info=True) + + had_handlers = bool(self._method_return_handlers or self._user_message_handlers) + + super()._finalize(err) + + if self._disconnect_future.done(): + return + + if err and not self._user_disconnect: + _future_set_exception(self._disconnect_future, err) + # If this happens during a reply, the message handlers + # will have the exception set and wait_for_disconnect will + # never be called so we need to manually set the exception + # as retrieved to avoid asyncio warnings when the future + # is garbage collected. + if had_handlers: + with contextlib.suppress(Exception): + self._disconnect_future.exception() + else: + _future_set_result(self._disconnect_future, None) + + def _reply_handler( + self, future: asyncio.Future, reply: Optional[Any], err: Optional[Exception] + ) -> None: + """The reply handler for method calls.""" + if err: + _future_set_exception(future, err) + else: + _future_set_result(future, reply) diff --git a/dbus_fast/aio/message_reader.pxd b/dbus_fast/aio/message_reader.pxd new file mode 100644 index 0000000..c570fab --- /dev/null +++ b/dbus_fast/aio/message_reader.pxd @@ -0,0 +1,13 @@ +"""cdefs for message_reader.py""" + +import cython + +from .._private.unmarshaller cimport Unmarshaller + + +cpdef _message_reader( + Unmarshaller unmarshaller, + object process, + object finalize, + bint negotiate_unix_fd +) diff --git a/dbus_fast/aio/message_reader.py b/dbus_fast/aio/message_reader.py new file mode 100644 index 0000000..3964d1b --- /dev/null +++ b/dbus_fast/aio/message_reader.py @@ -0,0 +1,45 @@ +import logging +import socket +from functools import partial +from typing import Callable, Optional + +from .._private.unmarshaller import Unmarshaller +from ..message import Message + + +def _message_reader( + unmarshaller: Unmarshaller, + process: Callable[[Message], None], + finalize: Callable[[Optional[Exception]], None], + negotiate_unix_fd: bool, +) -> None: + """Reads messages from the unmarshaller and passes them to the process function.""" + try: + while True: + message = unmarshaller._unmarshall() + if message is None: + return + try: + process(message) + except Exception: + logging.error("Unexpected error processing message: %s", exc_info=True) + # If we are not negotiating unix fds, we can stop reading as soon as we have + # the buffer is empty as asyncio will call us again when there is more data. + if ( + not negotiate_unix_fd + and not unmarshaller._has_another_message_in_buffer() + ): + return + except Exception as e: + finalize(e) + + +def build_message_reader( + sock: Optional[socket.socket], + process: Callable[[Message], None], + finalize: Callable[[Optional[Exception]], None], + negotiate_unix_fd: bool, +) -> Callable[[], None]: + """Build a callable that reads messages from the unmarshaller and passes them to the process function.""" + unmarshaller = Unmarshaller(None, sock, negotiate_unix_fd) + return partial(_message_reader, unmarshaller, process, finalize, negotiate_unix_fd) diff --git a/dbus_fast/aio/proxy_object.py b/dbus_fast/aio/proxy_object.py new file mode 100644 index 0000000..35f1ddb --- /dev/null +++ b/dbus_fast/aio/proxy_object.py @@ -0,0 +1,205 @@ +import xml.etree.ElementTree as ET +from typing import TYPE_CHECKING, Any, List, Union + +from .. import introspection as intr +from .._private.util import replace_fds_with_idx, replace_idx_with_fds +from ..constants import ErrorType, MessageFlag +from ..errors import DBusError +from ..message import Message +from ..message_bus import BaseMessageBus +from ..proxy_object import BaseProxyInterface, BaseProxyObject +from ..signature import Variant +from ..unpack import unpack_variants as unpack + +if TYPE_CHECKING: + from .message_bus import MessageBus as AioMessageBus + +NO_REPLY_EXPECTED_VALUE = MessageFlag.NO_REPLY_EXPECTED.value + + +class ProxyInterface(BaseProxyInterface): + """A class representing a proxy to an interface exported on the bus by + another client for the asyncio :class:`MessageBus + ` implementation. + + This class is not meant to be constructed directly by the user. Use + :func:`ProxyObject.get_interface() + ` on a asyncio proxy object to get + a proxy interface. + + This class exposes methods to call DBus methods, listen to signals, and get + and set properties on the interface that are created dynamically based on + the introspection data passed to the proxy object that made this proxy + interface. + + A *method call* takes this form: + + .. code-block:: python3 + + result = await interface.call_[METHOD](*args) + + Where ``METHOD`` is the name of the method converted to snake case. + + DBus methods are exposed as coroutines that take arguments that correpond + to the *in args* of the interface method definition and return a ``result`` + that corresponds to the *out arg*. If the method has more than one out arg, + they are returned within a :class:`list`. + + To *listen to a signal* use this form: + + .. code-block:: python3 + + interface.on_[SIGNAL](callback) + + To *stop listening to a signal* use this form: + + .. code-block:: python3 + + interface.off_[SIGNAL](callback) + + Where ``SIGNAL`` is the name of the signal converted to snake case. + + DBus signals are exposed with an event-callback interface. The provided + ``callback`` will be called when the signal is emitted with arguments that + correspond to the *out args* of the interface signal definition. + + To *get or set a property* use this form: + + .. code-block:: python3 + + value = await interface.get_[PROPERTY]() + await interface.set_[PROPERTY](value) + + Where ``PROPERTY`` is the name of the property converted to snake case. + + DBus property getters and setters are exposed as coroutines. The ``value`` + must correspond to the type of the property in the interface definition. + + If the service returns an error for a DBus call, a :class:`DBusError + ` will be raised with information about the error. + """ + + bus: "AioMessageBus" + + def _add_method(self, intr_method: intr.Method) -> None: + async def method_fn( + *args, flags=MessageFlag.NONE, unpack_variants: bool = False + ): + input_body, unix_fds = replace_fds_with_idx( + intr_method.in_signature, list(args) + ) + + msg = await self.bus.call( + Message( + destination=self.bus_name, + path=self.path, + interface=self.introspection.name, + member=intr_method.name, + signature=intr_method.in_signature, + body=input_body, + flags=flags, + unix_fds=unix_fds, + ) + ) + + if flags is not None and flags.value & NO_REPLY_EXPECTED_VALUE: + return None + + BaseProxyInterface._check_method_return(msg, intr_method.out_signature) + + out_len = len(intr_method.out_args) + + body = replace_idx_with_fds(msg.signature_tree, msg.body, msg.unix_fds) + + if not out_len: + return None + + if unpack_variants: + body = unpack(body) + + if out_len == 1: + return body[0] + return body + + method_name = f"call_{BaseProxyInterface._to_snake_case(intr_method.name)}" + setattr(self, method_name, method_fn) + + def _add_property( + self, + intr_property: intr.Property, + ) -> None: + async def property_getter( + *, flags=MessageFlag.NONE, unpack_variants: bool = False + ): + msg = await self.bus.call( + Message( + destination=self.bus_name, + path=self.path, + interface="org.freedesktop.DBus.Properties", + member="Get", + signature="ss", + body=[self.introspection.name, intr_property.name], + ) + ) + + BaseProxyInterface._check_method_return(msg, "v") + variant = msg.body[0] + if variant.signature != intr_property.signature: + raise DBusError( + ErrorType.CLIENT_ERROR, + f'property returned unexpected signature "{variant.signature}"', + msg, + ) + + body = replace_idx_with_fds("v", msg.body, msg.unix_fds)[0].value + + if unpack_variants: + return unpack(body) + return body + + async def property_setter(val: Any) -> None: + variant = Variant(intr_property.signature, val) + + body, unix_fds = replace_fds_with_idx( + "ssv", [self.introspection.name, intr_property.name, variant] + ) + + msg = await self.bus.call( + Message( + destination=self.bus_name, + path=self.path, + interface="org.freedesktop.DBus.Properties", + member="Set", + signature="ssv", + body=body, + unix_fds=unix_fds, + ) + ) + + BaseProxyInterface._check_method_return(msg) + + snake_case = BaseProxyInterface._to_snake_case(intr_property.name) + setattr(self, f"get_{snake_case}", property_getter) + setattr(self, f"set_{snake_case}", property_setter) + + +class ProxyObject(BaseProxyObject): + """The proxy object implementation for the GLib :class:`MessageBus `. + + For more information, see the :class:`BaseProxyObject `. + """ + + def __init__( + self, + bus_name: str, + path: str, + introspection: Union[intr.Node, str, ET.Element], + bus: BaseMessageBus, + ) -> None: + super().__init__(bus_name, path, introspection, bus, ProxyInterface) + + def get_interface(self, name: str) -> ProxyInterface: + return super().get_interface(name) + + def get_children(self) -> List["ProxyObject"]: + return super().get_children() diff --git a/dbus_fast/auth.py b/dbus_fast/auth.py new file mode 100644 index 0000000..4c77ca6 --- /dev/null +++ b/dbus_fast/auth.py @@ -0,0 +1,127 @@ +import enum +import os +from typing import List, Optional, Tuple + +from .errors import AuthError + +UID_NOT_SPECIFIED = -1 + +# The auth interface here is unstable. I would like to eventually open this up +# for people to define their own custom authentication protocols, but I'm not +# familiar with what's needed for that exactly. To work with any message bus +# implementation would require abstracting out all the IO. Async operations +# might be challenging because different IO backends have different ways of +# doing that. I might just end up giving the raw socket and leaving it all up +# to the user, but it would be nice to have a little guidance in the interface +# since a lot of it is strongly specified. If you have a need for this, contact +# the project maintainer to help stabilize this interface. + + +class _AuthResponse(enum.Enum): + OK = "OK" + REJECTED = "REJECTED" + DATA = "DATA" + ERROR = "ERROR" + AGREE_UNIX_FD = "AGREE_UNIX_FD" + + @classmethod + def parse(klass, line: str) -> Tuple["_AuthResponse", List[str]]: + args = line.split(" ") + response = klass(args[0]) + return response, args[1:] + + +# UNSTABLE +class Authenticator: + """The base class for authenticators for :class:`MessageBus ` authentication. + + In the future, the library may allow extending this class for custom authentication protocols. + + :seealso: https://dbus.freedesktop.org/doc/dbus-specification.html#auth-protocol + """ + + def _authentication_start(self, negotiate_unix_fd: bool = False) -> str: + raise NotImplementedError( + "authentication_start() must be implemented in the inheriting class" + ) + + def _receive_line(self, line: str) -> str: + raise NotImplementedError( + "receive_line() must be implemented in the inheriting class" + ) + + @staticmethod + def _format_line(line: str) -> bytes: + return f"{line}\r\n".encode() + + +class AuthExternal(Authenticator): + """An authenticator class for the external auth protocol for use with the + :class:`MessageBus `. + + :param uid: The uid to use when connecting to the message bus. Use UID_NOT_SPECIFIED to use the uid known to the kernel. + :vartype uid: int + + :sealso: https://dbus.freedesktop.org/doc/dbus-specification.html#auth-protocol + """ + + def __init__(self, uid: Optional[int] = None) -> None: + self.negotiate_unix_fd: bool = False + self.negotiating_fds: bool = False + self.uid: Optional[int] = uid + + def _authentication_start(self, negotiate_unix_fd: bool = False) -> str: + self.negotiate_unix_fd = negotiate_unix_fd + uid = self.uid + if uid == UID_NOT_SPECIFIED: + return "AUTH EXTERNAL" + if uid is None: + uid = os.getuid() + hex_uid = str(uid).encode().hex() + return f"AUTH EXTERNAL {hex_uid}" + + def _receive_line(self, line: str) -> str: + response, args = _AuthResponse.parse(line) + + if response is _AuthResponse.OK: + if self.negotiate_unix_fd: + self.negotiating_fds = True + return "NEGOTIATE_UNIX_FD" + else: + return "BEGIN" + + if response is _AuthResponse.AGREE_UNIX_FD: + return "BEGIN" + + if response is _AuthResponse.DATA and self.uid == UID_NOT_SPECIFIED: + return "DATA" + + raise AuthError(f"authentication failed: {response.value}: {args}") + + +class AuthAnonymous(Authenticator): + """An authenticator class for the anonymous auth protocol for use with the + :class:`MessageBus `. + + :sealso: https://dbus.freedesktop.org/doc/dbus-specification.html#auth-protocol + """ + + def _authentication_start(self, negotiate_unix_fd: bool = False) -> str: + if negotiate_unix_fd: + raise AuthError( + "anonymous authentication does not support negotiating unix fds right now" + ) + + return "AUTH ANONYMOUS" + + def _receive_line(self, line: str) -> str: + response, args = _AuthResponse.parse(line) + + if response != _AuthResponse.OK: + raise AuthError(f"authentication failed: {response.value}: {args}") + + return "BEGIN" + + +# The following line provides backwards compatibility, remove at some point? --jrd +AuthAnnonymous = AuthAnonymous diff --git a/dbus_fast/constants.py b/dbus_fast/constants.py new file mode 100644 index 0000000..fe70b6e --- /dev/null +++ b/dbus_fast/constants.py @@ -0,0 +1,135 @@ +from enum import Enum, IntFlag + + +class BusType(Enum): + """An enum that indicates a type of bus. On most systems, there are + normally two different kinds of buses running. + """ + + SESSION = 1 #: A bus for the current graphical user session. + SYSTEM = 2 #: A persistent bus for the whole machine. + + +class MessageType(Enum): + """An enum that indicates a type of message.""" + + METHOD_CALL = 1 #: An outgoing method call. + METHOD_RETURN = 2 #: A return to a previously sent method call + ERROR = 3 #: A return to a method call that has failed + SIGNAL = 4 #: A broadcast signal to subscribed connections + + +MESSAGE_TYPE_MAP = {field.value: field for field in MessageType} + + +class MessageFlag(IntFlag): + """Flags that affect the behavior of sent and received messages""" + + NONE = 0 + NO_REPLY_EXPECTED = 1 #: The method call does not expect a method return. + NO_AUTOSTART = 2 + ALLOW_INTERACTIVE_AUTHORIZATION = 4 + + +# This is written out because of https://github.com/python/cpython/issues/98976 +MESSAGE_FLAG_MAP = { + 0: MessageFlag.NONE, + 1: MessageFlag.NO_REPLY_EXPECTED, + 2: MessageFlag.NO_AUTOSTART, + 4: MessageFlag.ALLOW_INTERACTIVE_AUTHORIZATION, +} + + +class NameFlag(IntFlag): + """A flag that affects the behavior of a name request.""" + + NONE = 0 + ALLOW_REPLACEMENT = 1 #: If another client requests this name, let them have it. + REPLACE_EXISTING = 2 #: If another client owns this name, try to take it. + DO_NOT_QUEUE = 4 #: Name requests normally queue and wait for the owner to release the name. Do not enter this queue. + + +class RequestNameReply(Enum): + """An enum that describes the result of a name request.""" + + PRIMARY_OWNER = 1 #: The bus owns the name. + IN_QUEUE = 2 #: The bus is in a queue and may receive the name after it is relased by the primary owner. + EXISTS = 3 #: The name has an owner and NameFlag.DO_NOT_QUEUE was given. + ALREADY_OWNER = 4 #: The bus already owns the name. + + +class ReleaseNameReply(Enum): + """An enum that describes the result of a name release request""" + + RELEASED = 1 + NON_EXISTENT = 2 + NOT_OWNER = 3 + + +class PropertyAccess(Enum): + """An enum that describes whether a DBus property can be gotten or set with + the ``org.freedesktop.DBus.Properties`` interface. + """ + + READ = "read" #: The property is readonly. + WRITE = "write" #: The property is writeonly. + READWRITE = "readwrite" #: The property can be read or written to. + + def readable(self) -> bool: + """Get whether the property can be read.""" + return self == PropertyAccess.READ or self == PropertyAccess.READWRITE + + def writable(self) -> bool: + """Get whether the property can be written to.""" + return self == PropertyAccess.WRITE or self == PropertyAccess.READWRITE + + +class ArgDirection(Enum): + """For an introspected argument, indicates whether it is an input parameter or a return value.""" + + IN = "in" + OUT = "out" + + +class ErrorType(str, Enum): + """An enum for the type of an error for a message reply. + + :seealso: http://man7.org/linux/man-pages/man3/sd-bus-errors.3.html + """ + + SERVICE_ERROR = "com.dubstepdish.dbus.next.ServiceError" #: A custom error to indicate an exported service threw an exception. + INTERNAL_ERROR = "com.dubstepdish.dbus.next.InternalError" #: A custom error to indicate something went wrong with the library. + CLIENT_ERROR = "com.dubstepdish.dbus.next.ClientError" #: A custom error to indicate something went wrong with the client. + + FAILED = "org.freedesktop.DBus.Error.Failed" + NO_MEMORY = "org.freedesktop.DBus.Error.NoMemory" + SERVICE_UNKNOWN = "org.freedesktop.DBus.Error.ServiceUnknown" + NAME_HAS_NO_OWNER = "org.freedesktop.DBus.Error.NameHasNoOwner" + NO_REPLY = "org.freedesktop.DBus.Error.NoReply" + IO_ERROR = "org.freedesktop.DBus.Error.IOError" + BAD_ADDRESS = "org.freedesktop.DBus.Error.BadAddress" + NOT_SUPPORTED = "org.freedesktop.DBus.Error.NotSupported" + LIMITS_EXCEEDED = "org.freedesktop.DBus.Error.LimitsExceeded" + ACCESS_DENIED = "org.freedesktop.DBus.Error.AccessDenied" + AUTH_FAILED = "org.freedesktop.DBus.Error.AuthFailed" + NO_SERVER = "org.freedesktop.DBus.Error.NoServer" + TIMEOUT = "org.freedesktop.DBus.Error.Timeout" + NO_NETWORK = "org.freedesktop.DBus.Error.NoNetwork" + ADDRESS_IN_USE = "org.freedesktop.DBus.Error.AddressInUse" + DISCONNECTED = "org.freedesktop.DBus.Error.Disconnected" + INVALID_ARGS = "org.freedesktop.DBus.Error.InvalidArgs" + FILE_NOT_FOUND = "org.freedesktop.DBus.Error.FileNotFound" + FILE_EXISTS = "org.freedesktop.DBus.Error.FileExists" + UNKNOWN_METHOD = "org.freedesktop.DBus.Error.UnknownMethod" + UNKNOWN_OBJECT = "org.freedesktop.DBus.Error.UnknownObject" + UNKNOWN_INTERFACE = "org.freedesktop.DBus.Error.UnknownInterface" + UNKNOWN_PROPERTY = "org.freedesktop.DBus.Error.UnknownProperty" + PROPERTY_READ_ONLY = "org.freedesktop.DBus.Error.PropertyReadOnly" + UNIX_PROCESS_ID_UNKNOWN = "org.freedesktop.DBus.Error.UnixProcessIdUnknown" + INVALID_SIGNATURE = "org.freedesktop.DBus.Error.InvalidSignature" + INCONSISTENT_MESSAGE = "org.freedesktop.DBus.Error.InconsistentMessage" + MATCH_RULE_NOT_FOUND = "org.freedesktop.DBus.Error.MatchRuleNotFound" + MATCH_RULE_INVALID = "org.freedesktop.DBus.Error.MatchRuleInvalid" + INTERACTIVE_AUTHORIZATION_REQUIRED = ( + "org.freedesktop.DBus.Error.InteractiveAuthorizationRequired" + ) diff --git a/dbus_fast/errors.py b/dbus_fast/errors.py new file mode 100644 index 0000000..0ef39c1 --- /dev/null +++ b/dbus_fast/errors.py @@ -0,0 +1,84 @@ +from typing import Optional, Union + + +class SignatureBodyMismatchError(ValueError): + pass + + +class InvalidSignatureError(ValueError): + pass + + +class InvalidAddressError(ValueError): + pass + + +class AuthError(Exception): + pass + + +class InvalidMessageError(ValueError): + pass + + +class InvalidIntrospectionError(ValueError): + pass + + +class InterfaceNotFoundError(Exception): + pass + + +class SignalDisabledError(Exception): + pass + + +class InvalidBusNameError(TypeError): + def __init__(self, name: str) -> None: + super().__init__(f"invalid bus name: {name}") + + +class InvalidObjectPathError(TypeError): + def __init__(self, path: str) -> None: + super().__init__(f"invalid object path: {path}") + + +class InvalidInterfaceNameError(TypeError): + def __init__(self, name: str) -> None: + super().__init__(f"invalid interface name: {name}") + + +class InvalidMemberNameError(TypeError): + def __init__(self, member: str) -> None: + super().__init__(f"invalid member name: {member}") + + +from .constants import ErrorType, MessageType +from .message import Message +from .validators import assert_interface_name_valid + + +class DBusError(Exception): + def __init__( + self, type_: Union[ErrorType, str], text: str, reply: Optional[Message] = None + ) -> None: + super().__init__(text) + + if type(type_) is ErrorType: + type_ = type_.value + + assert_interface_name_valid(type_) # type: ignore[arg-type] + if reply is not None and type(reply) is not Message: + raise TypeError("reply must be of type Message") + + self.type = type_ + self.text = text + self.reply = reply + + @staticmethod + def _from_message(msg: Message) -> "DBusError": + assert msg.message_type == MessageType.ERROR + return DBusError(msg.error_name or "unknown", msg.body[0], reply=msg) + + def _as_message(self, msg: Message) -> Message: + return Message.new_error(msg, self.type, self.text) diff --git a/dbus_fast/glib/__init__.py b/dbus_fast/glib/__init__.py new file mode 100644 index 0000000..020dcc7 --- /dev/null +++ b/dbus_fast/glib/__init__.py @@ -0,0 +1,2 @@ +from .message_bus import MessageBus +from .proxy_object import ProxyInterface, ProxyObject diff --git a/dbus_fast/glib/message_bus.py b/dbus_fast/glib/message_bus.py new file mode 100644 index 0000000..42de20d --- /dev/null +++ b/dbus_fast/glib/message_bus.py @@ -0,0 +1,513 @@ +import io +import logging +import traceback +from typing import Callable, Optional + +from .. import introspection as intr +from .._private.unmarshaller import Unmarshaller +from ..auth import Authenticator, AuthExternal +from ..constants import ( + BusType, + MessageFlag, + MessageType, + NameFlag, + ReleaseNameReply, + RequestNameReply, +) +from ..errors import AuthError +from ..message import Message +from ..message_bus import BaseMessageBus +from .proxy_object import ProxyObject + +# glib is optional +_import_error = None +try: + from gi.repository import GLib + + _GLibSource = GLib.Source +except ImportError as e: + _import_error = e + + class _GLibSource: + pass + + +class _MessageSource(_GLibSource): + def __init__(self, bus): + self.unmarshaller = None + self.bus = bus + + def prepare(self): + return (False, -1) + + def check(self): + return False + + def dispatch(self, callback, user_data): + try: + while self.bus._stream.readable(): + if not self.unmarshaller: + self.unmarshaller = Unmarshaller(self.bus._stream) + + message = self.unmarshaller.unmarshall() + if message: + callback(message) + self.unmarshaller = None + else: + break + except Exception as e: + self.bus.disconnect() + self.bus._finalize(e) + return GLib.SOURCE_REMOVE + + return GLib.SOURCE_CONTINUE + + +class _MessageWritableSource(_GLibSource): + def __init__(self, bus): + self.bus = bus + self.buf = b"" + self.message_stream = None + self.chunk_size = 128 + + def prepare(self): + return (False, -1) + + def check(self): + return False + + def dispatch(self, callback, user_data): + try: + if self.buf: + self.bus._stream.write(self.buf) + self.buf = b"" + + if self.message_stream: + while True: + self.buf = self.message_stream.read(self.chunk_size) + if self.buf == b"": + break + self.bus._stream.write(self.buf) + if len(self.buf) < self.chunk_size: + self.buf = b"" + break + self.buf = b"" + + self.bus._stream.flush() + + if not self.bus._buffered_messages: + return GLib.SOURCE_REMOVE + else: + message = self.bus._buffered_messages.pop(0) + self.message_stream = io.BytesIO(message._marshall(False)) + return GLib.SOURCE_CONTINUE + except BlockingIOError: + return GLib.SOURCE_CONTINUE + except Exception as e: + self.bus._finalize(e) + return GLib.SOURCE_REMOVE + + +class _AuthLineSource(_GLibSource): + def __init__(self, stream): + self.stream = stream + self.buf = b"" + + def prepare(self): + return (False, -1) + + def check(self): + return False + + def dispatch(self, callback, user_data): + self.buf += self.stream.read() + if self.buf[-2:] == b"\r\n": + resp = callback(self.buf.decode()[:-2]) + if resp: + return GLib.SOURCE_REMOVE + + return GLib.SOURCE_CONTINUE + + +class MessageBus(BaseMessageBus): + """The message bus implementation for use with the GLib main loop. + + The message bus class is the entry point into all the features of the + library. It sets up a connection to the DBus daemon and exposes an + interface to send and receive messages and expose services. + + You must call :func:`connect() ` or + :func:`connect_sync() ` before + using this message bus. + + :param bus_type: The type of bus to connect to. Affects the search path for + the bus address. + :type bus_type: :class:`BusType ` + :param bus_address: A specific bus address to connect to. Should not be + used under normal circumstances. + :param auth: The authenticator to use, defaults to an instance of + :class:`AuthExternal `. + :type auth: :class:`Authenticator ` + + :ivar connected: True if this message bus is expected to be able to send + and receive messages. + :vartype connected: bool + :ivar unique_name: The unique name of the message bus connection. It will + be :class:`None` until the message bus connects. + :vartype unique_name: str + """ + + def __init__( + self, + bus_address: Optional[str] = None, + bus_type: BusType = BusType.SESSION, + auth: Optional[Authenticator] = None, + ): + if _import_error: + raise _import_error + + super().__init__(bus_address, bus_type, ProxyObject) + self._main_context = GLib.main_context_default() + # buffer messages until connect + self._buffered_messages = [] + + if auth is None: + self._auth = AuthExternal() + else: + self._auth = auth + + def _on_message(self, msg: Message) -> None: + try: + self._process_message(msg) + except Exception as e: + logging.error( + f"got unexpected error processing a message: {e}.\n{traceback.format_exc()}" + ) + + def connect( + self, + connect_notify: Optional[ + Callable[["MessageBus", Optional[Exception]], None] + ] = None, + ): + """Connect this message bus to the DBus daemon. + + This method or the synchronous version must be called before the + message bus can be used. + + :param connect_notify: A callback that will be called with this message + bus. May return an :class:`Exception` on connection errors or + :class:`AuthError ` on authorization errors. + :type callback: :class:`Callable` + """ + + def authenticate_notify(exc): + if exc is not None: + if connect_notify is not None: + connect_notify(None, exc) + return + self.message_source = _MessageSource(self) + self.message_source.set_callback(self._on_message) + self.message_source.attach(self._main_context) + + self.writable_source = None + + self.message_source.add_unix_fd(self._fd, GLib.IO_IN) + + def on_hello(reply, err): + if err: + if connect_notify: + connect_notify(reply, err) + return + + self.unique_name = reply.body[0] + + for m in self._buffered_messages: + self.send(m) + + if connect_notify: + connect_notify(self, err) + + hello_msg = Message( + destination="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + interface="org.freedesktop.DBus", + member="Hello", + serial=self.next_serial(), + ) + + self._method_return_handlers[hello_msg.serial] = on_hello + self._stream.write(hello_msg._marshall(False)) + self._stream.flush() + + self._authenticate(authenticate_notify) + + def connect_sync(self) -> "MessageBus": + """Connect this message bus to the DBus daemon. + + This method or the asynchronous version must be called before the + message bus can be used. + + :returns: This message bus for convenience. + :rtype: :class:`MessageBus ` + + :raises: + - :class:`AuthError ` - If authorization to \ + the DBus daemon failed. + - :class:`Exception` - If there was a connection error. + """ + main = GLib.MainLoop() + connection_error = None + + def connect_notify(bus, err): + nonlocal connection_error + connection_error = err + main.quit() + + self.connect(connect_notify) + main.run() + + if connection_error: + raise connection_error + + return self + + def call( + self, + msg: Message, + reply_notify: Optional[ + Callable[[Optional[Message], Optional[Exception]], None] + ] = None, + ): + """Send a method call and asynchronously wait for a reply from the DBus + daemon. + + :param msg: The method call message to send. + :type msg: :class:`Message ` + :param reply_notify: A callback that will be called with the reply to + this message. May return an :class:`Exception` on connection errors. + :type reply_notify: Callable + """ + BaseMessageBus._check_callback_type(reply_notify) + self._call(msg, reply_notify) + + def call_sync(self, msg: Message) -> Optional[Message]: + """Send a method call and synchronously wait for a reply from the DBus + daemon. + + :param msg: The method call message to send. + :type msg: :class:`Message ` + + :returns: A message in reply to the message sent. If the message does + not expect a reply based on the message flags or type, returns + ``None`` immediately. + :rtype: :class:`Message ` + + :raises: + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + """ + if ( + msg.flags & MessageFlag.NO_REPLY_EXPECTED + or msg.message_type is not MessageType.METHOD_CALL + ): + self.send(msg) + return None + + if not msg.serial: + msg.serial = self.next_serial() + + main = GLib.MainLoop() + handler_reply = None + connection_error = None + + def reply_handler(reply, err): + nonlocal handler_reply + nonlocal connection_error + + handler_reply = reply + connection_error = err + + main.quit() + + self._method_return_handlers[msg.serial] = reply_handler + self.send(msg) + main.run() + + if connection_error: + raise connection_error + + return handler_reply + + def introspect_sync(self, bus_name: str, path: str) -> intr.Node: + """Get introspection data for the node at the given path from the given + bus name. + + Calls the standard ``org.freedesktop.DBus.Introspectable.Introspect`` + on the bus for the path. + + :param bus_name: The name to introspect. + :type bus_name: str + :param path: The path to introspect. + :type path: str + + :returns: The introspection data for the name at the path. + :rtype: :class:`Node ` + + :raises: + - :class:`InvalidObjectPathError ` \ + - If the given object path is not valid. + - :class:`InvalidBusNameError ` - If \ + the given bus name is not valid. + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + """ + main = GLib.MainLoop() + request_result = None + request_error = None + + def reply_notify(result, err): + nonlocal request_result + nonlocal request_error + + request_result = result + request_error = err + + main.quit() + + super().introspect(bus_name, path, reply_notify) + main.run() + + if request_error: + raise request_error + + return request_result + + def request_name_sync( + self, name: str, flags: NameFlag = NameFlag.NONE + ) -> RequestNameReply: + """Request that this message bus owns the given name. + + :param name: The name to request. + :type name: str + :param flags: Name flags that affect the behavior of the name request. + :type flags: :class:`NameFlag ` + + :returns: The reply to the name request. + :rtype: :class:`RequestNameReply ` + + :raises: + - :class:`InvalidBusNameError ` - If \ + the given bus name is not valid. + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + """ + main = GLib.MainLoop() + request_result = None + request_error = None + + def reply_notify(result, err): + nonlocal request_result + nonlocal request_error + + request_result = result + request_error = err + + main.quit() + + super().request_name(name, flags, reply_notify) + main.run() + + if request_error: + raise request_error + + return request_result + + def release_name_sync(self, name: str) -> ReleaseNameReply: + """Request that this message bus release the given name. + + :param name: The name to release. + :type name: str + + :returns: The reply to the release request. + :rtype: :class:`ReleaseNameReply ` + + :raises: + - :class:`InvalidBusNameError ` - If \ + the given bus name is not valid. + - :class:`DBusError ` - If the service threw \ + an error for the method call or returned an invalid result. + - :class:`Exception` - If a connection error occurred. + """ + main = GLib.MainLoop() + release_result = None + release_error = None + + def reply_notify(result, err): + nonlocal release_result + nonlocal release_error + + release_result = result + release_error = err + + main.quit() + + super().release_name(name, reply_notify) + main.run() + + if release_error: + raise release_error + + return release_result + + def send(self, msg: Message): + if not msg.serial: + msg.serial = self.next_serial() + + self._buffered_messages.append(msg) + + if self.unique_name: + self._schedule_write() + + def get_proxy_object( + self, bus_name: str, path: str, introspection: intr.Node + ) -> ProxyObject: + return super().get_proxy_object(bus_name, path, introspection) + + def _schedule_write(self): + if self.writable_source is None or self.writable_source.is_destroyed(): + self.writable_source = _MessageWritableSource(self) + self.writable_source.attach(self._main_context) + self.writable_source.add_unix_fd(self._fd, GLib.IO_OUT) + + def _authenticate(self, authenticate_notify): + self._stream.write(b"\0") + first_line = self._auth._authentication_start() + if first_line is not None: + if type(first_line) is not str: + raise AuthError("authenticator gave response not type str") + self._stream.write(f"{first_line}\r\n".encode()) + self._stream.flush() + + def line_notify(line): + try: + resp = self._auth._receive_line(line) + self._stream.write(Authenticator._format_line(resp)) + self._stream.flush() + if resp == "BEGIN": + self._readline_source = None + authenticate_notify(None) + return True + except Exception as e: + authenticate_notify(e) + return True + + readline_source = _AuthLineSource(self._stream) + readline_source.set_callback(line_notify) + readline_source.add_unix_fd(self._fd, GLib.IO_IN) + readline_source.attach(self._main_context) + # make sure it doesnt get cleaned up + self._readline_source = readline_source diff --git a/dbus_fast/glib/proxy_object.py b/dbus_fast/glib/proxy_object.py new file mode 100644 index 0000000..ba9f472 --- /dev/null +++ b/dbus_fast/glib/proxy_object.py @@ -0,0 +1,320 @@ +import xml.etree.ElementTree as ET +from typing import List, Union + +from .. import introspection as intr +from ..constants import ErrorType +from ..errors import DBusError +from ..message import Message +from ..message_bus import BaseMessageBus +from ..proxy_object import BaseProxyInterface, BaseProxyObject +from ..signature import Variant +from ..unpack import unpack_variants as unpack + +# glib is optional +try: + from gi.repository import GLib +except ImportError: + pass + + +class ProxyInterface(BaseProxyInterface): + """A class representing a proxy to an interface exported on the bus by + another client for the GLib :class:`MessageBus ` + implementation. + + This class is not meant to be constructed directly by the user. Use + :func:`ProxyObject.get_interface() + ` on a GLib proxy + object to get a proxy interface. + + This class exposes methods to call DBus methods, listen to signals, and get + and set properties on the interface that are created dynamically based on + the introspection data passed to the proxy object that made this proxy + interface. + + A *method call* takes this form: + + .. code-block:: python3 + + def callback(error: Exception, result: list(Any)): + pass + + interface.call_[METHOD](*args, callback) + result = interface.call_[METHOD]_sync(*args) + + Where ``METHOD`` is the name of the method converted to snake case. + + To call a method, provide ``*args`` that correspond to the *in args* of the + introspection method definition. + + To *asynchronously* call a method, provide a callback that takes an error + as the first argument and a list as the second argument. If the call + completed successfully, ``error`` will be :class:`None`. If the service + returns an error, it will be a :class:`DBusError ` + with information about the error returned from the bus. The result will be + a list of values that correspond to the *out args* of the introspection + method definition. + + To *synchronously* call a method, use the ``call_[METHOD]_sync()`` form. + The ``result`` corresponds to the *out arg* of the introspection method + definition. If the method has more than one otu arg, they are returned + within a :class:`list`. + + To *listen to a signal* use this form: + + .. code-block:: python3 + + interface.on_[SIGNAL](callback) + + To *stop listening to a signal* use this form: + + .. code-block:: python3 + + interface.off_[SIGNAL](callback) + + Where ``SIGNAL`` is the name of the signal converted to snake case. + + DBus signals are exposed with an event-callback interface. The provided + ``callback`` will be called when the signal is emitted with arguments that + correspond to the *out args* of the interface signal definition. + + To *get or set a property* use this form: + + .. code-block:: python3 + + def get_callback(error: Exception, value: Any): + pass + + def set_callback(error: Exception) + pass + + interface.get_[PROPERTY](get_callback) + value: Any = interface.get_[PROPERTY]_sync() + + interface.set_[PROPERTY](set_callback) + interface.set_[PROPERTY]_sync(value) + + Where ``PROPERTY`` is the name of the property converted to snake case. + + The ``value`` must correspond to the type of the property in the interface + definition. + + To asynchronously get or set a property, provide a callback that takes an + :class:`Exception` as the first argument. If the call completed + successfully, ``error`` will be :class:`None`. If the service returns an + error, it will be a :class:`DBusError ` with + information about the error returned from the bus. + + If the service returns an error for a synchronous DBus call, a + :class:`DBusError ` will be raised with information + about the error. + """ + + def _add_method(self, intr_method): + in_len = len(intr_method.in_args) + out_len = len(intr_method.out_args) + + def method_fn(*args, unpack_variants: bool = False): + if len(args) != in_len + 1: + raise TypeError( + f"method {intr_method.name} expects {in_len} arguments and a callback (got {len(args)} args)" + ) + + args = list(args) + # TODO type check: this callback takes two parameters + # (MessageBus.check_callback(cb)) + callback = args.pop() + + def call_notify(msg, err): + if err: + callback([], err) + return + + try: + BaseProxyInterface._check_method_return( + msg, intr_method.out_signature + ) + except DBusError as e: + err = e + + if unpack_variants: + callback(unpack(msg.body), err) + else: + callback(msg.body, err) + + self.bus.call( + Message( + destination=self.bus_name, + path=self.path, + interface=self.introspection.name, + member=intr_method.name, + signature=intr_method.in_signature, + body=list(args), + ), + call_notify, + ) + + def method_fn_sync(*args, unpack_variants: bool = False): + main = GLib.MainLoop() + call_error = None + call_body = None + + def callback(body, err): + nonlocal call_error + nonlocal call_body + call_error = err + call_body = body + main.quit() + + method_fn(*args, callback) + + main.run() + + if call_error: + raise call_error + + if not out_len: + return None + + if unpack_variants: + call_body = unpack(call_body) + + if out_len == 1: + return call_body[0] + return call_body + + method_name = f"call_{BaseProxyInterface._to_snake_case(intr_method.name)}" + method_name_sync = f"{method_name}_sync" + + setattr(self, method_name, method_fn) + setattr(self, method_name_sync, method_fn_sync) + + def _add_property(self, intr_property): + def property_getter(callback, *, unpack_variants: bool = False): + def call_notify(msg, err): + if err: + callback(None, err) + return + + try: + BaseProxyInterface._check_method_return(msg) + except Exception as e: + callback(None, e) + return + + variant = msg.body[0] + if variant.signature != intr_property.signature: + err = DBusError( + ErrorType.CLIENT_ERROR, + 'property returned unexpected signature "{variant.signature}"', + msg, + ) + callback(None, err) + return + if unpack_variants: + callback(unpack(variant.value), None) + else: + callback(variant.value, None) + + self.bus.call( + Message( + destination=self.bus_name, + path=self.path, + interface="org.freedesktop.DBus.Properties", + member="Get", + signature="ss", + body=[self.introspection.name, intr_property.name], + ), + call_notify, + ) + + def property_getter_sync(*, unpack_variants: bool = False): + property_value = None + reply_error = None + + main = GLib.MainLoop() + + def callback(value, err): + nonlocal property_value + nonlocal reply_error + property_value = value + reply_error = err + main.quit() + + property_getter(callback) + main.run() + if reply_error: + raise reply_error + if unpack_variants: + return unpack(property_value) + return property_value + + def property_setter(value, callback): + def call_notify(msg, err): + if err: + callback(None, err) + return + try: + BaseProxyInterface._check_method_return(msg) + except Exception as e: + callback(None, e) + return + + return callback(None, None) + + variant = Variant(intr_property.signature, value) + self.bus.call( + Message( + destination=self.bus_name, + path=self.path, + interface="org.freedesktop.DBus.Properties", + member="Set", + signature="ssv", + body=[self.introspection.name, intr_property.name, variant], + ), + call_notify, + ) + + def property_setter_sync(val): + reply_error = None + + main = GLib.MainLoop() + + def callback(value, err): + nonlocal reply_error + reply_error = err + main.quit() + + property_setter(val, callback) + main.run() + if reply_error: + raise reply_error + return None + + snake_case = super()._to_snake_case(intr_property.name) + setattr(self, f"get_{snake_case}", property_getter) + setattr(self, f"get_{snake_case}_sync", property_getter_sync) + setattr(self, f"set_{snake_case}", property_setter) + setattr(self, f"set_{snake_case}_sync", property_setter_sync) + + +class ProxyObject(BaseProxyObject): + """The proxy object implementation for the asyncio :class:`MessageBus `. + + For more information, see the :class:`BaseProxyObject `. + """ + + def __init__( + self, + bus_name: str, + path: str, + introspection: Union[intr.Node, str, ET.Element], + bus: BaseMessageBus, + ): + super().__init__(bus_name, path, introspection, bus, ProxyInterface) + + def get_interface(self, name: str) -> ProxyInterface: + return super().get_interface(name) + + def get_children(self) -> List["ProxyObject"]: + return super().get_children() diff --git a/dbus_fast/introspection.py b/dbus_fast/introspection.py new file mode 100644 index 0000000..135a741 --- /dev/null +++ b/dbus_fast/introspection.py @@ -0,0 +1,597 @@ +import xml.etree.ElementTree as ET +from typing import List, Optional, Union + +from .constants import ArgDirection, PropertyAccess +from .errors import InvalidIntrospectionError +from .signature import SignatureType, get_signature_tree +from .validators import assert_interface_name_valid, assert_member_name_valid + +# https://dbus.freedesktop.org/doc/dbus-specification.html#introspection-format +# TODO annotations + + +class Arg: + """A class that represents an input or output argument to a signal or a method. + + :ivar name: The name of this arg. + :vartype name: str + :ivar direction: Whether this is an input or an output argument. + :vartype direction: :class:`ArgDirection ` + :ivar type: The parsed signature type of this argument. + :vartype type: :class:`SignatureType ` + :ivar signature: The signature string of this argument. + :vartype signature: str + + :raises: + - :class:`InvalidMemberNameError ` - If the name of the arg is not valid. + - :class:`InvalidSignatureError ` - If the signature is not valid. + - :class:`InvalidIntrospectionError ` - If the signature is not a single complete type. + """ + + def __init__( + self, + signature: Union[SignatureType, str], + direction: Optional[List[ArgDirection]] = None, + name: Optional[str] = None, + ): + if name is not None: + assert_member_name_valid(name) + + type_ = None + if type(signature) is SignatureType: + type_ = signature + signature = signature.signature + else: + tree = get_signature_tree(signature) + if len(tree.types) != 1: + raise InvalidIntrospectionError( + f"an argument must have a single complete type. (has {len(tree.types)} types)" + ) + type_ = tree.types[0] + + self.type = type_ + self.signature = signature + self.name = name + self.direction = direction + + def from_xml(element: ET.Element, direction: ArgDirection) -> "Arg": + """Convert a :class:`xml.etree.ElementTree.Element` into a + :class:`Arg`. + + The element must be valid DBus introspection XML for an ``arg``. + + :param element: The parsed XML element. + :type element: :class:`xml.etree.ElementTree.Element` + :param direction: The direction of this arg. Must be specified because it can default to different values depending on if it's in a method or signal. + :type direction: :class:`ArgDirection ` + + :raises: + - :class:`InvalidIntrospectionError ` - If the XML tree is not valid introspection data. + """ + name = element.attrib.get("name") + signature = element.attrib.get("type") + + if not signature: + raise InvalidIntrospectionError( + 'a method argument must have a "type" attribute' + ) + + return Arg(signature, direction, name) + + def to_xml(self) -> ET.Element: + """Convert this :class:`Arg` into an :class:`xml.etree.ElementTree.Element`.""" + element = ET.Element("arg") + if self.name: + element.set("name", self.name) + + if self.direction: + element.set("direction", self.direction.value) + element.set("type", self.signature) + + return element + + +class Signal: + """A class that represents a signal exposed on an interface. + + :ivar name: The name of this signal + :vartype name: str + :ivar args: A list of output arguments for this signal. + :vartype args: list(Arg) + :ivar signature: The collected signature of the output arguments. + :vartype signature: str + + :raises: + - :class:`InvalidMemberNameError ` - If the name of the signal is not a valid member name. + """ + + def __init__(self, name: Optional[str], args: Optional[List[Arg]] = None): + if name is not None: + assert_member_name_valid(name) + + self.name = name + self.args = args or [] + self.signature = "".join(arg.signature for arg in self.args) + + def from_xml(element): + """Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Signal`. + + The element must be valid DBus introspection XML for a ``signal``. + + :param element: The parsed XML element. + :type element: :class:`xml.etree.ElementTree.Element` + :param is_root: Whether this is the root node + :type is_root: bool + + :raises: + - :class:`InvalidIntrospectionError ` - If the XML tree is not valid introspection data. + """ + name = element.attrib.get("name") + if not name: + raise InvalidIntrospectionError('signals must have a "name" attribute') + + args = [] + for child in element: + if child.tag == "arg": + args.append(Arg.from_xml(child, ArgDirection.OUT)) + + signal = Signal(name, args) + + return signal + + def to_xml(self) -> ET.Element: + """Convert this :class:`Signal` into an :class:`xml.etree.ElementTree.Element`.""" + element = ET.Element("signal") + element.set("name", self.name) + + for arg in self.args: + element.append(arg.to_xml()) + + return element + + +class Method: + """A class that represents a method exposed on an :class:`Interface`. + + :ivar name: The name of this method. + :vartype name: str + :ivar in_args: A list of input arguments to this method. + :vartype in_args: list(Arg) + :ivar out_args: A list of output arguments to this method. + :vartype out_args: list(Arg) + :ivar in_signature: The collected signature string of the input arguments. + :vartype in_signature: str + :ivar out_signature: The collected signature string of the output arguments. + :vartype out_signature: str + + :raises: + - :class:`InvalidMemberNameError ` - If the name of this method is not valid. + """ + + def __init__(self, name: str, in_args: List[Arg] = [], out_args: List[Arg] = []): + assert_member_name_valid(name) + + self.name = name + self.in_args = in_args + self.out_args = out_args + self.in_signature = "".join(arg.signature for arg in in_args) + self.out_signature = "".join(arg.signature for arg in out_args) + + def from_xml(element: ET.Element) -> "Method": + """Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Method`. + + The element must be valid DBus introspection XML for a ``method``. + + :param element: The parsed XML element. + :type element: :class:`xml.etree.ElementTree.Element` + :param is_root: Whether this is the root node + :type is_root: bool + + :raises: + - :class:`InvalidIntrospectionError ` - If the XML tree is not valid introspection data. + """ + name = element.attrib.get("name") + if not name: + raise InvalidIntrospectionError('interfaces must have a "name" attribute') + + in_args = [] + out_args = [] + + for child in element: + if child.tag == "arg": + direction = ArgDirection(child.attrib.get("direction", "in")) + arg = Arg.from_xml(child, direction) + if direction == ArgDirection.IN: + in_args.append(arg) + elif direction == ArgDirection.OUT: + out_args.append(arg) + + return Method(name, in_args, out_args) + + def to_xml(self) -> ET.Element: + """Convert this :class:`Method` into an :class:`xml.etree.ElementTree.Element`.""" + element = ET.Element("method") + element.set("name", self.name) + + for arg in self.in_args: + element.append(arg.to_xml()) + for arg in self.out_args: + element.append(arg.to_xml()) + + return element + + +class Property: + """A class that represents a DBus property exposed on an + :class:`Interface`. + + :ivar name: The name of this property. + :vartype name: str + :ivar signature: The signature string for this property. Must be a single complete type. + :vartype signature: str + :ivar access: Whether this property is readable and writable. + :vartype access: :class:`PropertyAccess ` + :ivar type: The parsed type of this property. + :vartype type: :class:`SignatureType ` + + :raises: + - :class:`InvalidIntrospectionError ` - If the property is not a single complete type. + - :class `InvalidSignatureError ` - If the given signature is not valid. + - :class: `InvalidMemberNameError ` - If the member name is not valid. + """ + + def __init__( + self, + name: str, + signature: str, + access: PropertyAccess = PropertyAccess.READWRITE, + ): + assert_member_name_valid(name) + + tree = get_signature_tree(signature) + if len(tree.types) != 1: + raise InvalidIntrospectionError( + f"properties must have a single complete type. (has {len(tree.types)} types)" + ) + + self.name = name + self.signature = signature + self.access = access + self.type = tree.types[0] + + def from_xml(element): + """Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Property`. + + The element must be valid DBus introspection XML for a ``property``. + + :param element: The parsed XML element. + :type element: :class:`xml.etree.ElementTree.Element` + + :raises: + - :class:`InvalidIntrospectionError ` - If the XML tree is not valid introspection data. + """ + name = element.attrib.get("name") + signature = element.attrib.get("type") + access = PropertyAccess(element.attrib.get("access", "readwrite")) + + if not name: + raise InvalidIntrospectionError('properties must have a "name" attribute') + if not signature: + raise InvalidIntrospectionError('properties must have a "type" attribute') + + return Property(name, signature, access) + + def to_xml(self) -> ET.Element: + """Convert this :class:`Property` into an :class:`xml.etree.ElementTree.Element`.""" + element = ET.Element("property") + element.set("name", self.name) + element.set("type", self.signature) + element.set("access", self.access.value) + return element + + +class Interface: + """A class that represents a DBus interface exported on on object path. + + Contains information about the methods, signals, and properties exposed on + this interface. + + :ivar name: The name of this interface. + :vartype name: str + :ivar methods: A list of methods exposed on this interface. + :vartype methods: list(:class:`Method`) + :ivar signals: A list of signals exposed on this interface. + :vartype signals: list(:class:`Signal`) + :ivar properties: A list of properties exposed on this interface. + :vartype properties: list(:class:`Property`) + + :raises: + - :class:`InvalidInterfaceNameError ` - If the name is not a valid interface name. + """ + + def __init__( + self, + name: str, + methods: Optional[List[Method]] = None, + signals: Optional[List[Signal]] = None, + properties: Optional[List[Property]] = None, + ): + assert_interface_name_valid(name) + + self.name = name + self.methods = methods if methods is not None else [] + self.signals = signals if signals is not None else [] + self.properties = properties if properties is not None else [] + + @staticmethod + def from_xml(element: ET.Element) -> "Interface": + """Convert a :class:`xml.etree.ElementTree.Element` into a + :class:`Interface`. + + The element must be valid DBus introspection XML for an ``interface``. + + :param element: The parsed XML element. + :type element: :class:`xml.etree.ElementTree.Element` + + :raises: + - :class:`InvalidIntrospectionError ` - If the XML tree is not valid introspection data. + """ + name = element.attrib.get("name") + if not name: + raise InvalidIntrospectionError('interfaces must have a "name" attribute') + + interface = Interface(name) + + for child in element: + if child.tag == "method": + interface.methods.append(Method.from_xml(child)) + elif child.tag == "signal": + interface.signals.append(Signal.from_xml(child)) + elif child.tag == "property": + interface.properties.append(Property.from_xml(child)) + + return interface + + def to_xml(self) -> ET.Element: + """Convert this :class:`Interface` into an :class:`xml.etree.ElementTree.Element`.""" + element = ET.Element("interface") + element.set("name", self.name) + + for method in self.methods: + element.append(method.to_xml()) + for signal in self.signals: + element.append(signal.to_xml()) + for prop in self.properties: + element.append(prop.to_xml()) + + return element + + +class Node: + """A class that represents a node in an object path in introspection data. + + A node contains information about interfaces exported on this path and + child nodes. A node can be converted to and from introspection XML exposed + through the ``org.freedesktop.DBus.Introspectable`` standard DBus + interface. + + This class is an essential building block for a high-level DBus interface. + This is the underlying data structure for the :class:`ProxyObject + `. A :class:`ServiceInterface + ` definition is converted to this class + to expose XML on the introspectable interface. + + :ivar interfaces: A list of interfaces exposed on this node. + :vartype interfaces: list(:class:`Interface `) + :ivar nodes: A list of child nodes. + :vartype nodes: list(:class:`Node`) + :ivar name: The object path of this node. + :vartype name: str + :ivar is_root: Whether this is the root node. False if it is a child node. + :vartype is_root: bool + + :raises: + - :class:`InvalidIntrospectionError ` - If the name is not a valid node name. + """ + + def __init__( + self, + name: Optional[str] = None, + interfaces: Optional[List[Interface]] = None, + is_root: bool = True, + ): + if not is_root and not name: + raise InvalidIntrospectionError('child nodes must have a "name" attribute') + + self.interfaces = interfaces if interfaces is not None else [] + self.nodes = [] + self.name = name + self.is_root = is_root + + @staticmethod + def from_xml(element: ET.Element, is_root: bool = False): + """Convert an :class:`xml.etree.ElementTree.Element` to a :class:`Node`. + + The element must be valid DBus introspection XML for a ``node``. + + :param element: The parsed XML element. + :type element: :class:`xml.etree.ElementTree.Element` + :param is_root: Whether this is the root node + :type is_root: bool + + :raises: + - :class:`InvalidIntrospectionError ` - If the XML tree is not valid introspection data. + """ + node = Node(element.attrib.get("name"), is_root=is_root) + + for child in element: + if child.tag == "interface": + node.interfaces.append(Interface.from_xml(child)) + elif child.tag == "node": + node.nodes.append(Node.from_xml(child)) + + return node + + @staticmethod + def parse(data: str) -> "Node": + """Parse XML data as a string into a :class:`Node`. + + The string must be valid DBus introspection XML. + + :param data: The XMl string. + :type data: str + + :raises: + - :class:`InvalidIntrospectionError ` - If the string is not valid introspection data. + """ + element = ET.fromstring(data) + if element.tag != "node": + raise InvalidIntrospectionError( + 'introspection data must have a "node" for the root element' + ) + + return Node.from_xml(element, is_root=True) + + def to_xml(self) -> ET.Element: + """Convert this :class:`Node` into an :class:`xml.etree.ElementTree.Element`.""" + element = ET.Element("node") + + if self.name: + element.set("name", self.name) + + for interface in self.interfaces: + element.append(interface.to_xml()) + for node in self.nodes: + element.append(node.to_xml()) + + return element + + def tostring(self) -> str: + """Convert this :class:`Node` into a DBus introspection XML string.""" + header = '\n' + + def indent(elem, level=0): + i = "\n" + level * " " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level + 1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + xml = self.to_xml() + indent(xml) + return header + ET.tostring(xml, encoding="unicode").rstrip() + + @staticmethod + def default(name: Optional[str] = None) -> "Node": + """Create a :class:`Node` with the default interfaces supported by this library. + + The default interfaces include: + + * ``org.freedesktop.DBus.Introspectable`` + * ``org.freedesktop.DBus.Peer`` + * ``org.freedesktop.DBus.Properties`` + * ``org.freedesktop.DBus.ObjectManager`` + """ + return Node( + name, + is_root=True, + interfaces=[ + Interface( + "org.freedesktop.DBus.Introspectable", + methods=[ + Method( + "Introspect", out_args=[Arg("s", ArgDirection.OUT, "data")] + ) + ], + ), + Interface( + "org.freedesktop.DBus.Peer", + methods=[ + Method( + "GetMachineId", + out_args=[Arg("s", ArgDirection.OUT, "machine_uuid")], + ), + Method("Ping"), + ], + ), + Interface( + "org.freedesktop.DBus.Properties", + methods=[ + Method( + "Get", + in_args=[ + Arg("s", ArgDirection.IN, "interface_name"), + Arg("s", ArgDirection.IN, "property_name"), + ], + out_args=[Arg("v", ArgDirection.OUT, "value")], + ), + Method( + "Set", + in_args=[ + Arg("s", ArgDirection.IN, "interface_name"), + Arg("s", ArgDirection.IN, "property_name"), + Arg("v", ArgDirection.IN, "value"), + ], + ), + Method( + "GetAll", + in_args=[Arg("s", ArgDirection.IN, "interface_name")], + out_args=[Arg("a{sv}", ArgDirection.OUT, "props")], + ), + ], + signals=[ + Signal( + "PropertiesChanged", + args=[ + Arg("s", ArgDirection.OUT, "interface_name"), + Arg("a{sv}", ArgDirection.OUT, "changed_properties"), + Arg("as", ArgDirection.OUT, "invalidated_properties"), + ], + ) + ], + ), + Interface( + "org.freedesktop.DBus.ObjectManager", + methods=[ + Method( + "GetManagedObjects", + out_args=[ + Arg( + "a{oa{sa{sv}}}", + ArgDirection.OUT, + "objpath_interfaces_and_properties", + ) + ], + ), + ], + signals=[ + Signal( + "InterfacesAdded", + args=[ + Arg("o", ArgDirection.OUT, "object_path"), + Arg( + "a{sa{sv}}", + ArgDirection.OUT, + "interfaces_and_properties", + ), + ], + ), + Signal( + "InterfacesRemoved", + args=[ + Arg("o", ArgDirection.OUT, "object_path"), + Arg("as", ArgDirection.OUT, "interfaces"), + ], + ), + ], + ), + ], + ) diff --git a/dbus_fast/main.py b/dbus_fast/main.py new file mode 100644 index 0000000..f2a4348 --- /dev/null +++ b/dbus_fast/main.py @@ -0,0 +1,2 @@ +def add(n1: int, n2: int) -> int: + return n1 + n2 diff --git a/dbus_fast/message.pxd b/dbus_fast/message.pxd new file mode 100644 index 0000000..891c8ce --- /dev/null +++ b/dbus_fast/message.pxd @@ -0,0 +1,56 @@ +"""cdefs for message.py""" + +import cython + +from ._private.marshaller cimport Marshaller +from .signature cimport Variant + + +cdef object ErrorType +cdef object SignatureTree +cdef object SignatureType +cdef object MessageType + + +cdef object HEADER_PATH +cdef object HEADER_INTERFACE +cdef object HEADER_MEMBER +cdef object HEADER_ERROR_NAME +cdef object HEADER_REPLY_SERIAL +cdef object HEADER_DESTINATION +cdef object HEADER_SENDER +cdef object HEADER_SIGNATURE +cdef object HEADER_UNIX_FDS + + +cdef object LITTLE_ENDIAN +cdef object PROTOCOL_VERSION + +cdef object MESSAGE_FLAG +cdef object MESSAGE_FLAG_NONE +cdef object MESSAGE_TYPE_METHOD_CALL + +cdef get_signature_tree + +cdef class Message: + + cdef public object destination + cdef public object path + cdef public object interface + cdef public object member + cdef public object message_type + cdef public object flags + cdef public object error_name + cdef public object reply_serial + cdef public object sender + cdef public cython.list unix_fds + cdef public object signature + cdef public object signature_tree + cdef public object body + cdef public object serial + + @cython.locals( + body_buffer=cython.bytearray, + header_buffer=cython.bytearray + ) + cpdef _marshall(self, object negotiate_unix_fd) diff --git a/dbus_fast/message.py b/dbus_fast/message.py new file mode 100644 index 0000000..ecdb519 --- /dev/null +++ b/dbus_fast/message.py @@ -0,0 +1,319 @@ +from typing import Any, List, Optional, Union + +from ._private.constants import LITTLE_ENDIAN, PROTOCOL_VERSION, HeaderField +from ._private.marshaller import Marshaller +from .constants import ErrorType, MessageFlag, MessageType +from .errors import InvalidMessageError +from .signature import SignatureTree, Variant, get_signature_tree +from .validators import ( + assert_bus_name_valid, + assert_interface_name_valid, + assert_member_name_valid, + assert_object_path_valid, +) + +REQUIRED_FIELDS = { + MessageType.METHOD_CALL.value: ("path", "member"), + MessageType.SIGNAL.value: ("path", "member", "interface"), + MessageType.ERROR.value: ("error_name", "reply_serial"), + MessageType.METHOD_RETURN.value: ("reply_serial",), +} + +HEADER_PATH = HeaderField.PATH.value +HEADER_INTERFACE = HeaderField.INTERFACE.value +HEADER_MEMBER = HeaderField.MEMBER.value +HEADER_ERROR_NAME = HeaderField.ERROR_NAME.value +HEADER_REPLY_SERIAL = HeaderField.REPLY_SERIAL.value +HEADER_DESTINATION = HeaderField.DESTINATION.value +HEADER_SIGNATURE = HeaderField.SIGNATURE.value +HEADER_UNIX_FDS = HeaderField.UNIX_FDS.value + +MESSAGE_FLAG = MessageFlag + +MESSAGE_FLAG_NONE = MessageFlag.NONE +MESSAGE_TYPE_METHOD_CALL = MessageType.METHOD_CALL + + +class Message: + """A class for sending and receiving messages through the + :class:`MessageBus ` with the + low-level api. + + A ``Message`` can be constructed by the user to send over the message bus. + When messages are received, such as from method calls or signal emissions, + they will use this class as well. + + :ivar destination: The address of the client for which this message is intended. + :vartype destination: str + :ivar path: The intended object path exported on the destination bus. + :vartype path: str + :ivar interface: The intended interface on the object path. + :vartype interface: str + :ivar member: The intended member on the interface. + :vartype member: str + :ivar message_type: The type of this message. A method call, signal, method return, or error. + :vartype message_type: :class:`MessageType` + :ivar flags: Flags that affect the behavior of this message. + :vartype flags: :class:`MessageFlag` + :ivar error_name: If this message is an error, the name of this error. Must be a valid interface name. + :vartype error_name: str + :ivar reply_serial: If this is a return type, the serial this message is in reply to. + :vartype reply_serial: int + :ivar sender: The address of the sender of this message. Will be a unique name. + :vartype sender: str + :ivar unix_fds: A list of unix fds that were sent in the header of this message. + :vartype unix_fds: list(int) + :ivar signature: The signature of the body of this message. + :vartype signature: str + :ivar signature_tree: The signature parsed as a signature tree. + :vartype signature_tree: :class:`SignatureTree` + :ivar body: The body of this message. Must match the signature. + :vartype body: list(Any) + :ivar serial: The serial of the message. Will be automatically set during message sending if not present. Use the ``new_serial()`` method of the bus to generate a serial. + :vartype serial: int + + :raises: + - :class:`InvalidMessageError` - If the message is malformed or missing fields for the message type. + - :class:`InvalidSignatureError` - If the given signature is not valid. + - :class:`InvalidObjectPathError` - If ``path`` is not a valid object path. + - :class:`InvalidBusNameError` - If ``destination`` is not a valid bus name. + - :class:`InvalidMemberNameError` - If ``member`` is not a valid member name. + - :class:`InvalidInterfaceNameError` - If ``error_name`` or ``interface`` is not a valid interface name. + """ + + __slots__ = ( + "destination", + "path", + "interface", + "member", + "message_type", + "flags", + "error_name", + "reply_serial", + "sender", + "unix_fds", + "signature", + "signature_tree", + "body", + "serial", + ) + + def __init__( + self, + destination: Optional[str] = None, + path: Optional[str] = None, + interface: Optional[str] = None, + member: Optional[str] = None, + message_type: MessageType = MESSAGE_TYPE_METHOD_CALL, + flags: Union[MessageFlag, int] = MESSAGE_FLAG_NONE, + error_name: Optional[Union[str, ErrorType]] = None, + reply_serial: int = 0, + sender: Optional[str] = None, + unix_fds: List[int] = [], + signature: Optional[Union[SignatureTree, str]] = None, + body: List[Any] = [], + serial: int = 0, + validate: bool = True, + ) -> None: + self.destination = destination + self.path = path + self.interface = interface + self.member = member + self.message_type = message_type + self.flags = flags if type(flags) is MESSAGE_FLAG else MESSAGE_FLAG(flags) + self.error_name = ( + str(error_name.value) if type(error_name) is ErrorType else error_name + ) + self.reply_serial = reply_serial or 0 + self.sender = sender + self.unix_fds = unix_fds + if type(signature) is SignatureTree: + self.signature = signature.signature + self.signature_tree = signature + else: + self.signature = signature or "" # type: ignore[assignment] + self.signature_tree = get_signature_tree(signature or "") + self.body = body + self.serial = serial or 0 + + if not validate: + return + if self.destination is not None: + assert_bus_name_valid(self.destination) + if self.interface is not None: + assert_interface_name_valid(self.interface) + if self.path is not None: + assert_object_path_valid(self.path) + if self.member is not None: + assert_member_name_valid(self.member) + if self.error_name is not None: + assert_interface_name_valid(self.error_name) # type: ignore[arg-type] + + required_fields = REQUIRED_FIELDS.get(self.message_type.value) + if not required_fields: + raise InvalidMessageError(f"got unknown message type: {self.message_type}") + for field in required_fields: + if not getattr(self, field): + raise InvalidMessageError(f"missing required field: {field}") + + def __repr__(self) -> str: + """Return a string representation of this message.""" + return ( + f"" + ) + + @staticmethod + def new_error( + msg: "Message", error_name: Union[str, ErrorType], error_text: str + ) -> "Message": + """A convenience constructor to create an error message in reply to the given message. + + :param msg: The message this error is in reply to. + :type msg: :class:`Message` + :param error_name: The name of this error. Must be a valid interface name. + :type error_name: str + :param error_text: Human-readable text for the error. + + :returns: The error message. + :rtype: :class:`Message` + + :raises: + - :class:`InvalidInterfaceNameError` - If the error_name is not a valid interface name. + """ + return Message( + message_type=MessageType.ERROR, + reply_serial=msg.serial, + destination=msg.sender, + error_name=error_name, + signature="s", + body=[error_text], + ) + + @staticmethod + def new_method_return( + msg: "Message", + signature: str = "", + body: List[Any] = [], + unix_fds: List[int] = [], + ) -> "Message": + """A convenience constructor to create a method return to the given method call message. + + :param msg: The method call message this is a reply to. + :type msg: :class:`Message` + :param signature: The signature for the message body. + :type signature: str + :param body: The body of this message. Must match the signature. + :type body: list(Any) + :param unix_fds: List integer file descriptors to send with this message. + :type body: list(int) + + :returns: The method return message + :rtype: :class:`Message` + + :raises: + - :class:`InvalidSignatureError` - If the signature is not a valid signature. + """ + return Message( + message_type=MessageType.METHOD_RETURN, + reply_serial=msg.serial, + destination=msg.sender, + signature=signature, + body=body, + unix_fds=unix_fds, + ) + + @staticmethod + def new_signal( + path: str, + interface: str, + member: str, + signature: str = "", + body: Optional[List[Any]] = None, + unix_fds: Optional[List[int]] = None, + ) -> "Message": + """A convenience constructor to create a new signal message. + + :param path: The path of this signal. + :type path: str + :param interface: The interface of this signal. + :type interface: str + :param member: The member name of this signal. + :type member: str + :param signature: The signature of the signal body. + :type signature: str + :param body: The body of this signal message. + :type body: list(Any) + :param unix_fds: List integer file descriptors to send with this message. + :type body: list(int) + + :returns: The signal message. + :rtype: :class:`Message` + + :raises: + - :class:`InvalidSignatureError` - If the signature is not a valid signature. + - :class:`InvalidObjectPathError` - If ``path`` is not a valid object path. + - :class:`InvalidInterfaceNameError` - If ``interface`` is not a valid interface name. + - :class:`InvalidMemberNameError` - If ``member`` is not a valid member name. + """ + return Message( + message_type=MessageType.SIGNAL, + interface=interface, + path=path, + member=member, + signature=signature, + body=body or [], + unix_fds=unix_fds or [], + ) + + def _marshall(self, negotiate_unix_fd: bool) -> bytearray: + """Marshall this message into a byte array.""" + # TODO maximum message size is 134217728 (128 MiB) + body_block = Marshaller(self.signature, self.body) + body_buffer = body_block._marshall() + + fields = [] + + # No verify here since the marshaller will raise an exception if the + # Variant is invalid. + + if self.path: + fields.append([HEADER_PATH, Variant("o", self.path, False)]) + if self.interface: + fields.append([HEADER_INTERFACE, Variant("s", self.interface, False)]) + if self.member: + fields.append([HEADER_MEMBER, Variant("s", self.member, False)]) + if self.error_name: + fields.append([HEADER_ERROR_NAME, Variant("s", self.error_name, False)]) + if self.reply_serial: + fields.append([HEADER_REPLY_SERIAL, Variant("u", self.reply_serial, False)]) + if self.destination: + fields.append([HEADER_DESTINATION, Variant("s", self.destination, False)]) + if self.signature: + fields.append([HEADER_SIGNATURE, Variant("g", self.signature, False)]) + if self.unix_fds and negotiate_unix_fd: + fields.append([HEADER_UNIX_FDS, Variant("u", len(self.unix_fds), False)]) + + header_body = [ + LITTLE_ENDIAN, + self.message_type.value, + self.flags.value, + PROTOCOL_VERSION, + len(body_buffer), + self.serial, + fields, + ] + header_block = Marshaller("yyyyuua(yv)", header_body) + header_block._marshall() + header_block._align(8) + header_buffer = header_block._buffer() + return header_buffer + body_buffer diff --git a/dbus_fast/message_bus.pxd b/dbus_fast/message_bus.pxd new file mode 100644 index 0000000..9c88836 --- /dev/null +++ b/dbus_fast/message_bus.pxd @@ -0,0 +1,70 @@ +import cython + +from ._private.address cimport get_bus_address, parse_address +from .message cimport Message +from .service cimport ServiceInterface, _Method + + +cdef object MessageType +cdef object DBusError +cdef object MessageFlag + +cdef object MESSAGE_TYPE_CALL +cdef object MESSAGE_TYPE_SIGNAL +cdef cython.uint NO_REPLY_EXPECTED_VALUE +cdef object NONE +cdef object NO_REPLY_EXPECTED + +cdef object BLOCK_UNEXPECTED_REPLY +cdef object assert_object_path_valid +cdef object assert_bus_name_valid + +@cython.locals(flag_value=cython.uint) +cdef bint _expects_reply(Message msg) + + +cdef class BaseMessageBus: + + cdef public object unique_name + cdef public bint _disconnected + cdef public object _user_disconnect + cdef public cython.dict _method_return_handlers + cdef public object _serial + cdef public cython.dict _path_exports + cdef public cython.list _user_message_handlers + cdef public cython.dict _name_owners + cdef public object _bus_address + cdef public object _name_owner_match_rule + cdef public cython.dict _match_rules + cdef public object _high_level_client_initialized + cdef public object _ProxyObject + cdef public object _machine_id + cdef public object _negotiate_unix_fd + cdef public object _sock + cdef public object _stream + cdef public object _fd + + cpdef _process_message(self, Message msg) + + @cython.locals( + methods=cython.list, + method=_Method, + interface=ServiceInterface, + interfaces=cython.list, + ) + cdef _find_message_handler(self, Message msg) + + cdef _setup_socket(self) + + @cython.locals(no_reply_expected=bint) + cpdef _call(self, Message msg, object callback) + + cpdef next_serial(self) + + cpdef void _callback_method_handler( + self, + ServiceInterface interface, + _Method method, + Message msg, + object send_reply + ) diff --git a/dbus_fast/message_bus.py b/dbus_fast/message_bus.py new file mode 100644 index 0000000..14776fb --- /dev/null +++ b/dbus_fast/message_bus.py @@ -0,0 +1,1299 @@ +import inspect +import logging +import socket +import traceback +import xml.etree.ElementTree as ET +from functools import partial +from typing import Any, Callable, Dict, List, Optional, Type, Union + +from . import introspection as intr +from ._private.address import get_bus_address, parse_address +from ._private.util import replace_fds_with_idx, replace_idx_with_fds +from .constants import ( + BusType, + ErrorType, + MessageFlag, + MessageType, + NameFlag, + ReleaseNameReply, + RequestNameReply, +) +from .errors import DBusError, InvalidAddressError +from .message import Message +from .proxy_object import BaseProxyObject +from .send_reply import SendReply +from .service import ServiceInterface, _Method +from .signature import Variant +from .validators import assert_bus_name_valid, assert_object_path_valid + +MESSAGE_TYPE_CALL = MessageType.METHOD_CALL +MESSAGE_TYPE_SIGNAL = MessageType.SIGNAL +NO_REPLY_EXPECTED_VALUE = MessageFlag.NO_REPLY_EXPECTED.value +NO_REPLY_EXPECTED = MessageFlag.NO_REPLY_EXPECTED +NONE = MessageFlag.NONE +_LOGGER = logging.getLogger(__name__) + + +_Message = Message + + +def _expects_reply(msg: _Message) -> bool: + """Whether a message expects a reply.""" + if msg.flags is NO_REPLY_EXPECTED: + return False + if msg.flags is NONE: + return True + # Slow check for NO_REPLY_EXPECTED + flag_value = msg.flags.value + return not (flag_value & NO_REPLY_EXPECTED_VALUE) + + +def _block_unexpected_reply(reply: _Message) -> None: + """Block a reply if it's not expected. + + Previously we silently ignored replies that were not expected, but this + lead to implementation errors that were hard to debug. Now we log a + debug message instead. + """ + _LOGGER.debug( + "Blocked attempt to send a reply from handler " + "that received a message with flag " + "MessageFlag.NO_REPLY_EXPECTED: %s", + reply, + ) + + +BLOCK_UNEXPECTED_REPLY = _block_unexpected_reply + + +class BaseMessageBus: + """An abstract class to manage a connection to a DBus message bus. + + The message bus class is the entry point into all the features of the + library. It sets up a connection to the DBus daemon and exposes an + interface to send and receive messages and expose services. + + This class is not meant to be used directly by users. For more information, + see the documentation for the implementation of the message bus you plan to + use. + + :param bus_type: The type of bus to connect to. Affects the search path for + the bus address. + :type bus_type: :class:`BusType ` + :param bus_address: A specific bus address to connect to. Should not be + used under normal circumstances. + :type bus_address: str + :param ProxyObject: The proxy object implementation for this message bus. + Must be passed in by an implementation that supports the high-level client. + :type ProxyObject: Type[:class:`BaseProxyObject + `] + + :ivar unique_name: The unique name of the message bus connection. It will + be :class:`None` until the message bus connects. + :vartype unique_name: str + :ivar connected: True if this message bus is expected to be able to send + and receive messages. + :vartype connected: bool + """ + + __slots__ = ( + "unique_name", + "_disconnected", + "_user_disconnect", + "_method_return_handlers", + "_serial", + "_user_message_handlers", + "_name_owners", + "_path_exports", + "_bus_address", + "_name_owner_match_rule", + "_match_rules", + "_high_level_client_initialized", + "_ProxyObject", + "_machine_id", + "_negotiate_unix_fd", + "_sock", + "_stream", + "_fd", + ) + + def __init__( + self, + bus_address: Optional[str] = None, + bus_type: BusType = BusType.SESSION, + ProxyObject: Optional[Type[BaseProxyObject]] = None, + negotiate_unix_fd: bool = False, + ) -> None: + self.unique_name: Optional[str] = None + self._disconnected = False + self._negotiate_unix_fd = negotiate_unix_fd + + # True if the user disconnected himself, so don't throw errors out of + # the main loop. + self._user_disconnect = False + + self._method_return_handlers: Dict[ + int, Callable[[Optional[Message], Optional[Exception]], None] + ] = {} + self._serial = 0 + self._user_message_handlers: List[ + Callable[[Message], Union[Message, bool, None]] + ] = [] + # the key is the name and the value is the unique name of the owner. + # This cache is kept up to date by the NameOwnerChanged signal and is + # used to route messages to the correct proxy object. (used for the + # high level client only) + self._name_owners: Dict[str, str] = {} + # used for the high level service + self._path_exports: Dict[str, list[ServiceInterface]] = {} + self._bus_address = ( + parse_address(bus_address) + if bus_address + else parse_address(get_bus_address(bus_type)) + ) + # the bus implementations need this rule for the high level client to + # work correctly. + self._name_owner_match_rule = "sender='org.freedesktop.DBus',interface='org.freedesktop.DBus',path='/org/freedesktop/DBus',member='NameOwnerChanged'" + # _match_rules: the keys are match rules and the values are ref counts + # (used for the high level client only) + self._match_rules: Dict[str, int] = {} + self._high_level_client_initialized = False + self._ProxyObject = ProxyObject + + # machine id is lazy loaded + self._machine_id: Optional[int] = None + self._sock: Optional[socket.socket] = None + self._fd: Optional[int] = None + self._stream: Optional[Any] = None + + self._setup_socket() + + @property + def connected(self) -> bool: + if self.unique_name is None or self._disconnected or self._user_disconnect: + return False + return True + + def export(self, path: str, interface: ServiceInterface) -> None: + """Export the service interface on this message bus to make it available + to other clients. + + :param path: The object path to export this interface on. + :type path: str + :param interface: The service interface to export. + :type interface: :class:`ServiceInterface + ` + + :raises: + - :class:`InvalidObjectPathError ` - If the given object path is not valid. + - :class:`ValueError` - If an interface with this name is already exported on the message bus at this path + """ + assert_object_path_valid(path) + if not isinstance(interface, ServiceInterface): + raise TypeError("interface must be a ServiceInterface") + + if path not in self._path_exports: + self._path_exports[path] = [] + + for f in self._path_exports[path]: + if f.name == interface.name: + raise ValueError( + f'An interface with this name is already exported on this bus at path "{path}": "{interface.name}"' + ) + + self._path_exports[path].append(interface) + ServiceInterface._add_bus(interface, self, self._make_method_handler) + self._emit_interface_added(path, interface) + + def unexport( + self, path: str, interface: Optional[Union[ServiceInterface, str]] = None + ) -> None: + """Unexport the path or service interface to make it no longer + available to clients. + + :param path: The object path to unexport. + :type path: str + :param interface: The interface instance or the name of the interface + to unexport. If ``None``, unexport every interface on the path. + :type interface: :class:`ServiceInterface + ` or str or None + + :raises: + - :class:`InvalidObjectPathError ` - If the given object path is not valid. + """ + assert_object_path_valid(path) + if type(interface) not in [str, type(None)] and not isinstance( + interface, ServiceInterface + ): + raise TypeError("interface must be a ServiceInterface or interface name") + + if path not in self._path_exports: + return + + exports = self._path_exports[path] + + if type(interface) is str: + try: + interface = next(iface for iface in exports if iface.name == interface) + except StopIteration: + return + + removed_interfaces = [] + if interface is None: + del self._path_exports[path] + for iface in filter(lambda e: not self._has_interface(e), exports): + removed_interfaces.append(iface.name) + ServiceInterface._remove_bus(iface, self) + else: + for i, iface in enumerate(exports): + if iface is interface: + removed_interfaces.append(iface.name) + del self._path_exports[path][i] + if not self._path_exports[path]: + del self._path_exports[path] + if not self._has_interface(iface): + ServiceInterface._remove_bus(iface, self) + break + self._emit_interface_removed(path, removed_interfaces) + + def introspect( + self, + bus_name: str, + path: str, + callback: Callable[[Optional[intr.Node], Optional[Exception]], None], + check_callback_type: bool = True, + ) -> None: + """Get introspection data for the node at the given path from the given + bus name. + + Calls the standard ``org.freedesktop.DBus.Introspectable.Introspect`` + on the bus for the path. + + :param bus_name: The name to introspect. + :type bus_name: str + :param path: The path to introspect. + :type path: str + :param callback: A callback that will be called with the introspection + data as a :class:`Node `. + :type callback: :class:`Callable` + + :raises: + - :class:`InvalidObjectPathError ` - If the given object path is not valid. + - :class:`InvalidBusNameError ` - If the given bus name is not valid. + """ + if check_callback_type: + BaseMessageBus._check_callback_type(callback) + + def reply_notify(reply: Optional[Message], err: Optional[Exception]) -> None: + try: + BaseMessageBus._check_method_return(reply, err, "s") + result = intr.Node.parse(reply.body[0]) # type: ignore[union-attr] + except Exception as e: + callback(None, e) + return + + callback(result, None) + + self._call( + Message( + destination=bus_name, + path=path, + interface="org.freedesktop.DBus.Introspectable", + member="Introspect", + ), + reply_notify, + ) + + def _emit_interface_added(self, path: str, interface: ServiceInterface) -> None: + """Emit the ``org.freedesktop.DBus.ObjectManager.InterfacesAdded`` signal. + + This signal is intended to be used to alert clients when + a new interface has been added. + + :param path: Path of exported object. + :type path: str + :param interface: Exported service interface. + :type interface: :class:`ServiceInterface + ` + """ + if self._disconnected: + return + + def get_properties_callback( + interface: ServiceInterface, + result: Any, + user_data: Any, + e: Optional[Exception], + ) -> None: + if e is not None: + try: + raise e + except Exception: + logging.error( + "An exception ocurred when emitting ObjectManager.InterfacesAdded for %s. " + "Some properties will not be included in the signal.", + interface.name, + exc_info=True, + ) + + body = {interface.name: result} + + self.send( + Message.new_signal( + path=path, + interface="org.freedesktop.DBus.ObjectManager", + member="InterfacesAdded", + signature="oa{sa{sv}}", + body=[path, body], + ) + ) + + ServiceInterface._get_all_property_values(interface, get_properties_callback) + + def _emit_interface_removed(self, path: str, removed_interfaces: List[str]) -> None: + """Emit the ``org.freedesktop.DBus.ObjectManager.InterfacesRemoved` signal. + + This signal is intended to be used to alert clients when + a interface has been removed. + + :param path: Path of removed (unexported) object. + :type path: str + :param removed_interfaces: List of unexported service interfaces. + :type removed_interfaces: list[str] + """ + if self._disconnected: + return + + self.send( + Message.new_signal( + path=path, + interface="org.freedesktop.DBus.ObjectManager", + member="InterfacesRemoved", + signature="oas", + body=[path, removed_interfaces], + ) + ) + + def request_name( + self, + name: str, + flags: NameFlag = NameFlag.NONE, + callback: Optional[ + Callable[[Optional[RequestNameReply], Optional[Exception]], None] + ] = None, + check_callback_type: bool = True, + ) -> None: + """Request that this message bus owns the given name. + + :param name: The name to request. + :type name: str + :param flags: Name flags that affect the behavior of the name request. + :type flags: :class:`NameFlag ` + :param callback: A callback that will be called with the reply of the + request as a :class:`RequestNameReply `. + :type callback: :class:`Callable` + + :raises: + - :class:`InvalidBusNameError ` - If the given bus name is not valid. + """ + assert_bus_name_valid(name) + + if callback is not None and check_callback_type: + BaseMessageBus._check_callback_type(callback) + + if type(flags) is not NameFlag: + flags = NameFlag(flags) + + message = Message( + destination="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + interface="org.freedesktop.DBus", + member="RequestName", + signature="su", + body=[name, flags], + ) + + if callback is None: + self._call(message, None) + return + + def reply_notify(reply: Optional[Message], err: Optional[Exception]) -> None: + try: + BaseMessageBus._check_method_return(reply, err, "u") + result = RequestNameReply(reply.body[0]) # type: ignore[union-attr] + except Exception as e: + callback(None, e) # type: ignore[misc] + return + + callback(result, None) # type: ignore[misc] + + self._call(message, reply_notify) + + def release_name( + self, + name: str, + callback: Optional[ + Callable[[Optional[ReleaseNameReply], Optional[Exception]], None] + ] = None, + check_callback_type: bool = True, + ) -> None: + """Request that this message bus release the given name. + + :param name: The name to release. + :type name: str + :param callback: A callback that will be called with the reply of the + release request as a :class:`ReleaseNameReply + `. + :type callback: :class:`Callable` + + :raises: + - :class:`InvalidBusNameError ` - If the given bus name is not valid. + """ + assert_bus_name_valid(name) + + if callback is not None and check_callback_type: + BaseMessageBus._check_callback_type(callback) + + message = Message( + destination="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + interface="org.freedesktop.DBus", + member="ReleaseName", + signature="s", + body=[name], + ) + + if callback is None: + self._call(message, None) + return + + def reply_notify(reply: Optional[Message], err: Optional[Exception]) -> None: + try: + BaseMessageBus._check_method_return(reply, err, "u") + result = ReleaseNameReply(reply.body[0]) # type: ignore[union-attr] + except Exception as e: + callback(None, e) # type: ignore[misc] + return + + callback(result, None) # type: ignore[misc] + + self._call(message, reply_notify) + + def get_proxy_object( + self, bus_name: str, path: str, introspection: Union[intr.Node, str, ET.Element] + ) -> BaseProxyObject: + """Get a proxy object for the path exported on the bus that owns the + name. The object is expected to export the interfaces and nodes + specified in the introspection data. + + This is the entry point into the high-level client. + + :param bus_name: The name on the bus to get the proxy object for. + :type bus_name: str + :param path: The path on the client for the proxy object. + :type path: str + :param introspection: XML introspection data used to build the + interfaces on the proxy object. + :type introspection: :class:`Node ` or str or :class:`ElementTree` + + :returns: A proxy object for the given path on the given name. + :rtype: :class:`BaseProxyObject ` + + :raises: + - :class:`InvalidBusNameError ` - If the given bus name is not valid. + - :class:`InvalidObjectPathError ` - If the given object path is not valid. + - :class:`InvalidIntrospectionError ` - If the introspection data for the node is not valid. + """ + if self._ProxyObject is None: + raise Exception( + "the message bus implementation did not provide a proxy object class" + ) + + self._init_high_level_client() + + return self._ProxyObject(bus_name, path, introspection, self) + + def disconnect(self) -> None: + """Disconnect the message bus by closing the underlying connection asynchronously. + + All pending and future calls will error with a connection error. + """ + self._user_disconnect = True + try: + self._sock.shutdown(socket.SHUT_RDWR) + except Exception: + logging.warning("could not shut down socket", exc_info=True) + + def next_serial(self) -> int: + """Get the next serial for this bus. This can be used as the ``serial`` + attribute of a :class:`Message ` to manually handle + the serial of messages. + + :returns: The next serial for the bus. + :rtype: int + """ + self._serial += 1 + return self._serial + + def add_message_handler( + self, handler: Callable[[Message], Optional[Union[Message, bool]]] + ) -> None: + """Add a custom message handler for incoming messages. + + The handler should be a callable that takes a :class:`Message + `. If the message is a method call, you may return + another Message as a reply and it will be marked as handled. You may + also return ``True`` to mark the message as handled without sending a + reply. + + :param handler: A handler that will be run for every message the bus + connection received. + :type handler: :class:`Callable` or None + """ + error_text = "a message handler must be callable with a single parameter" + if not callable(handler): + raise TypeError(error_text) + + handler_signature = inspect.signature(handler) + if len(handler_signature.parameters) != 1: + raise TypeError(error_text) + + self._user_message_handlers.append(handler) + + def remove_message_handler( + self, handler: Callable[[Message], Optional[Union[Message, bool]]] + ) -> None: + """Remove a message handler that was previously added by + :func:`add_message_handler() + `. + + :param handler: A message handler. + :type handler: :class:`Callable` + """ + for i, h in enumerate(self._user_message_handlers): + if h == handler: + del self._user_message_handlers[i] + return + + def send(self, msg: Message) -> None: + """Asynchronously send a message on the message bus. + + :param msg: The message to send. + :type msg: :class:`Message ` + """ + raise NotImplementedError( + 'the "send" method must be implemented in the inheriting class' + ) + + def _finalize(self, err: Optional[Exception]) -> None: + """should be called after the socket disconnects with the disconnection + error to clean up resources and put the bus in a disconnected state""" + if self._disconnected: + return + + self._disconnected = True + + for handler in self._method_return_handlers.values(): + try: + handler(None, err) + except Exception: + logging.warning( + "a message handler threw an exception on shutdown", exc_info=True + ) + + self._method_return_handlers.clear() + + for path in list(self._path_exports): + self.unexport(path) + + self._user_message_handlers.clear() + + def _has_interface(self, interface: ServiceInterface) -> bool: + for _, exports in self._path_exports.items(): + for iface in exports: + if iface is interface: + return True + + return False + + def _interface_signal_notify( + self, + interface: ServiceInterface, + interface_name: str, + member: str, + signature: str, + body: List[Any], + unix_fds: List[int] = [], + ) -> None: + path = None + for p, ifaces in self._path_exports.items(): + for i in ifaces: + if i is interface: + path = p + + if path is None: + raise Exception( + "Could not find interface on bus (this is a bug in dbus-fast)" + ) + + self.send( + Message.new_signal( + path=path, + interface=interface_name, + member=member, + signature=signature, + body=body, + unix_fds=unix_fds, + ) + ) + + def _introspect_export_path(self, path: str) -> intr.Node: + assert_object_path_valid(path) + + if path in self._path_exports: + node = intr.Node.default(path) + for interface in self._path_exports[path]: + node.interfaces.append(interface.introspect()) + else: + node = intr.Node(path) + + children = set() + + for export_path in self._path_exports: + if not export_path.startswith(path): + continue + + child_path = export_path.split(path, maxsplit=1)[1] + if path != "/" and child_path and child_path[0] != "/": + continue + + child_path = child_path.lstrip("/") + child_name = child_path.split("/", maxsplit=1)[0] + + children.add(child_name) + + node.nodes = [intr.Node(name) for name in children if name] + + return node + + def _setup_socket(self) -> None: + err = None + + for transport, options in self._bus_address: + filename = None + ip_addr = "" + ip_port = 0 + + if transport == "unix": + self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + self._stream = self._sock.makefile("rwb") + self._fd = self._sock.fileno() + + if "path" in options: + filename = options["path"] + elif "abstract" in options: + filename = b"\0" + options["abstract"].encode() + else: + raise InvalidAddressError( + "got unix transport with unknown path specifier" + ) + + try: + self._sock.connect(filename) + self._sock.setblocking(False) + break + except Exception as e: + err = e + + elif transport == "tcp": + self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self._stream = self._sock.makefile("rwb") + self._fd = self._sock.fileno() + + if "host" in options: + ip_addr = options["host"] + if "port" in options: + ip_port = int(options["port"]) + + try: + self._sock.connect((ip_addr, ip_port)) + self._sock.setblocking(False) + break + except Exception as e: + err = e + + else: + raise InvalidAddressError(f"got unknown address transport: {transport}") + + if err: + raise err + + def _reply_notify( + self, + msg: Message, + callback: Optional[Callable[[Optional[Message], Optional[Exception]], None]], + reply: Optional[Message], + err: Optional[Exception], + ) -> None: + """Callback on reply.""" + if reply and msg.destination and reply.sender: + self._name_owners[msg.destination] = reply.sender + callback(reply, err) + + def _call( + self, + msg: Message, + callback: Optional[Callable[[Optional[Message], Optional[Exception]], None]], + ) -> None: + if not msg.serial: + msg.serial = self.next_serial() + + reply_expected = _expects_reply(msg) + # Make sure the return reply handler is installed + # before sending the message to avoid a race condition + # where the reply is lost in case the backend can + # send it right away. + if reply_expected: + self._method_return_handlers[msg.serial] = partial( + self._reply_notify, msg, callback + ) + + self.send(msg) + + if not reply_expected: + callback(None, None) + + @staticmethod + def _check_callback_type(callback: Callable) -> None: + """Raise a TypeError if the user gives an invalid callback as a parameter""" + + text = "a callback must be callable with two parameters" + + if not callable(callback): + raise TypeError(text) + + fn_signature = inspect.signature(callback) + if len(fn_signature.parameters) != 2: + raise TypeError(text) + + @staticmethod + def _check_method_return( + msg: Optional[Message], err: Optional[Exception], signature: str + ) -> None: + if err: + raise err + elif msg is None: + raise DBusError( + ErrorType.INTERNAL_ERROR, "invalid message type for method call", msg + ) + elif ( + msg.message_type == MessageType.METHOD_RETURN and msg.signature == signature + ): + return + elif msg.message_type == MessageType.ERROR: + raise DBusError._from_message(msg) + else: + raise DBusError( + ErrorType.INTERNAL_ERROR, "invalid message type for method call", msg + ) + + def _process_message(self, msg: _Message) -> None: + """Process a message received from the message bus.""" + handled = False + for user_handler in self._user_message_handlers: + try: + result = user_handler(msg) + if result: + if type(result) is Message: + self.send(result) + handled = True + break + except DBusError as e: + if msg.message_type is MESSAGE_TYPE_CALL: + self.send(e._as_message(msg)) + handled = True + break + else: + logging.exception("A message handler raised an exception: %s", e) + except Exception as e: + logging.exception("A message handler raised an exception: %s", e) + if msg.message_type is MESSAGE_TYPE_CALL: + self.send( + Message.new_error( + msg, + ErrorType.INTERNAL_ERROR, + f"An internal error occurred: {e}.\n{traceback.format_exc()}", + ) + ) + handled = True + break + + if msg.message_type is MESSAGE_TYPE_SIGNAL: + if ( + msg.member == "NameOwnerChanged" + and msg.sender == "org.freedesktop.DBus" + and msg.path == "/org/freedesktop/DBus" + and msg.interface == "org.freedesktop.DBus" + ): + [name, old_owner, new_owner] = msg.body + if new_owner: + self._name_owners[name] = new_owner + elif name in self._name_owners: + del self._name_owners[name] + return + + if msg.message_type is MESSAGE_TYPE_CALL: + if not handled: + handler = self._find_message_handler(msg) + if _expects_reply(msg) is False: + if handler: + handler(msg, BLOCK_UNEXPECTED_REPLY) + else: + _LOGGER.error( + '"%s.%s" with signature "%s" could not be found', + msg.interface, + msg.member, + msg.signature, + ) + return + + send_reply = SendReply(self, msg) + with send_reply: + if handler: + handler(msg, send_reply) + else: + send_reply( + Message.new_error( + msg, + ErrorType.UNKNOWN_METHOD, + f"{msg.interface}.{msg.member} with signature " + f'"{msg.signature}" could not be found', + ) + ) + return + + # An ERROR or a METHOD_RETURN + return_handler = self._method_return_handlers.get(msg.reply_serial) + if return_handler is not None: + if not handled: + return_handler(msg, None) + del self._method_return_handlers[msg.reply_serial] + + def _callback_method_handler( + self, + interface: ServiceInterface, + method: _Method, + msg: Message, + send_reply: Callable[[Message], None], + ) -> None: + """This is the callback that will be called when a method call is.""" + args = ServiceInterface._c_msg_body_to_args(msg) if msg.unix_fds else msg.body + result = method.fn(interface, *args) + if send_reply is BLOCK_UNEXPECTED_REPLY or _expects_reply(msg) is False: + return + body, fds = ServiceInterface._c_fn_result_to_body( + result, + signature_tree=method.out_signature_tree, + replace_fds=self._negotiate_unix_fd, + ) + send_reply( + Message( + message_type=MessageType.METHOD_RETURN, + reply_serial=msg.serial, + destination=msg.sender, + signature=method.out_signature, + body=body, + unix_fds=fds, + ) + ) + + def _make_method_handler( + self, interface: ServiceInterface, method: _Method + ) -> Callable[[Message, Callable[[Message], None]], None]: + return partial(self._callback_method_handler, interface, method) + + def _find_message_handler( + self, msg: _Message + ) -> Optional[Callable[[Message, Callable[[Message], None]], None]]: + if "org.freedesktop.DBus." in msg.interface: + if ( + msg.interface == "org.freedesktop.DBus.Introspectable" + and msg.member == "Introspect" + and msg.signature == "" + ): + return self._default_introspect_handler + + if msg.interface == "org.freedesktop.DBus.Properties": + return self._default_properties_handler + + if msg.interface == "org.freedesktop.DBus.Peer": + if msg.member == "Ping" and msg.signature == "": + return self._default_ping_handler + elif msg.member == "GetMachineId" and msg.signature == "": + return self._default_get_machine_id_handler + + if ( + msg.interface == "org.freedesktop.DBus.ObjectManager" + and msg.member == "GetManagedObjects" + ): + return self._default_get_managed_objects_handler + + msg_path = msg.path + if msg_path: + interfaces = self._path_exports.get(msg_path) + if not interfaces: + return None + for interface in interfaces: + methods = ServiceInterface._c_get_methods(interface) + for method in methods: + if method.disabled: + continue + + if ( + msg.interface == interface.name + and msg.member == method.name + and msg.signature == method.in_signature + ): + return ServiceInterface._c_get_handler(interface, method, self) + + return None + + def _default_introspect_handler( + self, msg: Message, send_reply: Callable[[Message], None] + ) -> None: + introspection = self._introspect_export_path(msg.path).tostring() + send_reply(Message.new_method_return(msg, "s", [introspection])) + + def _default_ping_handler( + self, msg: Message, send_reply: Callable[[Message], None] + ) -> None: + send_reply(Message.new_method_return(msg)) + + def _default_get_machine_id_handler( + self, msg: Message, send_reply: Callable[[Message], None] + ) -> None: + if self._machine_id: + send_reply(Message.new_method_return(msg, "s", self._machine_id)) + return + + def reply_handler(reply, err): + if err: + # the bus has been disconnected, cannot send a reply + return + + if reply.message_type == MessageType.METHOD_RETURN: + self._machine_id = reply.body[0] + send_reply(Message.new_method_return(msg, "s", [self._machine_id])) + elif reply.message_type == MessageType.ERROR: + send_reply(Message.new_error(msg, reply.error_name, reply.body)) + else: + send_reply( + Message.new_error(msg, ErrorType.FAILED, "could not get machine_id") + ) + + self._call( + Message( + destination="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + interface="org.freedesktop.DBus.Peer", + member="GetMachineId", + ), + reply_handler, + ) + + def _default_get_managed_objects_handler( + self, msg: Message, send_reply: Callable[[Message], None] + ) -> None: + result = {} + result_signature = "a{oa{sa{sv}}}" + error_handled = False + + def is_result_complete(): + if not result: + return True + for n, interfaces in result.items(): + for value in interfaces.values(): + if value is None: + return False + + return True + + nodes = [ + node + for node in self._path_exports + if msg.path == "/" or node.startswith(msg.path + "/") + ] + + # first build up the result object to know when it's complete + for node in nodes: + result[node] = {} + for interface in self._path_exports[node]: + result[node][interface.name] = None + + if is_result_complete(): + send_reply(Message.new_method_return(msg, result_signature, [result])) + return + + def get_all_properties_callback(interface, values, node, err): + nonlocal error_handled + if err is not None: + if not error_handled: + error_handled = True + send_reply.send_error(err) + return + + result[node][interface.name] = values + + if is_result_complete(): + send_reply(Message.new_method_return(msg, result_signature, [result])) + + for node in nodes: + for interface in self._path_exports[node]: + ServiceInterface._get_all_property_values( + interface, get_all_properties_callback, node + ) + + def _default_properties_handler( + self, msg: Message, send_reply: Callable[[Message], None] + ) -> None: + methods = {"Get": "ss", "Set": "ssv", "GetAll": "s"} + if msg.member not in methods or methods[msg.member] != msg.signature: + raise DBusError( + ErrorType.UNKNOWN_METHOD, + f'properties interface doesn\'t have method "{msg.member}" with signature "{msg.signature}"', + ) + + interface_name = msg.body[0] + if interface_name == "": + raise DBusError( + ErrorType.NOT_SUPPORTED, + "getting and setting properties with an empty interface string is not supported yet", + ) + + elif msg.path not in self._path_exports: + raise DBusError( + ErrorType.UNKNOWN_OBJECT, f'no interfaces at path: "{msg.path}"' + ) + + match = [ + iface + for iface in self._path_exports[msg.path] + if iface.name == interface_name + ] + if not match: + if interface_name in [ + "org.freedesktop.DBus.Properties", + "org.freedesktop.DBus.Introspectable", + "org.freedesktop.DBus.Peer", + "org.freedesktop.DBus.ObjectManager", + ]: + # the standard interfaces do not have properties + if msg.member == "Get" or msg.member == "Set": + prop_name = msg.body[1] + raise DBusError( + ErrorType.UNKNOWN_PROPERTY, + f'interface "{interface_name}" does not have property "{prop_name}"', + ) + elif msg.member == "GetAll": + send_reply(Message.new_method_return(msg, "a{sv}", [{}])) + return + else: + assert False + raise DBusError( + ErrorType.UNKNOWN_INTERFACE, + f'could not find an interface "{interface_name}" at path: "{msg.path}"', + ) + + interface = match[0] + properties = ServiceInterface._get_properties(interface) + + if msg.member == "Get" or msg.member == "Set": + prop_name = msg.body[1] + match = [ + prop + for prop in properties + if prop.name == prop_name and not prop.disabled + ] + if not match: + raise DBusError( + ErrorType.UNKNOWN_PROPERTY, + f'interface "{interface_name}" does not have property "{prop_name}"', + ) + + prop = match[0] + if msg.member == "Get": + if not prop.access.readable(): + raise DBusError( + ErrorType.UNKNOWN_PROPERTY, + "the property does not have read access", + ) + + def get_property_callback(interface, prop, prop_value, err): + try: + if err is not None: + send_reply.send_error(err) + return + + body, unix_fds = replace_fds_with_idx( + prop.signature, [prop_value] + ) + + send_reply( + Message.new_method_return( + msg, + "v", + [Variant(prop.signature, body[0])], + unix_fds=unix_fds, + ) + ) + except Exception as e: + send_reply.send_error(e) + + ServiceInterface._get_property_value( + interface, prop, get_property_callback + ) + + elif msg.member == "Set": + if not prop.access.writable(): + raise DBusError( + ErrorType.PROPERTY_READ_ONLY, "the property is readonly" + ) + value = msg.body[2] + if value.signature != prop.signature: + raise DBusError( + ErrorType.INVALID_SIGNATURE, + f'wrong signature for property. expected "{prop.signature}"', + ) + assert prop.prop_setter + + def set_property_callback(interface, prop, err): + if err is not None: + send_reply.send_error(err) + return + send_reply(Message.new_method_return(msg)) + + body = replace_idx_with_fds( + value.signature, [value.value], msg.unix_fds + ) + ServiceInterface._set_property_value( + interface, prop, body[0], set_property_callback + ) + + elif msg.member == "GetAll": + + def get_all_properties_callback(interface, values, user_data, err): + if err is not None: + send_reply.send_error(err) + return + body, unix_fds = replace_fds_with_idx("a{sv}", [values]) + send_reply( + Message.new_method_return(msg, "a{sv}", body, unix_fds=unix_fds) + ) + + ServiceInterface._get_all_property_values( + interface, get_all_properties_callback + ) + + else: + assert False + + def _init_high_level_client(self) -> None: + """The high level client is initialized when the first proxy object is + gotten. Currently just sets up the match rules for the name owner cache + so signals can be routed to the right objects.""" + if self._high_level_client_initialized: + return + self._high_level_client_initialized = True + + def add_match_notify(msg, err): + if err: + logging.error( + f'add match request failed. match="{self._name_owner_match_rule}", {err}' + ) + elif msg.message_type == MessageType.ERROR: + logging.error( + f'add match request failed. match="{self._name_owner_match_rule}", {msg.body[0]}' + ) + + self._call( + Message( + destination="org.freedesktop.DBus", + interface="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + member="AddMatch", + signature="s", + body=[self._name_owner_match_rule], + ), + add_match_notify, + ) + + def _add_match_rule(self, match_rule): + """Add a match rule. Match rules added by this function are refcounted + and must be removed by _remove_match_rule(). This is for use in the + high level client only.""" + if match_rule == self._name_owner_match_rule: + return + + if match_rule in self._match_rules: + self._match_rules[match_rule] += 1 + return + + self._match_rules[match_rule] = 1 + + def add_match_notify(msg: Message, err: Optional[Exception]) -> None: + if err: + logging.error(f'add match request failed. match="{match_rule}", {err}') + elif msg.message_type == MessageType.ERROR: + logging.error( + f'add match request failed. match="{match_rule}", {msg.body[0]}' + ) + + self._call( + Message( + destination="org.freedesktop.DBus", + interface="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + member="AddMatch", + signature="s", + body=[match_rule], + ), + add_match_notify, + ) + + def _remove_match_rule(self, match_rule): + """Remove a match rule added with _add_match_rule(). This is for use in + the high level client only.""" + if match_rule == self._name_owner_match_rule: + return + + if match_rule in self._match_rules: + self._match_rules[match_rule] -= 1 + if self._match_rules[match_rule] > 0: + return + + del self._match_rules[match_rule] + + def remove_match_notify(msg, err): + if self._disconnected: + return + + if err: + logging.error( + f'remove match request failed. match="{match_rule}", {err}' + ) + elif msg.message_type == MessageType.ERROR: + logging.error( + f'remove match request failed. match="{match_rule}", {msg.body[0]}' + ) + + self._call( + Message( + destination="org.freedesktop.DBus", + interface="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + member="RemoveMatch", + signature="s", + body=[match_rule], + ), + remove_match_notify, + ) diff --git a/dbus_fast/proxy_object.py b/dbus_fast/proxy_object.py new file mode 100644 index 0000000..0aef097 --- /dev/null +++ b/dbus_fast/proxy_object.py @@ -0,0 +1,341 @@ +import asyncio +import inspect +import logging +import re +import xml.etree.ElementTree as ET +from dataclasses import dataclass +from functools import lru_cache +from typing import Callable, Coroutine, Dict, List, Optional, Type, Union + +from . import introspection as intr +from . import message_bus +from ._private.util import replace_idx_with_fds +from .constants import ErrorType, MessageType +from .errors import DBusError, InterfaceNotFoundError +from .message import Message +from .unpack import unpack_variants as unpack +from .validators import assert_bus_name_valid, assert_object_path_valid + + +@dataclass +class SignalHandler: + """Signal handler.""" + + fn: Callable + unpack_variants: bool + + +class BaseProxyInterface: + """An abstract class representing a proxy to an interface exported on the bus by another client. + + Implementations of this class are not meant to be constructed directly by + users. Use :func:`BaseProxyObject.get_interface` to get a proxy interface. + Each message bus implementation provides its own proxy interface + implementation that will be returned by that method. + + Proxy interfaces can be used to call methods, get properties, and listen to + signals on the interface. Proxy interfaces are created dynamically with a + family of methods for each of these operations based on what members the + interface exposes. Each proxy interface implementation exposes these + members in a different way depending on the features of the backend. See + the documentation of the proxy interface implementation you use for more + details. + + :ivar bus_name: The name of the bus this interface is exported on. + :vartype bus_name: str + :ivar path: The object path exported on the client that owns the bus name. + :vartype path: str + :ivar introspection: Parsed introspection data for the proxy interface. + :vartype introspection: :class:`Node ` + :ivar bus: The message bus this proxy interface is connected to. + :vartype bus: :class:`BaseMessageBus ` + """ + + def __init__( + self, + bus_name: str, + path: str, + introspection: intr.Interface, + bus: "message_bus.BaseMessageBus", + ) -> None: + self.bus_name = bus_name + self.path = path + self.introspection = introspection + self.bus = bus + self._signal_handlers: Dict[str, List[SignalHandler]] = {} + self._signal_match_rule = f"type='signal',sender={bus_name},interface={introspection.name},path={path}" + + _underscorer1 = re.compile(r"(.)([A-Z][a-z]+)") + _underscorer2 = re.compile(r"([a-z0-9])([A-Z])") + + @staticmethod + @lru_cache(maxsize=128) + def _to_snake_case(member: str) -> str: + subbed = BaseProxyInterface._underscorer1.sub(r"\1_\2", member) + return BaseProxyInterface._underscorer2.sub(r"\1_\2", subbed).lower() + + @staticmethod + def _check_method_return(msg: Message, signature: Optional[str] = None): + if msg.message_type == MessageType.ERROR: + raise DBusError._from_message(msg) + elif msg.message_type != MessageType.METHOD_RETURN: + raise DBusError( + ErrorType.CLIENT_ERROR, "method call didnt return a method return", msg + ) + elif signature is not None and msg.signature != signature: + raise DBusError( + ErrorType.CLIENT_ERROR, + f'method call returned unexpected signature: "{msg.signature}"', + msg, + ) + + def _add_method(self, intr_method: intr.Method) -> None: + raise NotImplementedError("this must be implemented in the inheriting class") + + def _add_property(self, intr_property: intr.Property) -> None: + raise NotImplementedError("this must be implemented in the inheriting class") + + def _message_handler(self, msg: Message) -> None: + if ( + msg.message_type != MessageType.SIGNAL + or msg.interface != self.introspection.name + or msg.path != self.path + or msg.member not in self._signal_handlers + ): + return + + if ( + msg.sender != self.bus_name + and self.bus._name_owners.get(self.bus_name, "") != msg.sender + ): + # The sender is always a unique name, but the bus name given might + # be a well known name. If the sender isn't an exact match, check + # to see if it owns the bus_name we were given from the cache kept + # on the bus for this purpose. + return + + match = [s for s in self.introspection.signals if s.name == msg.member] + if not len(match): + return + intr_signal = match[0] + if intr_signal.signature != msg.signature: + logging.warning( + f'got signal "{self.introspection.name}.{msg.member}" with unexpected signature "{msg.signature}"' + ) + return + + body = replace_idx_with_fds(msg.signature, msg.body, msg.unix_fds) + no_sig = None + for handler in self._signal_handlers[msg.member]: + if handler.unpack_variants: + if not no_sig: + no_sig = unpack(body) + data = no_sig + else: + data = body + + cb_result = handler.fn(*data) + if isinstance(cb_result, Coroutine): + asyncio.create_task(cb_result) + + def _add_signal(self, intr_signal: intr.Signal, interface: intr.Interface) -> None: + def on_signal_fn(fn: Callable, *, unpack_variants: bool = False): + fn_signature = inspect.signature(fn) + if 0 < len( + [ + par + for par in fn_signature.parameters.values() + if par.kind == inspect.Parameter.KEYWORD_ONLY + and par.default == inspect.Parameter.empty + ] + ): + raise TypeError( + "reply_notify cannot have required keyword only parameters" + ) + + positional_params = [ + par.kind + for par in fn_signature.parameters.values() + if par.kind + not in [inspect.Parameter.KEYWORD_ONLY, inspect.Parameter.VAR_KEYWORD] + ] + if len(positional_params) != len(intr_signal.args) and ( + inspect.Parameter.VAR_POSITIONAL not in positional_params + or len(positional_params) - 1 > len(intr_signal.args) + ): + raise TypeError( + f"reply_notify must be a function with {len(intr_signal.args)} positional parameters" + ) + + if not self._signal_handlers: + self.bus._add_match_rule(self._signal_match_rule) + self.bus.add_message_handler(self._message_handler) + + if intr_signal.name not in self._signal_handlers: + self._signal_handlers[intr_signal.name] = [] + + self._signal_handlers[intr_signal.name].append( + SignalHandler(fn, unpack_variants) + ) + + def off_signal_fn(fn: Callable, *, unpack_variants: bool = False) -> None: + try: + i = self._signal_handlers[intr_signal.name].index( + SignalHandler(fn, unpack_variants) + ) + del self._signal_handlers[intr_signal.name][i] + if not self._signal_handlers[intr_signal.name]: + del self._signal_handlers[intr_signal.name] + except (KeyError, ValueError): + return + + if not self._signal_handlers: + self.bus._remove_match_rule(self._signal_match_rule) + self.bus.remove_message_handler(self._message_handler) + + snake_case = BaseProxyInterface._to_snake_case(intr_signal.name) + setattr(interface, f"on_{snake_case}", on_signal_fn) + setattr(interface, f"off_{snake_case}", off_signal_fn) + + +class BaseProxyObject: + """An abstract class representing a proxy to an object exported on the bus by another client. + + Implementations of this class are not meant to be constructed directly. Use + :func:`BaseMessageBus.get_proxy_object() + ` to get a proxy + object. Each message bus implementation provides its own proxy object + implementation that will be returned by that method. + + The primary use of the proxy object is to select a proxy interface to act + on. Information on what interfaces are available is provided by + introspection data provided to this class. This introspection data can + either be included in your project as an XML file (recommended) or + retrieved from the ``org.freedesktop.DBus.Introspectable`` interface at + runtime. + + :ivar bus_name: The name of the bus this object is exported on. + :vartype bus_name: str + :ivar path: The object path exported on the client that owns the bus name. + :vartype path: str + :ivar introspection: Parsed introspection data for the proxy object. + :vartype introspection: :class:`Node ` + :ivar bus: The message bus this proxy object is connected to. + :vartype bus: :class:`BaseMessageBus ` + :ivar ~.ProxyInterface: The proxy interface class this proxy object uses. + :vartype ~.ProxyInterface: Type[:class:`BaseProxyInterface `] + :ivar child_paths: A list of absolute object paths of the children of this object. + :vartype child_paths: list(str) + + :raises: + - :class:`InvalidBusNameError ` - If the given bus name is not valid. + - :class:`InvalidObjectPathError ` - If the given object path is not valid. + - :class:`InvalidIntrospectionError ` - If the introspection data for the node is not valid. + """ + + def __init__( + self, + bus_name: str, + path: str, + introspection: Union[intr.Node, str, ET.Element], + bus: "message_bus.BaseMessageBus", + ProxyInterface: Type[BaseProxyInterface], + ) -> None: + assert_object_path_valid(path) + assert_bus_name_valid(bus_name) + + if not isinstance(bus, message_bus.BaseMessageBus): + raise TypeError("bus must be an instance of BaseMessageBus") + if not issubclass(ProxyInterface, BaseProxyInterface): + raise TypeError("ProxyInterface must be an instance of BaseProxyInterface") + + if type(introspection) is intr.Node: + self.introspection = introspection + elif type(introspection) is str: + self.introspection = intr.Node.parse(introspection) + elif type(introspection) is ET.Element: + self.introspection = intr.Node.from_xml(introspection) + else: + raise TypeError( + "introspection must be xml node introspection or introspection.Node class" + ) + + self.bus_name = bus_name + self.path = path + self.bus = bus + self.ProxyInterface = ProxyInterface + self.child_paths = [f"{path}/{n.name}" for n in self.introspection.nodes] + + self._interfaces = {} + + # lazy loaded by get_children() + self._children = None + + def get_interface(self, name: str) -> BaseProxyInterface: + """Get an interface exported on this proxy object and connect it to the bus. + + :param name: The name of the interface to retrieve. + :type name: str + + :raises: + - :class:`InterfaceNotFoundError ` - If there is no interface by this name exported on the bus. + """ + if name in self._interfaces: + return self._interfaces[name] + + try: + intr_interface = next( + i for i in self.introspection.interfaces if i.name == name + ) + except StopIteration: + raise InterfaceNotFoundError(f"interface not found on this object: {name}") + + interface = self.ProxyInterface( + self.bus_name, self.path, intr_interface, self.bus + ) + + for intr_method in intr_interface.methods: + interface._add_method(intr_method) + for intr_property in intr_interface.properties: + interface._add_property(intr_property) + for intr_signal in intr_interface.signals: + interface._add_signal(intr_signal, interface) + + def get_owner_notify(msg: Message, err: Optional[Exception]) -> None: + if err: + logging.error(f'getting name owner for "{name}" failed, {err}') + return + if msg.message_type == MessageType.ERROR: + if msg.error_name != ErrorType.NAME_HAS_NO_OWNER.value: + logging.error( + f'getting name owner for "{name}" failed, {msg.body[0]}' + ) + return + + self.bus._name_owners[self.bus_name] = msg.body[0] + + if self.bus_name[0] != ":" and not self.bus._name_owners.get(self.bus_name, ""): + self.bus._call( + Message( + destination="org.freedesktop.DBus", + interface="org.freedesktop.DBus", + path="/org/freedesktop/DBus", + member="GetNameOwner", + signature="s", + body=[self.bus_name], + ), + get_owner_notify, + ) + + self._interfaces[name] = interface + return interface + + def get_children(self) -> List["BaseProxyObject"]: + """Get the child nodes of this proxy object according to the introspection data.""" + if self._children is None: + self._children = [ + self.__class__(self.bus_name, self.path, child, self.bus) + for child in self.introspection.nodes + ] + + return self._children diff --git a/dbus_fast/py.typed b/dbus_fast/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/dbus_fast/send_reply.py b/dbus_fast/send_reply.py new file mode 100644 index 0000000..e53f3da --- /dev/null +++ b/dbus_fast/send_reply.py @@ -0,0 +1,59 @@ +import traceback +from types import TracebackType +from typing import TYPE_CHECKING, Optional, Type + +from .constants import ErrorType +from .errors import DBusError +from .message import Message + +if TYPE_CHECKING: + from .message_bus import BaseMessageBus + + +class SendReply: + """A context manager to send a reply to a message.""" + + __slots__ = ("_bus", "_msg") + + def __init__(self, bus: "BaseMessageBus", msg: Message) -> None: + """Create a new reply context manager.""" + self._bus = bus + self._msg = msg + + def __enter__(self): + return self + + def __call__(self, reply: Message) -> None: + self._bus.send(reply) + + def _exit( + self, + exc_type: Optional[Type[Exception]], + exc_value: Optional[Exception], + tb: Optional[TracebackType], + ) -> bool: + if exc_value: + if isinstance(exc_value, DBusError): + self(exc_value._as_message(self._msg)) + else: + self( + Message.new_error( + self._msg, + ErrorType.SERVICE_ERROR, + f"The service interface raised an error: {exc_value}.\n{traceback.format_tb(tb)}", + ) + ) + return True + + return False + + def __exit__( + self, + exc_type: Optional[Type[Exception]], + exc_value: Optional[Exception], + tb: Optional[TracebackType], + ) -> bool: + return self._exit(exc_type, exc_value, tb) + + def send_error(self, exc: Exception) -> None: + self._exit(exc.__class__, exc, exc.__traceback__) diff --git a/dbus_fast/service.pxd b/dbus_fast/service.pxd new file mode 100644 index 0000000..6a2d637 --- /dev/null +++ b/dbus_fast/service.pxd @@ -0,0 +1,51 @@ +"""cdefs for service.py""" + +import cython + +from .message cimport Message +from .signature cimport SignatureTree + + +cdef class _Method: + + cdef public str name + cdef public object fn + cdef public bint disabled + cdef public object introspection + cdef public str in_signature + cdef public str out_signature + cdef public SignatureTree in_signature_tree + cdef public SignatureTree out_signature_tree + + + +cdef tuple _real_fn_result_to_body( + object result, + SignatureTree signature_tree, + bint replace_fds +) + +cdef class ServiceInterface: + + cdef public str name + cdef list __methods + cdef list __properties + cdef list __signals + cdef set __buses + cdef dict __handlers + + @staticmethod + cdef list _c_get_methods(ServiceInterface interface) + + @staticmethod + cdef object _c_get_handler(ServiceInterface interface, _Method method, object bus) + + @staticmethod + cdef list _c_msg_body_to_args(Message msg) + + @staticmethod + cdef tuple _c_fn_result_to_body( + object result, + SignatureTree signature_tree, + bint replace_fds, + ) diff --git a/dbus_fast/service.py b/dbus_fast/service.py new file mode 100644 index 0000000..16489cf --- /dev/null +++ b/dbus_fast/service.py @@ -0,0 +1,659 @@ +import asyncio +import copy +import inspect +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple + +from . import introspection as intr +from ._private.util import ( + parse_annotation, + replace_fds_with_idx, + replace_idx_with_fds, + signature_contains_type, +) +from .constants import PropertyAccess +from .errors import SignalDisabledError +from .message import Message +from .signature import ( + SignatureBodyMismatchError, + SignatureTree, + Variant, + get_signature_tree, +) + +if TYPE_CHECKING: + from .message_bus import BaseMessageBus + + +class _Method: + def __init__(self, fn, name: str, disabled=False): + in_signature = "" + out_signature = "" + + inspection = inspect.signature(fn) + + in_args = [] + for i, param in enumerate(inspection.parameters.values()): + if i == 0: + # first is self + continue + annotation = parse_annotation(param.annotation) + if not annotation: + raise ValueError( + "method parameters must specify the dbus type string as an annotation" + ) + in_args.append(intr.Arg(annotation, intr.ArgDirection.IN, param.name)) + in_signature += annotation + + out_args = [] + out_signature = parse_annotation(inspection.return_annotation) + if out_signature: + for type_ in get_signature_tree(out_signature).types: + out_args.append(intr.Arg(type_, intr.ArgDirection.OUT)) + + self.name = name + self.fn = fn + self.disabled = disabled + self.introspection = intr.Method(name, in_args, out_args) + self.in_signature = in_signature + self.out_signature = out_signature + self.in_signature_tree = get_signature_tree(in_signature) + self.out_signature_tree = get_signature_tree(out_signature) + + +def method(name: Optional[str] = None, disabled: bool = False): + """A decorator to mark a class method of a :class:`ServiceInterface` to be a DBus service method. + + The parameters and return value must each be annotated with a signature + string of a single complete DBus type. + + This class method will be called when a client calls the method on the DBus + interface. The parameters given to the function come from the calling + client and will conform to the dbus-fast type system. The parameters + returned will be returned to the calling client and must conform to the + dbus-fast type system. If multiple parameters are returned, they must be + contained within a :class:`list`. + + The decorated method may raise a :class:`DBusError ` + to return an error to the client. + + :param name: The member name that DBus clients will use to call this method. Defaults to the name of the class method. + :type name: str + :param disabled: If set to true, the method will not be visible to clients. + :type disabled: bool + + :example: + + :: + + @method() + def echo(self, val: 's') -> 's': + return val + + @method() + def echo_two(self, val1: 's', val2: 'u') -> 'su': + return [val1, val2] + """ + if name is not None and type(name) is not str: + raise TypeError("name must be a string") + if type(disabled) is not bool: + raise TypeError("disabled must be a bool") + + def decorator(fn): + @wraps(fn) + def wrapped(*args, **kwargs): + fn(*args, **kwargs) + + fn_name = name if name else fn.__name__ + wrapped.__dict__["__DBUS_METHOD"] = _Method(fn, fn_name, disabled=disabled) + + return wrapped + + return decorator + + +class _Signal: + def __init__(self, fn, name, disabled=False): + inspection = inspect.signature(fn) + + args = [] + signature = "" + signature_tree = None + + return_annotation = parse_annotation(inspection.return_annotation) + + if return_annotation: + signature = return_annotation + signature_tree = get_signature_tree(signature) + for type_ in signature_tree.types: + args.append(intr.Arg(type_, intr.ArgDirection.OUT)) + else: + signature = "" + signature_tree = get_signature_tree("") + + self.signature = signature + self.signature_tree = signature_tree + self.name = name + self.disabled = disabled + self.introspection = intr.Signal(self.name, args) + + +def signal(name: Optional[str] = None, disabled: bool = False): + """A decorator to mark a class method of a :class:`ServiceInterface` to be a DBus signal. + + The signal is broadcast on the bus when the decorated class method is + called by the user. + + If the signal has an out argument, the class method must have a return type + annotation with a signature string of a single complete DBus type and the + return value of the class method must conform to the dbus-fast type system. + If the signal has multiple out arguments, they must be returned within a + ``list``. + + :param name: The member name that will be used for this signal. Defaults to + the name of the class method. + :type name: str + :param disabled: If set to true, the signal will not be visible to clients. + :type disabled: bool + + :example: + + :: + + @signal() + def string_signal(self, val) -> 's': + return val + + @signal() + def two_strings_signal(self, val1, val2) -> 'ss': + return [val1, val2] + """ + if name is not None and type(name) is not str: + raise TypeError("name must be a string") + if type(disabled) is not bool: + raise TypeError("disabled must be a bool") + + def decorator(fn): + fn_name = name if name else fn.__name__ + signal = _Signal(fn, fn_name, disabled) + + @wraps(fn) + def wrapped(self, *args, **kwargs): + if signal.disabled: + raise SignalDisabledError("Tried to call a disabled signal") + result = fn(self, *args, **kwargs) + ServiceInterface._handle_signal(self, signal, result) + return result + + wrapped.__dict__["__DBUS_SIGNAL"] = signal + + return wrapped + + return decorator + + +class _Property(property): + def set_options(self, options): + self.options = getattr(self, "options", {}) + for k, v in options.items(): + self.options[k] = v + + if "name" in options and options["name"] is not None: + self.name = options["name"] + else: + self.name = self.prop_getter.__name__ + + if "access" in options: + self.access = PropertyAccess(options["access"]) + else: + self.access = PropertyAccess.READWRITE + + if "disabled" in options: + self.disabled = options["disabled"] + else: + self.disabled = False + + self.introspection = intr.Property(self.name, self.signature, self.access) + + self.__dict__["__DBUS_PROPERTY"] = True + + def __init__(self, fn, *args, **kwargs): + self.prop_getter = fn + self.prop_setter = None + + inspection = inspect.signature(fn) + if len(inspection.parameters) != 1: + raise ValueError('the property must only have the "self" input parameter') + + return_annotation = parse_annotation(inspection.return_annotation) + + if not return_annotation: + raise ValueError( + "the property must specify the dbus type string as a return annotation string" + ) + + self.signature = return_annotation + tree = get_signature_tree(return_annotation) + + if len(tree.types) != 1: + raise ValueError("the property signature must be a single complete type") + + self.type = tree.types[0] + + if "options" in kwargs: + options = kwargs["options"] + self.set_options(options) + del kwargs["options"] + + super().__init__(fn, *args, **kwargs) + + def setter(self, fn, **kwargs): + # XXX The setter decorator seems to be recreating the class in the list + # of class members and clobbering the options so we need to reset them. + # Why does it do that? + result = super().setter(fn, **kwargs) + result.prop_setter = fn + result.set_options(self.options) + return result + + +def dbus_property( + access: PropertyAccess = PropertyAccess.READWRITE, + name: Optional[str] = None, + disabled: bool = False, +): + """A decorator to mark a class method of a :class:`ServiceInterface` to be a DBus property. + + The class method must be a Python getter method with a return annotation + that is a signature string of a single complete DBus type. When a client + gets the property through the ``org.freedesktop.DBus.Properties`` + interface, the getter will be called and the resulting value will be + returned to the client. + + If the property is writable, it must have a setter method that takes a + single parameter that is annotated with the same signature. When a client + sets the property through the ``org.freedesktop.DBus.Properties`` + interface, the setter will be called with the value from the calling + client. + + The parameters of the getter and the setter must conform to the dbus-fast + type system. The getter or the setter may raise a :class:`DBusError + ` to return an error to the client. + + :param name: The name that DBus clients will use to interact with this + property on the bus. + :type name: str + :param disabled: If set to true, the property will not be visible to + clients. + :type disabled: bool + + :example: + + :: + + @dbus_property() + def string_prop(self) -> 's': + return self._string_prop + + @string_prop.setter + def string_prop(self, val: 's'): + self._string_prop = val + """ + if type(access) is not PropertyAccess: + raise TypeError("access must be a PropertyAccess class") + if name is not None and type(name) is not str: + raise TypeError("name must be a string") + if type(disabled) is not bool: + raise TypeError("disabled must be a bool") + + def decorator(fn): + options = {"name": name, "access": access, "disabled": disabled} + return _Property(fn, options=options) + + return decorator + + +def _real_fn_result_to_body( + result: Optional[Any], + signature_tree: SignatureTree, + replace_fds: bool, +) -> Tuple[List[Any], List[int]]: + out_len = len(signature_tree.types) + if result is None: + final_result = [] + else: + if out_len == 1: + final_result = [result] + else: + result_type = type(result) + if result_type is not list and result_type is not tuple: + raise SignatureBodyMismatchError( + "Expected signal to return a list or tuple of arguments" + ) + final_result = result + + if out_len != len(final_result): + raise SignatureBodyMismatchError( + f"Signature and function return mismatch, expected {len(signature_tree.types)} arguments but got {len(result)}" + ) + + if not replace_fds: + return final_result, [] + return replace_fds_with_idx(signature_tree, final_result) + + +class ServiceInterface: + """An abstract class that can be extended by the user to define DBus services. + + Instances of :class:`ServiceInterface` can be exported on a path of the bus + with the :class:`export ` + method of a :class:`MessageBus `. + + Use the :func:`@method `, :func:`@dbus_property + `, and :func:`@signal + ` decorators to mark class methods as DBus + methods, properties, and signals respectively. + + :ivar name: The name of this interface as it appears to clients. Must be a + valid interface name. + :vartype name: str + """ + + def __init__(self, name: str) -> None: + # TODO cannot be overridden by a dbus member + self.name = name + self.__methods: List[_Method] = [] + self.__properties: List[_Property] = [] + self.__signals: List[_Signal] = [] + self.__buses = set() + self.__handlers: Dict[ + BaseMessageBus, + Dict[_Method, Callable[[Message, Callable[[Message], None]], None]], + ] = {} + + for name, member in inspect.getmembers(type(self)): + member_dict = getattr(member, "__dict__", {}) + if type(member) is _Property: + # XXX The getter and the setter may show up as different + # members if they have different names. But if they have the + # same name, they will be the same member. So we try to merge + # them together here. I wish we could make this cleaner. + found = False + for prop in self.__properties: + if prop.prop_getter is member.prop_getter: + found = True + if member.prop_setter is not None: + prop.prop_setter = member.prop_setter + + if not found: + self.__properties.append(member) + elif "__DBUS_METHOD" in member_dict: + method = member_dict["__DBUS_METHOD"] + assert type(method) is _Method + self.__methods.append(method) + elif "__DBUS_SIGNAL" in member_dict: + signal = member_dict["__DBUS_SIGNAL"] + assert type(signal) is _Signal + self.__signals.append(signal) + + # validate that writable properties have a setter + for prop in self.__properties: + if prop.access.writable() and prop.prop_setter is None: + raise ValueError( + f'property "{prop.name}" is writable but does not have a setter' + ) + + def emit_properties_changed( + self, changed_properties: Dict[str, Any], invalidated_properties: List[str] = [] + ): + """Emit the ``org.freedesktop.DBus.Properties.PropertiesChanged`` signal. + + This signal is intended to be used to alert clients when a property of + the interface has changed. + + :param changed_properties: The keys must be the names of properties exposed by this bus. The values must be valid for the signature of those properties. + :type changed_properties: dict(str, Any) + :param invalidated_properties: A list of names of properties that are now invalid (presumably for clients who cache the value). + :type invalidated_properties: list(str) + """ + # TODO cannot be overridden by a dbus member + variant_dict = {} + + for prop in ServiceInterface._get_properties(self): + if prop.name in changed_properties: + variant_dict[prop.name] = Variant( + prop.signature, changed_properties[prop.name] + ) + + body = [self.name, variant_dict, invalidated_properties] + for bus in ServiceInterface._get_buses(self): + bus._interface_signal_notify( + self, + "org.freedesktop.DBus.Properties", + "PropertiesChanged", + "sa{sv}as", + body, + ) + + def introspect(self) -> intr.Interface: + """Get introspection information for this interface. + + This might be useful for creating clients for the interface or examining the introspection output of an interface. + + :returns: The introspection data for the interface. + :rtype: :class:`dbus_fast.introspection.Interface` + """ + # TODO cannot be overridden by a dbus member + return intr.Interface( + self.name, + methods=[ + method.introspection + for method in ServiceInterface._get_methods(self) + if not method.disabled + ], + signals=[ + signal.introspection + for signal in ServiceInterface._get_signals(self) + if not signal.disabled + ], + properties=[ + prop.introspection + for prop in ServiceInterface._get_properties(self) + if not prop.disabled + ], + ) + + @staticmethod + def _get_properties(interface: "ServiceInterface") -> List[_Property]: + return interface.__properties + + @staticmethod + def _get_methods(interface: "ServiceInterface") -> List[_Method]: + return interface.__methods + + @staticmethod + def _c_get_methods(interface: "ServiceInterface") -> List[_Method]: + # _c_get_methods is used by the C code to get the methods for an + # interface + # https://github.com/cython/cython/issues/3327 + return interface.__methods + + @staticmethod + def _get_signals(interface: "ServiceInterface") -> List[_Signal]: + return interface.__signals + + @staticmethod + def _get_buses(interface: "ServiceInterface") -> Set["BaseMessageBus"]: + return interface.__buses + + @staticmethod + def _get_handler( + interface: "ServiceInterface", method: _Method, bus: "BaseMessageBus" + ) -> Callable[[Message, Callable[[Message], None]], None]: + return interface.__handlers[bus][method] + + @staticmethod + def _c_get_handler( + interface: "ServiceInterface", method: _Method, bus: "BaseMessageBus" + ) -> Callable[[Message, Callable[[Message], None]], None]: + # _c_get_handler is used by the C code to get the handler for a method + # https://github.com/cython/cython/issues/3327 + return interface.__handlers[bus][method] + + @staticmethod + def _add_bus( + interface: "ServiceInterface", + bus: "BaseMessageBus", + maker: Callable[ + ["ServiceInterface", _Method], + Callable[[Message, Callable[[Message], None]], None], + ], + ) -> None: + interface.__buses.add(bus) + interface.__handlers[bus] = { + method: maker(interface, method) for method in interface.__methods + } + + @staticmethod + def _remove_bus(interface: "ServiceInterface", bus: "BaseMessageBus") -> None: + interface.__buses.remove(bus) + del interface.__handlers[bus] + + @staticmethod + def _msg_body_to_args(msg: Message) -> List[Any]: + return ServiceInterface._c_msg_body_to_args(msg) + + @staticmethod + def _c_msg_body_to_args(msg: Message) -> List[Any]: + # https://github.com/cython/cython/issues/3327 + if not signature_contains_type(msg.signature_tree, msg.body, "h"): + return msg.body + + # XXX: This deep copy could be expensive if messages are very + # large. We could optimize this by only copying what we change + # here. + return replace_idx_with_fds( + msg.signature_tree, copy.deepcopy(msg.body), msg.unix_fds + ) + + @staticmethod + def _fn_result_to_body( + result: Optional[Any], + signature_tree: SignatureTree, + replace_fds: bool = True, + ) -> Tuple[List[Any], List[int]]: + return _real_fn_result_to_body(result, signature_tree, replace_fds) + + @staticmethod + def _c_fn_result_to_body( + result: Optional[Any], + signature_tree: SignatureTree, + replace_fds: bool, + ) -> Tuple[List[Any], List[int]]: + """The high level interfaces may return single values which may be + wrapped in a list to be a message body. Also they may return fds + directly for type 'h' which need to be put into an external list.""" + # https://github.com/cython/cython/issues/3327 + return _real_fn_result_to_body(result, signature_tree, replace_fds) + + @staticmethod + def _handle_signal( + interface: "ServiceInterface", signal: _Signal, result: Optional[Any] + ) -> None: + body, fds = ServiceInterface._fn_result_to_body(result, signal.signature_tree) + for bus in ServiceInterface._get_buses(interface): + bus._interface_signal_notify( + interface, interface.name, signal.name, signal.signature, body, fds + ) + + @staticmethod + def _get_property_value(interface: "ServiceInterface", prop: _Property, callback): + # XXX MUST CHECK TYPE RETURNED BY GETTER + try: + if asyncio.iscoroutinefunction(prop.prop_getter): + task = asyncio.ensure_future(prop.prop_getter(interface)) + + def get_property_callback(task): + try: + result = task.result() + except Exception as e: + callback(interface, prop, None, e) + return + + callback(interface, prop, result, None) + + task.add_done_callback(get_property_callback) + return + + callback( + interface, prop, getattr(interface, prop.prop_getter.__name__), None + ) + except Exception as e: + callback(interface, prop, None, e) + + @staticmethod + def _set_property_value(interface: "ServiceInterface", prop, value, callback): + # XXX MUST CHECK TYPE TO SET + try: + if asyncio.iscoroutinefunction(prop.prop_setter): + task = asyncio.ensure_future(prop.prop_setter(interface, value)) + + def set_property_callback(task): + try: + task.result() + except Exception as e: + callback(interface, prop, e) + return + + callback(interface, prop, None) + + task.add_done_callback(set_property_callback) + return + + setattr(interface, prop.prop_setter.__name__, value) + callback(interface, prop, None) + except Exception as e: + callback(interface, prop, e) + + @staticmethod + def _get_all_property_values( + interface: "ServiceInterface", callback, user_data=None + ): + result = {} + result_error = None + + for prop in ServiceInterface._get_properties(interface): + if prop.disabled or not prop.access.readable(): + continue + result[prop.name] = None + + if not result: + callback(interface, result, user_data, None) + return + + def get_property_callback( + interface: "ServiceInterface", + prop: _Property, + value: Any, + e: Optional[Exception], + ) -> None: + nonlocal result_error + if e is not None: + result_error = e + del result[prop.name] + else: + try: + result[prop.name] = Variant(prop.signature, value) + except SignatureBodyMismatchError as e: + result_error = e + del result[prop.name] + + if any(v is None for v in result.values()): + return + + callback(interface, result, user_data, result_error) + + for prop in ServiceInterface._get_properties(interface): + if prop.disabled or not prop.access.readable(): + continue + ServiceInterface._get_property_value(interface, prop, get_property_callback) diff --git a/dbus_fast/signature.pxd b/dbus_fast/signature.pxd new file mode 100644 index 0000000..faf8ea0 --- /dev/null +++ b/dbus_fast/signature.pxd @@ -0,0 +1,26 @@ +"""cdefs for signature.py""" + +import cython + + +cdef class SignatureType: + + cdef public str token + cdef public list children + cdef str _signature + + +cdef class SignatureTree: + + cdef public str signature + cdef public list types + + +cdef class Variant: + + cdef public SignatureType type + cdef public str signature + cdef public object value + + @cython.locals(signature_tree=SignatureTree) + cdef _init_variant(self, object signature, object value, bint verify) diff --git a/dbus_fast/signature.py b/dbus_fast/signature.py new file mode 100644 index 0000000..e6368a2 --- /dev/null +++ b/dbus_fast/signature.py @@ -0,0 +1,456 @@ +from functools import lru_cache +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from .errors import InvalidSignatureError, SignatureBodyMismatchError +from .validators import is_object_path_valid + + +class SignatureType: + """A class that represents a single complete type within a signature. + + This class is not meant to be constructed directly. Use the :class:`SignatureTree` + class to parse signatures. + + :ivar ~.signature: The signature of this complete type. + :vartype ~.signature: str + + :ivar children: A list of child types if this is a container type. Arrays \ + have one child type, dict entries have two child types (key and value), and \ + structs have child types equal to the number of struct members. + :vartype children: list(:class:`SignatureType`) + """ + + _tokens = "ybnqiuxtdsogavh({" + __slots__ = ("token", "children", "_signature") + + def __init__(self, token: str) -> None: + """Init a new SignatureType.""" + self.token: str = token + self.children: List[SignatureType] = [] + self._signature: Optional[str] = None + + def __eq__(self, other: Any) -> bool: + """Compare this type to another type or signature string.""" + if type(other) is SignatureType: + return self.signature == other.signature + return super().__eq__(other) + + def _collapse(self) -> str: + """Collapse this type into a signature string.""" + if self.token not in "a({": + return self.token + + signature = [self.token] + + for child in self.children: + signature.append(child._collapse()) + + if self.token == "(": + signature.append(")") + elif self.token == "{": + signature.append("}") + + return "".join(signature) + + @property + def signature(self) -> str: + if self._signature is not None: + return self._signature + self._signature = self._collapse() + return self._signature + + @staticmethod + def _parse_next(signature: str) -> Tuple["SignatureType", str]: + if not signature: + raise InvalidSignatureError("Cannot parse an empty signature") + + token = signature[0] + + if token not in SignatureType._tokens: + raise InvalidSignatureError(f'got unexpected token: "{token}"') + + # container types + if token == "a": + self = SignatureType("a") + (child, signature) = SignatureType._parse_next(signature[1:]) + if not child: + raise InvalidSignatureError("missing type for array") + self.children.append(child) + return (self, signature) + elif token == "(": + self = SignatureType("(") + signature = signature[1:] + while True: + (child, signature) = SignatureType._parse_next(signature) + if not signature: + raise InvalidSignatureError('missing closing ")" for struct') + self.children.append(child) + if signature[0] == ")": + return (self, signature[1:]) + elif token == "{": + self = SignatureType("{") + signature = signature[1:] + (key_child, signature) = SignatureType._parse_next(signature) + if not key_child or len(key_child.children): + raise InvalidSignatureError("expected a simple type for dict entry key") + self.children.append(key_child) + (value_child, signature) = SignatureType._parse_next(signature) + if not value_child: + raise InvalidSignatureError("expected a value for dict entry") + if not signature or signature[0] != "}": + raise InvalidSignatureError('missing closing "}" for dict entry') + self.children.append(value_child) + return (self, signature[1:]) + + # basic type + return (SignatureType(token), signature[1:]) + + def _verify_byte(self, body: Any) -> None: + BYTE_MIN = 0x00 + BYTE_MAX = 0xFF + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus BYTE type "y" must be Python type "int", got {type(body)}' + ) + if body < BYTE_MIN or body > BYTE_MAX: + raise SignatureBodyMismatchError( + f"DBus BYTE type must be between {BYTE_MIN} and {BYTE_MAX}" + ) + + def _verify_boolean(self, body: Any) -> None: + if not isinstance(body, bool): + raise SignatureBodyMismatchError( + f'DBus BOOLEAN type "b" must be Python type "bool", got {type(body)}' + ) + + def _verify_int16(self, body: Any) -> None: + INT16_MIN = -0x7FFF - 1 + INT16_MAX = 0x7FFF + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus INT16 type "n" must be Python type "int", got {type(body)}' + ) + elif body > INT16_MAX or body < INT16_MIN: + raise SignatureBodyMismatchError( + f'DBus INT16 type "n" must be between {INT16_MIN} and {INT16_MAX}' + ) + + def _verify_uint16(self, body: Any) -> None: + UINT16_MIN = 0 + UINT16_MAX = 0xFFFF + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus UINT16 type "q" must be Python type "int", got {type(body)}' + ) + elif body > UINT16_MAX or body < UINT16_MIN: + raise SignatureBodyMismatchError( + f'DBus UINT16 type "q" must be between {UINT16_MIN} and {UINT16_MAX}' + ) + + def _verify_int32(self, body: int) -> None: + INT32_MIN = -0x7FFFFFFF - 1 + INT32_MAX = 0x7FFFFFFF + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus INT32 type "i" must be Python type "int", got {type(body)}' + ) + elif body > INT32_MAX or body < INT32_MIN: + raise SignatureBodyMismatchError( + f'DBus INT32 type "i" must be between {INT32_MIN} and {INT32_MAX}' + ) + + def _verify_uint32(self, body: Any) -> None: + UINT32_MIN = 0 + UINT32_MAX = 0xFFFFFFFF + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus UINT32 type "u" must be Python type "int", got {type(body)}' + ) + elif body > UINT32_MAX or body < UINT32_MIN: + raise SignatureBodyMismatchError( + f'DBus UINT32 type "u" must be between {UINT32_MIN} and {UINT32_MAX}' + ) + + def _verify_int64(self, body: Any) -> None: + INT64_MAX = 9223372036854775807 + INT64_MIN = -INT64_MAX - 1 + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus INT64 type "x" must be Python type "int", got {type(body)}' + ) + elif body > INT64_MAX or body < INT64_MIN: + raise SignatureBodyMismatchError( + f'DBus INT64 type "x" must be between {INT64_MIN} and {INT64_MAX}' + ) + + def _verify_uint64(self, body: Any) -> None: + UINT64_MIN = 0 + UINT64_MAX = 18446744073709551615 + if not isinstance(body, int): + raise SignatureBodyMismatchError( + f'DBus UINT64 type "t" must be Python type "int", got {type(body)}' + ) + elif body > UINT64_MAX or body < UINT64_MIN: + raise SignatureBodyMismatchError( + f'DBus UINT64 type "t" must be between {UINT64_MIN} and {UINT64_MAX}' + ) + + def _verify_double(self, body: Any) -> None: + if not isinstance(body, (float, int)): + raise SignatureBodyMismatchError( + f'DBus DOUBLE type "d" must be Python type "float" or "int", got {type(body)}' + ) + + def _verify_unix_fd(self, body: Any) -> None: + try: + self._verify_uint32(body) + except SignatureBodyMismatchError: + raise SignatureBodyMismatchError( + 'DBus UNIX_FD type "h" must be a valid UINT32' + ) + + def _verify_object_path(self, body: Any) -> None: + if not is_object_path_valid(body): + raise SignatureBodyMismatchError( + 'DBus OBJECT_PATH type "o" must be a valid object path' + ) + + def _verify_string(self, body: Any) -> None: + if not isinstance(body, str): + raise SignatureBodyMismatchError( + f'DBus STRING type "s" must be Python type "str", got {type(body)}' + ) + + def _verify_signature(self, body: Any) -> None: + # I guess we could run it through the SignatureTree parser instead + if not isinstance(body, str): + raise SignatureBodyMismatchError( + f'DBus SIGNATURE type "g" must be Python type "str", got {type(body)}' + ) + if len(body.encode()) > 0xFF: + raise SignatureBodyMismatchError( + 'DBus SIGNATURE type "g" must be less than 256 bytes' + ) + + def _verify_array(self, body: Any) -> None: + child_type = self.children[0] + + if child_type.token == "{": + if not isinstance(body, dict): + raise SignatureBodyMismatchError( + f'DBus ARRAY type "a" with DICT_ENTRY child must be Python type "dict", got {type(body)}' + ) + for key, value in body.items(): + child_type.children[0].verify(key) + child_type.children[1].verify(value) + elif child_type.token == "y": + if not isinstance(body, (bytearray, bytes)): + raise SignatureBodyMismatchError( + f'DBus ARRAY type "a" with BYTE child must be Python type "bytes", got {type(body)}' + ) + # no need to verify children + else: + if not isinstance(body, list): + raise SignatureBodyMismatchError( + f'DBus ARRAY type "a" must be Python type "list", got {type(body)}' + ) + for member in body: + child_type.verify(member) + + def _verify_struct(self, body: Any) -> None: + if not isinstance(body, (list, tuple)): + raise SignatureBodyMismatchError( + f'DBus STRUCT type "(" must be Python type "list" or "tuple", got {type(body)}' + ) + + if len(body) != len(self.children): + raise SignatureBodyMismatchError( + 'DBus STRUCT type "(" must have Python list members equal to the number of struct type members' + ) + + for i, member in enumerate(body): + self.children[i].verify(member) + + def _verify_variant(self, body: Any) -> None: + # a variant signature and value is valid by construction + if not isinstance(body, Variant): + raise SignatureBodyMismatchError( + f'DBus VARIANT type "v" must be Python type "Variant", got {type(body)}' + ) + + def verify(self, body: Any) -> bool: + """Verify that the body matches this type. + + :returns: True if the body matches this type. + :raises: + :class:`SignatureBodyMismatchError` if the body does not match this type. + """ + if body is None: + raise SignatureBodyMismatchError('Cannot serialize Python type "None"') + validator = self.validators.get(self.token) + if validator: + validator(self, body) + else: + raise Exception(f"cannot verify type with token {self.token}") + + return True + + validators: Dict[str, Callable[["SignatureType", Any], None]] = { + "y": _verify_byte, + "b": _verify_boolean, + "n": _verify_int16, + "q": _verify_uint16, + "i": _verify_int32, + "u": _verify_uint32, + "x": _verify_int64, + "t": _verify_uint64, + "d": _verify_double, + "h": _verify_uint32, + "o": _verify_string, + "s": _verify_string, + "g": _verify_signature, + "a": _verify_array, + "(": _verify_struct, + "v": _verify_variant, + } + + +class SignatureTree: + """A class that represents a signature as a tree structure for conveniently + working with DBus signatures. + + This class will not normally be used directly by the user. + + :ivar types: A list of parsed complete types. + :vartype types: list(:class:`SignatureType`) + + :ivar ~.signature: The signature of this signature tree. + :vartype ~.signature: str + + :raises: + :class:`InvalidSignatureError` if the given signature is not valid. + """ + + __slots__ = ("signature", "types") + + def __init__(self, signature: str = "") -> None: + self.signature = signature + + self.types: List[SignatureType] = [] + + if len(signature) > 0xFF: + raise InvalidSignatureError("A signature must be less than 256 characters") + + while signature: + (type_, signature) = SignatureType._parse_next(signature) + self.types.append(type_) + + def __eq__(self, other: Any) -> bool: + if type(other) is SignatureTree: + return self.signature == other.signature + return super().__eq__(other) + + def verify(self, body: List[Any]) -> bool: + """Verifies that the give body matches this signature tree + + :param body: the body to verify for this tree + :type body: list(Any) + + :returns: True if the signature matches the body or an exception if not. + + :raises: + :class:`SignatureBodyMismatchError` if the signature does not match the body. + """ + if not isinstance(body, list): + raise SignatureBodyMismatchError( + f"The body must be a list (got {type(body)})" + ) + if len(body) != len(self.types): + raise SignatureBodyMismatchError( + f"The body has the wrong number of types (got {len(body)}, expected {len(self.types)})" + ) + for i, type_ in enumerate(self.types): + type_.verify(body[i]) + + return True + + +class Variant: + """A class to represent a DBus variant (type "v"). + + This class is used in message bodies to represent variants. The user can + expect a value in the body with type "v" to use this class and can + construct this class directly for use in message bodies sent over the bus. + + :ivar signature: The signature for this variant. Must be a single complete type. + :vartype signature: str or SignatureTree or SignatureType + + :ivar value: The value of this variant. Must correspond to the signature. + :vartype value: Any + + :raises: + :class:`InvalidSignatureError` if the signature is not valid. + :class:`SignatureBodyMismatchError` if the signature does not match the body. + """ + + __slots__ = ("type", "signature", "value") + + def __init__( + self, + signature: Union[str, SignatureTree, SignatureType], + value: Any, + verify: bool = True, + ) -> None: + """Init a new Variant.""" + self._init_variant(signature, value, verify) + + def _init_variant( + self, + signature: Union[str, SignatureTree, SignatureType], + value: Any, + verify: bool, + ) -> None: + if type(signature) is SignatureTree: + signature_tree = signature + self.signature = signature_tree.signature + self.type = signature_tree.types[0] + elif type(signature) is SignatureType: + signature_tree = None + self.signature = signature.signature + self.type = signature + elif type(signature) is str: + signature_tree = get_signature_tree(signature) + self.signature = signature + self.type = signature_tree.types[0] + else: + raise TypeError( + "signature must be a SignatureTree, SignatureType, or a string" + ) + self.value = value + if verify: + if signature_tree and len(signature_tree.types) != 1: + raise ValueError( + "variants must have a signature for a single complete type" + ) + self.type.verify(value) + + def __eq__(self, other: Any) -> bool: + if type(other) is Variant: + return self.signature == other.signature and self.value == other.value + return super().__eq__(other) + + def __repr__(self) -> str: + return "".format( + self.type.signature, self.value + ) + + +get_signature_tree = lru_cache(maxsize=None)(SignatureTree) +"""Get a signature tree for the given signature. + +:param signature: The signature to get a tree for. +:type signature: str + +:returns: The signature tree for the given signature. +:rtype: :class:`SignatureTree` +""" diff --git a/dbus_fast/unpack.pxd b/dbus_fast/unpack.pxd new file mode 100644 index 0000000..55ebef1 --- /dev/null +++ b/dbus_fast/unpack.pxd @@ -0,0 +1,13 @@ +"""cdefs for unpack.py""" + +import cython + +from .signature cimport Variant + + +cpdef unpack_variants(object data) + +@cython.locals( + var=Variant +) +cdef _unpack_variants(object data) diff --git a/dbus_fast/unpack.py b/dbus_fast/unpack.py new file mode 100644 index 0000000..9680ff3 --- /dev/null +++ b/dbus_fast/unpack.py @@ -0,0 +1,24 @@ +from typing import Any + +from .signature import Variant + + +def unpack_variants(data: Any) -> Any: + """Unpack variants and remove signature info. + + This function should only be used to unpack + unmarshalled data as the checks are not + idiomatic. + """ + return _unpack_variants(data) + + +def _unpack_variants(data: Any) -> Any: + if type(data) is dict: + return {k: _unpack_variants(v) for k, v in data.items()} + if type(data) is list: + return [_unpack_variants(item) for item in data] + if type(data) is Variant: + var = data + return _unpack_variants(var.value) + return data diff --git a/dbus_fast/validators.py b/dbus_fast/validators.py new file mode 100644 index 0000000..f35ccd4 --- /dev/null +++ b/dbus_fast/validators.py @@ -0,0 +1,199 @@ +import re +from functools import lru_cache + +from .errors import ( + InvalidBusNameError, + InvalidInterfaceNameError, + InvalidMemberNameError, + InvalidObjectPathError, +) + +_bus_name_re = re.compile(r"^[A-Za-z_-][A-Za-z0-9_-]*$") +_path_re = re.compile(r"^[A-Za-z0-9_]+$") +_element_re = re.compile(r"^[A-Za-z_][A-Za-z0-9_]*$") +_member_re = re.compile(r"^[A-Za-z_][A-Za-z0-9_-]*$") + + +@lru_cache(maxsize=32) +def is_bus_name_valid(name: str) -> bool: + """Whether this is a valid bus name. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-names-bus + + :param name: The bus name to validate. + :type name: str + + :returns: Whether the name is a valid bus name. + :rtype: bool + """ + if not isinstance(name, str): + return False # type: ignore[unreachable] + + if not name or len(name) > 255: + return False + + if name.startswith(":"): + # a unique bus name + return True + + if name.startswith("."): + return False + + if name.find(".") == -1: + return False + + for element in name.split("."): + if _bus_name_re.search(element) is None: + return False + + return True + + +@lru_cache(maxsize=1024) +def is_object_path_valid(path: str) -> bool: + """Whether this is a valid object path. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-marshaling-object-path + + :param path: The object path to validate. + :type path: str + + :returns: Whether the object path is valid. + :rtype: bool + """ + if not isinstance(path, str): + return False # type: ignore[unreachable] + + if not path: + return False + + if not path.startswith("/"): + return False + + if len(path) == 1: + return True + + for element in path[1:].split("/"): + if _path_re.search(element) is None: + return False + + return True + + +@lru_cache(maxsize=32) +def is_interface_name_valid(name: str) -> bool: + """Whether this is a valid interface name. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-names-interface + + :param name: The interface name to validate. + :type name: str + + :returns: Whether the name is a valid interface name. + :rtype: bool + """ + if not isinstance(name, str): + return False # type: ignore[unreachable] + + if not name or len(name) > 255: + return False + + if name.startswith("."): + return False + + if name.find(".") == -1: + return False + + for element in name.split("."): + if _element_re.search(element) is None: + return False + + return True + + +@lru_cache(maxsize=512) +def is_member_name_valid(member: str) -> bool: + """Whether this is a valid member name. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-names-member + + :param member: The member name to validate. + :type member: str + + :returns: Whether the name is a valid member name. + :rtype: bool + """ + if not isinstance(member, str): + return False # type: ignore[unreachable] + + if not member or len(member) > 255: + return False + + if _member_re.search(member) is None: + return False + + return True + + +@lru_cache(maxsize=32) +def assert_bus_name_valid(name: str) -> None: + """Raise an error if this is not a valid bus name. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-names-bus + + :param name: The bus name to validate. + :type name: str + + :raises: + - :class:`InvalidBusNameError` - If this is not a valid bus name. + """ + if not is_bus_name_valid(name): + raise InvalidBusNameError(name) + + +@lru_cache(maxsize=1024) +def assert_object_path_valid(path: str) -> None: + """Raise an error if this is not a valid object path. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-marshaling-object-path + + :param path: The object path to validate. + :type path: str + + :raises: + - :class:`InvalidObjectPathError` - If this is not a valid object path. + """ + if not is_object_path_valid(path): + raise InvalidObjectPathError(path) + + +@lru_cache(maxsize=32) +def assert_interface_name_valid(name: str) -> None: + """Raise an error if this is not a valid interface name. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-names-interface + + :param name: The interface name to validate. + :type name: str + + :raises: + - :class:`InvalidInterfaceNameError` - If this is not a valid object path. + """ + if not is_interface_name_valid(name): + raise InvalidInterfaceNameError(name) + + +@lru_cache(maxsize=512) +def assert_member_name_valid(member: str) -> None: + """Raise an error if this is not a valid member name. + + .. seealso:: https://dbus.freedesktop.org/doc/dbus-specification.html#message-protocol-names-member + + :param member: The member name to validate. + :type member: str + + :raises: + - :class:`InvalidMemberNameError` - If this is not a valid object path. + """ + if not is_member_name_valid(member): + raise InvalidMemberNameError(member) diff --git a/idasen/cli.py b/idasen/cli.py new file mode 100755 index 0000000..2c461a6 --- /dev/null +++ b/idasen/cli.py @@ -0,0 +1,330 @@ +#!/usr/bin/python3 +import functools + +from desk import IdasenDesk +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional +import argparse +import asyncio +import importlib.metadata +import logging +import os +import sys +import voluptuous as vol +import yaml +import platform + +HOME = os.path.expanduser("~") +IDASEN_CONFIG_DIRECTORY = os.path.join(HOME, ".config", "idasen") +IDASEN_CONFIG_PATH = os.path.join(IDASEN_CONFIG_DIRECTORY, "idasen.yaml") + +DEFAULT_CONFIG: Dict[str, Any] = { + "positions": {"stand": 1.1, "sit": 0.75}, + "mac_address": "AA:AA:AA:AA:AA:AA", +} + +CONFIG_SCHEMA = vol.Schema( + { + "mac_address": vol.All(str, vol.Length(min=17, max=36)), + "positions": { + str: vol.All( + vol.Any(float, int), + vol.Range(min=IdasenDesk.MIN_HEIGHT, max=IdasenDesk.MAX_HEIGHT), + ) + }, + }, + extra=False, +) + +RESERVED_NAMES = {"init", "pair", "monitor", "height", "speed", "save", "delete"} + + +def save_config(config: dict, path: str = IDASEN_CONFIG_PATH): + with open(path, "w") as f: + yaml.dump(config, f) + + +def load_config(path: str = IDASEN_CONFIG_PATH) -> dict: + """Load user config.""" + try: + with open(path, "r") as f: + config = yaml.load(f, Loader=yaml.FullLoader) + except FileNotFoundError: + return {} + + # convert old config file format + if "positions" not in config: + config["positions"] = dict() + config["positions"]["sit"] = config.pop( + "sit_height", DEFAULT_CONFIG["positions"]["sit"] + ) + config["positions"]["stand"] = config.pop( + "stand_height", DEFAULT_CONFIG["positions"]["stand"] + ) + + save_config(config, path) + + try: + config = CONFIG_SCHEMA(config) + except vol.Invalid as e: + print(f"Invalid configuration: {e}", file=sys.stderr) + sys.exit(1) + else: + for position in config["positions"]: + if position in RESERVED_NAMES: + print( + "Invalid configuration, " + f"position with name '{position}' is a reserved name.", + file=sys.stderr, + ) + sys.exit(1) + + return config + + +def add_common_args(parser: argparse.ArgumentParser): + parser.add_argument( + "--mac-address", + dest="mac_address", + type=str, + help="MAC address of the Idasen desk.", + ) + parser.add_argument( + "--verbose", "-v", action="count", default=0, help="Increase logging verbosity." + ) + parser.add_argument( + "--version", action="store_true", help="Prints version information." + ) + + +def get_parser(config: dict) -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description="ikea IDÅSEN desk control") + add_common_args(parser) + sub = parser.add_subparsers(dest="sub", help="Subcommands", required=False) + + height_parser = sub.add_parser("height", help="Get the desk height.") + speed_parser = sub.add_parser("speed", help="Get the desk speed.") + monitor_parser = sub.add_parser("monitor", help="Monitor the desk position.") + init_parser = sub.add_parser("init", help="Initialize a new configuration file.") + save_parser = sub.add_parser("save", help="Save current desk position.") + pair_parser = sub.add_parser("pair", help="Pair with device.") + save_parser.add_argument("name", help="Position name") + delete_parser = sub.add_parser("delete", help="Remove position with given name.") + delete_parser.add_argument("name", help="Position name") + + positions = config.get("positions", {}) + for name, value in positions.items(): + subcommand = sub.add_parser(name, help=f"Move the desk to {value}m.") + add_common_args(subcommand) + + init_parser.add_argument( + "-f", + "--force", + action="store_true", + help="Overwrite any existing configuration files.", + ) + + add_common_args(init_parser) + add_common_args(pair_parser) + add_common_args(height_parser) + add_common_args(speed_parser) + add_common_args(monitor_parser) + add_common_args(save_parser) + add_common_args(delete_parser) + + return parser + + +async def init(args: argparse.Namespace) -> int: + if not args.force and os.path.isfile(IDASEN_CONFIG_PATH): + print("Configuration file already exists.", file=sys.stderr) + print("Use --force to overwrite existing configuration.", file=sys.stderr) + return 1 + else: + mac = await IdasenDesk.discover() + if mac is not None: + print(f"Discovered desk's MAC address: {mac}", file=sys.stderr) + DEFAULT_CONFIG["mac_address"] = str(mac) + else: + print("Failed to discover desk's MAC address", file=sys.stderr) + os.makedirs(IDASEN_CONFIG_DIRECTORY, exist_ok=True) + with open(IDASEN_CONFIG_PATH, "w") as f: + f.write("# https://newam.github.io/idasen/index.html#configuration\n") + yaml.dump(DEFAULT_CONFIG, f) + print( + f"Created new configuration file at: {IDASEN_CONFIG_PATH}", file=sys.stderr + ) + print("'idasen pair' can be used to pair to desk.") + + return 0 + + +async def pair(args: argparse.Namespace) -> Optional[int]: + try: + async with IdasenDesk(args.mac_address, exit_on_fail=True) as desk: + await desk.pair() + except NotImplementedError as e: + if platform.system() == "Darwin": + print( + "The pair subcommand does not function reliably on macOS.\n" + "A pairing dialogue is shown if the OS deems that pairing is needed.\n" + "Retrying can help.\n\n" + "See docs at https://bleak.readthedocs.io/en/latest/backends/macos.html" + ) + return 1 + else: + raise e + return None + + +async def monitor(args: argparse.Namespace) -> None: + try: + async with IdasenDesk(args.mac_address, exit_on_fail=True) as desk: + + async def printer(height: float, speed: float): + print(f"{height:.3f} meters - {speed:.3f} meters/second", flush=True) + + await desk.monitor(printer) + while True: + await asyncio.sleep(1000000) + except (KeyboardInterrupt, asyncio.exceptions.CancelledError): + pass + + +async def height(args: argparse.Namespace): + async with IdasenDesk(args.mac_address, exit_on_fail=True) as desk: + height = await desk.get_height() + print(f"{height:.3f} meters") + + +async def speed(args: argparse.Namespace): + async with IdasenDesk(args.mac_address, exit_on_fail=True) as desk: + speed = await desk.get_speed() + print(f"{speed:.3f} meters/second") + + +async def move_to(args: argparse.Namespace, position: float) -> None: + async with IdasenDesk(args.mac_address, exit_on_fail=True) as desk: + await desk.move_to_target(target=position) + + +async def save(args: argparse.Namespace, config: dict) -> int: + if args.name in RESERVED_NAMES: + print(f"Position with name '{args.name}' is a reserved name.", file=sys.stderr) + return 1 + + async with IdasenDesk(args.mac_address, exit_on_fail=True) as desk: + height = await desk.get_height() + + config["positions"][args.name] = height + save_config(config) + + print(f"Saved position '{args.name}' with height: {height}m.") + return 0 + + +async def delete(args: argparse.Namespace, config: dict) -> int: + position = config["positions"].pop(args.name, None) + if args.name in RESERVED_NAMES: + print(f"Position with name '{args.name}' is a reserved name.", file=sys.stderr) + return 1 + elif position is None: + print(f"Position with name '{args.name}' doesn't exist.", file=sys.stderr) + else: + save_config(config) + print(f"Position with name '{args.name}' removed.") + + return 0 + + +def from_config( + args: argparse.Namespace, + config: dict, + parser: argparse.ArgumentParser, + key: str, + raise_error: bool = True, +): + if hasattr(args, key) and getattr(args, key) is None: + if key in config: + setattr(args, key, config[key]) + elif raise_error: + parser.error(f"{key} must be provided via the CLI or the config file") + + +def count_to_level(count: int) -> int: + if count == 1: + return logging.ERROR + elif count == 2: + return logging.WARNING + elif count == 3: + return logging.INFO + elif count >= 4: + return logging.DEBUG + + return logging.CRITICAL + + +def subcommand_to_callable(sub: str, config: dict) -> Callable: + if sub == "init": + return init + elif sub == "pair": + return pair + elif sub == "monitor": + return monitor + elif sub == "height": + return height + elif sub == "speed": + return speed + elif sub == "save": + return functools.partial(save, config=config) + elif sub == "delete": + return functools.partial(delete, config=config) + elif sub in config.get("positions", {}): + position = config["positions"][sub] + return functools.partial(move_to, position=position) + else: + raise AssertionError(f"internal error, please report this bug {sub=}") + + +def main(argv: Optional[List[str]] = None): + config = load_config() + parser = get_parser(config) + args = parser.parse_args(argv) + + from_config(args, config, parser, "mac_address", raise_error=args.sub != "init") + + level = count_to_level(args.verbose) + + root_logger = logging.getLogger() + + handler = logging.StreamHandler(stream=sys.stderr) + handler.setLevel(level) + formatter = logging.Formatter("{levelname} {name} {message}", style="{") + handler.setFormatter(formatter) + root_logger.addHandler(handler) + root_logger.setLevel(level) + + if args.version: + version = importlib.metadata.version("idasen") + print(version) + sys.exit(0) + elif args.sub is None: + print("A subcommand is required") + parser.print_usage() + sys.exit(1) + else: + func = subcommand_to_callable(args.sub, config) + + rc = asyncio.run(func(args)) + + if rc is None: + rc = 0 + + sys.exit(rc) + + +if __name__ == "__main__": + main() diff --git a/idasen/desk.py b/idasen/desk.py new file mode 100644 index 0000000..dcb15ad --- /dev/null +++ b/idasen/desk.py @@ -0,0 +1,466 @@ +from bleak import BleakClient +from bleak import BleakScanner +from bleak import BleakGATTCharacteristic +from bleak.backends.device import BLEDevice +from bleak.backends.scanner import AdvertisementData +from typing import Any, Awaitable, Callable +from typing import MutableMapping +from typing import Optional +from typing import Tuple +from typing import Union +from inspect import signature +import asyncio +import logging +import struct +import sys + + +_UUID_HEIGHT: str = "99fa0021-338a-1024-8a49-009c0215f78a" +_UUID_COMMAND: str = "99fa0002-338a-1024-8a49-009c0215f78a" +_UUID_REFERENCE_INPUT: str = "99fa0031-338a-1024-8a49-009c0215f78a" +_UUID_ADV_SVC: str = "99fa0001-338a-1024-8a49-009c0215f78a" +_UUID_DPG: str = "99fa0011-338a-1024-8a49-009c0215f78a" + +_COMMAND_REFERENCE_INPUT_STOP: bytearray = bytearray([0x01, 0x80]) +_COMMAND_UP: bytearray = bytearray([0x47, 0x00]) +_COMMAND_DOWN: bytearray = bytearray([0x46, 0x00]) +_COMMAND_STOP: bytearray = bytearray([0xFF, 0x00]) +_COMMAND_WAKEUP: bytearray = bytearray([0xFE, 0x00]) + + +# height calculation offset in meters, assumed to be the same for all desks +def _bytes_to_meters_and_speed(raw: bytearray) -> Tuple[float, float]: + """Converts a value read from the desk in bytes to height in meters and speed.""" + raw_len = len(raw) + expected_len = 4 + assert ( + raw_len == expected_len + ), f"Expected raw value to be {expected_len} bytes long, got {raw_len} bytes" + + int_raw, speed_raw = struct.unpack(" bytearray: + """Converts meters to bytes for setting the position on the desk""" + int_raw: int = int((meters - IdasenDesk.MIN_HEIGHT) * 10000) + return bytearray(struct.pack(" bool: + return _UUID_ADV_SVC in adv.service_uuids + + +class _DeskLoggingAdapter(logging.LoggerAdapter): + """Prepends logging messages with the desk MAC address.""" + + def process( + self, msg: str, kwargs: MutableMapping[str, Any] + ) -> Tuple[str, MutableMapping[str, Any]]: + return f"[{self.extra['mac']}] {msg}", kwargs # type: ignore + + +class IdasenDesk: + """ + Idasen desk. + + Args: + mac: Bluetooth MAC address of the desk, or an instance of a BLEDevice. + exit_on_fail: If set to True, failing to connect will call ``sys.exit(1)``, + otherwise the exception will be raised. + disconnected_callback: + Callback that will be scheduled in the event loop when the client is + disconnected. The callable must take one argument, which will be + this client object. + + Note: + There is no locking to prevent you from running multiple movement + coroutines simultaneously. + + Example: + Basic Usage:: + + from idasen import IdasenDesk + + + async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + # call methods here... + """ + + #: Minimum desk height in meters. + MIN_HEIGHT: float = 0.62 + + #: Maximum desk height in meters. + MAX_HEIGHT: float = 1.27 + + #: Number of times to retry upon failure to connect. + RETRY_COUNT: int = 3 + + def __init__( + self, + mac: Union[BLEDevice, str], + exit_on_fail: bool = False, + disconnected_callback: Optional[Callable[[BleakClient], None]] = None, + ): + self._exit_on_fail = exit_on_fail + self._client = BleakClient( + address_or_ble_device=mac, + disconnected_callback=disconnected_callback, + ) + self._mac = mac.address if isinstance(mac, BLEDevice) else mac + self._logger = _DeskLoggingAdapter( + logger=logging.getLogger(__name__), extra={"mac": self.mac} + ) + self._moving = False + self._move_task: Optional[asyncio.Task] = None + + async def __aenter__(self): + await self.connect() + return self + + async def __aexit__(self, *args, **kwargs): + await self.disconnect() + + async def pair(self): + """ + Pair with the desk. + + This method is not available on macOS. Instead of manually initiating + paring, the user will be prompted to pair automatically as soon as it + is required. + + See :py:meth:`bleak.BleakClient.pair` for more information. + """ + await self._client.pair() + + async def connect(self): + """ + Connect to the desk. + + This method is an alternative to the context manager. + When possible the context manager is preferred. + + >>> async def example() -> bool: + ... desk = IdasenDesk(mac="AA:AA:AA:AA:AA:AA") + ... await desk.connect() # don't forget to call disconnect later! + ... return desk.is_connected + >>> asyncio.run(example()) + True + """ + i = 0 + while True: + try: + await self._client.connect() + await self.wakeup() + return + except Exception: + if i >= self.RETRY_COUNT: + self._logger.critical("Connection failed") + if self._exit_on_fail: + sys.exit(1) + raise + i += 1 + self._logger.warning( + f"Failed to connect, retrying ({i}/{self.RETRY_COUNT})..." + ) + await asyncio.sleep(0.3 * i) + + async def disconnect(self): + """ + Disconnect from the desk. + + This method is an alternative to the context manager. + When possible the context manager is preferred. + + >>> async def example() -> bool: + ... desk = IdasenDesk(mac="AA:AA:AA:AA:AA:AA") + ... await desk.connect() + ... await desk.disconnect() + ... return desk.is_connected + >>> asyncio.run(example()) + False + """ + await self._client.disconnect() + + async def monitor(self, callback: Callable[..., Awaitable[None]]): + output_service_uuid = "99fa0020-338a-1024-8a49-009c0215f78a" + output_char_uuid = "99fa0021-338a-1024-8a49-009c0215f78a" + + # Determine the amount of callback parameters + # 1st one is height, optional 2nd one is speed, more is not supported + callback_param_count = len(signature(callback).parameters) + if callback_param_count != 1 and callback_param_count != 2: + raise ValueError( + "Invalid callback provided, only 1 or 2 parameters are supported" + ) + + return_speed_value = callback_param_count == 2 + previous_height = 0.0 + previous_speed = 0.0 + + async def output_listener(char: BleakGATTCharacteristic, data: bytearray): + height, speed = _bytes_to_meters_and_speed(data) + self._logger.debug(f"Got data: {height}m {speed}m/s") + + nonlocal previous_height + nonlocal previous_speed + if abs(height - previous_height) < 0.001 and ( + not return_speed_value or abs(speed - previous_speed) < 0.001 + ): + return + previous_height = height + previous_speed = speed + + if return_speed_value: + await callback(height, speed) + else: + await callback(height) + + for service in self._client.services: + if service.uuid != output_service_uuid: + continue + + chr_output = service.get_characteristic(output_char_uuid) + if chr_output is None: + self._logger.error("No output characteristic found") + return + + self._logger.debug("Starting notify") + await self._client.start_notify(chr_output, output_listener) + return + + self._logger.error("Output service not found") + + @property + def is_connected(self) -> bool: + """ + ``True`` if the desk is connected. + + >>> async def example() -> bool: + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... return desk.is_connected + >>> asyncio.run(example()) + True + """ + return self._client.is_connected + + @property + def is_moving(self) -> bool: + """ + ``True`` if the desk is currently being moved by this class. + """ + return self._moving + + @property + def mac(self) -> str: + """ + Desk MAC address. + + >>> async def example() -> str: + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... return desk.mac + >>> asyncio.run(example()) + 'AA:AA:AA:AA:AA:AA' + """ + return self._mac + + async def wakeup(self): + """ + Wakeup the controller from sleep. + + This exists for compatibility with the Linak DPG1C controller, + it is not necessary with the original idasen controller. + + >>> async def example(): + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.wakeup() + >>> asyncio.run(example()) + """ + # https://github.com/rhyst/linak-controller/issues/32#issuecomment-1784055470 + await self._client.write_gatt_char(_UUID_DPG, b"\x7F\x86\x00") + await self._client.write_gatt_char( + _UUID_DPG, + b"\x7F\x86\x80\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D" + b"\x0E\x0F\x10\x11", + ) + await self._client.write_gatt_char(_UUID_COMMAND, _COMMAND_WAKEUP) + + async def move_up(self): + """ + Move the desk upwards. + + This command moves the desk upwards for a fixed duration + (approximately one second) as set by your desk controller. + + >>> async def example(): + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.move_up() + >>> asyncio.run(example()) + """ + await self._client.write_gatt_char(_UUID_COMMAND, _COMMAND_UP, response=False) + + async def move_down(self): + """ + Move the desk downwards. + + This command moves the desk downwards for a fixed duration + (approximately one second) as set by your desk controller. + + >>> async def example(): + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.move_down() + >>> asyncio.run(example()) + """ + await self._client.write_gatt_char(_UUID_COMMAND, _COMMAND_DOWN, response=False) + + async def move_to_target(self, target: float): + """ + Move the desk to the target position. + + Args: + target: Target position in meters. + + Raises: + ValueError: Target exceeds maximum or minimum limits. + + >>> async def example(): + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.move_to_target(1.1) + >>> asyncio.run(example()) + """ + if target > self.MAX_HEIGHT: + raise ValueError( + f"target position of {target:.3f} meters exceeds maximum of " + f"{self.MAX_HEIGHT:.3f}" + ) + elif target < self.MIN_HEIGHT: + raise ValueError( + f"target position of {target:.3f} meters exceeds minimum of " + f"{self.MIN_HEIGHT:.3f}" + ) + + if self._moving: + self._logger.error("Already moving") + return + self._moving = True + + async def do_move() -> None: + current_height = await self.get_height() + if current_height == target: + return + + # Wakeup and stop commands are needed in order to + # start the reference input for setting the position + await self._client.write_gatt_char(_UUID_COMMAND, _COMMAND_WAKEUP) + await self._client.write_gatt_char(_UUID_COMMAND, _COMMAND_STOP) + + data = _meters_to_bytes(target) + + while self._moving: + await self._client.write_gatt_char(_UUID_REFERENCE_INPUT, data) + await asyncio.sleep(0.2) + + # Stop as soon as the speed is 0, + # which means the desk has reached the target position + speed = await self.get_speed() + if speed == 0: + break + + self._move_task = asyncio.create_task(do_move()) + await self._move_task + self._moving = False + + async def stop(self): + """Stop desk movement.""" + self._moving = False + if self._move_task: + self._logger.debug("Desk was moving, waiting for it to stop") + await self._move_task + + await self._stop() + + async def _stop(self): + """Send stop commands""" + self._logger.debug("Sending stop commands") + await asyncio.gather( + self._client.write_gatt_char(_UUID_COMMAND, _COMMAND_STOP, response=False), + self._client.write_gatt_char( + _UUID_REFERENCE_INPUT, _COMMAND_REFERENCE_INPUT_STOP, response=False + ), + ) + + async def get_height(self) -> float: + """ + Get the desk height in meters. + + Returns: + Desk height in meters. + + >>> async def example() -> float: + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.move_to_target(1.0) + ... return await desk.get_height() + >>> asyncio.run(example()) + 1.0 + """ + height, _ = await self.get_height_and_speed() + return height + + async def get_speed(self) -> float: + """ + Get the desk speed in meters per second. + + Returns: + Desk speed in meters per second. + + >>> async def example() -> float: + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.move_to_target(1.0) + ... return await desk.get_speed() + >>> asyncio.run(example()) + 0.0 + """ + _, speed = await self.get_height_and_speed() + return speed + + async def get_height_and_speed(self) -> Tuple[float, float]: + """ + Get the desk height in meters and speed in meters per second. + + Returns: + Tuple of desk height in meters and speed in meters per second. + + >>> async def example() -> [float, float]: + ... async with IdasenDesk(mac="AA:AA:AA:AA:AA:AA") as desk: + ... await desk.move_to_target(1.0) + ... return await desk.get_height_and_speed() + >>> asyncio.run(example()) + (1.0, 0.0) + """ + raw = await self._client.read_gatt_char(_UUID_HEIGHT) + return _bytes_to_meters_and_speed(raw) + + @staticmethod + async def discover() -> Optional[str]: + """ + Try to find the desk's MAC address by discovering currently connected devices. + + Returns: + MAC address if found, ``None`` if not found. + + >>> asyncio.run(IdasenDesk.discover()) + 'AA:AA:AA:AA:AA:AA' + """ + try: + device = await BleakScanner.find_device_by_filter(_is_desk) + except Exception as e: + raise e + return None + + if device is None: + return None + + return device.address diff --git a/macparse/macaddress.py b/macparse/macaddress.py new file mode 100644 index 0000000..225b5c8 --- /dev/null +++ b/macparse/macaddress.py @@ -0,0 +1,389 @@ +# SPDX-License-Identifier: 0BSD +# Copyright 2021 Alexander Kozhevnikov + +"""Like ``ipaddress``, but for hardware identifiers such as MAC addresses.""" + +__all__ = ( + 'HWAddress', + 'OUI', + 'CDI32', 'CDI40', + 'MAC', + 'EUI48', 'EUI60', 'EUI64', + 'parse', +) +__version__ = '2.0.2' + + +from functools import total_ordering as _total_ordering + + +_HEX_DIGITS = "0123456789ABCDEFabcdef" + + +def _name(obj): + return type(obj).__name__ + + +def _class_names_in_proper_english(classes): + class_names = [cls.__name__ for cls in classes] + number_of_classes = len(classes) + if number_of_classes < 2: + return class_names[0] + elif number_of_classes == 2: + return ' or '.join(class_names) + else: + class_names[-1] = 'or ' + class_names[-1] + return ', '.join(class_names) + + +def _type_error(value, *classes): + class_names = _class_names_in_proper_english(classes) + return TypeError(repr(value) + ' has wrong type for ' + class_names) + + +def _value_error(value, error, *classes): + class_names = _class_names_in_proper_english(classes) + return ValueError(repr(value) + ' ' + error + ' ' + class_names) + + +@_total_ordering +class HWAddress: + """Base class for hardware addresses. + + Can be subclassed to create new address types + by just defining a couple class attribures. + + Attributes: + size: An integer defined by each subclass to specify the size + (in bits) of the hardware address. + formats: A sequence of format strings defined by each subclass + to specify what formats the class can parse. The first + format string is also used for ``repr`` and ``str`` output. + Each "x" in each format string stands for one hexadecimal + digit. All other characters are literal. For example, for + MAC addresses, the format strings are "xx-xx-xx-xx-xx-xx", + "xx:xx:xx:xx:xx:xx", "xxxx.xxxx.xxxx", and "xxxxxxxxxxxx". + """ + + __slots__ = ('_address', '__weakref__') + + formats = () + + def __init__(self, address): + """Initialize the hardware address object with the address given. + + Arguments: + address: An ``int``, ``bytes``, or ``str`` representation of + the address, or another instance of an address which is + either the same class, a subclass, or a superclass. If a + string, the ``formats`` attribute of the class is used + to parse it. If a byte string, it is read in big-endian. + If an integer, its value bytes in big-endian are used as + the address bytes. + + Raises: + TypeError: If ``address`` is not one of the valid types. + ValueError: If ``address`` is a string but does not match + one of the formats, if ``address`` is a byte string + but does not match the size, or if ``address`` is an + integer with a value that is negative or too big. + """ + if isinstance(address, int): + overflow = 1 << type(self).size + if address >= overflow: + raise _value_error(address, 'is too big for', type(self)) + if address < 0: + raise ValueError('hardware address cannot be negative') + self._address = address + elif isinstance(address, bytes): + length = len(address) + size_in_bytes = (type(self).size + 7) >> 3 + if length != size_in_bytes: + raise _value_error(address, 'has wrong length for', type(self)) + offset = (8 - type(self).size) & 7 + self._address = int.from_bytes(address, 'big') >> offset + elif isinstance(address, str) and len(type(self).formats): + self._address, _ = _parse(address, type(self)) + # Subclass being "cast" to superclass: + elif isinstance(address, type(self)): + self._address = int(address) + # Superclass being "cast" to subclass: + elif (isinstance(address, HWAddress) + and isinstance(self, type(address))): + self._address = int(address) + else: + raise _type_error(address, type(self)) + + def __repr__(self): + """Represent the hardware address as an unambiguous string.""" + try: + address = repr(str(self)) + except TypeError: + address = _hex(int(self), type(self).size) + return _name(self) + '(' + address + ')' + + def __str__(self): + """Get the canonical human-readable string of this hardware address.""" + formats = type(self).formats + if not len(formats): + raise TypeError(_name(self) + ' has no string format') + result = [] + offset = (4 - type(self).size) & 3 + unconsumed_address_value = int(self) << offset + for character in reversed(formats[0]): + if character == 'x': + nibble = unconsumed_address_value & 0xf + result.append(_HEX_DIGITS[nibble]) + unconsumed_address_value >>= 4 + else: + result.append(character) + return ''.join(reversed(result)) + + def __bytes__(self): + """Get the big-endian byte string of this hardware address.""" + offset = (8 - type(self).size) & 7 + size_in_bytes = (type(self).size + 7) >> 3 + return (int(self) << offset).to_bytes(size_in_bytes, 'big') + + def __int__(self): + """Get the raw integer value of this hardware address.""" + return self._address + + def __eq__(self, other): + """Check if this hardware address is equal to another. + + Hardware addresses are equal if they are instances of the + same class, and their raw bit strings are the same. + """ + if not isinstance(other, HWAddress): + return NotImplemented + return type(self) == type(other) and int(self) == int(other) + + def __lt__(self, other): + """Check if this hardware address is before another. + + Hardware addresses are sorted by their raw bit strings, + regardless of the exact hardware address class or size. + + For example: ``OUI('00-00-00') < CDI32('00-00-00-00')``, + and they both are less than ``OUI('00-00-01')``. + + This order intuitively groups address prefixes like OUIs + with (and just in front of) addresses like MAC addresses + which have that prefix when sorting a list of them. + """ + if not isinstance(other, HWAddress): + return NotImplemented + class1 = type(self) + class2 = type(other) + size1 = class1.size + size2 = class2.size + bits1 = int(self) + bits2 = int(other) + if size1 > size2: + bits2 <<= size1 - size2 + else: + bits1 <<= size2 - size1 + return (bits1, size1, id(class1)) < (bits2, size2, id(class2)) + + def __hash__(self): + """Get the hash of this hardware address.""" + return hash((type(self), int(self))) + + +def _hex(integer, bits): + # Like the built-in function ``hex`` but pads the + # output to ``bits`` worth of hex characters. + # + # Examples: + # (integer=5, bits=32) -> '0x00000005' + # (integer=0x1234, bits=32) -> '0x00001234' + # (integer=0x1234, bits=16) -> '0x1234' + return '0x' + hex((1 << (bits+3)) | integer)[3:] + + +class OUI(HWAddress): + """Organizationally Unique Identifier.""" + + __slots__ = () + + size = 24 + + formats = ( + 'xx-xx-xx', + 'xx:xx:xx', + 'xxxxxx', + ) + + +class _StartsWithOUI(HWAddress): + __slots__ = () + + @property + def oui(self): + """Get the OUI part of this hardware address.""" + return OUI(int(self) >> (type(self).size - OUI.size)) + + +class CDI32(_StartsWithOUI): + """32-bit Context Dependent Identifier (CDI-32).""" + + __slots__ = () + + size = 32 + + formats = ( + 'xx-xx-xx-xx', + 'xx:xx:xx:xx', + 'xxxxxxxx', + ) + + +class CDI40(_StartsWithOUI): + """40-bit Context Dependent Identifier (CDI-40).""" + + __slots__ = () + + size = 40 + + formats = ( + 'xx-xx-xx-xx-xx', + 'xx:xx:xx:xx:xx', + 'xxxxxxxxxx', + ) + + +class EUI48(_StartsWithOUI): + """48-Bit Extended Unique Identifier (EUI-48). + + EUI-48 is also the modern official name for what + many people are used to calling a "MAC address". + """ + + __slots__ = () + + size = 48 + + formats = ( + 'xx-xx-xx-xx-xx-xx', + 'xx:xx:xx:xx:xx:xx', + 'xxxx.xxxx.xxxx', + 'xxxxxxxxxxxx', + ) + + +MAC = EUI48 + + +class EUI60(_StartsWithOUI): + """60-Bit Extended Unique Identifier (EUI-60).""" + + __slots__ = () + + size = 60 + + formats = ( + 'x.x.x.x.x.x.x.x.x.x.x.x.x.x.x', + 'xx-xx-xx.x.x.x.x.x.x.x.x.x', + 'xxxxxxxxxxxxxxx', + ) + + +class EUI64(_StartsWithOUI): + """64-Bit Extended Unique Identifier (EUI-64).""" + + __slots__ = () + + size = 64 + + formats = ( + 'xx-xx-xx-xx-xx-xx-xx-xx', + 'xx:xx:xx:xx:xx:xx:xx:xx', + 'xxxx.xxxx.xxxx.xxxx', + 'xxxxxxxxxxxxxxxx', + ) + + +def parse(value, *classes): + """Try parsing a value as several hardware address classes at once. + + This lets you just write + + address = macaddress.parse(user_input, EUI64, EUI48, ...) + + instead of all of this: + + try: + address = macaddress.EUI64(user_input) + except ValueError: + try: + address = macaddress.EUI48(user_input) + except ValueError: + ... + + Arguments: + value: The value to parse as a hardware address. Either a + string, byte string, or an instance of one of the classes. + *classes: HWAddress subclasses to try to parse the string as. + If the input address could parse as more than one of the + classes, it is parsed as the first one. + + Returns: + HWAddress: The parsed hardware address if the value argument + was a string or byte string, or the value argument itself + if it was already an instance of one of the classes. + + Raises: + TypeError: If the value is not one of the valid types, + or if no classes were passed in. + ValueError: If the value could not be parsed as any + of the given classes. + """ + if not classes: + raise TypeError('parse() requires at least one class argument') + if isinstance(value, str): + address, cls = _parse(value, *classes) + return cls(address) + elif isinstance(value, bytes): + max_size = len(value) * 8 + min_size = max_size - 7 + for cls in classes: + if min_size <= cls.size <= max_size: + return cls(value) + raise _value_error(value, 'has wrong length for', *classes) + elif isinstance(value, classes): + return value + raise _type_error(value, *classes) + + +def _parse(string, *classes): + length = len(string) + if length < 1: + raise ValueError('hardware address cannot be an empty string') + candidates = {} + for cls in classes: + for format_ in cls.formats: + if len(format_) == length: + candidates.setdefault(format_, cls) + candidates = sorted(candidates.items()) + address = 0 + start = 0 + end = len(candidates) + for index in range(length): + character = string[index] + if character in _HEX_DIGITS: + address <<= 4 + address += int(character, 16) + character = 'x' + elif character == 'x': + character = '' + while start < end and candidates[start][0][index] < character: + start += 1 + while start < end and candidates[end - 1][0][index] > character: + end -= 1 + if start >= end: + raise _value_error(string, 'cannot be parsed as', *classes) + _, cls = candidates[start] + offset = (4 - cls.size) & 3 + address >>= offset + return address, cls diff --git a/main.py b/main.py new file mode 100644 index 0000000..6ae689e --- /dev/null +++ b/main.py @@ -0,0 +1,78 @@ +import idasen as ida +from idasen.desk import IdasenDesk +from macparse import macaddress + +import asyncio +import argparse +import sys + +VERSION = "idasit 0.0.1" + +def parse_args(): + parser = argparse.ArgumentParser(description="CLI for controlling a desk.") + + parser.add_argument('-m', '--mac', required=True, help="MAC address of the desk") + parser.add_argument('--monitor', action='store_true', help="Print height and speed while you move the desk") + parser.add_argument('--height', action='store_true', help="Print the desk height") + parser.add_argument('--move', type=float, help="Move the desk to a specific position") + + parser.add_argument('--discover', action='store_true', help="Print the desk MAC") + parser.add_argument('--pair', action='store_true', help="Pair with the desk") + + parser.add_argument('-v', '--version', action='version', version=VERSION) + + args = parser.parse_args() + return vars(args) + + + +# MAC = 'F8:9D:F4:10:90:DF' +# MAC = 'F8:9D:F4:10:90:D4' +SIT = 0.7793 +STAND = 1.2116 + +async def pair_desk(MAC): + async with IdasenDesk(MAC, exit_on_fail=True) as desk: + await desk.pair() + print('paired') + +async def discover_desk(): + mac = await IdasenDesk.discover() + print(mac) + + +async def monitor(MAC) -> None: + try: + async with IdasenDesk(MAC, exit_on_fail=True) as desk: + + async def printer(height: float, speed: float): + print(f"{height:.3f} meters - {speed:.3f} meters/second", flush=True) + + await desk.monitor(printer) + while True: + await asyncio.sleep(1000000) + except (KeyboardInterrupt, asyncio.exceptions.CancelledError): + pass + +async def height(MAC): + async with IdasenDesk(MAC, exit_on_fail=True) as desk: + height = await desk.get_height() + print(f"{height:.3f} meters") + +async def move_to(MAC, position: float) -> None: + async with IdasenDesk(MAC, exit_on_fail=True) as desk: + await desk.move_to_target(target=position) +def validate(mac): + try: + macaddress.MAC(mac) + return True + except: + return False + +if __name__ == '__main__': + args = parse_args() + mac = args['mac'] + if not validate(mac): + print(f'Not a valid MAC address. Use --discover to find the correct MAC address.') + sys.exit(2) + asyncio.run(discover_desk()) diff --git a/voluptuous/__init__.py b/voluptuous/__init__.py new file mode 100644 index 0000000..d030b35 --- /dev/null +++ b/voluptuous/__init__.py @@ -0,0 +1,88 @@ +"""Schema validation for Python data structures. + +Given eg. a nested data structure like this: + + { + 'exclude': ['Users', 'Uptime'], + 'include': [], + 'set': { + 'snmp_community': 'public', + 'snmp_timeout': 15, + 'snmp_version': '2c', + }, + 'targets': { + 'localhost': { + 'exclude': ['Uptime'], + 'features': { + 'Uptime': { + 'retries': 3, + }, + 'Users': { + 'snmp_community': 'monkey', + 'snmp_port': 15, + }, + }, + 'include': ['Users'], + 'set': { + 'snmp_community': 'monkeys', + }, + }, + }, + } + +A schema like this: + + >>> settings = { + ... 'snmp_community': str, + ... 'retries': int, + ... 'snmp_version': All(Coerce(str), Any('3', '2c', '1')), + ... } + >>> features = ['Ping', 'Uptime', 'Http'] + >>> schema = Schema({ + ... 'exclude': features, + ... 'include': features, + ... 'set': settings, + ... 'targets': { + ... 'exclude': features, + ... 'include': features, + ... 'features': { + ... str: settings, + ... }, + ... }, + ... }) + +Validate like so: + + >>> schema({ + ... 'set': { + ... 'snmp_community': 'public', + ... 'snmp_version': '2c', + ... }, + ... 'targets': { + ... 'exclude': ['Ping'], + ... 'features': { + ... 'Uptime': {'retries': 3}, + ... 'Users': {'snmp_community': 'monkey'}, + ... }, + ... }, + ... }) == { + ... 'set': {'snmp_version': '2c', 'snmp_community': 'public'}, + ... 'targets': { + ... 'exclude': ['Ping'], + ... 'features': {'Uptime': {'retries': 3}, + ... 'Users': {'snmp_community': 'monkey'}}}} + True +""" + +# flake8: noqa +# fmt: off +from voluptuous.schema_builder import * +from voluptuous.util import * +from voluptuous.validators import * + +from voluptuous.error import * # isort: skip + +# fmt: on + +__version__ = '0.15.2' +__author__ = 'alecthomas' diff --git a/voluptuous/error.py b/voluptuous/error.py new file mode 100644 index 0000000..9dab943 --- /dev/null +++ b/voluptuous/error.py @@ -0,0 +1,219 @@ +# fmt: off +import typing + +# fmt: on + + +class Error(Exception): + """Base validation exception.""" + + +class SchemaError(Error): + """An error was encountered in the schema.""" + + +class Invalid(Error): + """The data was invalid. + + :attr msg: The error message. + :attr path: The path to the error, as a list of keys in the source data. + :attr error_message: The actual error message that was raised, as a + string. + + """ + + def __init__( + self, + message: str, + path: typing.Optional[typing.List[typing.Hashable]] = None, + error_message: typing.Optional[str] = None, + error_type: typing.Optional[str] = None, + ) -> None: + Error.__init__(self, message) + self._path = path or [] + self._error_message = error_message or message + self.error_type = error_type + + @property + def msg(self) -> str: + return self.args[0] + + @property + def path(self) -> typing.List[typing.Hashable]: + return self._path + + @property + def error_message(self) -> str: + return self._error_message + + def __str__(self) -> str: + path = ' @ data[%s]' % ']['.join(map(repr, self.path)) if self.path else '' + output = Exception.__str__(self) + if self.error_type: + output += ' for ' + self.error_type + return output + path + + def prepend(self, path: typing.List[typing.Hashable]) -> None: + self._path = path + self.path + + +class MultipleInvalid(Invalid): + def __init__(self, errors: typing.Optional[typing.List[Invalid]] = None) -> None: + self.errors = errors[:] if errors else [] + + def __repr__(self) -> str: + return 'MultipleInvalid(%r)' % self.errors + + @property + def msg(self) -> str: + return self.errors[0].msg + + @property + def path(self) -> typing.List[typing.Hashable]: + return self.errors[0].path + + @property + def error_message(self) -> str: + return self.errors[0].error_message + + def add(self, error: Invalid) -> None: + self.errors.append(error) + + def __str__(self) -> str: + return str(self.errors[0]) + + def prepend(self, path: typing.List[typing.Hashable]) -> None: + for error in self.errors: + error.prepend(path) + + +class RequiredFieldInvalid(Invalid): + """Required field was missing.""" + + +class ObjectInvalid(Invalid): + """The value we found was not an object.""" + + +class DictInvalid(Invalid): + """The value found was not a dict.""" + + +class ExclusiveInvalid(Invalid): + """More than one value found in exclusion group.""" + + +class InclusiveInvalid(Invalid): + """Not all values found in inclusion group.""" + + +class SequenceTypeInvalid(Invalid): + """The type found is not a sequence type.""" + + +class TypeInvalid(Invalid): + """The value was not of required type.""" + + +class ValueInvalid(Invalid): + """The value was found invalid by evaluation function.""" + + +class ContainsInvalid(Invalid): + """List does not contain item""" + + +class ScalarInvalid(Invalid): + """Scalars did not match.""" + + +class CoerceInvalid(Invalid): + """Impossible to coerce value to type.""" + + +class AnyInvalid(Invalid): + """The value did not pass any validator.""" + + +class AllInvalid(Invalid): + """The value did not pass all validators.""" + + +class MatchInvalid(Invalid): + """The value does not match the given regular expression.""" + + +class RangeInvalid(Invalid): + """The value is not in given range.""" + + +class TrueInvalid(Invalid): + """The value is not True.""" + + +class FalseInvalid(Invalid): + """The value is not False.""" + + +class BooleanInvalid(Invalid): + """The value is not a boolean.""" + + +class UrlInvalid(Invalid): + """The value is not a URL.""" + + +class EmailInvalid(Invalid): + """The value is not an email address.""" + + +class FileInvalid(Invalid): + """The value is not a file.""" + + +class DirInvalid(Invalid): + """The value is not a directory.""" + + +class PathInvalid(Invalid): + """The value is not a path.""" + + +class LiteralInvalid(Invalid): + """The literal values do not match.""" + + +class LengthInvalid(Invalid): + pass + + +class DatetimeInvalid(Invalid): + """The value is not a formatted datetime string.""" + + +class DateInvalid(Invalid): + """The value is not a formatted date string.""" + + +class InInvalid(Invalid): + pass + + +class NotInInvalid(Invalid): + pass + + +class ExactSequenceInvalid(Invalid): + pass + + +class NotEnoughValid(Invalid): + """The value did not pass enough validations.""" + + pass + + +class TooManyValid(Invalid): + """The value passed more than expected validations.""" + + pass diff --git a/voluptuous/humanize.py b/voluptuous/humanize.py new file mode 100644 index 0000000..eabfd02 --- /dev/null +++ b/voluptuous/humanize.py @@ -0,0 +1,57 @@ +# fmt: off +import typing + +from voluptuous import Invalid, MultipleInvalid +from voluptuous.error import Error +from voluptuous.schema_builder import Schema + +# fmt: on + +MAX_VALIDATION_ERROR_ITEM_LENGTH = 500 + + +def _nested_getitem( + data: typing.Any, path: typing.List[typing.Hashable] +) -> typing.Optional[typing.Any]: + for item_index in path: + try: + data = data[item_index] + except (KeyError, IndexError, TypeError): + # The index is not present in the dictionary, list or other + # indexable or data is not subscriptable + return None + return data + + +def humanize_error( + data, + validation_error: Invalid, + max_sub_error_length: int = MAX_VALIDATION_ERROR_ITEM_LENGTH, +) -> str: + """Provide a more helpful + complete validation error message than that provided automatically + Invalid and MultipleInvalid do not include the offending value in error messages, + and MultipleInvalid.__str__ only provides the first error. + """ + if isinstance(validation_error, MultipleInvalid): + return '\n'.join( + sorted( + humanize_error(data, sub_error, max_sub_error_length) + for sub_error in validation_error.errors + ) + ) + else: + offending_item_summary = repr(_nested_getitem(data, validation_error.path)) + if len(offending_item_summary) > max_sub_error_length: + offending_item_summary = ( + offending_item_summary[: max_sub_error_length - 3] + '...' + ) + return '%s. Got %s' % (validation_error, offending_item_summary) + + +def validate_with_humanized_errors( + data, schema: Schema, max_sub_error_length: int = MAX_VALIDATION_ERROR_ITEM_LENGTH +) -> typing.Any: + try: + return schema(data) + except (Invalid, MultipleInvalid) as e: + raise Error(humanize_error(data, e, max_sub_error_length)) diff --git a/voluptuous/py.typed b/voluptuous/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/voluptuous/schema_builder.py b/voluptuous/schema_builder.py new file mode 100644 index 0000000..cdeb514 --- /dev/null +++ b/voluptuous/schema_builder.py @@ -0,0 +1,1315 @@ +# fmt: off +from __future__ import annotations + +import collections +import inspect +import itertools +import re +import sys +import typing +from collections.abc import Generator +from contextlib import contextmanager +from functools import cache, wraps + +from voluptuous import error as er +from voluptuous.error import Error + +# fmt: on + +# options for extra keys +PREVENT_EXTRA = 0 # any extra key not in schema will raise an error +ALLOW_EXTRA = 1 # extra keys not in schema will be included in output +REMOVE_EXTRA = 2 # extra keys not in schema will be excluded from output + + +def _isnamedtuple(obj): + return isinstance(obj, tuple) and hasattr(obj, '_fields') + + +class Undefined(object): + def __nonzero__(self): + return False + + def __repr__(self): + return '...' + + +UNDEFINED = Undefined() + + +def Self() -> None: + raise er.SchemaError('"Self" should never be called') + + +DefaultFactory = typing.Union[Undefined, typing.Callable[[], typing.Any]] + + +def default_factory(value) -> DefaultFactory: + if value is UNDEFINED or callable(value): + return value + return lambda: value + + +@contextmanager +def raises( + exc, msg: typing.Optional[str] = None, regex: typing.Optional[re.Pattern] = None +) -> Generator[None, None, None]: + try: + yield + except exc as e: + if msg is not None: + assert str(e) == msg, '%r != %r' % (str(e), msg) + if regex is not None: + assert re.search(regex, str(e)), '%r does not match %r' % (str(e), regex) + else: + raise AssertionError(f"Did not raise exception {exc.__name__}") + + +def Extra(_) -> None: + """Allow keys in the data that are not present in the schema.""" + raise er.SchemaError('"Extra" should never be called') + + +# As extra() is never called there's no way to catch references to the +# deprecated object, so we just leave an alias here instead. +extra = Extra + +primitive_types = (bool, bytes, int, str, float, complex) + +# fmt: off +Schemable = typing.Union[ + 'Schema', 'Object', + collections.abc.Mapping, + list, tuple, frozenset, set, + bool, bytes, int, str, float, complex, + type, object, dict, None, typing.Callable +] +# fmt: on + + +class Schema(object): + """A validation schema. + + The schema is a Python tree-like structure where nodes are pattern + matched against corresponding trees of values. + + Nodes can be values, in which case a direct comparison is used, types, + in which case an isinstance() check is performed, or callables, which will + validate and optionally convert the value. + + We can equate schemas also. + + For Example: + + >>> v = Schema({Required('a'): str}) + >>> v1 = Schema({Required('a'): str}) + >>> v2 = Schema({Required('b'): str}) + >>> assert v == v1 + >>> assert v != v2 + + """ + + _extra_to_name = { + REMOVE_EXTRA: 'REMOVE_EXTRA', + ALLOW_EXTRA: 'ALLOW_EXTRA', + PREVENT_EXTRA: 'PREVENT_EXTRA', + } + + def __init__( + self, schema: Schemable, required: bool = False, extra: int = PREVENT_EXTRA + ) -> None: + """Create a new Schema. + + :param schema: Validation schema. See :module:`voluptuous` for details. + :param required: Keys defined in the schema must be in the data. + :param extra: Specify how extra keys in the data are treated: + - :const:`~voluptuous.PREVENT_EXTRA`: to disallow any undefined + extra keys (raise ``Invalid``). + - :const:`~voluptuous.ALLOW_EXTRA`: to include undefined extra + keys in the output. + - :const:`~voluptuous.REMOVE_EXTRA`: to exclude undefined extra keys + from the output. + - Any value other than the above defaults to + :const:`~voluptuous.PREVENT_EXTRA` + """ + self.schema: typing.Any = schema + self.required = required + self.extra = int(extra) # ensure the value is an integer + self._compiled = self._compile(schema) + + @classmethod + def infer(cls, data, **kwargs) -> Schema: + """Create a Schema from concrete data (e.g. an API response). + + For example, this will take a dict like: + + { + 'foo': 1, + 'bar': { + 'a': True, + 'b': False + }, + 'baz': ['purple', 'monkey', 'dishwasher'] + } + + And return a Schema: + + { + 'foo': int, + 'bar': { + 'a': bool, + 'b': bool + }, + 'baz': [str] + } + + Note: only very basic inference is supported. + """ + + def value_to_schema_type(value): + if isinstance(value, dict): + if len(value) == 0: + return dict + return {k: value_to_schema_type(v) for k, v in value.items()} + if isinstance(value, list): + if len(value) == 0: + return list + else: + return [value_to_schema_type(v) for v in value] + return type(value) + + return cls(value_to_schema_type(data), **kwargs) + + def __eq__(self, other): + if not isinstance(other, Schema): + return False + return other.schema == self.schema + + def __ne__(self, other): + return not (self == other) + + def __str__(self): + return str(self.schema) + + def __repr__(self): + return "" % ( + self.schema, + self._extra_to_name.get(self.extra, '??'), + self.required, + id(self), + ) + + def __call__(self, data): + """Validate data against this schema.""" + try: + return self._compiled([], data) + except er.MultipleInvalid: + raise + except er.Invalid as e: + raise er.MultipleInvalid([e]) + # return self.validate([], self.schema, data) + + def _compile(self, schema): + if schema is Extra: + return lambda _, v: v + if schema is Self: + return lambda p, v: self._compiled(p, v) + elif hasattr(schema, "__voluptuous_compile__"): + return schema.__voluptuous_compile__(self) + if isinstance(schema, Object): + return self._compile_object(schema) + if isinstance(schema, collections.abc.Mapping): + return self._compile_dict(schema) + elif isinstance(schema, list): + return self._compile_list(schema) + elif isinstance(schema, tuple): + return self._compile_tuple(schema) + elif isinstance(schema, (frozenset, set)): + return self._compile_set(schema) + type_ = type(schema) + if inspect.isclass(schema): + type_ = schema + if type_ in (*primitive_types, object, type(None)) or callable(schema): + return _compile_scalar(schema) + raise er.SchemaError('unsupported schema data type %r' % type(schema).__name__) + + def _compile_mapping(self, schema, invalid_msg=None): + """Create validator for given mapping.""" + invalid_msg = invalid_msg or 'mapping value' + + # Keys that may be required + all_required_keys = set( + key + for key in schema + if key is not Extra + and ( + (self.required and not isinstance(key, (Optional, Remove))) + or isinstance(key, Required) + ) + ) + + # Keys that may have defaults + all_default_keys = set( + key + for key in schema + if isinstance(key, Required) or isinstance(key, Optional) + ) + + _compiled_schema = {} + for skey, svalue in schema.items(): + new_key = self._compile(skey) + new_value = self._compile(svalue) + _compiled_schema[skey] = (new_key, new_value) + + candidates = list(_iterate_mapping_candidates(_compiled_schema)) + + # After we have the list of candidates in the correct order, we want to apply some optimization so that each + # key in the data being validated will be matched against the relevant schema keys only. + # No point in matching against different keys + additional_candidates = [] + candidates_by_key = {} + for skey, (ckey, cvalue) in candidates: + if type(skey) in primitive_types: + candidates_by_key.setdefault(skey, []).append((skey, (ckey, cvalue))) + elif isinstance(skey, Marker) and type(skey.schema) in primitive_types: + candidates_by_key.setdefault(skey.schema, []).append( + (skey, (ckey, cvalue)) + ) + else: + # These are wildcards such as 'int', 'str', 'Remove' and others which should be applied to all keys + additional_candidates.append((skey, (ckey, cvalue))) + + def validate_mapping(path, iterable, out): + required_keys = all_required_keys.copy() + + # Build a map of all provided key-value pairs. + # The type(out) is used to retain ordering in case a ordered + # map type is provided as input. + key_value_map = type(out)() + for key, value in iterable: + key_value_map[key] = value + + # Insert default values for non-existing keys. + for key in all_default_keys: + if ( + not isinstance(key.default, Undefined) + and key.schema not in key_value_map + ): + # A default value has been specified for this missing + # key, insert it. + key_value_map[key.schema] = key.default() + + errors = [] + for key, value in key_value_map.items(): + key_path = path + [key] + remove_key = False + + # Optimization. Validate against the matching key first, then fallback to the rest + relevant_candidates = itertools.chain( + candidates_by_key.get(key, []), additional_candidates + ) + + # compare each given key/value against all compiled key/values + # schema key, (compiled key, compiled value) + error = None + for skey, (ckey, cvalue) in relevant_candidates: + try: + new_key = ckey(key_path, key) + except er.Invalid as e: + if len(e.path) > len(key_path): + raise + if not error or len(e.path) > len(error.path): + error = e + continue + # Backtracking is not performed once a key is selected, so if + # the value is invalid we immediately throw an exception. + exception_errors = [] + # check if the key is marked for removal + is_remove = new_key is Remove + try: + cval = cvalue(key_path, value) + # include if it's not marked for removal + if not is_remove: + out[new_key] = cval + else: + remove_key = True + continue + except er.MultipleInvalid as e: + exception_errors.extend(e.errors) + except er.Invalid as e: + exception_errors.append(e) + + if exception_errors: + if is_remove or remove_key: + continue + for err in exception_errors: + if len(err.path) <= len(key_path): + err.error_type = invalid_msg + errors.append(err) + # If there is a validation error for a required + # key, this means that the key was provided. + # Discard the required key so it does not + # create an additional, noisy exception. + required_keys.discard(skey) + break + + # Key and value okay, mark as found in case it was + # a Required() field. + required_keys.discard(skey) + + break + else: + if remove_key: + # remove key + continue + elif self.extra == ALLOW_EXTRA: + out[key] = value + elif error: + errors.append(error) + elif self.extra != REMOVE_EXTRA: + errors.append(er.Invalid('extra keys not allowed', key_path)) + # else REMOVE_EXTRA: ignore the key so it's removed from output + + # for any required keys left that weren't found and don't have defaults: + for key in required_keys: + msg = ( + key.msg + if hasattr(key, 'msg') and key.msg + else 'required key not provided' + ) + errors.append(er.RequiredFieldInvalid(msg, path + [key])) + if errors: + raise er.MultipleInvalid(errors) + + return out + + return validate_mapping + + def _compile_object(self, schema): + """Validate an object. + + Has the same behavior as dictionary validator but work with object + attributes. + + For example: + + >>> class Structure(object): + ... def __init__(self, one=None, three=None): + ... self.one = one + ... self.three = three + ... + >>> validate = Schema(Object({'one': 'two', 'three': 'four'}, cls=Structure)) + >>> with raises(er.MultipleInvalid, "not a valid value for object value @ data['one']"): + ... validate(Structure(one='three')) + + """ + base_validate = self._compile_mapping(schema, invalid_msg='object value') + + def validate_object(path, data): + if schema.cls is not UNDEFINED and not isinstance(data, schema.cls): + raise er.ObjectInvalid('expected a {0!r}'.format(schema.cls), path) + iterable = _iterate_object(data) + iterable = filter(lambda item: item[1] is not None, iterable) + out = base_validate(path, iterable, {}) + return type(data)(**out) + + return validate_object + + def _compile_dict(self, schema): + """Validate a dictionary. + + A dictionary schema can contain a set of values, or at most one + validator function/type. + + A dictionary schema will only validate a dictionary: + + >>> validate = Schema({}) + >>> with raises(er.MultipleInvalid, 'expected a dictionary'): + ... validate([]) + + An invalid dictionary value: + + >>> validate = Schema({'one': 'two', 'three': 'four'}) + >>> with raises(er.MultipleInvalid, "not a valid value for dictionary value @ data['one']"): + ... validate({'one': 'three'}) + + An invalid key: + + >>> with raises(er.MultipleInvalid, "extra keys not allowed @ data['two']"): + ... validate({'two': 'three'}) + + + Validation function, in this case the "int" type: + + >>> validate = Schema({'one': 'two', 'three': 'four', int: str}) + + Valid integer input: + + >>> validate({10: 'twenty'}) + {10: 'twenty'} + + By default, a "type" in the schema (in this case "int") will be used + purely to validate that the corresponding value is of that type. It + will not Coerce the value: + + >>> with raises(er.MultipleInvalid, "extra keys not allowed @ data['10']"): + ... validate({'10': 'twenty'}) + + Wrap them in the Coerce() function to achieve this: + >>> from voluptuous import Coerce + >>> validate = Schema({'one': 'two', 'three': 'four', + ... Coerce(int): str}) + >>> validate({'10': 'twenty'}) + {10: 'twenty'} + + Custom message for required key + + >>> validate = Schema({Required('one', 'required'): 'two'}) + >>> with raises(er.MultipleInvalid, "required @ data['one']"): + ... validate({}) + + (This is to avoid unexpected surprises.) + + Multiple errors for nested field in a dict: + + >>> validate = Schema({ + ... 'adict': { + ... 'strfield': str, + ... 'intfield': int + ... } + ... }) + >>> try: + ... validate({ + ... 'adict': { + ... 'strfield': 123, + ... 'intfield': 'one' + ... } + ... }) + ... except er.MultipleInvalid as e: + ... print(sorted(str(i) for i in e.errors)) # doctest: +NORMALIZE_WHITESPACE + ["expected int for dictionary value @ data['adict']['intfield']", + "expected str for dictionary value @ data['adict']['strfield']"] + + """ + base_validate = self._compile_mapping(schema, invalid_msg='dictionary value') + + groups_of_exclusion = {} + groups_of_inclusion = {} + for node in schema: + if isinstance(node, Exclusive): + g = groups_of_exclusion.setdefault(node.group_of_exclusion, []) + g.append(node) + elif isinstance(node, Inclusive): + g = groups_of_inclusion.setdefault(node.group_of_inclusion, []) + g.append(node) + + def validate_dict(path, data): + if not isinstance(data, dict): + raise er.DictInvalid('expected a dictionary', path) + + errors = [] + for label, group in groups_of_exclusion.items(): + exists = False + for exclusive in group: + if exclusive.schema in data: + if exists: + msg = ( + exclusive.msg + if hasattr(exclusive, 'msg') and exclusive.msg + else "two or more values in the same group of exclusion '%s'" + % label + ) + next_path = path + [VirtualPathComponent(label)] + errors.append(er.ExclusiveInvalid(msg, next_path)) + break + exists = True + + if errors: + raise er.MultipleInvalid(errors) + + for label, group in groups_of_inclusion.items(): + included = [node.schema in data for node in group] + if any(included) and not all(included): + msg = ( + "some but not all values in the same group of inclusion '%s'" + % label + ) + for g in group: + if hasattr(g, 'msg') and g.msg: + msg = g.msg + break + next_path = path + [VirtualPathComponent(label)] + errors.append(er.InclusiveInvalid(msg, next_path)) + break + + if errors: + raise er.MultipleInvalid(errors) + + out = data.__class__() + return base_validate(path, data.items(), out) + + return validate_dict + + def _compile_sequence(self, schema, seq_type): + """Validate a sequence type. + + This is a sequence of valid values or validators tried in order. + + >>> validator = Schema(['one', 'two', int]) + >>> validator(['one']) + ['one'] + >>> with raises(er.MultipleInvalid, 'expected int @ data[0]'): + ... validator([3.5]) + >>> validator([1]) + [1] + """ + _compiled = [self._compile(s) for s in schema] + seq_type_name = seq_type.__name__ + + def validate_sequence(path, data): + if not isinstance(data, seq_type): + raise er.SequenceTypeInvalid('expected a %s' % seq_type_name, path) + + # Empty seq schema, reject any data. + if not schema: + if data: + raise er.MultipleInvalid( + [er.ValueInvalid('not a valid value', path if path else data)] + ) + return data + + out = [] + invalid = None + errors = [] + index_path = UNDEFINED + for i, value in enumerate(data): + index_path = path + [i] + invalid = None + for validate in _compiled: + try: + cval = validate(index_path, value) + if cval is not Remove: # do not include Remove values + out.append(cval) + break + except er.Invalid as e: + if len(e.path) > len(index_path): + raise + invalid = e + else: + errors.append(invalid) + if errors: + raise er.MultipleInvalid(errors) + + if _isnamedtuple(data): + return type(data)(*out) + else: + return type(data)(out) + + return validate_sequence + + def _compile_tuple(self, schema): + """Validate a tuple. + + A tuple is a sequence of valid values or validators tried in order. + + >>> validator = Schema(('one', 'two', int)) + >>> validator(('one',)) + ('one',) + >>> with raises(er.MultipleInvalid, 'expected int @ data[0]'): + ... validator((3.5,)) + >>> validator((1,)) + (1,) + """ + return self._compile_sequence(schema, tuple) + + def _compile_list(self, schema): + """Validate a list. + + A list is a sequence of valid values or validators tried in order. + + >>> validator = Schema(['one', 'two', int]) + >>> validator(['one']) + ['one'] + >>> with raises(er.MultipleInvalid, 'expected int @ data[0]'): + ... validator([3.5]) + >>> validator([1]) + [1] + """ + return self._compile_sequence(schema, list) + + def _compile_set(self, schema): + """Validate a set. + + A set is an unordered collection of unique elements. + + >>> validator = Schema({int}) + >>> validator(set([42])) == set([42]) + True + >>> with raises(er.Invalid, 'expected a set'): + ... validator(42) + >>> with raises(er.MultipleInvalid, 'invalid value in set'): + ... validator(set(['a'])) + """ + type_ = type(schema) + type_name = type_.__name__ + + def validate_set(path, data): + if not isinstance(data, type_): + raise er.Invalid('expected a %s' % type_name, path) + + _compiled = [self._compile(s) for s in schema] + errors = [] + for value in data: + for validate in _compiled: + try: + validate(path, value) + break + except er.Invalid: + pass + else: + invalid = er.Invalid('invalid value in %s' % type_name, path) + errors.append(invalid) + + if errors: + raise er.MultipleInvalid(errors) + + return data + + return validate_set + + def extend( + self, + schema: Schemable, + required: typing.Optional[bool] = None, + extra: typing.Optional[int] = None, + ) -> Schema: + """Create a new `Schema` by merging this and the provided `schema`. + + Neither this `Schema` nor the provided `schema` are modified. The + resulting `Schema` inherits the `required` and `extra` parameters of + this, unless overridden. + + Both schemas must be dictionary-based. + + :param schema: dictionary to extend this `Schema` with + :param required: if set, overrides `required` of this `Schema` + :param extra: if set, overrides `extra` of this `Schema` + """ + + assert isinstance(self.schema, dict) and isinstance( + schema, dict + ), 'Both schemas must be dictionary-based' + + result = self.schema.copy() + + # returns the key that may have been passed as an argument to Marker constructor + def key_literal(key): + return key.schema if isinstance(key, Marker) else key + + # build a map that takes the key literals to the needed objects + # literal -> Required|Optional|literal + result_key_map = dict((key_literal(key), key) for key in result) + + # for each item in the extension schema, replace duplicates + # or add new keys + for key, value in schema.items(): + # if the key is already in the dictionary, we need to replace it + # transform key to literal before checking presence + if key_literal(key) in result_key_map: + result_key = result_key_map[key_literal(key)] + result_value = result[result_key] + + # if both are dictionaries, we need to extend recursively + # create the new extended sub schema, then remove the old key and add the new one + if isinstance(result_value, dict) and isinstance(value, dict): + new_value = Schema(result_value).extend(value).schema + del result[result_key] + result[key] = new_value + # one or the other or both are not sub-schemas, simple replacement is fine + # remove old key and add new one + else: + del result[result_key] + result[key] = value + + # key is new and can simply be added + else: + result[key] = value + + # recompile and send old object + result_cls = type(self) + result_required = required if required is not None else self.required + result_extra = extra if extra is not None else self.extra + return result_cls(result, required=result_required, extra=result_extra) + + +def _compile_scalar(schema): + """A scalar value. + + The schema can either be a value or a type. + + >>> _compile_scalar(int)([], 1) + 1 + >>> with raises(er.Invalid, 'expected float'): + ... _compile_scalar(float)([], '1') + + Callables have + >>> _compile_scalar(lambda v: float(v))([], '1') + 1.0 + + As a convenience, ValueError's are trapped: + + >>> with raises(er.Invalid, 'not a valid value'): + ... _compile_scalar(lambda v: float(v))([], 'a') + """ + if inspect.isclass(schema): + + def validate_instance(path, data): + if isinstance(data, schema): + return data + else: + msg = 'expected %s' % schema.__name__ + raise er.TypeInvalid(msg, path) + + return validate_instance + + if callable(schema): + + def validate_callable(path, data): + try: + return schema(data) + except ValueError: + raise er.ValueInvalid('not a valid value', path) + except er.Invalid as e: + e.prepend(path) + raise + + return validate_callable + + def validate_value(path, data): + if data != schema: + raise er.ScalarInvalid('not a valid value', path) + return data + + return validate_value + + +def _compile_itemsort(): + '''return sort function of mappings''' + + def is_extra(key_): + return key_ is Extra + + def is_remove(key_): + return isinstance(key_, Remove) + + def is_marker(key_): + return isinstance(key_, Marker) + + def is_type(key_): + return inspect.isclass(key_) + + def is_callable(key_): + return callable(key_) + + # priority list for map sorting (in order of checking) + # We want Extra to match last, because it's a catch-all. On the other hand, + # Remove markers should match first (since invalid values will not + # raise an Error, instead the validator will check if other schemas match + # the same value). + priority = [ + (1, is_remove), # Remove highest priority after values + (2, is_marker), # then other Markers + (4, is_type), # types/classes lowest before Extra + (3, is_callable), # callables after markers + (5, is_extra), # Extra lowest priority + ] + + def item_priority(item_): + key_ = item_[0] + for i, check_ in priority: + if check_(key_): + return i + # values have highest priorities + return 0 + + return item_priority + + +_sort_item = _compile_itemsort() + + +def _iterate_mapping_candidates(schema): + """Iterate over schema in a meaningful order.""" + # Without this, Extra might appear first in the iterator, and fail to + # validate a key even though it's a Required that has its own validation, + # generating a false positive. + return sorted(schema.items(), key=_sort_item) + + +def _iterate_object(obj): + """Return iterator over object attributes. Respect objects with + defined __slots__. + + """ + d = {} + try: + d = vars(obj) + except TypeError: + # maybe we have named tuple here? + if hasattr(obj, '_asdict'): + d = obj._asdict() + for item in d.items(): + yield item + try: + slots = obj.__slots__ + except AttributeError: + pass + else: + for key in slots: + if key != '__dict__': + yield (key, getattr(obj, key)) + + +class Msg(object): + """Report a user-friendly message if a schema fails to validate. + + >>> validate = Schema( + ... Msg(['one', 'two', int], + ... 'should be one of "one", "two" or an integer')) + >>> with raises(er.MultipleInvalid, 'should be one of "one", "two" or an integer'): + ... validate(['three']) + + Messages are only applied to invalid direct descendants of the schema: + + >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!')) + >>> with raises(er.MultipleInvalid, 'expected int @ data[0][0]'): + ... validate([['three']]) + + The type which is thrown can be overridden but needs to be a subclass of Invalid + + >>> with raises(er.SchemaError, 'Msg can only use subclases of Invalid as custom class'): + ... validate = Schema(Msg([int], 'should be int', cls=KeyError)) + + If you do use a subclass of Invalid, that error will be thrown (wrapped in a MultipleInvalid) + + >>> validate = Schema(Msg([['one', 'two', int]], 'not okay!', cls=er.RangeInvalid)) + >>> try: + ... validate(['three']) + ... except er.MultipleInvalid as e: + ... assert isinstance(e.errors[0], er.RangeInvalid) + """ + + def __init__( + self, + schema: Schemable, + msg: str, + cls: typing.Optional[typing.Type[Error]] = None, + ) -> None: + if cls and not issubclass(cls, er.Invalid): + raise er.SchemaError( + "Msg can only use subclases of Invalid as custom class" + ) + self._schema = schema + self.schema = Schema(schema) + self.msg = msg + self.cls = cls + + def __call__(self, v): + try: + return self.schema(v) + except er.Invalid as e: + if len(e.path) > 1: + raise e + else: + raise (self.cls or er.Invalid)(self.msg) + + def __repr__(self): + return 'Msg(%s, %s, cls=%s)' % (self._schema, self.msg, self.cls) + + +class Object(dict): + """Indicate that we should work with attributes, not keys.""" + + def __init__(self, schema: typing.Any, cls: object = UNDEFINED) -> None: + self.cls = cls + super(Object, self).__init__(schema) + + +class VirtualPathComponent(str): + def __str__(self): + return '<' + self + '>' + + def __repr__(self): + return self.__str__() + + +class Marker(object): + """Mark nodes for special treatment. + + `description` is an optional field, unused by Voluptuous itself, but can be + introspected by any external tool, for example to generate schema documentation. + """ + + __slots__ = ('schema', '_schema', 'msg', 'description', '__hash__') + + def __init__( + self, + schema_: Schemable, + msg: typing.Optional[str] = None, + description: typing.Any | None = None, + ) -> None: + self.schema: typing.Any = schema_ + self._schema = Schema(schema_) + self.msg = msg + self.description = description + self.__hash__ = cache(lambda: hash(schema_)) # type: ignore[method-assign] + + def __call__(self, v): + try: + return self._schema(v) + except er.Invalid as e: + if not self.msg or len(e.path) > 1: + raise + raise er.Invalid(self.msg) + + def __str__(self): + return str(self.schema) + + def __repr__(self): + return repr(self.schema) + + def __lt__(self, other): + if isinstance(other, Marker): + return self.schema < other.schema + return self.schema < other + + def __eq__(self, other): + return self.schema == other + + def __ne__(self, other): + return not (self.schema == other) + + +class Optional(Marker): + """Mark a node in the schema as optional, and optionally provide a default + + >>> schema = Schema({Optional('key'): str}) + >>> schema({}) + {} + >>> schema = Schema({Optional('key', default='value'): str}) + >>> schema({}) + {'key': 'value'} + >>> schema = Schema({Optional('key', default=list): list}) + >>> schema({}) + {'key': []} + + If 'required' flag is set for an entire schema, optional keys aren't required + + >>> schema = Schema({ + ... Optional('key'): str, + ... 'key2': str + ... }, required=True) + >>> schema({'key2':'value'}) + {'key2': 'value'} + """ + + def __init__( + self, + schema: Schemable, + msg: typing.Optional[str] = None, + default: typing.Any = UNDEFINED, + description: typing.Any | None = None, + ) -> None: + super(Optional, self).__init__(schema, msg=msg, description=description) + self.default = default_factory(default) + + +class Exclusive(Optional): + """Mark a node in the schema as exclusive. + + Exclusive keys inherited from Optional: + + >>> schema = Schema({Exclusive('alpha', 'angles'): int, Exclusive('beta', 'angles'): int}) + >>> schema({'alpha': 30}) + {'alpha': 30} + + Keys inside a same group of exclusion cannot be together, it only makes sense for dictionaries: + + >>> with raises(er.MultipleInvalid, "two or more values in the same group of exclusion 'angles' @ data[]"): + ... schema({'alpha': 30, 'beta': 45}) + + For example, API can provides multiple types of authentication, but only one works in the same time: + + >>> msg = 'Please, use only one type of authentication at the same time.' + >>> schema = Schema({ + ... Exclusive('classic', 'auth', msg=msg):{ + ... Required('email'): str, + ... Required('password'): str + ... }, + ... Exclusive('internal', 'auth', msg=msg):{ + ... Required('secret_key'): str + ... }, + ... Exclusive('social', 'auth', msg=msg):{ + ... Required('social_network'): str, + ... Required('token'): str + ... } + ... }) + + >>> with raises(er.MultipleInvalid, "Please, use only one type of authentication at the same time. @ data[]"): + ... schema({'classic': {'email': 'foo@example.com', 'password': 'bar'}, + ... 'social': {'social_network': 'barfoo', 'token': 'tEMp'}}) + """ + + def __init__( + self, + schema: Schemable, + group_of_exclusion: str, + msg: typing.Optional[str] = None, + description: typing.Any | None = None, + ) -> None: + super(Exclusive, self).__init__(schema, msg=msg, description=description) + self.group_of_exclusion = group_of_exclusion + + +class Inclusive(Optional): + """Mark a node in the schema as inclusive. + + Inclusive keys inherited from Optional: + + >>> schema = Schema({ + ... Inclusive('filename', 'file'): str, + ... Inclusive('mimetype', 'file'): str + ... }) + >>> data = {'filename': 'dog.jpg', 'mimetype': 'image/jpeg'} + >>> data == schema(data) + True + + Keys inside a same group of inclusive must exist together, it only makes sense for dictionaries: + + >>> with raises(er.MultipleInvalid, "some but not all values in the same group of inclusion 'file' @ data[]"): + ... schema({'filename': 'dog.jpg'}) + + If none of the keys in the group are present, it is accepted: + + >>> schema({}) + {} + + For example, API can return 'height' and 'width' together, but not separately. + + >>> msg = "Height and width must exist together" + >>> schema = Schema({ + ... Inclusive('height', 'size', msg=msg): int, + ... Inclusive('width', 'size', msg=msg): int + ... }) + + >>> with raises(er.MultipleInvalid, msg + " @ data[]"): + ... schema({'height': 100}) + + >>> with raises(er.MultipleInvalid, msg + " @ data[]"): + ... schema({'width': 100}) + + >>> data = {'height': 100, 'width': 100} + >>> data == schema(data) + True + """ + + def __init__( + self, + schema: Schemable, + group_of_inclusion: str, + msg: typing.Optional[str] = None, + description: typing.Any | None = None, + default: typing.Any = UNDEFINED, + ) -> None: + super(Inclusive, self).__init__( + schema, msg=msg, default=default, description=description + ) + self.group_of_inclusion = group_of_inclusion + + +class Required(Marker): + """Mark a node in the schema as being required, and optionally provide a default value. + + >>> schema = Schema({Required('key'): str}) + >>> with raises(er.MultipleInvalid, "required key not provided @ data['key']"): + ... schema({}) + + >>> schema = Schema({Required('key', default='value'): str}) + >>> schema({}) + {'key': 'value'} + >>> schema = Schema({Required('key', default=list): list}) + >>> schema({}) + {'key': []} + """ + + def __init__( + self, + schema: Schemable, + msg: typing.Optional[str] = None, + default: typing.Any = UNDEFINED, + description: typing.Any | None = None, + ) -> None: + super(Required, self).__init__(schema, msg=msg, description=description) + self.default = default_factory(default) + + +class Remove(Marker): + """Mark a node in the schema to be removed and excluded from the validated + output. Keys that fail validation will not raise ``Invalid``. Instead, these + keys will be treated as extras. + + >>> schema = Schema({str: int, Remove(int): str}) + >>> with raises(er.MultipleInvalid, "extra keys not allowed @ data[1]"): + ... schema({'keep': 1, 1: 1.0}) + >>> schema({1: 'red', 'red': 1, 2: 'green'}) + {'red': 1} + >>> schema = Schema([int, Remove(float), Extra]) + >>> schema([1, 2, 3, 4.0, 5, 6.0, '7']) + [1, 2, 3, 5, '7'] + """ + + def __init__( + self, + schema_: Schemable, + msg: typing.Optional[str] = None, + description: typing.Any | None = None, + ) -> None: + super().__init__(schema_, msg, description) + self.__hash__ = cache(lambda: object.__hash__(self)) # type: ignore[method-assign] + + def __call__(self, schema: Schemable): + super(Remove, self).__call__(schema) + return self.__class__ + + def __repr__(self): + return "Remove(%r)" % (self.schema,) + + +def message( + default: typing.Optional[str] = None, + cls: typing.Optional[typing.Type[Error]] = None, +) -> typing.Callable: + """Convenience decorator to allow functions to provide a message. + + Set a default message: + + >>> @message('not an integer') + ... def isint(v): + ... return int(v) + + >>> validate = Schema(isint()) + >>> with raises(er.MultipleInvalid, 'not an integer'): + ... validate('a') + + The message can be overridden on a per validator basis: + + >>> validate = Schema(isint('bad')) + >>> with raises(er.MultipleInvalid, 'bad'): + ... validate('a') + + The class thrown too: + + >>> class IntegerInvalid(er.Invalid): pass + >>> validate = Schema(isint('bad', clsoverride=IntegerInvalid)) + >>> try: + ... validate('a') + ... except er.MultipleInvalid as e: + ... assert isinstance(e.errors[0], IntegerInvalid) + """ + if cls and not issubclass(cls, er.Invalid): + raise er.SchemaError( + "message can only use subclases of Invalid as custom class" + ) + + def decorator(f): + @wraps(f) + def check(msg=None, clsoverride=None): + @wraps(f) + def wrapper(*args, **kwargs): + try: + return f(*args, **kwargs) + except ValueError: + raise (clsoverride or cls or er.ValueInvalid)( + msg or default or 'invalid value' + ) + + return wrapper + + return check + + return decorator + + +def _args_to_dict(func, args): + """Returns argument names as values as key-value pairs.""" + if sys.version_info >= (3, 0): + arg_count = func.__code__.co_argcount + arg_names = func.__code__.co_varnames[:arg_count] + else: + arg_count = func.func_code.co_argcount + arg_names = func.func_code.co_varnames[:arg_count] + + arg_value_list = list(args) + arguments = dict( + (arg_name, arg_value_list[i]) + for i, arg_name in enumerate(arg_names) + if i < len(arg_value_list) + ) + return arguments + + +def _merge_args_with_kwargs(args_dict, kwargs_dict): + """Merge args with kwargs.""" + ret = args_dict.copy() + ret.update(kwargs_dict) + return ret + + +def validate(*a, **kw) -> typing.Callable: + """Decorator for validating arguments of a function against a given schema. + + Set restrictions for arguments: + + >>> @validate(arg1=int, arg2=int) + ... def foo(arg1, arg2): + ... return arg1 * arg2 + + Set restriction for returned value: + + >>> @validate(arg=int, __return__=int) + ... def bar(arg1): + ... return arg1 * 2 + + """ + RETURNS_KEY = '__return__' + + def validate_schema_decorator(func): + returns_defined = False + returns = None + + schema_args_dict = _args_to_dict(func, a) + schema_arguments = _merge_args_with_kwargs(schema_args_dict, kw) + + if RETURNS_KEY in schema_arguments: + returns_defined = True + returns = schema_arguments[RETURNS_KEY] + del schema_arguments[RETURNS_KEY] + + input_schema = ( + Schema(schema_arguments, extra=ALLOW_EXTRA) + if len(schema_arguments) != 0 + else lambda x: x + ) + output_schema = Schema(returns) if returns_defined else lambda x: x + + @wraps(func) + def func_wrapper(*args, **kwargs): + args_dict = _args_to_dict(func, args) + arguments = _merge_args_with_kwargs(args_dict, kwargs) + validated_arguments = input_schema(arguments) + output = func(**validated_arguments) + return output_schema(output) + + return func_wrapper + + return validate_schema_decorator diff --git a/voluptuous/util.py b/voluptuous/util.py new file mode 100644 index 0000000..0bf9302 --- /dev/null +++ b/voluptuous/util.py @@ -0,0 +1,149 @@ +# F401: "imported but unused" +# fmt: off +import typing + +from voluptuous import validators # noqa: F401 +from voluptuous.error import Invalid, LiteralInvalid, TypeInvalid # noqa: F401 +from voluptuous.schema_builder import DefaultFactory # noqa: F401 +from voluptuous.schema_builder import Schema, default_factory, raises # noqa: F401 + +# fmt: on + +__author__ = 'tusharmakkar08' + + +def Lower(v: str) -> str: + """Transform a string to lower case. + + >>> s = Schema(Lower) + >>> s('HI') + 'hi' + """ + return str(v).lower() + + +def Upper(v: str) -> str: + """Transform a string to upper case. + + >>> s = Schema(Upper) + >>> s('hi') + 'HI' + """ + return str(v).upper() + + +def Capitalize(v: str) -> str: + """Capitalise a string. + + >>> s = Schema(Capitalize) + >>> s('hello world') + 'Hello world' + """ + return str(v).capitalize() + + +def Title(v: str) -> str: + """Title case a string. + + >>> s = Schema(Title) + >>> s('hello world') + 'Hello World' + """ + return str(v).title() + + +def Strip(v: str) -> str: + """Strip whitespace from a string. + + >>> s = Schema(Strip) + >>> s(' hello world ') + 'hello world' + """ + return str(v).strip() + + +class DefaultTo(object): + """Sets a value to default_value if none provided. + + >>> s = Schema(DefaultTo(42)) + >>> s(None) + 42 + >>> s = Schema(DefaultTo(list)) + >>> s(None) + [] + """ + + def __init__(self, default_value, msg: typing.Optional[str] = None) -> None: + self.default_value = default_factory(default_value) + self.msg = msg + + def __call__(self, v): + if v is None: + v = self.default_value() + return v + + def __repr__(self): + return 'DefaultTo(%s)' % (self.default_value(),) + + +class SetTo(object): + """Set a value, ignoring any previous value. + + >>> s = Schema(validators.Any(int, SetTo(42))) + >>> s(2) + 2 + >>> s("foo") + 42 + """ + + def __init__(self, value) -> None: + self.value = default_factory(value) + + def __call__(self, v): + return self.value() + + def __repr__(self): + return 'SetTo(%s)' % (self.value(),) + + +class Set(object): + """Convert a list into a set. + + >>> s = Schema(Set()) + >>> s([]) == set([]) + True + >>> s([1, 2]) == set([1, 2]) + True + >>> with raises(Invalid, regex="^cannot be presented as set: "): + ... s([set([1, 2]), set([3, 4])]) + """ + + def __init__(self, msg: typing.Optional[str] = None) -> None: + self.msg = msg + + def __call__(self, v): + try: + set_v = set(v) + except Exception as e: + raise TypeInvalid(self.msg or 'cannot be presented as set: {0}'.format(e)) + return set_v + + def __repr__(self): + return 'Set()' + + +class Literal(object): + def __init__(self, lit) -> None: + self.lit = lit + + def __call__(self, value, msg: typing.Optional[str] = None): + if self.lit != value: + raise LiteralInvalid(msg or '%s not match for %s' % (value, self.lit)) + else: + return self.lit + + def __str__(self): + return str(self.lit) + + def __repr__(self): + return repr(self.lit) diff --git a/voluptuous/validators.py b/voluptuous/validators.py new file mode 100644 index 0000000..d385260 --- /dev/null +++ b/voluptuous/validators.py @@ -0,0 +1,1248 @@ +# fmt: off +from __future__ import annotations + +import datetime +import os +import re +import sys +import typing +from decimal import Decimal, InvalidOperation +from functools import wraps + +from voluptuous.error import ( + AllInvalid, AnyInvalid, BooleanInvalid, CoerceInvalid, ContainsInvalid, DateInvalid, + DatetimeInvalid, DirInvalid, EmailInvalid, ExactSequenceInvalid, FalseInvalid, + FileInvalid, InInvalid, Invalid, LengthInvalid, MatchInvalid, MultipleInvalid, + NotEnoughValid, NotInInvalid, PathInvalid, RangeInvalid, TooManyValid, TrueInvalid, + TypeInvalid, UrlInvalid, +) + +# F401: flake8 complains about 'raises' not being used, but it is used in doctests +from voluptuous.schema_builder import Schema, Schemable, message, raises # noqa: F401 + +if typing.TYPE_CHECKING: + from _typeshed import SupportsAllComparisons + +# fmt: on + + +Enum: typing.Union[type, None] +try: + from enum import Enum +except ImportError: + Enum = None + + +if sys.version_info >= (3,): + import urllib.parse as urlparse + + basestring = str +else: + import urlparse + +# Taken from https://github.com/kvesteri/validators/blob/master/validators/email.py +# fmt: off +USER_REGEX = re.compile( + # start anchor, because fullmatch is not available in python 2.7 + "(?:" + # dot-atom + r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+" + r"(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*$" + # quoted-string + r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|' + r"""\\[\001-\011\013\014\016-\177])*"$)""" + # end anchor, because fullmatch is not available in python 2.7 + r")\Z", + re.IGNORECASE, +) +DOMAIN_REGEX = re.compile( + # start anchor, because fullmatch is not available in python 2.7 + "(?:" + # domain + r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+' + # tld + r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?$)' + # literal form, ipv4 address (SMTP 4.1.3) + r'|^\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)' + r'(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$' + # end anchor, because fullmatch is not available in python 2.7 + r")\Z", + re.IGNORECASE, +) +# fmt: on + +__author__ = 'tusharmakkar08' + + +def truth(f: typing.Callable) -> typing.Callable: + """Convenience decorator to convert truth functions into validators. + + >>> @truth + ... def isdir(v): + ... return os.path.isdir(v) + >>> validate = Schema(isdir) + >>> validate('/') + '/' + >>> with raises(MultipleInvalid, 'not a valid value'): + ... validate('/notavaliddir') + """ + + @wraps(f) + def check(v): + t = f(v) + if not t: + raise ValueError + return v + + return check + + +class Coerce(object): + """Coerce a value to a type. + + If the type constructor throws a ValueError or TypeError, the value + will be marked as Invalid. + + Default behavior: + + >>> validate = Schema(Coerce(int)) + >>> with raises(MultipleInvalid, 'expected int'): + ... validate(None) + >>> with raises(MultipleInvalid, 'expected int'): + ... validate('foo') + + With custom message: + + >>> validate = Schema(Coerce(int, "moo")) + >>> with raises(MultipleInvalid, 'moo'): + ... validate('foo') + """ + + def __init__( + self, + type: typing.Union[type, typing.Callable], + msg: typing.Optional[str] = None, + ) -> None: + self.type = type + self.msg = msg + self.type_name = type.__name__ + + def __call__(self, v): + try: + return self.type(v) + except (ValueError, TypeError, InvalidOperation): + msg = self.msg or ('expected %s' % self.type_name) + if not self.msg and Enum and issubclass(self.type, Enum): + msg += " or one of %s" % str([e.value for e in self.type])[1:-1] + raise CoerceInvalid(msg) + + def __repr__(self): + return 'Coerce(%s, msg=%r)' % (self.type_name, self.msg) + + +@message('value was not true', cls=TrueInvalid) +@truth +def IsTrue(v): + """Assert that a value is true, in the Python sense. + + >>> validate = Schema(IsTrue()) + + "In the Python sense" means that implicitly false values, such as empty + lists, dictionaries, etc. are treated as "false": + + >>> with raises(MultipleInvalid, "value was not true"): + ... validate([]) + >>> validate([1]) + [1] + >>> with raises(MultipleInvalid, "value was not true"): + ... validate(False) + + ...and so on. + + >>> try: + ... validate([]) + ... except MultipleInvalid as e: + ... assert isinstance(e.errors[0], TrueInvalid) + """ + return v + + +@message('value was not false', cls=FalseInvalid) +def IsFalse(v): + """Assert that a value is false, in the Python sense. + + (see :func:`IsTrue` for more detail) + + >>> validate = Schema(IsFalse()) + >>> validate([]) + [] + >>> with raises(MultipleInvalid, "value was not false"): + ... validate(True) + + >>> try: + ... validate(True) + ... except MultipleInvalid as e: + ... assert isinstance(e.errors[0], FalseInvalid) + """ + if v: + raise ValueError + return v + + +@message('expected boolean', cls=BooleanInvalid) +def Boolean(v): + """Convert human-readable boolean values to a bool. + + Accepted values are 1, true, yes, on, enable, and their negatives. + Non-string values are cast to bool. + + >>> validate = Schema(Boolean()) + >>> validate(True) + True + >>> validate("1") + True + >>> validate("0") + False + >>> with raises(MultipleInvalid, "expected boolean"): + ... validate('moo') + >>> try: + ... validate('moo') + ... except MultipleInvalid as e: + ... assert isinstance(e.errors[0], BooleanInvalid) + """ + if isinstance(v, basestring): + v = v.lower() + if v in ('1', 'true', 'yes', 'on', 'enable'): + return True + if v in ('0', 'false', 'no', 'off', 'disable'): + return False + raise ValueError + return bool(v) + + +class _WithSubValidators(object): + """Base class for validators that use sub-validators. + + Special class to use as a parent class for validators using sub-validators. + This class provides the `__voluptuous_compile__` method so the + sub-validators are compiled by the parent `Schema`. + """ + + def __init__( + self, *validators, msg=None, required=False, discriminant=None, **kwargs + ) -> None: + self.validators = validators + self.msg = msg + self.required = required + self.discriminant = discriminant + + def __voluptuous_compile__(self, schema: Schema) -> typing.Callable: + self._compiled = [] + old_required = schema.required + self.schema = schema + for v in self.validators: + schema.required = self.required + self._compiled.append(schema._compile(v)) + schema.required = old_required + return self._run + + def _run(self, path: typing.List[typing.Hashable], value): + if self.discriminant is not None: + self._compiled = [ + self.schema._compile(v) + for v in self.discriminant(value, self.validators) + ] + + return self._exec(self._compiled, value, path) + + def __call__(self, v): + return self._exec((Schema(val) for val in self.validators), v) + + def __repr__(self): + return '%s(%s, msg=%r)' % ( + self.__class__.__name__, + ", ".join(repr(v) for v in self.validators), + self.msg, + ) + + def _exec( + self, + funcs: typing.Iterable, + v, + path: typing.Optional[typing.List[typing.Hashable]] = None, + ): + raise NotImplementedError() + + +class Any(_WithSubValidators): + """Use the first validated value. + + :param msg: Message to deliver to user if validation fails. + :param kwargs: All other keyword arguments are passed to the sub-schema constructors. + :returns: Return value of the first validator that passes. + + >>> validate = Schema(Any('true', 'false', + ... All(Any(int, bool), Coerce(bool)))) + >>> validate('true') + 'true' + >>> validate(1) + True + >>> with raises(MultipleInvalid, "not a valid value"): + ... validate('moo') + + msg argument is used + + >>> validate = Schema(Any(1, 2, 3, msg="Expected 1 2 or 3")) + >>> validate(1) + 1 + >>> with raises(MultipleInvalid, "Expected 1 2 or 3"): + ... validate(4) + """ + + def _exec(self, funcs, v, path=None): + error = None + for func in funcs: + try: + if path is None: + return func(v) + else: + return func(path, v) + except Invalid as e: + if error is None or len(e.path) > len(error.path): + error = e + else: + if error: + raise error if self.msg is None else AnyInvalid(self.msg, path=path) + raise AnyInvalid(self.msg or 'no valid value found', path=path) + + +# Convenience alias +Or = Any + + +class Union(_WithSubValidators): + """Use the first validated value among those selected by discriminant. + + :param msg: Message to deliver to user if validation fails. + :param discriminant(value, validators): Returns the filtered list of validators based on the value. + :param kwargs: All other keyword arguments are passed to the sub-schema constructors. + :returns: Return value of the first validator that passes. + + >>> validate = Schema(Union({'type':'a', 'a_val':'1'},{'type':'b', 'b_val':'2'}, + ... discriminant=lambda val, alt: filter( + ... lambda v : v['type'] == val['type'] , alt))) + >>> validate({'type':'a', 'a_val':'1'}) == {'type':'a', 'a_val':'1'} + True + >>> with raises(MultipleInvalid, "not a valid value for dictionary value @ data['b_val']"): + ... validate({'type':'b', 'b_val':'5'}) + + ```discriminant({'type':'b', 'a_val':'5'}, [{'type':'a', 'a_val':'1'},{'type':'b', 'b_val':'2'}])``` is invoked + + Without the discriminant, the exception would be "extra keys not allowed @ data['b_val']" + """ + + def _exec(self, funcs, v, path=None): + error = None + for func in funcs: + try: + if path is None: + return func(v) + else: + return func(path, v) + except Invalid as e: + if error is None or len(e.path) > len(error.path): + error = e + else: + if error: + raise error if self.msg is None else AnyInvalid(self.msg, path=path) + raise AnyInvalid(self.msg or 'no valid value found', path=path) + + +# Convenience alias +Switch = Union + + +class All(_WithSubValidators): + """Value must pass all validators. + + The output of each validator is passed as input to the next. + + :param msg: Message to deliver to user if validation fails. + :param kwargs: All other keyword arguments are passed to the sub-schema constructors. + + >>> validate = Schema(All('10', Coerce(int))) + >>> validate('10') + 10 + """ + + def _exec(self, funcs, v, path=None): + try: + for func in funcs: + if path is None: + v = func(v) + else: + v = func(path, v) + except Invalid as e: + raise e if self.msg is None else AllInvalid(self.msg, path=path) + return v + + +# Convenience alias +And = All + + +class Match(object): + """Value must be a string that matches the regular expression. + + >>> validate = Schema(Match(r'^0x[A-F0-9]+$')) + >>> validate('0x123EF4') + '0x123EF4' + >>> with raises(MultipleInvalid, 'does not match regular expression ^0x[A-F0-9]+$'): + ... validate('123EF4') + + >>> with raises(MultipleInvalid, 'expected string or buffer'): + ... validate(123) + + Pattern may also be a compiled regular expression: + + >>> validate = Schema(Match(re.compile(r'0x[A-F0-9]+', re.I))) + >>> validate('0x123ef4') + '0x123ef4' + """ + + def __init__( + self, pattern: typing.Union[re.Pattern, str], msg: typing.Optional[str] = None + ) -> None: + if isinstance(pattern, basestring): + pattern = re.compile(pattern) + self.pattern = pattern + self.msg = msg + + def __call__(self, v): + try: + match = self.pattern.match(v) + except TypeError: + raise MatchInvalid("expected string or buffer") + if not match: + raise MatchInvalid( + self.msg + or 'does not match regular expression {}'.format(self.pattern.pattern) + ) + return v + + def __repr__(self): + return 'Match(%r, msg=%r)' % (self.pattern.pattern, self.msg) + + +class Replace(object): + """Regex substitution. + + >>> validate = Schema(All(Replace('you', 'I'), + ... Replace('hello', 'goodbye'))) + >>> validate('you say hello') + 'I say goodbye' + """ + + def __init__( + self, + pattern: typing.Union[re.Pattern, str], + substitution: str, + msg: typing.Optional[str] = None, + ) -> None: + if isinstance(pattern, basestring): + pattern = re.compile(pattern) + self.pattern = pattern + self.substitution = substitution + self.msg = msg + + def __call__(self, v): + return self.pattern.sub(self.substitution, v) + + def __repr__(self): + return 'Replace(%r, %r, msg=%r)' % ( + self.pattern.pattern, + self.substitution, + self.msg, + ) + + +def _url_validation(v: str) -> urlparse.ParseResult: + parsed = urlparse.urlparse(v) + if not parsed.scheme or not parsed.netloc: + raise UrlInvalid("must have a URL scheme and host") + return parsed + + +@message('expected an email address', cls=EmailInvalid) +def Email(v): + """Verify that the value is an email address or not. + + >>> s = Schema(Email()) + >>> with raises(MultipleInvalid, 'expected an email address'): + ... s("a.com") + >>> with raises(MultipleInvalid, 'expected an email address'): + ... s("a@.com") + >>> with raises(MultipleInvalid, 'expected an email address'): + ... s("a@.com") + >>> s('t@x.com') + 't@x.com' + """ + try: + if not v or "@" not in v: + raise EmailInvalid("Invalid email address") + user_part, domain_part = v.rsplit('@', 1) + + if not (USER_REGEX.match(user_part) and DOMAIN_REGEX.match(domain_part)): + raise EmailInvalid("Invalid email address") + return v + except: # noqa: E722 + raise ValueError + + +@message('expected a fully qualified domain name URL', cls=UrlInvalid) +def FqdnUrl(v): + """Verify that the value is a fully qualified domain name URL. + + >>> s = Schema(FqdnUrl()) + >>> with raises(MultipleInvalid, 'expected a fully qualified domain name URL'): + ... s("http://localhost/") + >>> s('http://w3.org') + 'http://w3.org' + """ + try: + parsed_url = _url_validation(v) + if "." not in parsed_url.netloc: + raise UrlInvalid("must have a domain name in URL") + return v + except: # noqa: E722 + raise ValueError + + +@message('expected a URL', cls=UrlInvalid) +def Url(v): + """Verify that the value is a URL. + + >>> s = Schema(Url()) + >>> with raises(MultipleInvalid, 'expected a URL'): + ... s(1) + >>> s('http://w3.org') + 'http://w3.org' + """ + try: + _url_validation(v) + return v + except: # noqa: E722 + raise ValueError + + +@message('Not a file', cls=FileInvalid) +@truth +def IsFile(v): + """Verify the file exists. + + >>> os.path.basename(IsFile()(__file__)).startswith('validators.py') + True + >>> with raises(FileInvalid, 'Not a file'): + ... IsFile()("random_filename_goes_here.py") + >>> with raises(FileInvalid, 'Not a file'): + ... IsFile()(None) + """ + try: + if v: + v = str(v) + return os.path.isfile(v) + else: + raise FileInvalid('Not a file') + except TypeError: + raise FileInvalid('Not a file') + + +@message('Not a directory', cls=DirInvalid) +@truth +def IsDir(v): + """Verify the directory exists. + + >>> IsDir()('/') + '/' + >>> with raises(DirInvalid, 'Not a directory'): + ... IsDir()(None) + """ + try: + if v: + v = str(v) + return os.path.isdir(v) + else: + raise DirInvalid("Not a directory") + except TypeError: + raise DirInvalid("Not a directory") + + +@message('path does not exist', cls=PathInvalid) +@truth +def PathExists(v): + """Verify the path exists, regardless of its type. + + >>> os.path.basename(PathExists()(__file__)).startswith('validators.py') + True + >>> with raises(Invalid, 'path does not exist'): + ... PathExists()("random_filename_goes_here.py") + >>> with raises(PathInvalid, 'Not a Path'): + ... PathExists()(None) + """ + try: + if v: + v = str(v) + return os.path.exists(v) + else: + raise PathInvalid("Not a Path") + except TypeError: + raise PathInvalid("Not a Path") + + +def Maybe(validator: Schemable, msg: typing.Optional[str] = None): + """Validate that the object matches given validator or is None. + + :raises Invalid: If the value does not match the given validator and is not + None. + + >>> s = Schema(Maybe(int)) + >>> s(10) + 10 + >>> with raises(Invalid): + ... s("string") + + """ + return Any(None, validator, msg=msg) + + +class Range(object): + """Limit a value to a range. + + Either min or max may be omitted. + Either min or max can be excluded from the range of accepted values. + + :raises Invalid: If the value is outside the range. + + >>> s = Schema(Range(min=1, max=10, min_included=False)) + >>> s(5) + 5 + >>> s(10) + 10 + >>> with raises(MultipleInvalid, 'value must be at most 10'): + ... s(20) + >>> with raises(MultipleInvalid, 'value must be higher than 1'): + ... s(1) + >>> with raises(MultipleInvalid, 'value must be lower than 10'): + ... Schema(Range(max=10, max_included=False))(20) + """ + + def __init__( + self, + min: SupportsAllComparisons | None = None, + max: SupportsAllComparisons | None = None, + min_included: bool = True, + max_included: bool = True, + msg: typing.Optional[str] = None, + ) -> None: + self.min = min + self.max = max + self.min_included = min_included + self.max_included = max_included + self.msg = msg + + def __call__(self, v): + try: + if self.min_included: + if self.min is not None and not v >= self.min: + raise RangeInvalid( + self.msg or 'value must be at least %s' % self.min + ) + else: + if self.min is not None and not v > self.min: + raise RangeInvalid( + self.msg or 'value must be higher than %s' % self.min + ) + if self.max_included: + if self.max is not None and not v <= self.max: + raise RangeInvalid( + self.msg or 'value must be at most %s' % self.max + ) + else: + if self.max is not None and not v < self.max: + raise RangeInvalid( + self.msg or 'value must be lower than %s' % self.max + ) + + return v + + # Objects that lack a partial ordering, e.g. None or strings will raise TypeError + except TypeError: + raise RangeInvalid( + self.msg or 'invalid value or type (must have a partial ordering)' + ) + + def __repr__(self): + return 'Range(min=%r, max=%r, min_included=%r, max_included=%r, msg=%r)' % ( + self.min, + self.max, + self.min_included, + self.max_included, + self.msg, + ) + + +class Clamp(object): + """Clamp a value to a range. + + Either min or max may be omitted. + + >>> s = Schema(Clamp(min=0, max=1)) + >>> s(0.5) + 0.5 + >>> s(5) + 1 + >>> s(-1) + 0 + """ + + def __init__( + self, + min: SupportsAllComparisons | None = None, + max: SupportsAllComparisons | None = None, + msg: typing.Optional[str] = None, + ) -> None: + self.min = min + self.max = max + self.msg = msg + + def __call__(self, v): + try: + if self.min is not None and v < self.min: + v = self.min + if self.max is not None and v > self.max: + v = self.max + return v + + # Objects that lack a partial ordering, e.g. None or strings will raise TypeError + except TypeError: + raise RangeInvalid( + self.msg or 'invalid value or type (must have a partial ordering)' + ) + + def __repr__(self): + return 'Clamp(min=%s, max=%s)' % (self.min, self.max) + + +class Length(object): + """The length of a value must be in a certain range.""" + + def __init__( + self, + min: SupportsAllComparisons | None = None, + max: SupportsAllComparisons | None = None, + msg: typing.Optional[str] = None, + ) -> None: + self.min = min + self.max = max + self.msg = msg + + def __call__(self, v): + try: + if self.min is not None and len(v) < self.min: + raise LengthInvalid( + self.msg or 'length of value must be at least %s' % self.min + ) + if self.max is not None and len(v) > self.max: + raise LengthInvalid( + self.msg or 'length of value must be at most %s' % self.max + ) + return v + + # Objects that have no length e.g. None or strings will raise TypeError + except TypeError: + raise RangeInvalid(self.msg or 'invalid value or type') + + def __repr__(self): + return 'Length(min=%s, max=%s)' % (self.min, self.max) + + +class Datetime(object): + """Validate that the value matches the datetime format.""" + + DEFAULT_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' + + def __init__( + self, format: typing.Optional[str] = None, msg: typing.Optional[str] = None + ) -> None: + self.format = format or self.DEFAULT_FORMAT + self.msg = msg + + def __call__(self, v): + try: + datetime.datetime.strptime(v, self.format) + except (TypeError, ValueError): + raise DatetimeInvalid( + self.msg or 'value does not match expected format %s' % self.format + ) + return v + + def __repr__(self): + return 'Datetime(format=%s)' % self.format + + +class Date(Datetime): + """Validate that the value matches the date format.""" + + DEFAULT_FORMAT = '%Y-%m-%d' + + def __call__(self, v): + try: + datetime.datetime.strptime(v, self.format) + except (TypeError, ValueError): + raise DateInvalid( + self.msg or 'value does not match expected format %s' % self.format + ) + return v + + def __repr__(self): + return 'Date(format=%s)' % self.format + + +class In(object): + """Validate that a value is in a collection.""" + + def __init__( + self, container: typing.Container, msg: typing.Optional[str] = None + ) -> None: + self.container = container + self.msg = msg + + def __call__(self, v): + try: + check = v not in self.container + except TypeError: + check = True + if check: + try: + raise InInvalid( + self.msg or f'value must be one of {sorted(self.container)}' + ) + except TypeError: + raise InInvalid( + self.msg + or f'value must be one of {sorted(self.container, key=str)}' + ) + return v + + def __repr__(self): + return 'In(%s)' % (self.container,) + + +class NotIn(object): + """Validate that a value is not in a collection.""" + + def __init__( + self, container: typing.Iterable, msg: typing.Optional[str] = None + ) -> None: + self.container = container + self.msg = msg + + def __call__(self, v): + try: + check = v in self.container + except TypeError: + check = True + if check: + try: + raise NotInInvalid( + self.msg or f'value must not be one of {sorted(self.container)}' + ) + except TypeError: + raise NotInInvalid( + self.msg + or f'value must not be one of {sorted(self.container, key=str)}' + ) + return v + + def __repr__(self): + return 'NotIn(%s)' % (self.container,) + + +class Contains(object): + """Validate that the given schema element is in the sequence being validated. + + >>> s = Contains(1) + >>> s([3, 2, 1]) + [3, 2, 1] + >>> with raises(ContainsInvalid, 'value is not allowed'): + ... s([3, 2]) + """ + + def __init__(self, item, msg: typing.Optional[str] = None) -> None: + self.item = item + self.msg = msg + + def __call__(self, v): + try: + check = self.item not in v + except TypeError: + check = True + if check: + raise ContainsInvalid(self.msg or 'value is not allowed') + return v + + def __repr__(self): + return 'Contains(%s)' % (self.item,) + + +class ExactSequence(object): + """Matches each element in a sequence against the corresponding element in + the validators. + + :param msg: Message to deliver to user if validation fails. + :param kwargs: All other keyword arguments are passed to the sub-schema + constructors. + + >>> from voluptuous import Schema, ExactSequence + >>> validate = Schema(ExactSequence([str, int, list, list])) + >>> validate(['hourly_report', 10, [], []]) + ['hourly_report', 10, [], []] + >>> validate(('hourly_report', 10, [], [])) + ('hourly_report', 10, [], []) + """ + + def __init__( + self, + validators: typing.Iterable[Schemable], + msg: typing.Optional[str] = None, + **kwargs, + ) -> None: + self.validators = validators + self.msg = msg + self._schemas = [Schema(val, **kwargs) for val in validators] + + def __call__(self, v): + if not isinstance(v, (list, tuple)) or len(v) != len(self._schemas): + raise ExactSequenceInvalid(self.msg) + try: + v = type(v)(schema(x) for x, schema in zip(v, self._schemas)) + except Invalid as e: + raise e if self.msg is None else ExactSequenceInvalid(self.msg) + return v + + def __repr__(self): + return 'ExactSequence([%s])' % ", ".join(repr(v) for v in self.validators) + + +class Unique(object): + """Ensure an iterable does not contain duplicate items. + + Only iterables convertible to a set are supported (native types and + objects with correct __eq__). + + JSON does not support set, so they need to be presented as arrays. + Unique allows ensuring that such array does not contain dupes. + + >>> s = Schema(Unique()) + >>> s([]) + [] + >>> s([1, 2]) + [1, 2] + >>> with raises(Invalid, 'contains duplicate items: [1]'): + ... s([1, 1, 2]) + >>> with raises(Invalid, "contains duplicate items: ['one']"): + ... s(['one', 'two', 'one']) + >>> with raises(Invalid, regex="^contains unhashable elements: "): + ... s([set([1, 2]), set([3, 4])]) + >>> s('abc') + 'abc' + >>> with raises(Invalid, regex="^contains duplicate items: "): + ... s('aabbc') + """ + + def __init__(self, msg: typing.Optional[str] = None) -> None: + self.msg = msg + + def __call__(self, v): + try: + set_v = set(v) + except TypeError as e: + raise TypeInvalid(self.msg or 'contains unhashable elements: {0}'.format(e)) + if len(set_v) != len(v): + seen = set() + dupes = list(set(x for x in v if x in seen or seen.add(x))) + raise Invalid(self.msg or 'contains duplicate items: {0}'.format(dupes)) + return v + + def __repr__(self): + return 'Unique()' + + +class Equal(object): + """Ensure that value matches target. + + >>> s = Schema(Equal(1)) + >>> s(1) + 1 + >>> with raises(Invalid): + ... s(2) + + Validators are not supported, match must be exact: + + >>> s = Schema(Equal(str)) + >>> with raises(Invalid): + ... s('foo') + """ + + def __init__(self, target, msg: typing.Optional[str] = None) -> None: + self.target = target + self.msg = msg + + def __call__(self, v): + if v != self.target: + raise Invalid( + self.msg + or 'Values are not equal: value:{} != target:{}'.format(v, self.target) + ) + return v + + def __repr__(self): + return 'Equal({})'.format(self.target) + + +class Unordered(object): + """Ensures sequence contains values in unspecified order. + + >>> s = Schema(Unordered([2, 1])) + >>> s([2, 1]) + [2, 1] + >>> s([1, 2]) + [1, 2] + >>> s = Schema(Unordered([str, int])) + >>> s(['foo', 1]) + ['foo', 1] + >>> s([1, 'foo']) + [1, 'foo'] + """ + + def __init__( + self, + validators: typing.Iterable[Schemable], + msg: typing.Optional[str] = None, + **kwargs, + ) -> None: + self.validators = validators + self.msg = msg + self._schemas = [Schema(val, **kwargs) for val in validators] + + def __call__(self, v): + if not isinstance(v, (list, tuple)): + raise Invalid(self.msg or 'Value {} is not sequence!'.format(v)) + + if len(v) != len(self._schemas): + raise Invalid( + self.msg + or 'List lengths differ, value:{} != target:{}'.format( + len(v), len(self._schemas) + ) + ) + + consumed = set() + missing = [] + for index, value in enumerate(v): + found = False + for i, s in enumerate(self._schemas): + if i in consumed: + continue + try: + s(value) + except Invalid: + pass + else: + found = True + consumed.add(i) + break + if not found: + missing.append((index, value)) + + if len(missing) == 1: + el = missing[0] + raise Invalid( + self.msg + or 'Element #{} ({}) is not valid against any validator'.format( + el[0], el[1] + ) + ) + elif missing: + raise MultipleInvalid( + [ + Invalid( + self.msg + or 'Element #{} ({}) is not valid against any validator'.format( + el[0], el[1] + ) + ) + for el in missing + ] + ) + return v + + def __repr__(self): + return 'Unordered([{}])'.format(", ".join(repr(v) for v in self.validators)) + + +class Number(object): + """ + Verify the number of digits that are present in the number(Precision), + and the decimal places(Scale). + + :raises Invalid: If the value does not match the provided Precision and Scale. + + >>> schema = Schema(Number(precision=6, scale=2)) + >>> schema('1234.01') + '1234.01' + >>> schema = Schema(Number(precision=6, scale=2, yield_decimal=True)) + >>> schema('1234.01') + Decimal('1234.01') + """ + + def __init__( + self, + precision: typing.Optional[int] = None, + scale: typing.Optional[int] = None, + msg: typing.Optional[str] = None, + yield_decimal: bool = False, + ) -> None: + self.precision = precision + self.scale = scale + self.msg = msg + self.yield_decimal = yield_decimal + + def __call__(self, v): + """ + :param v: is a number enclosed with string + :return: Decimal number + """ + precision, scale, decimal_num = self._get_precision_scale(v) + + if ( + self.precision is not None + and self.scale is not None + and precision != self.precision + and scale != self.scale + ): + raise Invalid( + self.msg + or "Precision must be equal to %s, and Scale must be equal to %s" + % (self.precision, self.scale) + ) + else: + if self.precision is not None and precision != self.precision: + raise Invalid( + self.msg or "Precision must be equal to %s" % self.precision + ) + + if self.scale is not None and scale != self.scale: + raise Invalid(self.msg or "Scale must be equal to %s" % self.scale) + + if self.yield_decimal: + return decimal_num + else: + return v + + def __repr__(self): + return 'Number(precision=%s, scale=%s, msg=%s)' % ( + self.precision, + self.scale, + self.msg, + ) + + def _get_precision_scale(self, number) -> typing.Tuple[int, int, Decimal]: + """ + :param number: + :return: tuple(precision, scale, decimal_number) + """ + try: + decimal_num = Decimal(number) + except InvalidOperation: + raise Invalid(self.msg or 'Value must be a number enclosed with string') + + exp = decimal_num.as_tuple().exponent + if isinstance(exp, int): + return (len(decimal_num.as_tuple().digits), -exp, decimal_num) + else: + # TODO: handle infinity and NaN + # raise Invalid(self.msg or 'Value has no precision') + raise TypeError("infinity and NaN have no precision") + + +class SomeOf(_WithSubValidators): + """Value must pass at least some validations, determined by the given parameter. + Optionally, number of passed validations can be capped. + + The output of each validator is passed as input to the next. + + :param min_valid: Minimum number of valid schemas. + :param validators: List of schemas or validators to match input against. + :param max_valid: Maximum number of valid schemas. + :param msg: Message to deliver to user if validation fails. + :param kwargs: All other keyword arguments are passed to the sub-schema constructors. + + :raises NotEnoughValid: If the minimum number of validations isn't met. + :raises TooManyValid: If the maximum number of validations is exceeded. + + >>> validate = Schema(SomeOf(min_valid=2, validators=[Range(1, 5), Any(float, int), 6.6])) + >>> validate(6.6) + 6.6 + >>> validate(3) + 3 + >>> with raises(MultipleInvalid, 'value must be at most 5, not a valid value'): + ... validate(6.2) + """ + + def __init__( + self, + validators: typing.List[Schemable], + min_valid: typing.Optional[int] = None, + max_valid: typing.Optional[int] = None, + **kwargs, + ) -> None: + assert min_valid is not None or max_valid is not None, ( + 'when using "%s" you should specify at least one of min_valid and max_valid' + % (type(self).__name__,) + ) + self.min_valid = min_valid or 0 + self.max_valid = max_valid or len(validators) + super(SomeOf, self).__init__(*validators, **kwargs) + + def _exec(self, funcs, v, path=None): + errors = [] + funcs = list(funcs) + for func in funcs: + try: + if path is None: + v = func(v) + else: + v = func(path, v) + except Invalid as e: + errors.append(e) + + passed_count = len(funcs) - len(errors) + if self.min_valid <= passed_count <= self.max_valid: + return v + + msg = self.msg + if not msg: + msg = ', '.join(map(str, errors)) + + if passed_count > self.max_valid: + raise TooManyValid(msg) + raise NotEnoughValid(msg) + + def __repr__(self): + return 'SomeOf(min_valid=%s, validators=[%s], max_valid=%s, msg=%r)' % ( + self.min_valid, + ", ".join(repr(v) for v in self.validators), + self.max_valid, + self.msg, + )