mirror of https://github.com/Yubico/python-fido2
Compare commits
203 Commits
Author | SHA1 | Date |
---|---|---|
Dain Nilsson | edcb00bc32 | |
Joost van Dijk | 8b979313e9 | |
Dain Nilsson | ee9bf59783 | |
Dain Nilsson | cfffe17e18 | |
Dain Nilsson | 77893c2fd5 | |
Dain Nilsson | 82f9d0765b | |
Dain Nilsson | 9240d6e53b | |
Dain Nilsson | b5686b3faf | |
Dain Nilsson | 6d13bd7b91 | |
Dain Nilsson | f1952aebb9 | |
Dain Nilsson | de1be496fe | |
Dain Nilsson | 1be5c0b5f6 | |
Dain Nilsson | bccf760c73 | |
Dain Nilsson | 4c6f7b68a2 | |
Dain Nilsson | 7a16dc4913 | |
Dain Nilsson | 8253b96f8e | |
Dain Nilsson | 98a57ab968 | |
Dain Nilsson | 34e354e508 | |
Dain Nilsson | 1c3e05f82d | |
Dain Nilsson | 2ce01289d8 | |
Dain Nilsson | 7179b439d3 | |
Dain Nilsson | 54569a37cc | |
Dain Nilsson | f36cade5e9 | |
Dain Nilsson | 9d8e8d167d | |
Dain Nilsson | a7f3c51aca | |
Dain Nilsson | a5357dd35c | |
Dain Nilsson | a5bd97c529 | |
Pol Henarejos | 4abb4a577f | |
Alexandre Detiste | 526e1ab7a0 | |
Dain Nilsson | 032dd8b853 | |
Dain Nilsson | 8c9f3f0200 | |
Dain Nilsson | aa3c5cd8e8 | |
Pol Henarejos | a40850adaf | |
Pol Henarejos | c8fd18d4b2 | |
Pol Henarejos | 2d7e9e1610 | |
Pol Henarejos | e82f231da9 | |
Dain Nilsson | 963eae041a | |
Dain Nilsson | be2e8904e8 | |
Dain Nilsson | 8067e90f89 | |
Dain Nilsson | d7304fa49f | |
Dain Nilsson | 5575d5838c | |
Dain Nilsson | ed9f50a117 | |
Dain Nilsson | f523839dab | |
Dain Nilsson | c7ebd878c0 | |
Dain Nilsson | 2d6c067689 | |
Dain Nilsson | cbe72665e1 | |
Dain Nilsson | 1143d471ef | |
Dain Nilsson | 54cee2216a | |
Dain Nilsson | e7eb53a73e | |
Dain Nilsson | c7f09de1b6 | |
Dain Nilsson | 50d0306a2d | |
Dain Nilsson | fc46e83f7c | |
Taylor R Campbell | fb6f77c5e4 | |
Dain Nilsson | 5dffcaa838 | |
Dain Nilsson | 46c095fc21 | |
Dain Nilsson | 737fd76a27 | |
Dain Nilsson | d52024ebef | |
Fabian Kaczmarczyck | 9c980040da | |
Dain Nilsson | 75977a9468 | |
Dain Nilsson | c6c3a68da0 | |
Dain Nilsson | bcefd244e6 | |
Dain Nilsson | 4d1782880c | |
Dain Nilsson | 60a309eea0 | |
Dain Nilsson | 8fdf098520 | |
Jon Janzen | 589e2f1c4b | |
Dain Nilsson | 667ff5588b | |
Dain Nilsson | b915870e72 | |
Dain Nilsson | 08e1c45c93 | |
Dain Nilsson | d3ec6174ae | |
Dain Nilsson | cfd7f6b0c0 | |
Dain Nilsson | 71a317b12d | |
Dain Nilsson | 026c6a7f8d | |
Dain Nilsson | 8c00d04945 | |
Dain Nilsson | e21341312c | |
Dain Nilsson | fabb844bce | |
Dain Nilsson | 791ef6eba9 | |
Dain Nilsson | 27cd3dda54 | |
Dain Nilsson | cd9e6cbd59 | |
Dain Nilsson | 103df1b456 | |
Dain Nilsson | ae048d06ff | |
Dain Nilsson | 15ec60a3c4 | |
Dain Nilsson | 4a07a4d004 | |
Dain Nilsson | 354672b9ce | |
Dain Nilsson | 709599f98c | |
Dain Nilsson | 8debc41942 | |
Dain Nilsson | f7e8c59649 | |
Dain Nilsson | fa5e9fcce5 | |
Dain Nilsson | 19bc5ce15a | |
Dain Nilsson | b674c2b5ac | |
Dain Nilsson | 5e40339a7c | |
Michael Gmelin | e1050b575b | |
Dain Nilsson | d8542e82de | |
Markus Meissner | 35db7f2f5c | |
Dain Nilsson | b546c0d629 | |
Dain Nilsson | e327e33fe0 | |
Dain Nilsson | 0f89a0901f | |
Dain Nilsson | ea1bdf3589 | |
Dain Nilsson | dcfd82464f | |
Dain Nilsson | 1dd112a340 | |
Dain Nilsson | dfdae68031 | |
Dain Nilsson | abe66f1385 | |
Dain Nilsson | 70808bd093 | |
Dain Nilsson | 2a202d0e19 | |
Michael Gmelin | 952b5b568b | |
Michael Gmelin | 1ce6df67d6 | |
Michael Gmelin | 1655099499 | |
Dain Nilsson | ceb6deffe3 | |
Dain Nilsson | 8c1ca676c3 | |
Dain Nilsson | 5d87ebe714 | |
Dain Nilsson | 6c50014762 | |
Dain Nilsson | ad64143cb9 | |
Dain Nilsson | fa5b2e3d2c | |
Dain Nilsson | 8afa330918 | |
Dain Nilsson | d995c21b97 | |
Dain Nilsson | db24c8b6bf | |
Dain Nilsson | a046a5a9e8 | |
Dain Nilsson | 452a02dd06 | |
Dain Nilsson | 77b7fb6990 | |
Dain Nilsson | 30ee871c3a | |
Dain Nilsson | 221b822b12 | |
Dain Nilsson | 69ad253dc9 | |
Dain Nilsson | 31148ab930 | |
Dain Nilsson | 966e38ee72 | |
Dain Nilsson | 1d7e2b5a5e | |
Dain Nilsson | 37224ea93c | |
Dain Nilsson | 53a32a4412 | |
Dain Nilsson | 7e77545e36 | |
Dain Nilsson | f18f815a47 | |
Dain Nilsson | 7985a4966a | |
Dain Nilsson | 90d0f63379 | |
Dain Nilsson | f2237d4f65 | |
Dain Nilsson | 0e961839f1 | |
Dain Nilsson | c77d30d456 | |
Dain Nilsson | 0436ab9026 | |
Dain Nilsson | 15c9e37534 | |
Dain Nilsson | d2a0d71be6 | |
Dain Nilsson | fd30409b03 | |
Dain Nilsson | 83625ddf39 | |
Dain Nilsson | 15d3107c5e | |
Dain Nilsson | 7c46413675 | |
Dain Nilsson | 9926168e76 | |
Dain Nilsson | f9090c8e1e | |
Dain Nilsson | 18968a99da | |
Dain Nilsson | 36ea6fcc9d | |
Dain Nilsson | b039820fad | |
Dain Nilsson | dd5662ca7b | |
Dain Nilsson | 80e79cc048 | |
Dain Nilsson | 8f78af003d | |
Dain Nilsson | 58471d4af1 | |
Dain Nilsson | 5cd89c999a | |
Dain Nilsson | 3bfd130bb1 | |
Dain Nilsson | b411555ea4 | |
Dain Nilsson | 4f4c48b9d5 | |
Dain Nilsson | 0b51e8e687 | |
Rasmus Précenth | e78efbfe93 | |
Dain Nilsson | e42eb2206c | |
Dain Nilsson | 26a590008f | |
Dain Nilsson | 1fa32fddcb | |
Dain Nilsson | 0ca5ca14b5 | |
Dain Nilsson | 79b74aa383 | |
Rasmus Précenth | fe6ac8bbab | |
Dain Nilsson | 30590991cf | |
Dain Nilsson | 5a96c18db1 | |
Dain Nilsson | b84dc08286 | |
Dain Nilsson | 07a65065e9 | |
Dain Nilsson | 6ce5291469 | |
Dain Nilsson | 9749d5a3f5 | |
Dain Nilsson | d82c513cd7 | |
Dain Nilsson | ad373825ea | |
Dain Nilsson | f7ddaa2e6f | |
Dain Nilsson | 1dc8f0dfc3 | |
Dain Nilsson | fbf1596e54 | |
Dain Nilsson | 5f7295bfcb | |
Dain Nilsson | 995d75b4e6 | |
Dain Nilsson | 11e0a8dd2f | |
Dain Nilsson | fed257922c | |
Dain Nilsson | f12247ed51 | |
Dain Nilsson | b6060940eb | |
Dain Nilsson | eae65b57a0 | |
Dain Nilsson | a243139778 | |
Dain Nilsson | 00062bab5c | |
Dain Nilsson | daeebf184a | |
Dain Nilsson | ef8e3c91f8 | |
Dain Nilsson | b7409245a3 | |
Dain Nilsson | 0e7fd9a7d5 | |
Dain Nilsson | 69815e4017 | |
Dain Nilsson | dc6e24e7f8 | |
Jonathan Morrison | 1769dc1982 | |
Dain Nilsson | 9c0d468419 | |
Dain Nilsson | e289d58f6d | |
Dain Nilsson | a64b1caf1b | |
Dain Nilsson | d697287e6b | |
Dain Nilsson | 525550120c | |
Dain Nilsson | 1c45c611fb | |
Dain Nilsson | 6749f8cfe9 | |
Dain Nilsson | b11b9f4647 | |
Dain Nilsson | 55fc2033cd | |
Dain Nilsson | b2bdfd191d | |
Dain Nilsson | 88124589cb | |
Dain Nilsson | 42de497e17 | |
Dain Nilsson | 69970e301e | |
Dain Nilsson | 450c70d525 | |
Dain Nilsson | 091277c623 |
|
@ -1,6 +1,3 @@
|
|||
[bdist_wheel]
|
||||
universal = 1
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
ignore = E203, W503
|
|
@ -0,0 +1,10 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
groups:
|
||||
github-actions:
|
||||
patterns:
|
||||
- "*"
|
|
@ -9,17 +9,13 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python: [2.7, 3.5, 3.6, 3.7, 3.8, pypy2, pypy3]
|
||||
python: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy3.9']
|
||||
architecture: [x86, x64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
architecture: x86
|
||||
- os: macos-latest
|
||||
architecture: x86
|
||||
- os: windows-latest
|
||||
python: pypy2
|
||||
- os: macos-latest
|
||||
python: pypy2
|
||||
- os: windows-latest
|
||||
python: pypy3
|
||||
- os: macos-latest
|
||||
|
@ -27,27 +23,27 @@ jobs:
|
|||
|
||||
name: ${{ matrix.os }} Py ${{ matrix.python }} ${{ matrix.architecture }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
architecture: ${{ matrix.architecture }}
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools pipenv
|
||||
pipenv install --skip-lock --dev
|
||||
python -m pip install --upgrade pip setuptools poetry
|
||||
poetry update
|
||||
|
||||
- name: Run pre-commit
|
||||
if: "!startsWith(matrix.python, 'pypy') && matrix.python != '3.5'"
|
||||
if: "!startsWith(matrix.python, 'pypy') && !startsWith(matrix.python, '3.7')"
|
||||
run: |
|
||||
python -m pip install pre-commit
|
||||
pre-commit run --all-files
|
||||
|
||||
- name: Run unit tests
|
||||
run: pipenv run test
|
||||
run: poetry run pytest
|
||||
|
||||
build:
|
||||
#needs: test
|
||||
|
@ -55,73 +51,20 @@ jobs:
|
|||
name: Build Python source .tar.gz
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- name: Build source package
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools
|
||||
python setup.py sdist
|
||||
python -m pip install --upgrade pip setuptools poetry
|
||||
poetry build
|
||||
|
||||
- name: Upload source package
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: fido2-python-sdist
|
||||
path: dist
|
||||
|
||||
deb:
|
||||
#needs: test
|
||||
runs-on: ubuntu-latest
|
||||
name: Build .deb
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Check Debian version
|
||||
run: |
|
||||
export PY_VER=$(awk '/__version__/{print $NF}' fido2/__init__.py | tr -d '"')
|
||||
export DEB_VER=$(dpkg-parsechangelog --show-field Version)
|
||||
case "$PY_VER" in
|
||||
*-dev*)
|
||||
if [[ ! $DEB_VER =~ "+git" ]]
|
||||
then
|
||||
echo "Debian package version mismatch, must contain +git!"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [ $PY_VER != $DEB_VER ]
|
||||
then
|
||||
echo "Debian package version mismatch, expected $PY_VER!"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Build .deb
|
||||
run: |
|
||||
sudo apt install -y devscripts equivs
|
||||
yes | sudo mk-build-deps -i debian/control
|
||||
debuild -us -uc
|
||||
mkdir dist
|
||||
mv ../python-fido2_* ../python3-fido2_* dist/
|
||||
|
||||
- name: Install .deb
|
||||
run: |
|
||||
sudo apt install python
|
||||
! python2 -c "import fido2"
|
||||
sudo dpkg -i dist/python-fido2_*.deb
|
||||
python2 -c "import fido2"
|
||||
! python3 -c "import fido2"
|
||||
sudo dpkg -i dist/python3-fido2_*.deb
|
||||
python3 -c "import fido2"
|
||||
|
||||
- name: Upload Debian packages
|
||||
uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: python2-deb-files
|
||||
path: dist
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 12 * * 4'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
|
@ -8,6 +8,7 @@ dist/
|
|||
.ropeproject/
|
||||
ChangeLog
|
||||
man/*.1
|
||||
poetry.lock
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
repos:
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.7.9
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 19.10b0
|
||||
rev: 24.2.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.6.2
|
||||
rev: 1.7.8
|
||||
hooks:
|
||||
- id: bandit
|
||||
exclude: ^test/
|
||||
exclude: ^tests/
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
files: ^fido2/
|
||||
|
|
12
MANIFEST.in
12
MANIFEST.in
|
@ -1,12 +0,0 @@
|
|||
include COPYING*
|
||||
include README.adoc
|
||||
include NEWS
|
||||
include ChangeLog
|
||||
include fido2/public_suffix_list.dat
|
||||
include examples/*.py
|
||||
include examples/server/*.py
|
||||
include examples/server/README.adoc
|
||||
include examples/server/Pipfile
|
||||
include examples/server/Pipfile.lock
|
||||
include examples/server/static/*
|
||||
recursive-include test *.py
|
46
NEWS
46
NEWS
|
@ -1,3 +1,49 @@
|
|||
* Version 1.1.3 (released 2024-03-13)
|
||||
** Fix USB HID issue on MacOS that sometimes caused a pause while waiting for a
|
||||
timeout.
|
||||
** Fix argument to CredProp extension where an enum value was required instead of
|
||||
also allowing a string.
|
||||
** Fix parsing of some key types (ES384, ES512) causing signature verification to fail.
|
||||
** Deprecation: Calling websafe_decode with a bytes argument instead of str.
|
||||
This will raise a TypeError in the next major version of the library.
|
||||
|
||||
* Version 1.1.2 (released 2023-07-06)
|
||||
** Fix ClientPin usage for Authenticators that do not support passing a PIN.
|
||||
** Fix: Handle correct CTAP response codes in authenticatorSelection.
|
||||
|
||||
* Version 1.1.1 (released 2023-04-05)
|
||||
** Add community provided support for NetBSD.
|
||||
** Bugfix: Don't set length for largeBlob when offset is 0.
|
||||
** Bugfix: Remove print statement in webauthn parsing.
|
||||
|
||||
* Version 1.1.0 (released 2022-10-17)
|
||||
** Bugfix: Fix name of "crossOrigin" in CollectedClientData.create().
|
||||
** Bugfix: Some incorrect type hints in the MDS3 classes were fixed.
|
||||
** Stricter checking of dataclass field types.
|
||||
** Add support for JSON-serialization of WebAuthn data classes.
|
||||
This changes the objects dict representation to align with new additions in the
|
||||
WebAuthn specification. As this may break compatibility, the new behavior
|
||||
requires explicit opt-in until python-fido2 2.0 is released.
|
||||
** Update server example to use JSON serialization.
|
||||
** Server: Add support for passing RegistrationResponse/AuthenticationResponse (or
|
||||
their deserialized JSON data) to register_complete/authenticate_complete.
|
||||
** Add new "hybrid" AuthenticatorTransport.
|
||||
** Add new AuthenticatorData flags, and use 2-letter names as in the WebAuthn spec
|
||||
(long names are still available as aliases).
|
||||
|
||||
* Version 1.0.0 (released 2022-06-08)
|
||||
** First stable release.
|
||||
|
||||
* Version 1.0.0rc1 (released 2022-05-02)
|
||||
** Release Candidate 1 of first stable release.
|
||||
** Require Python 3.7 or later.
|
||||
** APIs have updated to align with WebAuthn level 2.
|
||||
** Several CTAP 2.1 features have been implemented.
|
||||
|
||||
* Version 0.9.3 (released 2021-11-09)
|
||||
** Bugfix: Linux - Don't fail device discovery when hidraw doesn't support
|
||||
HIDIOCGRAWUNIQ (Linux kernels before 5.6).
|
||||
|
||||
* Version 0.9.2 (released 2021-10-14)
|
||||
** Support the latest Windows webauthn.h API (included in Windows 11).
|
||||
** Add product name and serial number to HidDescriptors.
|
||||
|
|
15
Pipfile
15
Pipfile
|
@ -1,15 +0,0 @@
|
|||
[[source]]
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
|
||||
[dev-packages]
|
||||
"mock" = "<4"
|
||||
pyfakefs = {markers = "platform_system == 'Linux'",version = ">=3.4"}
|
||||
fido2 = {editable = true, path = "."}
|
||||
|
||||
[packages]
|
||||
fido2 = {editable = true, path = "."}
|
||||
|
||||
[scripts]
|
||||
test = "python -m unittest discover -v"
|
|
@ -1,229 +0,0 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "8e32c5ef0708bc05c4ddda1b5d39710a28af3a4c039dcf1c7671d15ca8a56c25"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d",
|
||||
"sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771",
|
||||
"sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872",
|
||||
"sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c",
|
||||
"sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc",
|
||||
"sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762",
|
||||
"sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202",
|
||||
"sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5",
|
||||
"sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548",
|
||||
"sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a",
|
||||
"sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f",
|
||||
"sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20",
|
||||
"sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218",
|
||||
"sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c",
|
||||
"sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e",
|
||||
"sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56",
|
||||
"sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224",
|
||||
"sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a",
|
||||
"sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2",
|
||||
"sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a",
|
||||
"sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819",
|
||||
"sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346",
|
||||
"sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b",
|
||||
"sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e",
|
||||
"sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534",
|
||||
"sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb",
|
||||
"sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0",
|
||||
"sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156",
|
||||
"sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd",
|
||||
"sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87",
|
||||
"sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc",
|
||||
"sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195",
|
||||
"sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33",
|
||||
"sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f",
|
||||
"sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d",
|
||||
"sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd",
|
||||
"sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728",
|
||||
"sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7",
|
||||
"sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca",
|
||||
"sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99",
|
||||
"sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf",
|
||||
"sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e",
|
||||
"sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c",
|
||||
"sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5",
|
||||
"sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"
|
||||
],
|
||||
"version": "==1.14.6"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6",
|
||||
"sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6",
|
||||
"sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c",
|
||||
"sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999",
|
||||
"sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e",
|
||||
"sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992",
|
||||
"sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d",
|
||||
"sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588",
|
||||
"sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa",
|
||||
"sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d",
|
||||
"sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd",
|
||||
"sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d",
|
||||
"sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953",
|
||||
"sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2",
|
||||
"sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8",
|
||||
"sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6",
|
||||
"sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9",
|
||||
"sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6",
|
||||
"sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad",
|
||||
"sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==35.0.0"
|
||||
},
|
||||
"fido2": {
|
||||
"editable": true,
|
||||
"path": "."
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
|
||||
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.20"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.16.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d",
|
||||
"sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771",
|
||||
"sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872",
|
||||
"sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c",
|
||||
"sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc",
|
||||
"sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762",
|
||||
"sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202",
|
||||
"sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5",
|
||||
"sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548",
|
||||
"sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a",
|
||||
"sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f",
|
||||
"sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20",
|
||||
"sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218",
|
||||
"sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c",
|
||||
"sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e",
|
||||
"sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56",
|
||||
"sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224",
|
||||
"sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a",
|
||||
"sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2",
|
||||
"sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a",
|
||||
"sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819",
|
||||
"sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346",
|
||||
"sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b",
|
||||
"sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e",
|
||||
"sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534",
|
||||
"sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb",
|
||||
"sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0",
|
||||
"sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156",
|
||||
"sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd",
|
||||
"sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87",
|
||||
"sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc",
|
||||
"sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195",
|
||||
"sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33",
|
||||
"sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f",
|
||||
"sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d",
|
||||
"sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd",
|
||||
"sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728",
|
||||
"sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7",
|
||||
"sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca",
|
||||
"sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99",
|
||||
"sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf",
|
||||
"sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e",
|
||||
"sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c",
|
||||
"sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5",
|
||||
"sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"
|
||||
],
|
||||
"version": "==1.14.6"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6",
|
||||
"sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6",
|
||||
"sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c",
|
||||
"sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999",
|
||||
"sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e",
|
||||
"sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992",
|
||||
"sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d",
|
||||
"sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588",
|
||||
"sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa",
|
||||
"sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d",
|
||||
"sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd",
|
||||
"sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d",
|
||||
"sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953",
|
||||
"sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2",
|
||||
"sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8",
|
||||
"sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6",
|
||||
"sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9",
|
||||
"sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6",
|
||||
"sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad",
|
||||
"sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==35.0.0"
|
||||
},
|
||||
"fido2": {
|
||||
"editable": true,
|
||||
"path": "."
|
||||
},
|
||||
"mock": {
|
||||
"hashes": [
|
||||
"sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
|
||||
"sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.0.5"
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
|
||||
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.20"
|
||||
},
|
||||
"pyfakefs": {
|
||||
"hashes": [
|
||||
"sha256:002a065dcbf59c2caa039e4fc4ba01d1d636aa63ee9c794d4c9fc01f0e2d6dc0",
|
||||
"sha256:03a0e8e34e7250a458a640ca72c4c2c569bafc7e51a4c2d6c4ac62a426f60301"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "platform_system == 'Linux'",
|
||||
"version": "==4.5.1"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.16.0"
|
||||
}
|
||||
}
|
||||
}
|
40
README.adoc
40
README.adoc
|
@ -1,17 +1,10 @@
|
|||
== python-fido2
|
||||
image:https://github.com/Yubico/python-fido2/workflows/build/badge.svg["Github actions build", link="https://github.com/Yubico/python-fido2/actions"]
|
||||
|
||||
|
||||
Provides library functionality for communicating with a FIDO device over USB as
|
||||
well as verifying attestation and assertion signatures.
|
||||
|
||||
WARNING: This project is in beta. Expect things to change or break at any time!
|
||||
|
||||
WARNING: Version 0.9 is the last planned version of this library which will
|
||||
support Python 2. The next major version planned is 1.0, which will require
|
||||
Python 3 or later.
|
||||
|
||||
This library aims to support the FIDO U2F and FIDO 2.0 protocols for
|
||||
This library aims to support the FIDO U2F and FIDO 2 protocols for
|
||||
communicating with a USB authenticator via the Client-to-Authenticator Protocol
|
||||
(CTAP 1 and 2). In addition to this low-level device access, classes defined in
|
||||
the `fido2.client` and `fido2.server` modules implement higher level operations
|
||||
|
@ -48,17 +41,18 @@ or the _COPYING.MPLv2_ file for the full license text.
|
|||
|
||||
|
||||
=== Requirements
|
||||
fido2 is compatible with CPython 2.7 (2.7.6 and up), 3.5 onwards, and is tested
|
||||
on Windows, MacOS, and Linux. Support for OpenBSD and FreeBSD is provided as-is
|
||||
and relies on community contributions.
|
||||
fido2 is compatible with Python 3.7 and later, and is tested on Windows, MacOS,
|
||||
and Linux. Support for OpenBSD, FreeBSD, and NetBSD is provided as-is and
|
||||
relies on community contributions.
|
||||
|
||||
|
||||
=== Installation
|
||||
|
||||
fido2 is installable by running the following command:
|
||||
|
||||
pip install fido2
|
||||
|
||||
To install the dependencies required for communication with NFC Authenticators,
|
||||
To install the dependencies required for communication with NFC authenticators,
|
||||
instead use:
|
||||
|
||||
pip install fido2[pcsc]
|
||||
|
@ -68,6 +62,7 @@ requires running as Administrator. This library can still be used when running
|
|||
as non-administrator, via the `fido.client.WindowsClient` class. An example of
|
||||
this is included in the file `examples/credential.py`.
|
||||
|
||||
|
||||
Under Linux you will need to add a Udev rule to be able to access the FIDO
|
||||
device, or run as root. For example, the Udev rule may contain the following:
|
||||
|
||||
|
@ -78,31 +73,38 @@ KERNEL=="hidraw*", SUBSYSTEM=="hidraw", \
|
|||
MODE="0664", GROUP="plugdev", ATTRS{idVendor}=="1050"
|
||||
----
|
||||
|
||||
There may be a package already available for your distribution that does this
|
||||
for you, see:
|
||||
https://support.yubico.com/hc/en-us/articles/360013708900-Using-Your-U2F-YubiKey-with-Linux
|
||||
|
||||
Under FreeBSD you will either need to run as root or add rules for your device
|
||||
to /etc/devd.conf, which can be automated by installing security/u2f-devd:
|
||||
|
||||
# pkg install u2f-devd
|
||||
|
||||
|
||||
=== Dependencies
|
||||
==== Dependencies
|
||||
This project depends on Cryptography. For instructions on installing this
|
||||
dependency, see https://cryptography.io/en/latest/installation/.
|
||||
|
||||
NFC support is optionally available via PCSC, using the pyscard library. For
|
||||
NFC support is optionally available via PC/SC, using the pyscard library. For
|
||||
instructions on installing this dependency, see
|
||||
https://github.com/LudovicRousseau/pyscard/blob/master/INSTALL.md.
|
||||
|
||||
|
||||
=== Development
|
||||
For development of the library, we recommend using `pipenv`. To set up the dev
|
||||
environment, run this command in the root directory of the repository:
|
||||
For development of the library we use https://python-poetry.org/[poetry]. To
|
||||
set up the dev environment, run this command in the root directory of the
|
||||
repository:
|
||||
|
||||
pipenv install --dev
|
||||
poetry install
|
||||
|
||||
We also use https://pre-commit.com/[pre-commit] to run some scans on the code
|
||||
prior to committing.
|
||||
|
||||
|
||||
==== Running tests
|
||||
While many tests can run on their own, some require a connected U2F or FIDO2
|
||||
device to run.
|
||||
|
||||
pipenv run test
|
||||
poetry run pytest
|
||||
|
||||
|
|
35
RELEASE.adoc
35
RELEASE.adoc
|
@ -3,9 +3,8 @@
|
|||
|
||||
$ git checkout -b release/x.y.z
|
||||
|
||||
* Update the version in fido/__init__.py and make sure the NEWS file has an
|
||||
entry for it, and the correct release date.
|
||||
* Update the debian/changelog file with the correct version.
|
||||
* Update the version in pyproject.toml and fido/__init__.py and make sure the
|
||||
NEWS file has an entry for it, and the correct release date.
|
||||
* Commit the changes, and push the new branch.
|
||||
|
||||
$ git push -u origin release/x.y.z
|
||||
|
@ -18,37 +17,37 @@
|
|||
|
||||
* Build the release:
|
||||
|
||||
$ python setup.py sdist
|
||||
$ poetry build
|
||||
|
||||
* Sign the release, creating both binary and ascii signatures:
|
||||
* Sign the release:
|
||||
|
||||
$ gpg --detach-sign -a dist/fido2-x.y.z.tar.gz
|
||||
$ gpg --output dist/fido2-x.y.z.tar.gz.sig --dearmor dist/fido2-x.y.z.tar.gz.asc
|
||||
$ gpg --detach-sign -a dist/fido2-x.y.z-py3-none-any.whl
|
||||
|
||||
* Upload the release to PyPI:
|
||||
|
||||
$ twine upload dist/fido2-x.y.z.tar.gz dist/fido2-x.y.z.tar.gz.asc
|
||||
$ poetry publish
|
||||
|
||||
* Add the .tar.gz and .sig files to a new Github release, using the latest NEWS
|
||||
entry as description.
|
||||
* Add the .tar.gz, the .whl and .sig files to a new Github release, using the
|
||||
latest NEWS entry as description.
|
||||
|
||||
* Merge (using --ff) and delete the release branch, and push the tag:
|
||||
* Merge and delete the release branch, and push the tag:
|
||||
|
||||
$ git checkout master
|
||||
$ git checkout main
|
||||
$ git merge --ff release/x.y.z
|
||||
$ git branch -d release/x.y.z
|
||||
$ git push && git push --tags
|
||||
$ git push origin :release/x.y.z
|
||||
|
||||
* Bump the version number by incrementing the PATCH version and appending -dev0
|
||||
in fido2/__init__.py and add a new entry (unreleased) to the NEWS file.
|
||||
* Bump the version number by incrementing the PATCH version and appending -dev.0
|
||||
in pyproject.toml and fido2/__init__.py and add a new entry (unreleased) to the
|
||||
NEWS file.
|
||||
|
||||
__version__ = 'x.y.q-dev0'
|
||||
# pyproject.toml:
|
||||
version = "x.y.q-dev.0"
|
||||
|
||||
* Bump the version in debian/changelog by appending `+git` to it (don't
|
||||
increment PATCH).
|
||||
|
||||
(x.y.z+git)
|
||||
# fido2/__init__.py:
|
||||
__version__ = 'x.y.q-dev.0'
|
||||
|
||||
* Commit and push the change:
|
||||
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
python-fido2 (0.9.2) xenial; urgency=low
|
||||
|
||||
* Build for ppa
|
||||
|
||||
-- Dain Nilsson <dain@yubico.com> Thu, 14 Oct 2021 12:31:19 +0100
|
|
@ -1 +0,0 @@
|
|||
9
|
|
@ -1,52 +0,0 @@
|
|||
Source: python-fido2
|
||||
Maintainer: Debian Authentication Maintainers <pkg-auth-maintainers@lists.alioth.debian.org>
|
||||
Uploaders: Dag Heyman <dag@yubico.com>,
|
||||
Dain Nilsson <dain@yubico.com>,
|
||||
Emil Lundberg <emil@yubico.com>
|
||||
Section: python
|
||||
Priority: optional
|
||||
Standards-Version: 4.1.1
|
||||
Build-Depends: debhelper (>= 9),
|
||||
dh-python,
|
||||
python-all,
|
||||
python3-all,
|
||||
python-cryptography,
|
||||
python3-cryptography,
|
||||
python-enum34,
|
||||
python-setuptools,
|
||||
python3-setuptools
|
||||
Homepage: https://www.github.com/python-fido2/
|
||||
X-Python-Version: >= 2.7
|
||||
X-Python3-Version: >= 3.4
|
||||
|
||||
Package: python-fido2
|
||||
Architecture: all
|
||||
Section: python
|
||||
Depends: ${misc:Depends},
|
||||
python,
|
||||
python-cryptography,
|
||||
python-setuptools,
|
||||
python-six,
|
||||
Recommends: libu2f-udev,
|
||||
python-pyscard,
|
||||
Description: Python library for implementing FIDO 2.0
|
||||
A Python library for communicating with a FIDO device over USB HID as
|
||||
well as verifying attestation and assertion signatures.
|
||||
Supports FIDO U2F and FIDO 2.0.
|
||||
This is the Python 2 version of the package.
|
||||
|
||||
|
||||
Package: python3-fido2
|
||||
Architecture: all
|
||||
Section: python
|
||||
Depends: ${misc:Depends},
|
||||
python3,
|
||||
python3-cryptography,
|
||||
python3-setuptools,
|
||||
python3-six,
|
||||
Recommends: libu2f-udev,
|
||||
python3-pyscard,
|
||||
Description: Python library for implementing FIDO 2.0
|
||||
A Python library for communicating with a FIDO device over USB HID as
|
||||
well as verifying attestation and assertion signatures.
|
||||
This is the Python 3 version of the package.
|
|
@ -1,52 +0,0 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: python-fido2
|
||||
Source: https://developers.yubico.com/python-fido2
|
||||
|
||||
Files: *
|
||||
Copyright: Copyright (c) 2018 Yubico AB
|
||||
License: BSD-2-clause
|
||||
|
||||
Files: debian/*
|
||||
Copyright: Copyright (c) 2018 Yubico AB
|
||||
Copyright (c) 2018 Nicolas Braud-Santoni
|
||||
License: BSD-2-clause
|
||||
|
||||
Files: fido2/_pyu2f/*
|
||||
Copyright: Copyright (c) 2016 Google Inc.
|
||||
License: Apache-2.0
|
||||
On Debian systems the full text of the Apache-2.0 license can be found in
|
||||
/usr/share/common-licenses/Apache-2.0.
|
||||
|
||||
Files: fido2/public_suffix_list.dat
|
||||
Copyright: Copyright (c) 2007-16 Mozilla Foundation
|
||||
License: MPL-2.0
|
||||
On Debian systems the full text of the MPL-2.0 license can be found in
|
||||
/usr/share/common-licenses/MPL-2.0.
|
||||
|
||||
|
||||
License: BSD-2-clause
|
||||
All rights reserved.
|
||||
.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
.
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
.
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1 +0,0 @@
|
|||
fido2 python-fido2; PEP386
|
|
@ -1 +0,0 @@
|
|||
fido2 python-fido2; PEP386
|
|
@ -1,7 +0,0 @@
|
|||
#!/usr/bin/make -f
|
||||
|
||||
export PYBUILD_NAME=fido2
|
||||
export PYBUILD_DISABLE=test
|
||||
|
||||
%:
|
||||
dh $@ --with python2,python3 --buildsystem=pybuild
|
|
@ -1 +0,0 @@
|
|||
3.0 (native)
|
|
@ -1,11 +0,0 @@
|
|||
FROM ubuntu:xenial
|
||||
RUN mkdir /deb
|
||||
RUN apt-get -qq update && apt-get -qq upgrade && apt-get install -y git devscripts equivs
|
||||
COPY debian/control /python-fido2/debian/control
|
||||
RUN yes | mk-build-deps -i /python-fido2/debian/control
|
||||
|
||||
COPY . /python-fido2
|
||||
RUN cd /python-fido2 && debuild -us -uc
|
||||
|
||||
RUN mv /python-fido2_* /python3-fido2_* /deb
|
||||
RUN tar czf /python-fido2-debian-packages.tar.gz /deb
|
|
@ -34,7 +34,6 @@ www.acs.com.hk/en/driver/100/acr122u-nfc-reader-with-sam-slot-proprietary/
|
|||
"""
|
||||
|
||||
import time
|
||||
import six
|
||||
|
||||
from fido2.utils import sha256
|
||||
from fido2.ctap1 import CTAP1
|
||||
|
@ -78,7 +77,7 @@ class Acr122uSamPcscDevice(CtapPcscDevice):
|
|||
"""
|
||||
|
||||
# print('>> %s' % b2a_hex(apdu))
|
||||
resp, sw1, sw2 = self._conn.transmit(list(six.iterbytes(apdu)), protocol)
|
||||
resp, sw1, sw2 = self._conn.transmit(list(iter(apdu)), protocol)
|
||||
response = bytes(bytearray(resp))
|
||||
# print('<< [0x%04x] %s' % (sw1 * 0x100 + sw2, b2a_hex(response)))
|
||||
|
||||
|
@ -286,7 +285,7 @@ dev = next(Acr122uSamPcscDevice.list_devices())
|
|||
|
||||
print("CONNECT: %s" % dev)
|
||||
print("version: %s" % dev.reader_version())
|
||||
print("atr: %s" % bytes(dev.get_atr()).hex())
|
||||
print("atr: %s" % dev.get_atr().hex())
|
||||
print("ats: %s" % dev.ats.hex())
|
||||
|
||||
# uncomment if you want to see parameters from card's selection
|
||||
|
|
|
@ -32,10 +32,10 @@ fingerprint. This requires that a PIN is already set.
|
|||
NOTE: This uses a draft bio enrollment specification which is not yet final.
|
||||
Consider this highly experimental.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.ctap2 import Ctap2, ClientPin, FPBioEnrollment, CaptureError
|
||||
from fido2.ctap2 import Ctap2, FPBioEnrollment, CaptureError
|
||||
from fido2.ctap2.pin import ClientPin
|
||||
from fido2.ctap2.bio import BioEnrollment
|
||||
from getpass import getpass
|
||||
import sys
|
||||
|
||||
|
@ -45,7 +45,7 @@ uv = "discouraged"
|
|||
for dev in CtapHidDevice.list_devices():
|
||||
try:
|
||||
ctap = Ctap2(dev)
|
||||
if "bioEnroll" in ctap.info.options:
|
||||
if BioEnrollment.is_supported(ctap.info):
|
||||
break
|
||||
except Exception: # nosec
|
||||
continue
|
||||
|
@ -76,4 +76,6 @@ while template_id is None:
|
|||
print(enroller.remaining, "more scans needed.")
|
||||
except CaptureError as e:
|
||||
print(e)
|
||||
bio.set_name(template_id, "Example")
|
||||
|
||||
print("Fingerprint registered successfully with ID:", template_id)
|
||||
|
|
|
@ -29,10 +29,8 @@
|
|||
Connects to the first FIDO device found which supports the CredBlob extension,
|
||||
creates a new credential for it with the extension enabled, and stores some data.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.client import Fido2Client
|
||||
from fido2.client import Fido2Client, UserInteraction
|
||||
from fido2.server import Fido2Server
|
||||
from getpass import getpass
|
||||
import sys
|
||||
|
@ -52,38 +50,52 @@ def enumerate_devices():
|
|||
yield dev
|
||||
|
||||
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
# Locate a device
|
||||
for dev in enumerate_devices():
|
||||
client = Fido2Client(dev, "https://example.com")
|
||||
client = Fido2Client(dev, "https://example.com", user_interaction=CliInteraction())
|
||||
if "credBlob" in client.info.extensions:
|
||||
break
|
||||
else:
|
||||
print("No Authenticator with the CredBlob extension found!")
|
||||
sys.exit(1)
|
||||
|
||||
use_nfc = CtapPcscDevice and isinstance(dev, CtapPcscDevice)
|
||||
# Prefer UV token if supported
|
||||
uv = "discouraged"
|
||||
if client.info.options.get("pinUvAuthToken") or client.info.options.get("uv"):
|
||||
uv = "preferred"
|
||||
print("Authenticator supports UV token")
|
||||
|
||||
|
||||
# Prepare parameters for makeCredential
|
||||
server = Fido2Server({"id": "example.com", "name": "Example RP"})
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
create_options, state = server.register_begin(user, resident_key=True)
|
||||
# Prepare parameters for makeCredential
|
||||
create_options, state = server.register_begin(
|
||||
user,
|
||||
resident_key_requirement="required",
|
||||
user_verification=uv,
|
||||
authenticator_attachment="cross-platform",
|
||||
)
|
||||
|
||||
# Add CredBlob extension, attach data
|
||||
blob = os.urandom(32) # 32 random bytes
|
||||
create_options["publicKey"]["extensions"] = {"credBlob": blob}
|
||||
|
||||
# Prompt for PIN if needed
|
||||
pin = None
|
||||
if client.info.options.get("clientPin"):
|
||||
pin = getpass("Please enter PIN:")
|
||||
else:
|
||||
print("no pin")
|
||||
options = dict(create_options["publicKey"])
|
||||
options["extensions"] = {"credBlob": blob}
|
||||
|
||||
# Create a credential
|
||||
if not use_nfc:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
result = client.make_credential(create_options["publicKey"], pin=pin)
|
||||
result = client.make_credential(options)
|
||||
|
||||
# Complete registration
|
||||
auth_data = server.register_complete(
|
||||
|
@ -101,16 +113,12 @@ print("New credential created, with the CredBlob extension.")
|
|||
|
||||
# Prepare parameters for getAssertion
|
||||
request_options, state = server.authenticate_begin()
|
||||
request_options["publicKey"]["extensions"] = {
|
||||
"getCredBlob": True,
|
||||
}
|
||||
options = dict(request_options["publicKey"])
|
||||
options["extensions"] = {"getCredBlob": True}
|
||||
|
||||
# Authenticate the credential
|
||||
if not use_nfc:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
# Only one cred in allowCredentials, only one response.
|
||||
result = client.get_assertion(request_options["publicKey"], pin=pin).get_response(0)
|
||||
result = client.get_assertion(options).get_response(0)
|
||||
|
||||
blob_res = result.authenticator_data.extensions.get("credBlob")
|
||||
|
||||
|
|
|
@ -31,21 +31,26 @@ creates a new credential for it, and authenticates the credential.
|
|||
This works with both FIDO 2.0 devices as well as with U2F devices.
|
||||
On Windows, the native WebAuthn API will be used.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.ctap import CtapError, STATUS
|
||||
from fido2.client import Fido2Client, WindowsClient, PinRequiredError
|
||||
from fido2.client import Fido2Client, WindowsClient, UserInteraction
|
||||
from fido2.server import Fido2Server
|
||||
from getpass import getpass
|
||||
import sys
|
||||
import ctypes
|
||||
|
||||
|
||||
def on_keepalive(status):
|
||||
if status == STATUS.UPNEEDED: # Waiting for touch
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
uv = "discouraged"
|
||||
|
||||
|
@ -71,10 +76,10 @@ else:
|
|||
sys.exit(1)
|
||||
|
||||
# Set up a FIDO 2 client using the origin https://example.com
|
||||
client = Fido2Client(dev, "https://example.com")
|
||||
client = Fido2Client(dev, "https://example.com", user_interaction=CliInteraction())
|
||||
|
||||
# Prefer UV if supported and configured
|
||||
if client.info.options.get("uv"):
|
||||
if client.info.options.get("uv") or client.info.options.get("pinUvAuthToken"):
|
||||
uv = "preferred"
|
||||
print("Authenticator supports User Verification")
|
||||
|
||||
|
@ -83,24 +88,14 @@ server = Fido2Server({"id": "example.com", "name": "Example RP"}, attestation="d
|
|||
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
|
||||
|
||||
# Prepare parameters for makeCredential
|
||||
create_options, state = server.register_begin(
|
||||
user, user_verification=uv, authenticator_attachment="cross-platform"
|
||||
)
|
||||
|
||||
# Create a credential
|
||||
try:
|
||||
result = client.make_credential(
|
||||
create_options["publicKey"], on_keepalive=on_keepalive
|
||||
)
|
||||
except PinRequiredError as e:
|
||||
if isinstance(e.cause, CtapError):
|
||||
print(e.cause)
|
||||
result = client.make_credential(
|
||||
create_options["publicKey"],
|
||||
on_keepalive=on_keepalive,
|
||||
pin=getpass("Enter PIN: "),
|
||||
)
|
||||
result = client.make_credential(create_options["publicKey"])
|
||||
|
||||
# Complete registration
|
||||
auth_data = server.register_complete(
|
||||
|
@ -120,18 +115,7 @@ print("CREDENTIAL DATA:", auth_data.credential_data)
|
|||
request_options, state = server.authenticate_begin(credentials, user_verification=uv)
|
||||
|
||||
# Authenticate the credential
|
||||
try:
|
||||
result = client.get_assertion(
|
||||
request_options["publicKey"], on_keepalive=on_keepalive
|
||||
)
|
||||
except PinRequiredError as e:
|
||||
if isinstance(e.cause, CtapError):
|
||||
print(e.cause)
|
||||
result = client.get_assertion(
|
||||
request_options["publicKey"],
|
||||
on_keepalive=on_keepalive,
|
||||
pin=getpass("Enter PIN: "),
|
||||
)
|
||||
result = client.get_assertion(request_options["publicKey"])
|
||||
|
||||
# Only one cred in allowCredentials, only one response.
|
||||
result = result.get_response(0)
|
||||
|
|
|
@ -30,10 +30,9 @@ Connects to each attached FIDO device, and:
|
|||
1. If the device supports CBOR commands, perform a getInfo command.
|
||||
2. If the device supports WINK, perform the wink command.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice, CAPABILITY
|
||||
from fido2.ctap2 import CTAP2
|
||||
from fido2.ctap2 import Ctap2
|
||||
|
||||
try:
|
||||
from fido2.pcsc import CtapPcscDevice
|
||||
|
@ -56,7 +55,7 @@ for dev in enumerate_devices():
|
|||
print("CTAPHID protocol version: %d" % dev.version)
|
||||
|
||||
if dev.capabilities & CAPABILITY.CBOR:
|
||||
ctap2 = CTAP2(dev)
|
||||
ctap2 = Ctap2(dev)
|
||||
info = ctap2.get_info()
|
||||
print("DEVICE INFO: %s" % info)
|
||||
else:
|
||||
|
|
|
@ -30,12 +30,9 @@ Connects to the first FIDO device found which supports the HmacSecret extension,
|
|||
creates a new credential for it with the extension enabled, and uses it to
|
||||
derive two separate secrets.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.client import Fido2Client
|
||||
from fido2.client import Fido2Client, UserInteraction
|
||||
from getpass import getpass
|
||||
from binascii import b2a_hex
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
@ -53,32 +50,34 @@ def enumerate_devices():
|
|||
yield dev
|
||||
|
||||
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
# Locate a device
|
||||
for dev in enumerate_devices():
|
||||
client = Fido2Client(dev, "https://example.com")
|
||||
client = Fido2Client(dev, "https://example.com", user_interaction=CliInteraction())
|
||||
if "hmac-secret" in client.info.extensions:
|
||||
break
|
||||
else:
|
||||
print("No Authenticator with the HmacSecret extension found!")
|
||||
sys.exit(1)
|
||||
|
||||
use_nfc = CtapPcscDevice and isinstance(dev, CtapPcscDevice)
|
||||
|
||||
# Prepare parameters for makeCredential
|
||||
rp = {"id": "example.com", "name": "Example RP"}
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
challenge = b"Y2hhbGxlbmdl"
|
||||
|
||||
# Prompt for PIN if needed
|
||||
pin = None
|
||||
if client.info.options.get("clientPin"):
|
||||
pin = getpass("Please enter PIN:")
|
||||
else:
|
||||
print("no pin")
|
||||
|
||||
# Create a credential with a HmacSecret
|
||||
if not use_nfc:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
result = client.make_credential(
|
||||
{
|
||||
"rp": rp,
|
||||
|
@ -87,7 +86,6 @@ result = client.make_credential(
|
|||
"pubKeyCredParams": [{"type": "public-key", "alg": -7}],
|
||||
"extensions": {"hmacCreateSecret": True},
|
||||
},
|
||||
pin=pin,
|
||||
)
|
||||
|
||||
# HmacSecret result:
|
||||
|
@ -104,12 +102,9 @@ allow_list = [{"type": "public-key", "id": credential.credential_id}]
|
|||
|
||||
# Generate a salt for HmacSecret:
|
||||
salt = os.urandom(32)
|
||||
print("Authenticate with salt:", b2a_hex(salt))
|
||||
print("Authenticate with salt:", salt.hex())
|
||||
|
||||
# Authenticate the credential
|
||||
if not use_nfc:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
result = client.get_assertion(
|
||||
{
|
||||
"rpId": rp["id"],
|
||||
|
@ -117,22 +112,18 @@ result = client.get_assertion(
|
|||
"allowCredentials": allow_list,
|
||||
"extensions": {"hmacGetSecret": {"salt1": salt}},
|
||||
},
|
||||
pin=pin,
|
||||
).get_response(
|
||||
0
|
||||
) # Only one cred in allowList, only one response.
|
||||
|
||||
output1 = result.extension_results["hmacGetSecret"]["output1"]
|
||||
print("Authenticated, secret:", b2a_hex(output1))
|
||||
print("Authenticated, secret:", output1.hex())
|
||||
|
||||
# Authenticate again, using two salts to generate two secrets:
|
||||
|
||||
# Generate a second salt for HmacSecret:
|
||||
salt2 = os.urandom(32)
|
||||
print("Authenticate with second salt:", b2a_hex(salt2))
|
||||
|
||||
if not use_nfc:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
print("Authenticate with second salt:", salt2.hex())
|
||||
|
||||
# The first salt is reused, which should result in the same secret.
|
||||
result = client.get_assertion(
|
||||
|
@ -142,11 +133,10 @@ result = client.get_assertion(
|
|||
"allowCredentials": allow_list,
|
||||
"extensions": {"hmacGetSecret": {"salt1": salt, "salt2": salt2}},
|
||||
},
|
||||
pin=pin,
|
||||
).get_response(
|
||||
0
|
||||
) # One cred in allowCredentials, single response.
|
||||
|
||||
output = result.extension_results["hmacGetSecret"]
|
||||
print("Old secret:", b2a_hex(output["output1"]))
|
||||
print("New secret:", b2a_hex(output["output2"]))
|
||||
print("Old secret:", output["output1"].hex())
|
||||
print("New secret:", output["output2"].hex())
|
||||
|
|
|
@ -31,11 +31,8 @@ creates a new credential for it, and authenticates the credential.
|
|||
This works with both FIDO 2.0 devices as well as with U2F devices.
|
||||
On Windows, the native WebAuthn API will be used.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.ctap2 import ClientPin, LargeBlobs
|
||||
from fido2.client import Fido2Client
|
||||
from fido2.client import Fido2Client, UserInteraction
|
||||
from fido2.server import Fido2Server
|
||||
from getpass import getpass
|
||||
import sys
|
||||
|
@ -55,19 +52,28 @@ def enumerate_devices():
|
|||
yield dev
|
||||
|
||||
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
# Locate a device
|
||||
for dev in enumerate_devices():
|
||||
client = Fido2Client(dev, "https://example.com")
|
||||
client = Fido2Client(dev, "https://example.com", user_interaction=CliInteraction())
|
||||
if "largeBlobKey" in client.info.extensions:
|
||||
break
|
||||
else:
|
||||
print("No Authenticator with the largeBlobKey extension found!")
|
||||
sys.exit(1)
|
||||
|
||||
use_nfc = CtapPcscDevice and isinstance(dev, CtapPcscDevice)
|
||||
|
||||
pin = None
|
||||
uv = "discouraged"
|
||||
|
||||
if not client.info.options.get("largeBlobs"):
|
||||
print("Authenticator does not support large blobs!")
|
||||
|
@ -75,37 +81,31 @@ if not client.info.options.get("largeBlobs"):
|
|||
|
||||
|
||||
# Prefer UV token if supported
|
||||
if client.info.options.get("pinUvAuthToken") and client.info.options.get("uv"):
|
||||
uv = "discouraged"
|
||||
if client.info.options.get("pinUvAuthToken") or client.info.options.get("uv"):
|
||||
uv = "preferred"
|
||||
print("Authenticator supports UV token")
|
||||
elif client.info.options.get("clientPin"):
|
||||
# Prompt for PIN if needed
|
||||
pin = getpass("Please enter PIN: ")
|
||||
else:
|
||||
print("PIN not set, won't use")
|
||||
|
||||
|
||||
server = Fido2Server({"id": "example.com", "name": "Example RP"}, attestation="direct")
|
||||
|
||||
server = Fido2Server({"id": "example.com", "name": "Example RP"})
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
|
||||
# Prepare parameters for makeCredential
|
||||
create_options, state = server.register_begin(
|
||||
user,
|
||||
resident_key=True,
|
||||
resident_key_requirement="required",
|
||||
user_verification=uv,
|
||||
authenticator_attachment="cross-platform",
|
||||
)
|
||||
|
||||
# Enable largeBlobKey
|
||||
options = create_options["publicKey"]
|
||||
options.extensions = {"largeBlobKey": True}
|
||||
print("Creating a credential with LargeBlob support...")
|
||||
|
||||
# Enable largeBlob
|
||||
options = dict(create_options["publicKey"])
|
||||
options["extensions"] = {"largeBlob": {"support": "required"}}
|
||||
|
||||
# Create a credential
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
result = client.make_credential(options, pin=pin)
|
||||
key = result.attestation_object.large_blob_key
|
||||
result = client.make_credential(options)
|
||||
|
||||
# Complete registration
|
||||
auth_data = server.register_complete(
|
||||
|
@ -113,46 +113,37 @@ auth_data = server.register_complete(
|
|||
)
|
||||
credentials = [auth_data.credential_data]
|
||||
|
||||
print("New credential created!")
|
||||
print("Large Blob Key:", key)
|
||||
if not result.extension_results.get("supported"):
|
||||
print("Credential does not support largeBlob, failure!")
|
||||
sys.exit(1)
|
||||
|
||||
client_pin = ClientPin(client.ctap2)
|
||||
if pin:
|
||||
token = client_pin.get_pin_token(pin, ClientPin.PERMISSION.LARGE_BLOB_WRITE)
|
||||
else:
|
||||
token = client_pin.get_uv_token(ClientPin.PERMISSION.LARGE_BLOB_WRITE)
|
||||
large_blobs = LargeBlobs(client.ctap2, client_pin.protocol, token)
|
||||
|
||||
# Write a large blob
|
||||
print("Writing a large blob...")
|
||||
large_blobs.put_blob(key, b"Here is some data to store!")
|
||||
print("Credential created! Writing a blob...")
|
||||
|
||||
# Prepare parameters for getAssertion
|
||||
request_options, state = server.authenticate_begin(user_verification=uv)
|
||||
|
||||
# Enable largeBlobKey
|
||||
options = request_options["publicKey"]
|
||||
options.extensions = {"largeBlobKey": True}
|
||||
# Write a large blob
|
||||
options = dict(request_options["publicKey"])
|
||||
options["extensions"] = {"largeBlob": {"write": b"Here is some data to store!"}}
|
||||
|
||||
# Authenticate the credential
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
selection = client.get_assertion(options)
|
||||
|
||||
selection = client.get_assertion(options, pin=pin)
|
||||
# Only one cred in allowCredentials, only one response.
|
||||
assertion = selection.get_assertions()[0]
|
||||
result = selection.get_response(0)
|
||||
if not result.extension_results.get("written"):
|
||||
print("Failed to write blob!")
|
||||
sys.exit(1)
|
||||
|
||||
# This should match the key from MakeCredential.
|
||||
key = assertion.large_blob_key
|
||||
print("Blob written! Reading back the blob...")
|
||||
|
||||
# Get a fresh PIN token
|
||||
if pin:
|
||||
token = client_pin.get_pin_token(pin, ClientPin.PERMISSION.LARGE_BLOB_WRITE)
|
||||
else:
|
||||
token = client_pin.get_uv_token(ClientPin.PERMISSION.LARGE_BLOB_WRITE)
|
||||
large_blobs = LargeBlobs(client.ctap2, client_pin.protocol, token)
|
||||
# Read the blob
|
||||
options = dict(request_options["publicKey"])
|
||||
options["extensions"] = {"largeBlob": {"read": True}}
|
||||
|
||||
blob = large_blobs.get_blob(key)
|
||||
print("Read blob", blob)
|
||||
# Authenticate the credential
|
||||
selection = client.get_assertion(options)
|
||||
|
||||
# Clean up
|
||||
large_blobs.delete_blob(key)
|
||||
# Only one cred in allowCredentials, only one response.
|
||||
result = selection.get_response(0)
|
||||
print("Read blob:", result.extension_results.get("blob"))
|
||||
|
|
|
@ -30,11 +30,10 @@ Connects to each FIDO device found, and causes them all to blink until the user
|
|||
triggers one to select it. A new credential is created for that authenticator,
|
||||
and the operation is cancelled for the others.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice, STATUS
|
||||
from fido2.client import Fido2Client, ClientError
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.client import Fido2Client, ClientError, UserInteraction
|
||||
from threading import Event, Thread
|
||||
from getpass import getpass
|
||||
import sys
|
||||
|
||||
# Locate a device
|
||||
|
@ -43,58 +42,73 @@ if not devs:
|
|||
print("No FIDO device found")
|
||||
sys.exit(1)
|
||||
|
||||
clients = [Fido2Client(d, "https://example.com") for d in devs]
|
||||
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
cli_interaction = CliInteraction()
|
||||
clients = [
|
||||
Fido2Client(d, "https://example.com", user_interaction=cli_interaction)
|
||||
for d in devs
|
||||
]
|
||||
|
||||
# Prepare parameters for makeCredential
|
||||
rp = {"id": "example.com", "name": "Example RP"}
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
challenge = b"Y2hhbGxlbmdl"
|
||||
cancel = Event()
|
||||
attestation, client_data = None, None
|
||||
|
||||
has_prompted = False
|
||||
selected = None
|
||||
|
||||
|
||||
def on_keepalive(status):
|
||||
global has_prompted # Don't prompt for each device.
|
||||
if status == STATUS.UPNEEDED and not has_prompted:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
has_prompted = True
|
||||
|
||||
|
||||
def work(client):
|
||||
global attestation, client_data
|
||||
def select(client):
|
||||
global selected
|
||||
try:
|
||||
attestation, client_data = client.make_credential(
|
||||
{
|
||||
"rp": rp,
|
||||
"user": user,
|
||||
"challenge": challenge,
|
||||
"pubKeyCredParams": [{"type": "public-key", "alg": -7}],
|
||||
},
|
||||
event=cancel,
|
||||
on_keepalive=on_keepalive,
|
||||
)
|
||||
client.selection(cancel)
|
||||
selected = client
|
||||
except ClientError as e:
|
||||
if e.code != ClientError.ERR.TIMEOUT:
|
||||
raise
|
||||
else:
|
||||
return
|
||||
cancel.set()
|
||||
print("New credential created!")
|
||||
print("ATTESTATION OBJECT:", attestation)
|
||||
print()
|
||||
print("CREDENTIAL DATA:", attestation.auth_data.credential_data)
|
||||
|
||||
|
||||
print("\nTouch the authenticator you wish to use...\n")
|
||||
|
||||
threads = []
|
||||
for client in clients:
|
||||
t = Thread(target=work, args=(client,))
|
||||
t = Thread(target=select, args=(client,))
|
||||
threads.append(t)
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
if not cancel.is_set():
|
||||
if cancel.is_set():
|
||||
print("Authenticator selected, making credential...")
|
||||
|
||||
result = selected.make_credential(
|
||||
{
|
||||
"rp": rp,
|
||||
"user": user,
|
||||
"challenge": challenge,
|
||||
"pubKeyCredParams": [{"type": "public-key", "alg": -7}],
|
||||
},
|
||||
)
|
||||
|
||||
print("New credential created!")
|
||||
print("ATTESTATION OBJECT:", result.attestation_object)
|
||||
print()
|
||||
print("CREDENTIAL DATA:", result.attestation_object.auth_data.credential_data)
|
||||
else:
|
||||
print("Operation timed out!")
|
||||
|
|
|
@ -31,10 +31,8 @@ creates a new credential for it, and authenticates the credential.
|
|||
This works with both FIDO 2.0 devices as well as with U2F devices.
|
||||
On Windows, the native WebAuthn API will be used.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.client import Fido2Client, WindowsClient
|
||||
from fido2.client import Fido2Client, WindowsClient, UserInteraction
|
||||
from fido2.server import Fido2Server
|
||||
from getpass import getpass
|
||||
import sys
|
||||
|
@ -54,8 +52,19 @@ def enumerate_devices():
|
|||
yield dev
|
||||
|
||||
|
||||
use_prompt = False
|
||||
pin = None
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
uv = "discouraged"
|
||||
|
||||
if WindowsClient.is_available() and not ctypes.windll.shell32.IsUserAnAdmin():
|
||||
|
@ -64,9 +73,10 @@ if WindowsClient.is_available() and not ctypes.windll.shell32.IsUserAnAdmin():
|
|||
else:
|
||||
# Locate a device
|
||||
for dev in enumerate_devices():
|
||||
client = Fido2Client(dev, "https://example.com")
|
||||
client = Fido2Client(
|
||||
dev, "https://example.com", user_interaction=CliInteraction()
|
||||
)
|
||||
if client.info.options.get("rk"):
|
||||
use_prompt = not (CtapPcscDevice and isinstance(dev, CtapPcscDevice))
|
||||
break
|
||||
else:
|
||||
print("No Authenticator with support for resident key found!")
|
||||
|
@ -76,11 +86,6 @@ else:
|
|||
if client.info.options.get("uv"):
|
||||
uv = "preferred"
|
||||
print("Authenticator supports User Verification")
|
||||
elif client.info.options.get("clientPin"):
|
||||
# Prompt for PIN if needed
|
||||
pin = getpass("Please enter PIN: ")
|
||||
else:
|
||||
print("PIN not set, won't use")
|
||||
|
||||
|
||||
server = Fido2Server({"id": "example.com", "name": "Example RP"}, attestation="direct")
|
||||
|
@ -90,16 +95,13 @@ user = {"id": b"user_id", "name": "A. User"}
|
|||
# Prepare parameters for makeCredential
|
||||
create_options, state = server.register_begin(
|
||||
user,
|
||||
resident_key=True,
|
||||
resident_key_requirement="required",
|
||||
user_verification=uv,
|
||||
authenticator_attachment="cross-platform",
|
||||
)
|
||||
|
||||
# Create a credential
|
||||
if use_prompt:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
result = client.make_credential(create_options["publicKey"], pin=pin)
|
||||
result = client.make_credential(create_options["publicKey"])
|
||||
|
||||
|
||||
# Complete registration
|
||||
|
@ -120,10 +122,7 @@ print("CREDENTIAL DATA:", auth_data.credential_data)
|
|||
request_options, state = server.authenticate_begin(user_verification=uv)
|
||||
|
||||
# Authenticate the credential
|
||||
if use_prompt:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
selection = client.get_assertion(request_options["publicKey"], pin=pin)
|
||||
selection = client.get_assertion(request_options["publicKey"])
|
||||
result = selection.get_response(0) # There may be multiple responses, get the first.
|
||||
|
||||
print("USER ID:", result.user_handle)
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
[[source]]
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
|
||||
[packages]
|
||||
flask = "*"
|
||||
pyOpenSSL = "*"
|
||||
"5448283" = {editable = true, path = "./../.."}
|
||||
|
||||
[scripts]
|
||||
server = "python server.py"
|
||||
server-u2f = "python server-u2f.py"
|
|
@ -1,232 +0,0 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "bf2fa9b63243b7172b84ae28c7fc3abc291ed1bb91f4919f2b2c47bfc0ccdd4d"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"5448283": {
|
||||
"editable": true,
|
||||
"path": "./../.."
|
||||
},
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3",
|
||||
"sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2",
|
||||
"sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636",
|
||||
"sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20",
|
||||
"sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728",
|
||||
"sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27",
|
||||
"sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66",
|
||||
"sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443",
|
||||
"sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0",
|
||||
"sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7",
|
||||
"sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39",
|
||||
"sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605",
|
||||
"sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a",
|
||||
"sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37",
|
||||
"sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029",
|
||||
"sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139",
|
||||
"sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc",
|
||||
"sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df",
|
||||
"sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14",
|
||||
"sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880",
|
||||
"sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2",
|
||||
"sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a",
|
||||
"sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e",
|
||||
"sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474",
|
||||
"sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024",
|
||||
"sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8",
|
||||
"sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0",
|
||||
"sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e",
|
||||
"sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a",
|
||||
"sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e",
|
||||
"sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032",
|
||||
"sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6",
|
||||
"sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e",
|
||||
"sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b",
|
||||
"sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e",
|
||||
"sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954",
|
||||
"sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962",
|
||||
"sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c",
|
||||
"sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4",
|
||||
"sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55",
|
||||
"sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962",
|
||||
"sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023",
|
||||
"sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c",
|
||||
"sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6",
|
||||
"sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8",
|
||||
"sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382",
|
||||
"sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7",
|
||||
"sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc",
|
||||
"sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997",
|
||||
"sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"
|
||||
],
|
||||
"version": "==1.15.0"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3",
|
||||
"sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==8.0.3"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6",
|
||||
"sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6",
|
||||
"sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c",
|
||||
"sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999",
|
||||
"sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e",
|
||||
"sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992",
|
||||
"sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d",
|
||||
"sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588",
|
||||
"sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa",
|
||||
"sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d",
|
||||
"sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd",
|
||||
"sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d",
|
||||
"sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953",
|
||||
"sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2",
|
||||
"sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8",
|
||||
"sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6",
|
||||
"sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9",
|
||||
"sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6",
|
||||
"sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad",
|
||||
"sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==35.0.0"
|
||||
},
|
||||
"fido2": {
|
||||
"editable": true,
|
||||
"path": "./../.."
|
||||
},
|
||||
"flask": {
|
||||
"hashes": [
|
||||
"sha256:7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2",
|
||||
"sha256:cb90f62f1d8e4dc4621f52106613488b5ba826b2e1e10a33eac92f723093ab6a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.0.2"
|
||||
},
|
||||
"itsdangerous": {
|
||||
"hashes": [
|
||||
"sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c",
|
||||
"sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.1"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:827a0e32839ab1600d4eb1c4c33ec5a8edfbc5cb42dafa13b81f182f97784b45",
|
||||
"sha256:8569982d3f0889eed11dd620c706d39b60c36d6d25843961f33f77fb6bc6b20c"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.2"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298",
|
||||
"sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64",
|
||||
"sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b",
|
||||
"sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567",
|
||||
"sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff",
|
||||
"sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724",
|
||||
"sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74",
|
||||
"sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646",
|
||||
"sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35",
|
||||
"sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6",
|
||||
"sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6",
|
||||
"sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad",
|
||||
"sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26",
|
||||
"sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38",
|
||||
"sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac",
|
||||
"sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7",
|
||||
"sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6",
|
||||
"sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75",
|
||||
"sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f",
|
||||
"sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135",
|
||||
"sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8",
|
||||
"sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a",
|
||||
"sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a",
|
||||
"sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9",
|
||||
"sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864",
|
||||
"sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914",
|
||||
"sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18",
|
||||
"sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8",
|
||||
"sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2",
|
||||
"sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d",
|
||||
"sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b",
|
||||
"sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b",
|
||||
"sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f",
|
||||
"sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb",
|
||||
"sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833",
|
||||
"sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28",
|
||||
"sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415",
|
||||
"sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902",
|
||||
"sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d",
|
||||
"sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9",
|
||||
"sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d",
|
||||
"sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145",
|
||||
"sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066",
|
||||
"sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c",
|
||||
"sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1",
|
||||
"sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f",
|
||||
"sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53",
|
||||
"sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134",
|
||||
"sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85",
|
||||
"sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5",
|
||||
"sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94",
|
||||
"sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509",
|
||||
"sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51",
|
||||
"sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.1"
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
|
||||
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.20"
|
||||
},
|
||||
"pyopenssl": {
|
||||
"hashes": [
|
||||
"sha256:5e2d8c5e46d0d865ae933bef5230090bdaf5506281e9eec60fa250ee80600cb3",
|
||||
"sha256:8935bd4920ab9abfebb07c41a4f58296407ed77f04bd1a92914044b848ba1ed6"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==21.0.0"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
|
||||
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.16.0"
|
||||
},
|
||||
"werkzeug": {
|
||||
"hashes": [
|
||||
"sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f",
|
||||
"sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
|
@ -4,24 +4,26 @@ WebAuthn credential registration, and use.
|
|||
|
||||
|
||||
=== Running
|
||||
To run this sample, you will need `pipenv`. For instructions on installing
|
||||
`pipenv`, see https://docs.pipenv.org.
|
||||
To run this sample, you will need `poetry`. For instructions on installing
|
||||
`poetry`, see https://python-poetry.org/.
|
||||
|
||||
Run the following command in the `examples/server` directory to set up the
|
||||
example:
|
||||
|
||||
$ pipenv install
|
||||
$ poetry install
|
||||
|
||||
Once the environment has been created, you can run the server by running:
|
||||
|
||||
$ pipenv run server
|
||||
$ poetry run server
|
||||
|
||||
When the server is running, use a browser supporting WebAuthn and open
|
||||
https://localhost:5000 to access the website.
|
||||
http://localhost:5000 to access the website.
|
||||
|
||||
NOTE: As this server uses a self-signed certificate, you will get warnings in
|
||||
your browser about the connection not being secure. This is expected, and you
|
||||
can safely proceed to the site.
|
||||
NOTE: Webauthn requires a secure context (HTTPS), which involves
|
||||
obtaining a valid TLS certificate. However, most browsers also treat
|
||||
http://localhost as a secure context. This example runs without TLS
|
||||
as a demo, but otherwise you should always use HTTPS with a valid
|
||||
certificate when using Webauthn.
|
||||
|
||||
=== Using the website
|
||||
The site allows you to register a WebAuthn credential, and to authenticate it.
|
||||
|
@ -49,11 +51,12 @@ a slightly altered version of the example server which uses this class to
|
|||
authenticate U2F credentials as well as WebAuthn credentials. To run this
|
||||
version of the server, run:
|
||||
|
||||
$ pipenv run server-u2f
|
||||
$ poetry run server-u2f
|
||||
|
||||
This version allows registration both using the newer WebAuthn APIs and by using
|
||||
the legacy U2F APIs, so that you can test authentication using both credential
|
||||
types. The source code for this version of the server is in `server-u2f.py`.
|
||||
types. The source code for this version of the server is in
|
||||
`server/server_u2f.py`.
|
||||
|
||||
NOTE: There should be no need to support registration of new U2F credentials as
|
||||
new registrations should be using the WebAuthn APIs, even for existing users.
|
||||
|
|
|
@ -0,0 +1,361 @@
|
|||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "blinker"
|
||||
version = "1.7.0"
|
||||
description = "Fast, simple object-to-object and broadcast signaling"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"},
|
||||
{file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "1.16.0"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
|
||||
{file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
|
||||
{file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
|
||||
{file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
|
||||
{file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
|
||||
{file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
|
||||
{file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.7"
|
||||
description = "Composable command line interface toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
|
||||
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "42.0.5"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"},
|
||||
{file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"},
|
||||
{file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"},
|
||||
{file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"},
|
||||
{file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"},
|
||||
{file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"},
|
||||
{file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"},
|
||||
{file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"},
|
||||
{file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"},
|
||||
{file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"},
|
||||
{file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"},
|
||||
{file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
|
||||
docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
|
||||
nox = ["nox"]
|
||||
pep8test = ["check-sdist", "click", "mypy", "ruff"]
|
||||
sdist = ["build"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "fido2"
|
||||
version = "1.1.3-dev.0"
|
||||
description = "FIDO2/WebAuthn library for implementing clients and servers."
|
||||
optional = false
|
||||
python-versions = "^3.8"
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=2.6, !=35, <45"
|
||||
|
||||
[package.extras]
|
||||
pcsc = ["pyscard (>=1.9,<3)"]
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
url = "../.."
|
||||
|
||||
[[package]]
|
||||
name = "flask"
|
||||
version = "2.3.3"
|
||||
description = "A simple framework for building complex web applications."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"},
|
||||
{file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
blinker = ">=1.6.2"
|
||||
click = ">=8.1.3"
|
||||
importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
|
||||
itsdangerous = ">=2.1.2"
|
||||
Jinja2 = ">=3.1.2"
|
||||
Werkzeug = ">=2.3.7"
|
||||
|
||||
[package.extras]
|
||||
async = ["asgiref (>=3.2)"]
|
||||
dotenv = ["python-dotenv"]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "7.0.2"
|
||||
description = "Read metadata from Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"},
|
||||
{file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
perf = ["ipython"]
|
||||
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "itsdangerous"
|
||||
version = "2.1.2"
|
||||
description = "Safely pass data to untrusted environments and back."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
|
||||
{file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.3"
|
||||
description = "A very fast and expressive template engine."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
|
||||
{file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=2.0"
|
||||
|
||||
[package.extras]
|
||||
i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "2.1.5"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
|
||||
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.21"
|
||||
description = "C parser in Python"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "3.0.1"
|
||||
description = "The comprehensive WSGI web application library."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"},
|
||||
{file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=2.1.1"
|
||||
|
||||
[package.extras]
|
||||
watchdog = ["watchdog (>=2.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.17.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
|
||||
{file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "2fce33bd11a195af8dd3d95f62169819676ed4bba09e10863a6e61caa33a74a8"
|
|
@ -0,0 +1,23 @@
|
|||
[tool.poetry]
|
||||
name = "fido2-example-server"
|
||||
version = "0.1.0"
|
||||
description = "Example server for python-fido2"
|
||||
authors = ["Dain Nilsson <dain@yubico.com>"]
|
||||
license = "Apache-2"
|
||||
packages = [
|
||||
{ include = "server" },
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
Flask = "^2.0"
|
||||
fido2 = {path = "../.."}
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
server = "server.server:main"
|
|
@ -1,184 +0,0 @@
|
|||
# Copyright (c) 2018 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
Example demo server to use a supported web browser to call the WebAuthn APIs
|
||||
to register and use a credential.
|
||||
|
||||
See the file README.adoc in this directory for details.
|
||||
|
||||
Navigate to https://localhost:5000 in a supported web browser.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.webauthn import PublicKeyCredentialRpEntity
|
||||
from fido2.client import ClientData
|
||||
from fido2.server import U2FFido2Server
|
||||
from fido2.ctap2 import AttestationObject, AuthenticatorData
|
||||
from fido2.ctap1 import RegistrationData
|
||||
from fido2.utils import sha256, websafe_encode
|
||||
from fido2 import cbor
|
||||
from flask import Flask, session, request, redirect, abort
|
||||
|
||||
import os
|
||||
|
||||
|
||||
app = Flask(__name__, static_url_path="")
|
||||
app.secret_key = os.urandom(32) # Used for session.
|
||||
|
||||
rp = PublicKeyCredentialRpEntity("localhost", "Demo server")
|
||||
# By using the U2FFido2Server class, we can support existing credentials
|
||||
# registered by the legacy u2f.register API for an appId.
|
||||
server = U2FFido2Server("https://localhost:5000", rp)
|
||||
|
||||
# Registered credentials are stored globally, in memory only. Single user
|
||||
# support, state is lost when the server terminates.
|
||||
credentials = []
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return redirect("/index-u2f.html")
|
||||
|
||||
|
||||
@app.route("/api/register/begin", methods=["POST"])
|
||||
def register_begin():
|
||||
registration_data, state = server.register_begin(
|
||||
{
|
||||
"id": b"user_id",
|
||||
"name": "a_user",
|
||||
"displayName": "A. User",
|
||||
"icon": "https://example.com/image.png",
|
||||
},
|
||||
credentials,
|
||||
)
|
||||
|
||||
session["state"] = state
|
||||
print("\n\n\n\n")
|
||||
print(registration_data)
|
||||
print("\n\n\n\n")
|
||||
return cbor.encode(registration_data)
|
||||
|
||||
|
||||
@app.route("/api/register/complete", methods=["POST"])
|
||||
def register_complete():
|
||||
data = cbor.decode(request.get_data())
|
||||
client_data = ClientData(data["clientDataJSON"])
|
||||
att_obj = AttestationObject(data["attestationObject"])
|
||||
print("clientData", client_data)
|
||||
print("AttestationObject:", att_obj)
|
||||
|
||||
auth_data = server.register_complete(session["state"], client_data, att_obj)
|
||||
|
||||
credentials.append(auth_data.credential_data)
|
||||
print("REGISTERED CREDENTIAL:", auth_data.credential_data)
|
||||
return cbor.encode({"status": "OK"})
|
||||
|
||||
|
||||
@app.route("/api/authenticate/begin", methods=["POST"])
|
||||
def authenticate_begin():
|
||||
if not credentials:
|
||||
abort(404)
|
||||
|
||||
auth_data, state = server.authenticate_begin(credentials)
|
||||
session["state"] = state
|
||||
return cbor.encode(auth_data)
|
||||
|
||||
|
||||
@app.route("/api/authenticate/complete", methods=["POST"])
|
||||
def authenticate_complete():
|
||||
if not credentials:
|
||||
abort(404)
|
||||
|
||||
data = cbor.decode(request.get_data())
|
||||
credential_id = data["credentialId"]
|
||||
client_data = ClientData(data["clientDataJSON"])
|
||||
auth_data = AuthenticatorData(data["authenticatorData"])
|
||||
signature = data["signature"]
|
||||
print("clientData", client_data)
|
||||
print("AuthenticatorData", auth_data)
|
||||
|
||||
server.authenticate_complete(
|
||||
session.pop("state"),
|
||||
credentials,
|
||||
credential_id,
|
||||
client_data,
|
||||
auth_data,
|
||||
signature,
|
||||
)
|
||||
print("ASSERTION OK")
|
||||
return cbor.encode({"status": "OK"})
|
||||
|
||||
|
||||
###############################################################################
|
||||
# WARNING!
|
||||
#
|
||||
# The below functions allow the registration of legacy U2F credentials.
|
||||
# This is provided FOR TESTING PURPOSES ONLY. New credentials should be
|
||||
# registered using the WebAuthn APIs.
|
||||
###############################################################################
|
||||
|
||||
|
||||
@app.route("/api/u2f/begin", methods=["POST"])
|
||||
def u2f_begin():
|
||||
registration_data, state = server.register_begin(
|
||||
{
|
||||
"id": b"user_id",
|
||||
"name": "a_user",
|
||||
"displayName": "A. User",
|
||||
"icon": "https://example.com/image.png",
|
||||
},
|
||||
credentials,
|
||||
)
|
||||
|
||||
session["state"] = state
|
||||
print("\n\n\n\n")
|
||||
print(registration_data)
|
||||
print("\n\n\n\n")
|
||||
return cbor.encode(websafe_encode(registration_data["publicKey"]["challenge"]))
|
||||
|
||||
|
||||
@app.route("/api/u2f/complete", methods=["POST"])
|
||||
def u2f_complete():
|
||||
data = cbor.decode(request.get_data())
|
||||
client_data = ClientData.from_b64(data["clientData"])
|
||||
reg_data = RegistrationData.from_b64(data["registrationData"])
|
||||
print("clientData", client_data)
|
||||
print("U2F RegistrationData:", reg_data)
|
||||
att_obj = AttestationObject.from_ctap1(sha256(b"https://localhost:5000"), reg_data)
|
||||
print("AttestationObject:", att_obj)
|
||||
|
||||
auth_data = att_obj.auth_data
|
||||
|
||||
credentials.append(auth_data.credential_data)
|
||||
print("REGISTERED U2F CREDENTIAL:", auth_data.credential_data)
|
||||
return cbor.encode({"status": "OK"})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(__doc__)
|
||||
app.run(ssl_context="adhoc", debug=False)
|
|
@ -31,24 +31,22 @@ to register and use a credential.
|
|||
|
||||
See the file README.adoc in this directory for details.
|
||||
|
||||
Navigate to https://localhost:5000 in a supported web browser.
|
||||
Navigate to http://localhost:5000 in a supported web browser.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.webauthn import PublicKeyCredentialRpEntity
|
||||
from fido2.client import ClientData
|
||||
from fido2.webauthn import PublicKeyCredentialRpEntity, PublicKeyCredentialUserEntity
|
||||
from fido2.server import Fido2Server
|
||||
from fido2.ctap2 import AttestationObject, AuthenticatorData
|
||||
from fido2 import cbor
|
||||
from flask import Flask, session, request, redirect, abort
|
||||
from flask import Flask, session, request, redirect, abort, jsonify
|
||||
|
||||
import os
|
||||
import fido2.features
|
||||
|
||||
fido2.features.webauthn_json_mapping.enabled = True
|
||||
|
||||
|
||||
app = Flask(__name__, static_url_path="")
|
||||
app.secret_key = os.urandom(32) # Used for session.
|
||||
|
||||
rp = PublicKeyCredentialRpEntity("localhost", "Demo server")
|
||||
rp = PublicKeyCredentialRpEntity(name="Demo server", id="localhost")
|
||||
server = Fido2Server(rp)
|
||||
|
||||
|
||||
|
@ -64,13 +62,12 @@ def index():
|
|||
|
||||
@app.route("/api/register/begin", methods=["POST"])
|
||||
def register_begin():
|
||||
registration_data, state = server.register_begin(
|
||||
{
|
||||
"id": b"user_id",
|
||||
"name": "a_user",
|
||||
"displayName": "A. User",
|
||||
"icon": "https://example.com/image.png",
|
||||
},
|
||||
options, state = server.register_begin(
|
||||
PublicKeyCredentialUserEntity(
|
||||
id=b"user_id",
|
||||
name="a_user",
|
||||
display_name="A. User",
|
||||
),
|
||||
credentials,
|
||||
user_verification="discouraged",
|
||||
authenticator_attachment="cross-platform",
|
||||
|
@ -78,24 +75,21 @@ def register_begin():
|
|||
|
||||
session["state"] = state
|
||||
print("\n\n\n\n")
|
||||
print(registration_data)
|
||||
print(options)
|
||||
print("\n\n\n\n")
|
||||
return cbor.encode(registration_data)
|
||||
|
||||
return jsonify(dict(options))
|
||||
|
||||
|
||||
@app.route("/api/register/complete", methods=["POST"])
|
||||
def register_complete():
|
||||
data = cbor.decode(request.get_data())
|
||||
client_data = ClientData(data["clientDataJSON"])
|
||||
att_obj = AttestationObject(data["attestationObject"])
|
||||
print("clientData", client_data)
|
||||
print("AttestationObject:", att_obj)
|
||||
|
||||
auth_data = server.register_complete(session["state"], client_data, att_obj)
|
||||
response = request.json
|
||||
print("RegistrationResponse:", response)
|
||||
auth_data = server.register_complete(session["state"], response)
|
||||
|
||||
credentials.append(auth_data.credential_data)
|
||||
print("REGISTERED CREDENTIAL:", auth_data.credential_data)
|
||||
return cbor.encode({"status": "OK"})
|
||||
return jsonify({"status": "OK"})
|
||||
|
||||
|
||||
@app.route("/api/authenticate/begin", methods=["POST"])
|
||||
|
@ -103,9 +97,10 @@ def authenticate_begin():
|
|||
if not credentials:
|
||||
abort(404)
|
||||
|
||||
auth_data, state = server.authenticate_begin(credentials)
|
||||
options, state = server.authenticate_begin(credentials)
|
||||
session["state"] = state
|
||||
return cbor.encode(auth_data)
|
||||
|
||||
return jsonify(dict(options))
|
||||
|
||||
|
||||
@app.route("/api/authenticate/complete", methods=["POST"])
|
||||
|
@ -113,26 +108,24 @@ def authenticate_complete():
|
|||
if not credentials:
|
||||
abort(404)
|
||||
|
||||
data = cbor.decode(request.get_data())
|
||||
credential_id = data["credentialId"]
|
||||
client_data = ClientData(data["clientDataJSON"])
|
||||
auth_data = AuthenticatorData(data["authenticatorData"])
|
||||
signature = data["signature"]
|
||||
print("clientData", client_data)
|
||||
print("AuthenticatorData", auth_data)
|
||||
|
||||
response = request.json
|
||||
print("AuthenticationResponse:", response)
|
||||
server.authenticate_complete(
|
||||
session.pop("state"),
|
||||
credentials,
|
||||
credential_id,
|
||||
client_data,
|
||||
auth_data,
|
||||
signature,
|
||||
response,
|
||||
)
|
||||
print("ASSERTION OK")
|
||||
return cbor.encode({"status": "OK"})
|
||||
return jsonify({"status": "OK"})
|
||||
|
||||
|
||||
def main():
|
||||
print(__doc__)
|
||||
# Note: using localhost without TLS, as some browsers do
|
||||
# not allow Webauthn in case of TLS certificate errors.
|
||||
# See https://lists.w3.org/Archives/Public/public-webauthn/2022Nov/0135.html
|
||||
app.run(host="localhost", debug=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(__doc__)
|
||||
app.run(ssl_context="adhoc", debug=False)
|
||||
main()
|
|
@ -0,0 +1,60 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
|
||||
<script type="module">
|
||||
import {
|
||||
get,
|
||||
parseRequestOptionsFromJSON,
|
||||
} from '/webauthn-json.browser-ponyfill.js';
|
||||
|
||||
async function start() {
|
||||
let request = await fetch('/api/authenticate/begin', {
|
||||
method: 'POST',
|
||||
});
|
||||
if(!request.ok) {
|
||||
throw new Error('No credential available to authenticate!');
|
||||
}
|
||||
let json = await request.json();
|
||||
let options = parseRequestOptionsFromJSON(json);
|
||||
|
||||
let response = await get(options);
|
||||
let result = await fetch('/api/authenticate/complete', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(response),
|
||||
});
|
||||
|
||||
let stat = result.ok ? 'successful' : 'unsuccessful';
|
||||
alert('Authentication ' + stat + ' More details in server log...');
|
||||
window.location = '/';
|
||||
}
|
||||
|
||||
window.start = start;
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
||||
a { color: #0080ac; font-weight: bold; text-decoration: none;}
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebAuthn demo using python-fido2</h1>
|
||||
<p>This demo requires a browser supporting the WebAuthn API!</p>
|
||||
<hr>
|
||||
|
||||
<h2>Authenticate using a credential</h2>
|
||||
<div id="initial">
|
||||
<button onclick="start();">Click here to start</button>
|
||||
</div>
|
||||
<div id="started", style="display: none;">
|
||||
<p>Touch your authenticator device now...</p>
|
||||
<a href="/">Cancel</a>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,7 +1,6 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
<script src="/cbor.js"></script>
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
|
@ -0,0 +1,57 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
|
||||
<script type="module">
|
||||
import {
|
||||
create,
|
||||
parseCreationOptionsFromJSON,
|
||||
} from '/webauthn-json.browser-ponyfill.js';
|
||||
|
||||
async function start() {
|
||||
let request = await fetch('/api/register/begin', {
|
||||
method: 'POST',
|
||||
});
|
||||
let json = await request.json();
|
||||
let options = parseCreationOptionsFromJSON(json);
|
||||
document.getElementById('initial').style.display = 'none';
|
||||
document.getElementById('started').style.display = 'block';
|
||||
|
||||
let response = await create(options);
|
||||
let result = await fetch('/api/register/complete', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(response),
|
||||
});
|
||||
|
||||
let stat = result.ok ? 'successful' : 'unsuccessful';
|
||||
alert('Registration ' + stat + ' More details in server log...');
|
||||
window.location = '/';
|
||||
}
|
||||
|
||||
window.start = start;
|
||||
</script>
|
||||
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
||||
a { color: #0080ac; font-weight: bold; text-decoration: none;}
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebAuthn demo using python-fido2</h1>
|
||||
<p>This demo requires a browser supporting the WebAuthn API!</p>
|
||||
<hr>
|
||||
|
||||
<h2>Register a credential</h2>
|
||||
<div id="initial">
|
||||
<button onclick="start();">Click here to start</button>
|
||||
</div>
|
||||
<div id="started", style="display: none;">
|
||||
<p>Touch your authenticator device now...</p>
|
||||
<a href="/">Cancel</a>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,193 @@
|
|||
// src/webauthn-json/base64url.ts
|
||||
function base64urlToBuffer(baseurl64String) {
|
||||
const padding = "==".slice(0, (4 - baseurl64String.length % 4) % 4);
|
||||
const base64String = baseurl64String.replace(/-/g, "+").replace(/_/g, "/") + padding;
|
||||
const str = atob(base64String);
|
||||
const buffer = new ArrayBuffer(str.length);
|
||||
const byteView = new Uint8Array(buffer);
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
byteView[i] = str.charCodeAt(i);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
function bufferToBase64url(buffer) {
|
||||
const byteView = new Uint8Array(buffer);
|
||||
let str = "";
|
||||
for (const charCode of byteView) {
|
||||
str += String.fromCharCode(charCode);
|
||||
}
|
||||
const base64String = btoa(str);
|
||||
const base64urlString = base64String.replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
|
||||
return base64urlString;
|
||||
}
|
||||
|
||||
// src/webauthn-json/convert.ts
|
||||
var copyValue = "copy";
|
||||
var convertValue = "convert";
|
||||
function convert(conversionFn, schema, input) {
|
||||
if (schema === copyValue) {
|
||||
return input;
|
||||
}
|
||||
if (schema === convertValue) {
|
||||
return conversionFn(input);
|
||||
}
|
||||
if (schema instanceof Array) {
|
||||
return input.map((v) => convert(conversionFn, schema[0], v));
|
||||
}
|
||||
if (schema instanceof Object) {
|
||||
const output = {};
|
||||
for (const [key, schemaField] of Object.entries(schema)) {
|
||||
if (schemaField.derive) {
|
||||
const v = schemaField.derive(input);
|
||||
if (v !== void 0) {
|
||||
input[key] = v;
|
||||
}
|
||||
}
|
||||
if (!(key in input)) {
|
||||
if (schemaField.required) {
|
||||
throw new Error(`Missing key: ${key}`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (input[key] == null) {
|
||||
output[key] = null;
|
||||
continue;
|
||||
}
|
||||
output[key] = convert(conversionFn, schemaField.schema, input[key]);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
}
|
||||
function derived(schema, derive) {
|
||||
return {
|
||||
required: true,
|
||||
schema,
|
||||
derive
|
||||
};
|
||||
}
|
||||
function required(schema) {
|
||||
return {
|
||||
required: true,
|
||||
schema
|
||||
};
|
||||
}
|
||||
function optional(schema) {
|
||||
return {
|
||||
required: false,
|
||||
schema
|
||||
};
|
||||
}
|
||||
|
||||
// src/webauthn-json/basic/schema.ts
|
||||
var publicKeyCredentialDescriptorSchema = {
|
||||
type: required(copyValue),
|
||||
id: required(convertValue),
|
||||
transports: optional(copyValue)
|
||||
};
|
||||
var simplifiedExtensionsSchema = {
|
||||
appid: optional(copyValue),
|
||||
appidExclude: optional(copyValue),
|
||||
credProps: optional(copyValue)
|
||||
};
|
||||
var simplifiedClientExtensionResultsSchema = {
|
||||
appid: optional(copyValue),
|
||||
appidExclude: optional(copyValue),
|
||||
credProps: optional(copyValue)
|
||||
};
|
||||
var credentialCreationOptions = {
|
||||
publicKey: required({
|
||||
rp: required(copyValue),
|
||||
user: required({
|
||||
id: required(convertValue),
|
||||
name: required(copyValue),
|
||||
displayName: required(copyValue)
|
||||
}),
|
||||
challenge: required(convertValue),
|
||||
pubKeyCredParams: required(copyValue),
|
||||
timeout: optional(copyValue),
|
||||
excludeCredentials: optional([publicKeyCredentialDescriptorSchema]),
|
||||
authenticatorSelection: optional(copyValue),
|
||||
attestation: optional(copyValue),
|
||||
extensions: optional(simplifiedExtensionsSchema)
|
||||
}),
|
||||
signal: optional(copyValue)
|
||||
};
|
||||
var publicKeyCredentialWithAttestation = {
|
||||
type: required(copyValue),
|
||||
id: required(copyValue),
|
||||
rawId: required(convertValue),
|
||||
authenticatorAttachment: optional(copyValue),
|
||||
response: required({
|
||||
clientDataJSON: required(convertValue),
|
||||
attestationObject: required(convertValue),
|
||||
transports: derived(copyValue, (response) => {
|
||||
var _a;
|
||||
return ((_a = response.getTransports) == null ? void 0 : _a.call(response)) || [];
|
||||
})
|
||||
}),
|
||||
clientExtensionResults: derived(simplifiedClientExtensionResultsSchema, (pkc) => pkc.getClientExtensionResults())
|
||||
};
|
||||
var credentialRequestOptions = {
|
||||
mediation: optional(copyValue),
|
||||
publicKey: required({
|
||||
challenge: required(convertValue),
|
||||
timeout: optional(copyValue),
|
||||
rpId: optional(copyValue),
|
||||
allowCredentials: optional([publicKeyCredentialDescriptorSchema]),
|
||||
userVerification: optional(copyValue),
|
||||
extensions: optional(simplifiedExtensionsSchema)
|
||||
}),
|
||||
signal: optional(copyValue)
|
||||
};
|
||||
var publicKeyCredentialWithAssertion = {
|
||||
type: required(copyValue),
|
||||
id: required(copyValue),
|
||||
rawId: required(convertValue),
|
||||
authenticatorAttachment: optional(copyValue),
|
||||
response: required({
|
||||
clientDataJSON: required(convertValue),
|
||||
authenticatorData: required(convertValue),
|
||||
signature: required(convertValue),
|
||||
userHandle: required(convertValue)
|
||||
}),
|
||||
clientExtensionResults: derived(simplifiedClientExtensionResultsSchema, (pkc) => pkc.getClientExtensionResults())
|
||||
};
|
||||
|
||||
// src/webauthn-json/basic/api.ts
|
||||
function createRequestFromJSON(requestJSON) {
|
||||
return convert(base64urlToBuffer, credentialCreationOptions, requestJSON);
|
||||
}
|
||||
function createResponseToJSON(credential) {
|
||||
return convert(bufferToBase64url, publicKeyCredentialWithAttestation, credential);
|
||||
}
|
||||
function getRequestFromJSON(requestJSON) {
|
||||
return convert(base64urlToBuffer, credentialRequestOptions, requestJSON);
|
||||
}
|
||||
function getResponseToJSON(credential) {
|
||||
return convert(bufferToBase64url, publicKeyCredentialWithAssertion, credential);
|
||||
}
|
||||
|
||||
// src/webauthn-json/basic/supported.ts
|
||||
function supported() {
|
||||
return !!(navigator.credentials && navigator.credentials.create && navigator.credentials.get && window.PublicKeyCredential);
|
||||
}
|
||||
|
||||
// src/webauthn-json/browser-ponyfill.ts
|
||||
async function create(options) {
|
||||
const response = await navigator.credentials.create(options);
|
||||
response.toJSON = () => createResponseToJSON(response);
|
||||
return response;
|
||||
}
|
||||
async function get(options) {
|
||||
const response = await navigator.credentials.get(options);
|
||||
response.toJSON = () => getResponseToJSON(response);
|
||||
return response;
|
||||
}
|
||||
export {
|
||||
create,
|
||||
get,
|
||||
createRequestFromJSON as parseCreationOptionsFromJSON,
|
||||
getRequestFromJSON as parseRequestOptionsFromJSON,
|
||||
supported
|
||||
};
|
||||
//# sourceMappingURL=webauthn-json.browser-ponyfill.js.map
|
File diff suppressed because one or more lines are too long
|
@ -1,51 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
<script src="/cbor.js"></script>
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
||||
a { color: #0080ac; font-weight: bold; text-decoration: none;}
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebAuthn demo using python-fido2</h1>
|
||||
<p>This demo requires a browser supporting the WebAuthn API!</p>
|
||||
<hr>
|
||||
|
||||
<h2>Authenticate using a credential</h2>
|
||||
<p>Touch your authenticator device now...</p>
|
||||
<a href="/">Cancel</a>
|
||||
|
||||
<script>
|
||||
fetch('/api/authenticate/begin', {
|
||||
method: 'POST',
|
||||
}).then(function(response) {
|
||||
if(response.ok) return response.arrayBuffer();
|
||||
throw new Error('No credential available to authenticate!');
|
||||
}).then(CBOR.decode).then(function(options) {
|
||||
return navigator.credentials.get(options);
|
||||
}).then(function(assertion) {
|
||||
return fetch('/api/authenticate/complete', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/cbor'},
|
||||
body: CBOR.encode({
|
||||
"credentialId": new Uint8Array(assertion.rawId),
|
||||
"authenticatorData": new Uint8Array(assertion.response.authenticatorData),
|
||||
"clientDataJSON": new Uint8Array(assertion.response.clientDataJSON),
|
||||
"signature": new Uint8Array(assertion.response.signature)
|
||||
})
|
||||
})
|
||||
}).then(function(response) {
|
||||
var stat = response.ok ? 'successful' : 'unsuccessful';
|
||||
alert('Authentication ' + stat + ' More details in server log...');
|
||||
}, function(reason) {
|
||||
alert(reason);
|
||||
}).then(function() {
|
||||
window.location = '/';
|
||||
});
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,406 +0,0 @@
|
|||
/*
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2014-2016 Patrick Gansterer <paroga@paroga.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
(function(global, undefined) { "use strict";
|
||||
var POW_2_24 = 5.960464477539063e-8,
|
||||
POW_2_32 = 4294967296,
|
||||
POW_2_53 = 9007199254740992;
|
||||
|
||||
function encode(value) {
|
||||
var data = new ArrayBuffer(256);
|
||||
var dataView = new DataView(data);
|
||||
var lastLength;
|
||||
var offset = 0;
|
||||
|
||||
function prepareWrite(length) {
|
||||
var newByteLength = data.byteLength;
|
||||
var requiredLength = offset + length;
|
||||
while (newByteLength < requiredLength)
|
||||
newByteLength <<= 1;
|
||||
if (newByteLength !== data.byteLength) {
|
||||
var oldDataView = dataView;
|
||||
data = new ArrayBuffer(newByteLength);
|
||||
dataView = new DataView(data);
|
||||
var uint32count = (offset + 3) >> 2;
|
||||
for (var i = 0; i < uint32count; ++i)
|
||||
dataView.setUint32(i << 2, oldDataView.getUint32(i << 2));
|
||||
}
|
||||
|
||||
lastLength = length;
|
||||
return dataView;
|
||||
}
|
||||
function commitWrite() {
|
||||
offset += lastLength;
|
||||
}
|
||||
function writeFloat64(value) {
|
||||
commitWrite(prepareWrite(8).setFloat64(offset, value));
|
||||
}
|
||||
function writeUint8(value) {
|
||||
commitWrite(prepareWrite(1).setUint8(offset, value));
|
||||
}
|
||||
function writeUint8Array(value) {
|
||||
var dataView = prepareWrite(value.length);
|
||||
for (var i = 0; i < value.length; ++i)
|
||||
dataView.setUint8(offset + i, value[i]);
|
||||
commitWrite();
|
||||
}
|
||||
function writeUint16(value) {
|
||||
commitWrite(prepareWrite(2).setUint16(offset, value));
|
||||
}
|
||||
function writeUint32(value) {
|
||||
commitWrite(prepareWrite(4).setUint32(offset, value));
|
||||
}
|
||||
function writeUint64(value) {
|
||||
var low = value % POW_2_32;
|
||||
var high = (value - low) / POW_2_32;
|
||||
var dataView = prepareWrite(8);
|
||||
dataView.setUint32(offset, high);
|
||||
dataView.setUint32(offset + 4, low);
|
||||
commitWrite();
|
||||
}
|
||||
function writeTypeAndLength(type, length) {
|
||||
if (length < 24) {
|
||||
writeUint8(type << 5 | length);
|
||||
} else if (length < 0x100) {
|
||||
writeUint8(type << 5 | 24);
|
||||
writeUint8(length);
|
||||
} else if (length < 0x10000) {
|
||||
writeUint8(type << 5 | 25);
|
||||
writeUint16(length);
|
||||
} else if (length < 0x100000000) {
|
||||
writeUint8(type << 5 | 26);
|
||||
writeUint32(length);
|
||||
} else {
|
||||
writeUint8(type << 5 | 27);
|
||||
writeUint64(length);
|
||||
}
|
||||
}
|
||||
|
||||
function encodeItem(value) {
|
||||
var i;
|
||||
|
||||
if (value === false)
|
||||
return writeUint8(0xf4);
|
||||
if (value === true)
|
||||
return writeUint8(0xf5);
|
||||
if (value === null)
|
||||
return writeUint8(0xf6);
|
||||
if (value === undefined)
|
||||
return writeUint8(0xf7);
|
||||
|
||||
switch (typeof value) {
|
||||
case "number":
|
||||
if (Math.floor(value) === value) {
|
||||
if (0 <= value && value <= POW_2_53)
|
||||
return writeTypeAndLength(0, value);
|
||||
if (-POW_2_53 <= value && value < 0)
|
||||
return writeTypeAndLength(1, -(value + 1));
|
||||
}
|
||||
writeUint8(0xfb);
|
||||
return writeFloat64(value);
|
||||
|
||||
case "string":
|
||||
var utf8data = [];
|
||||
for (i = 0; i < value.length; ++i) {
|
||||
var charCode = value.charCodeAt(i);
|
||||
if (charCode < 0x80) {
|
||||
utf8data.push(charCode);
|
||||
} else if (charCode < 0x800) {
|
||||
utf8data.push(0xc0 | charCode >> 6);
|
||||
utf8data.push(0x80 | charCode & 0x3f);
|
||||
} else if (charCode < 0xd800) {
|
||||
utf8data.push(0xe0 | charCode >> 12);
|
||||
utf8data.push(0x80 | (charCode >> 6) & 0x3f);
|
||||
utf8data.push(0x80 | charCode & 0x3f);
|
||||
} else {
|
||||
charCode = (charCode & 0x3ff) << 10;
|
||||
charCode |= value.charCodeAt(++i) & 0x3ff;
|
||||
charCode += 0x10000;
|
||||
|
||||
utf8data.push(0xf0 | charCode >> 18);
|
||||
utf8data.push(0x80 | (charCode >> 12) & 0x3f);
|
||||
utf8data.push(0x80 | (charCode >> 6) & 0x3f);
|
||||
utf8data.push(0x80 | charCode & 0x3f);
|
||||
}
|
||||
}
|
||||
|
||||
writeTypeAndLength(3, utf8data.length);
|
||||
return writeUint8Array(utf8data);
|
||||
|
||||
default:
|
||||
var length;
|
||||
if (Array.isArray(value)) {
|
||||
length = value.length;
|
||||
writeTypeAndLength(4, length);
|
||||
for (i = 0; i < length; ++i)
|
||||
encodeItem(value[i]);
|
||||
} else if (value instanceof Uint8Array) {
|
||||
writeTypeAndLength(2, value.length);
|
||||
writeUint8Array(value);
|
||||
} else {
|
||||
var keys = Object.keys(value);
|
||||
length = keys.length;
|
||||
writeTypeAndLength(5, length);
|
||||
for (i = 0; i < length; ++i) {
|
||||
var key = keys[i];
|
||||
encodeItem(key);
|
||||
encodeItem(value[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
encodeItem(value);
|
||||
|
||||
if ("slice" in data)
|
||||
return data.slice(0, offset);
|
||||
|
||||
var ret = new ArrayBuffer(offset);
|
||||
var retView = new DataView(ret);
|
||||
for (var i = 0; i < offset; ++i)
|
||||
retView.setUint8(i, dataView.getUint8(i));
|
||||
return ret;
|
||||
}
|
||||
|
||||
function decode(data, tagger, simpleValue) {
|
||||
var dataView = new DataView(data);
|
||||
var offset = 0;
|
||||
|
||||
if (typeof tagger !== "function")
|
||||
tagger = function(value) { return value; };
|
||||
if (typeof simpleValue !== "function")
|
||||
simpleValue = function() { return undefined; };
|
||||
|
||||
function commitRead(length, value) {
|
||||
offset += length;
|
||||
return value;
|
||||
}
|
||||
function readArrayBuffer(length) {
|
||||
return commitRead(length, new Uint8Array(data, offset, length));
|
||||
}
|
||||
function readFloat16() {
|
||||
var tempArrayBuffer = new ArrayBuffer(4);
|
||||
var tempDataView = new DataView(tempArrayBuffer);
|
||||
var value = readUint16();
|
||||
|
||||
var sign = value & 0x8000;
|
||||
var exponent = value & 0x7c00;
|
||||
var fraction = value & 0x03ff;
|
||||
|
||||
if (exponent === 0x7c00)
|
||||
exponent = 0xff << 10;
|
||||
else if (exponent !== 0)
|
||||
exponent += (127 - 15) << 10;
|
||||
else if (fraction !== 0)
|
||||
return (sign ? -1 : 1) * fraction * POW_2_24;
|
||||
|
||||
tempDataView.setUint32(0, sign << 16 | exponent << 13 | fraction << 13);
|
||||
return tempDataView.getFloat32(0);
|
||||
}
|
||||
function readFloat32() {
|
||||
return commitRead(4, dataView.getFloat32(offset));
|
||||
}
|
||||
function readFloat64() {
|
||||
return commitRead(8, dataView.getFloat64(offset));
|
||||
}
|
||||
function readUint8() {
|
||||
return commitRead(1, dataView.getUint8(offset));
|
||||
}
|
||||
function readUint16() {
|
||||
return commitRead(2, dataView.getUint16(offset));
|
||||
}
|
||||
function readUint32() {
|
||||
return commitRead(4, dataView.getUint32(offset));
|
||||
}
|
||||
function readUint64() {
|
||||
return readUint32() * POW_2_32 + readUint32();
|
||||
}
|
||||
function readBreak() {
|
||||
if (dataView.getUint8(offset) !== 0xff)
|
||||
return false;
|
||||
offset += 1;
|
||||
return true;
|
||||
}
|
||||
function readLength(additionalInformation) {
|
||||
if (additionalInformation < 24)
|
||||
return additionalInformation;
|
||||
if (additionalInformation === 24)
|
||||
return readUint8();
|
||||
if (additionalInformation === 25)
|
||||
return readUint16();
|
||||
if (additionalInformation === 26)
|
||||
return readUint32();
|
||||
if (additionalInformation === 27)
|
||||
return readUint64();
|
||||
if (additionalInformation === 31)
|
||||
return -1;
|
||||
throw "Invalid length encoding";
|
||||
}
|
||||
function readIndefiniteStringLength(majorType) {
|
||||
var initialByte = readUint8();
|
||||
if (initialByte === 0xff)
|
||||
return -1;
|
||||
var length = readLength(initialByte & 0x1f);
|
||||
if (length < 0 || (initialByte >> 5) !== majorType)
|
||||
throw "Invalid indefinite length element";
|
||||
return length;
|
||||
}
|
||||
|
||||
function appendUtf16Data(utf16data, length) {
|
||||
for (var i = 0; i < length; ++i) {
|
||||
var value = readUint8();
|
||||
if (value & 0x80) {
|
||||
if (value < 0xe0) {
|
||||
value = (value & 0x1f) << 6
|
||||
| (readUint8() & 0x3f);
|
||||
length -= 1;
|
||||
} else if (value < 0xf0) {
|
||||
value = (value & 0x0f) << 12
|
||||
| (readUint8() & 0x3f) << 6
|
||||
| (readUint8() & 0x3f);
|
||||
length -= 2;
|
||||
} else {
|
||||
value = (value & 0x0f) << 18
|
||||
| (readUint8() & 0x3f) << 12
|
||||
| (readUint8() & 0x3f) << 6
|
||||
| (readUint8() & 0x3f);
|
||||
length -= 3;
|
||||
}
|
||||
}
|
||||
|
||||
if (value < 0x10000) {
|
||||
utf16data.push(value);
|
||||
} else {
|
||||
value -= 0x10000;
|
||||
utf16data.push(0xd800 | (value >> 10));
|
||||
utf16data.push(0xdc00 | (value & 0x3ff));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function decodeItem() {
|
||||
var initialByte = readUint8();
|
||||
var majorType = initialByte >> 5;
|
||||
var additionalInformation = initialByte & 0x1f;
|
||||
var i;
|
||||
var length;
|
||||
|
||||
if (majorType === 7) {
|
||||
switch (additionalInformation) {
|
||||
case 25:
|
||||
return readFloat16();
|
||||
case 26:
|
||||
return readFloat32();
|
||||
case 27:
|
||||
return readFloat64();
|
||||
}
|
||||
}
|
||||
|
||||
length = readLength(additionalInformation);
|
||||
if (length < 0 && (majorType < 2 || 6 < majorType))
|
||||
throw "Invalid length";
|
||||
|
||||
switch (majorType) {
|
||||
case 0:
|
||||
return length;
|
||||
case 1:
|
||||
return -1 - length;
|
||||
case 2:
|
||||
if (length < 0) {
|
||||
var elements = [];
|
||||
var fullArrayLength = 0;
|
||||
while ((length = readIndefiniteStringLength(majorType)) >= 0) {
|
||||
fullArrayLength += length;
|
||||
elements.push(readArrayBuffer(length));
|
||||
}
|
||||
var fullArray = new Uint8Array(fullArrayLength);
|
||||
var fullArrayOffset = 0;
|
||||
for (i = 0; i < elements.length; ++i) {
|
||||
fullArray.set(elements[i], fullArrayOffset);
|
||||
fullArrayOffset += elements[i].length;
|
||||
}
|
||||
return fullArray;
|
||||
}
|
||||
return readArrayBuffer(length);
|
||||
case 3:
|
||||
var utf16data = [];
|
||||
if (length < 0) {
|
||||
while ((length = readIndefiniteStringLength(majorType)) >= 0)
|
||||
appendUtf16Data(utf16data, length);
|
||||
} else
|
||||
appendUtf16Data(utf16data, length);
|
||||
return String.fromCharCode.apply(null, utf16data);
|
||||
case 4:
|
||||
var retArray;
|
||||
if (length < 0) {
|
||||
retArray = [];
|
||||
while (!readBreak())
|
||||
retArray.push(decodeItem());
|
||||
} else {
|
||||
retArray = new Array(length);
|
||||
for (i = 0; i < length; ++i)
|
||||
retArray[i] = decodeItem();
|
||||
}
|
||||
return retArray;
|
||||
case 5:
|
||||
var retObject = {};
|
||||
for (i = 0; i < length || length < 0 && !readBreak(); ++i) {
|
||||
var key = decodeItem();
|
||||
retObject[key] = decodeItem();
|
||||
}
|
||||
return retObject;
|
||||
case 6:
|
||||
return tagger(decodeItem(), length);
|
||||
case 7:
|
||||
switch (length) {
|
||||
case 20:
|
||||
return false;
|
||||
case 21:
|
||||
return true;
|
||||
case 22:
|
||||
return null;
|
||||
case 23:
|
||||
return undefined;
|
||||
default:
|
||||
return simpleValue(length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var ret = decodeItem();
|
||||
if (offset !== data.byteLength)
|
||||
throw "Remaining bytes";
|
||||
return ret;
|
||||
}
|
||||
|
||||
var obj = { encode: encode, decode: decode };
|
||||
|
||||
if (typeof define === "function" && define.amd)
|
||||
define("cbor/cbor", obj);
|
||||
else if (typeof module !== "undefined" && module.exports)
|
||||
module.exports = obj;
|
||||
else if (!global.CBOR)
|
||||
global.CBOR = obj;
|
||||
|
||||
})(this);
|
|
@ -1,27 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
<script src="/cbor.js"></script>
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
||||
a { color: #0080ac; font-weight: bold; text-decoration: none;}
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebAuthn demo using python-fido2</h1>
|
||||
<p>This demo requires a browser supporting the WebAuthn API!</p>
|
||||
<hr>
|
||||
|
||||
<h2>Available actions</h2>
|
||||
<a href="/register.html">Register</a><br>
|
||||
<a href="/authenticate.html">Authenticate</a><br>
|
||||
|
||||
<p>
|
||||
To allow the testing of authenticating with legacy U2F credentials, you can
|
||||
also register a U2F credential:
|
||||
<a href="/u2f.html">Register U2F</a>
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
|
@ -1,49 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
<script src="/cbor.js"></script>
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
||||
a { color: #0080ac; font-weight: bold; text-decoration: none;}
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebAuthn demo using python-fido2</h1>
|
||||
<p>This demo requires a browser supporting the WebAuthn API!</p>
|
||||
<hr>
|
||||
|
||||
<h2>Register a credential</h2>
|
||||
<p>Touch your authenticator device now...</p>
|
||||
<a href="/">Cancel</a>
|
||||
|
||||
<script>
|
||||
fetch('/api/register/begin', {
|
||||
method: 'POST',
|
||||
}).then(function(response) {
|
||||
if(response.ok) return response.arrayBuffer();
|
||||
throw new Error('Error getting registration data!');
|
||||
}).then(CBOR.decode).then(function(options) {
|
||||
return navigator.credentials.create(options);
|
||||
}).then(function(attestation) {
|
||||
return fetch('/api/register/complete', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/cbor'},
|
||||
body: CBOR.encode({
|
||||
"attestationObject": new Uint8Array(attestation.response.attestationObject),
|
||||
"clientDataJSON": new Uint8Array(attestation.response.clientDataJSON),
|
||||
})
|
||||
});
|
||||
}).then(function(response) {
|
||||
var stat = response.ok ? 'successful' : 'unsuccessful';
|
||||
alert('Registration ' + stat + ' More details in server log...');
|
||||
}, function(reason) {
|
||||
alert(reason);
|
||||
}).then(function() {
|
||||
window.location = '/';
|
||||
});
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,748 +0,0 @@
|
|||
//Copyright 2014-2015 Google Inc. All rights reserved.
|
||||
|
||||
//Use of this source code is governed by a BSD-style
|
||||
//license that can be found in the LICENSE file or at
|
||||
//https://developers.google.com/open-source/licenses/bsd
|
||||
|
||||
/**
|
||||
* @fileoverview The U2F api.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
|
||||
/**
|
||||
* Namespace for the U2F api.
|
||||
* @type {Object}
|
||||
*/
|
||||
var u2f = u2f || {};
|
||||
|
||||
/**
|
||||
* FIDO U2F Javascript API Version
|
||||
* @number
|
||||
*/
|
||||
var js_api_version;
|
||||
|
||||
/**
|
||||
* The U2F extension id
|
||||
* @const {string}
|
||||
*/
|
||||
// The Chrome packaged app extension ID.
|
||||
// Uncomment this if you want to deploy a server instance that uses
|
||||
// the package Chrome app and does not require installing the U2F Chrome extension.
|
||||
u2f.EXTENSION_ID = 'kmendfapggjehodndflmmgagdbamhnfd';
|
||||
// The U2F Chrome extension ID.
|
||||
// Uncomment this if you want to deploy a server instance that uses
|
||||
// the U2F Chrome extension to authenticate.
|
||||
// u2f.EXTENSION_ID = 'pfboblefjcgdjicmnffhdgionmgcdmne';
|
||||
|
||||
|
||||
/**
|
||||
* Message types for messsages to/from the extension
|
||||
* @const
|
||||
* @enum {string}
|
||||
*/
|
||||
u2f.MessageTypes = {
|
||||
'U2F_REGISTER_REQUEST': 'u2f_register_request',
|
||||
'U2F_REGISTER_RESPONSE': 'u2f_register_response',
|
||||
'U2F_SIGN_REQUEST': 'u2f_sign_request',
|
||||
'U2F_SIGN_RESPONSE': 'u2f_sign_response',
|
||||
'U2F_GET_API_VERSION_REQUEST': 'u2f_get_api_version_request',
|
||||
'U2F_GET_API_VERSION_RESPONSE': 'u2f_get_api_version_response'
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Response status codes
|
||||
* @const
|
||||
* @enum {number}
|
||||
*/
|
||||
u2f.ErrorCodes = {
|
||||
'OK': 0,
|
||||
'OTHER_ERROR': 1,
|
||||
'BAD_REQUEST': 2,
|
||||
'CONFIGURATION_UNSUPPORTED': 3,
|
||||
'DEVICE_INELIGIBLE': 4,
|
||||
'TIMEOUT': 5
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* A message for registration requests
|
||||
* @typedef {{
|
||||
* type: u2f.MessageTypes,
|
||||
* appId: ?string,
|
||||
* timeoutSeconds: ?number,
|
||||
* requestId: ?number
|
||||
* }}
|
||||
*/
|
||||
u2f.U2fRequest;
|
||||
|
||||
|
||||
/**
|
||||
* A message for registration responses
|
||||
* @typedef {{
|
||||
* type: u2f.MessageTypes,
|
||||
* responseData: (u2f.Error | u2f.RegisterResponse | u2f.SignResponse),
|
||||
* requestId: ?number
|
||||
* }}
|
||||
*/
|
||||
u2f.U2fResponse;
|
||||
|
||||
|
||||
/**
|
||||
* An error object for responses
|
||||
* @typedef {{
|
||||
* errorCode: u2f.ErrorCodes,
|
||||
* errorMessage: ?string
|
||||
* }}
|
||||
*/
|
||||
u2f.Error;
|
||||
|
||||
/**
|
||||
* Data object for a single sign request.
|
||||
* @typedef {enum {BLUETOOTH_RADIO, BLUETOOTH_LOW_ENERGY, USB, NFC}}
|
||||
*/
|
||||
u2f.Transport;
|
||||
|
||||
|
||||
/**
|
||||
* Data object for a single sign request.
|
||||
* @typedef {Array<u2f.Transport>}
|
||||
*/
|
||||
u2f.Transports;
|
||||
|
||||
/**
|
||||
* Data object for a single sign request.
|
||||
* @typedef {{
|
||||
* version: string,
|
||||
* challenge: string,
|
||||
* keyHandle: string,
|
||||
* appId: string
|
||||
* }}
|
||||
*/
|
||||
u2f.SignRequest;
|
||||
|
||||
|
||||
/**
|
||||
* Data object for a sign response.
|
||||
* @typedef {{
|
||||
* keyHandle: string,
|
||||
* signatureData: string,
|
||||
* clientData: string
|
||||
* }}
|
||||
*/
|
||||
u2f.SignResponse;
|
||||
|
||||
|
||||
/**
|
||||
* Data object for a registration request.
|
||||
* @typedef {{
|
||||
* version: string,
|
||||
* challenge: string
|
||||
* }}
|
||||
*/
|
||||
u2f.RegisterRequest;
|
||||
|
||||
|
||||
/**
|
||||
* Data object for a registration response.
|
||||
* @typedef {{
|
||||
* version: string,
|
||||
* keyHandle: string,
|
||||
* transports: Transports,
|
||||
* appId: string
|
||||
* }}
|
||||
*/
|
||||
u2f.RegisterResponse;
|
||||
|
||||
|
||||
/**
|
||||
* Data object for a registered key.
|
||||
* @typedef {{
|
||||
* version: string,
|
||||
* keyHandle: string,
|
||||
* transports: ?Transports,
|
||||
* appId: ?string
|
||||
* }}
|
||||
*/
|
||||
u2f.RegisteredKey;
|
||||
|
||||
|
||||
/**
|
||||
* Data object for a get API register response.
|
||||
* @typedef {{
|
||||
* js_api_version: number
|
||||
* }}
|
||||
*/
|
||||
u2f.GetJsApiVersionResponse;
|
||||
|
||||
|
||||
//Low level MessagePort API support
|
||||
|
||||
/**
|
||||
* Sets up a MessagePort to the U2F extension using the
|
||||
* available mechanisms.
|
||||
* @param {function((MessagePort|u2f.WrappedChromeRuntimePort_))} callback
|
||||
*/
|
||||
u2f.getMessagePort = function(callback) {
|
||||
if (typeof chrome != 'undefined' && chrome.runtime) {
|
||||
// The actual message here does not matter, but we need to get a reply
|
||||
// for the callback to run. Thus, send an empty signature request
|
||||
// in order to get a failure response.
|
||||
var msg = {
|
||||
type: u2f.MessageTypes.U2F_SIGN_REQUEST,
|
||||
signRequests: []
|
||||
};
|
||||
chrome.runtime.sendMessage(u2f.EXTENSION_ID, msg, function() {
|
||||
if (!chrome.runtime.lastError) {
|
||||
// We are on a whitelisted origin and can talk directly
|
||||
// with the extension.
|
||||
u2f.getChromeRuntimePort_(callback);
|
||||
} else {
|
||||
// chrome.runtime was available, but we couldn't message
|
||||
// the extension directly, use iframe
|
||||
u2f.getIframePort_(callback);
|
||||
}
|
||||
});
|
||||
} else if (u2f.isAndroidChrome_()) {
|
||||
u2f.getAuthenticatorPort_(callback);
|
||||
} else if (u2f.isIosChrome_()) {
|
||||
u2f.getIosPort_(callback);
|
||||
} else {
|
||||
// chrome.runtime was not available at all, which is normal
|
||||
// when this origin doesn't have access to any extensions.
|
||||
u2f.getIframePort_(callback);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Detect chrome running on android based on the browser's useragent.
|
||||
* @private
|
||||
*/
|
||||
u2f.isAndroidChrome_ = function() {
|
||||
var userAgent = navigator.userAgent;
|
||||
return userAgent.indexOf('Chrome') != -1 &&
|
||||
userAgent.indexOf('Android') != -1;
|
||||
};
|
||||
|
||||
/**
|
||||
* Detect chrome running on iOS based on the browser's platform.
|
||||
* @private
|
||||
*/
|
||||
u2f.isIosChrome_ = function() {
|
||||
return ["iPhone", "iPad", "iPod"].indexOf(navigator.platform) > -1;
|
||||
};
|
||||
|
||||
/**
|
||||
* Connects directly to the extension via chrome.runtime.connect.
|
||||
* @param {function(u2f.WrappedChromeRuntimePort_)} callback
|
||||
* @private
|
||||
*/
|
||||
u2f.getChromeRuntimePort_ = function(callback) {
|
||||
var port = chrome.runtime.connect(u2f.EXTENSION_ID,
|
||||
{'includeTlsChannelId': true});
|
||||
setTimeout(function() {
|
||||
callback(new u2f.WrappedChromeRuntimePort_(port));
|
||||
}, 0);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return a 'port' abstraction to the Authenticator app.
|
||||
* @param {function(u2f.WrappedAuthenticatorPort_)} callback
|
||||
* @private
|
||||
*/
|
||||
u2f.getAuthenticatorPort_ = function(callback) {
|
||||
setTimeout(function() {
|
||||
callback(new u2f.WrappedAuthenticatorPort_());
|
||||
}, 0);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return a 'port' abstraction to the iOS client app.
|
||||
* @param {function(u2f.WrappedIosPort_)} callback
|
||||
* @private
|
||||
*/
|
||||
u2f.getIosPort_ = function(callback) {
|
||||
setTimeout(function() {
|
||||
callback(new u2f.WrappedIosPort_());
|
||||
}, 0);
|
||||
};
|
||||
|
||||
/**
|
||||
* A wrapper for chrome.runtime.Port that is compatible with MessagePort.
|
||||
* @param {Port} port
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
u2f.WrappedChromeRuntimePort_ = function(port) {
|
||||
this.port_ = port;
|
||||
};
|
||||
|
||||
/**
|
||||
* Format and return a sign request compliant with the JS API version supported by the extension.
|
||||
* @param {Array<u2f.SignRequest>} signRequests
|
||||
* @param {number} timeoutSeconds
|
||||
* @param {number} reqId
|
||||
* @return {Object}
|
||||
*/
|
||||
u2f.formatSignRequest_ =
|
||||
function(appId, challenge, registeredKeys, timeoutSeconds, reqId) {
|
||||
if (js_api_version === undefined || js_api_version < 1.1) {
|
||||
// Adapt request to the 1.0 JS API
|
||||
var signRequests = [];
|
||||
for (var i = 0; i < registeredKeys.length; i++) {
|
||||
signRequests[i] = {
|
||||
version: registeredKeys[i].version,
|
||||
challenge: challenge,
|
||||
keyHandle: registeredKeys[i].keyHandle,
|
||||
appId: appId
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: u2f.MessageTypes.U2F_SIGN_REQUEST,
|
||||
signRequests: signRequests,
|
||||
timeoutSeconds: timeoutSeconds,
|
||||
requestId: reqId
|
||||
};
|
||||
}
|
||||
// JS 1.1 API
|
||||
return {
|
||||
type: u2f.MessageTypes.U2F_SIGN_REQUEST,
|
||||
appId: appId,
|
||||
challenge: challenge,
|
||||
registeredKeys: registeredKeys,
|
||||
timeoutSeconds: timeoutSeconds,
|
||||
requestId: reqId
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Format and return a register request compliant with the JS API version supported by the extension..
|
||||
* @param {Array<u2f.SignRequest>} signRequests
|
||||
* @param {Array<u2f.RegisterRequest>} signRequests
|
||||
* @param {number} timeoutSeconds
|
||||
* @param {number} reqId
|
||||
* @return {Object}
|
||||
*/
|
||||
u2f.formatRegisterRequest_ =
|
||||
function(appId, registeredKeys, registerRequests, timeoutSeconds, reqId) {
|
||||
if (js_api_version === undefined || js_api_version < 1.1) {
|
||||
// Adapt request to the 1.0 JS API
|
||||
for (var i = 0; i < registerRequests.length; i++) {
|
||||
registerRequests[i].appId = appId;
|
||||
}
|
||||
var signRequests = [];
|
||||
for (var i = 0; i < registeredKeys.length; i++) {
|
||||
signRequests[i] = {
|
||||
version: registeredKeys[i].version,
|
||||
challenge: registerRequests[0],
|
||||
keyHandle: registeredKeys[i].keyHandle,
|
||||
appId: appId
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: u2f.MessageTypes.U2F_REGISTER_REQUEST,
|
||||
signRequests: signRequests,
|
||||
registerRequests: registerRequests,
|
||||
timeoutSeconds: timeoutSeconds,
|
||||
requestId: reqId
|
||||
};
|
||||
}
|
||||
// JS 1.1 API
|
||||
return {
|
||||
type: u2f.MessageTypes.U2F_REGISTER_REQUEST,
|
||||
appId: appId,
|
||||
registerRequests: registerRequests,
|
||||
registeredKeys: registeredKeys,
|
||||
timeoutSeconds: timeoutSeconds,
|
||||
requestId: reqId
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Posts a message on the underlying channel.
|
||||
* @param {Object} message
|
||||
*/
|
||||
u2f.WrappedChromeRuntimePort_.prototype.postMessage = function(message) {
|
||||
this.port_.postMessage(message);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Emulates the HTML 5 addEventListener interface. Works only for the
|
||||
* onmessage event, which is hooked up to the chrome.runtime.Port.onMessage.
|
||||
* @param {string} eventName
|
||||
* @param {function({data: Object})} handler
|
||||
*/
|
||||
u2f.WrappedChromeRuntimePort_.prototype.addEventListener =
|
||||
function(eventName, handler) {
|
||||
var name = eventName.toLowerCase();
|
||||
if (name == 'message' || name == 'onmessage') {
|
||||
this.port_.onMessage.addListener(function(message) {
|
||||
// Emulate a minimal MessageEvent object
|
||||
handler({'data': message});
|
||||
});
|
||||
} else {
|
||||
console.error('WrappedChromeRuntimePort only supports onMessage');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Wrap the Authenticator app with a MessagePort interface.
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
u2f.WrappedAuthenticatorPort_ = function() {
|
||||
this.requestId_ = -1;
|
||||
this.requestObject_ = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Launch the Authenticator intent.
|
||||
* @param {Object} message
|
||||
*/
|
||||
u2f.WrappedAuthenticatorPort_.prototype.postMessage = function(message) {
|
||||
var intentUrl =
|
||||
u2f.WrappedAuthenticatorPort_.INTENT_URL_BASE_ +
|
||||
';S.request=' + encodeURIComponent(JSON.stringify(message)) +
|
||||
';end';
|
||||
document.location = intentUrl;
|
||||
};
|
||||
|
||||
/**
|
||||
* Tells what type of port this is.
|
||||
* @return {String} port type
|
||||
*/
|
||||
u2f.WrappedAuthenticatorPort_.prototype.getPortType = function() {
|
||||
return "WrappedAuthenticatorPort_";
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Emulates the HTML 5 addEventListener interface.
|
||||
* @param {string} eventName
|
||||
* @param {function({data: Object})} handler
|
||||
*/
|
||||
u2f.WrappedAuthenticatorPort_.prototype.addEventListener = function(eventName, handler) {
|
||||
var name = eventName.toLowerCase();
|
||||
if (name == 'message') {
|
||||
var self = this;
|
||||
/* Register a callback to that executes when
|
||||
* chrome injects the response. */
|
||||
window.addEventListener(
|
||||
'message', self.onRequestUpdate_.bind(self, handler), false);
|
||||
} else {
|
||||
console.error('WrappedAuthenticatorPort only supports message');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Callback invoked when a response is received from the Authenticator.
|
||||
* @param function({data: Object}) callback
|
||||
* @param {Object} message message Object
|
||||
*/
|
||||
u2f.WrappedAuthenticatorPort_.prototype.onRequestUpdate_ =
|
||||
function(callback, message) {
|
||||
var messageObject = JSON.parse(message.data);
|
||||
var intentUrl = messageObject['intentURL'];
|
||||
|
||||
var errorCode = messageObject['errorCode'];
|
||||
var responseObject = null;
|
||||
if (messageObject.hasOwnProperty('data')) {
|
||||
responseObject = /** @type {Object} */ (
|
||||
JSON.parse(messageObject['data']));
|
||||
}
|
||||
|
||||
callback({'data': responseObject});
|
||||
};
|
||||
|
||||
/**
|
||||
* Base URL for intents to Authenticator.
|
||||
* @const
|
||||
* @private
|
||||
*/
|
||||
u2f.WrappedAuthenticatorPort_.INTENT_URL_BASE_ =
|
||||
'intent:#Intent;action=com.google.android.apps.authenticator.AUTHENTICATE';
|
||||
|
||||
/**
|
||||
* Wrap the iOS client app with a MessagePort interface.
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
u2f.WrappedIosPort_ = function() {};
|
||||
|
||||
/**
|
||||
* Launch the iOS client app request
|
||||
* @param {Object} message
|
||||
*/
|
||||
u2f.WrappedIosPort_.prototype.postMessage = function(message) {
|
||||
var str = JSON.stringify(message);
|
||||
var url = "u2f://auth?" + encodeURI(str);
|
||||
location.replace(url);
|
||||
};
|
||||
|
||||
/**
|
||||
* Tells what type of port this is.
|
||||
* @return {String} port type
|
||||
*/
|
||||
u2f.WrappedIosPort_.prototype.getPortType = function() {
|
||||
return "WrappedIosPort_";
|
||||
};
|
||||
|
||||
/**
|
||||
* Emulates the HTML 5 addEventListener interface.
|
||||
* @param {string} eventName
|
||||
* @param {function({data: Object})} handler
|
||||
*/
|
||||
u2f.WrappedIosPort_.prototype.addEventListener = function(eventName, handler) {
|
||||
var name = eventName.toLowerCase();
|
||||
if (name !== 'message') {
|
||||
console.error('WrappedIosPort only supports message');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets up an embedded trampoline iframe, sourced from the extension.
|
||||
* @param {function(MessagePort)} callback
|
||||
* @private
|
||||
*/
|
||||
u2f.getIframePort_ = function(callback) {
|
||||
// Create the iframe
|
||||
var iframeOrigin = 'chrome-extension://' + u2f.EXTENSION_ID;
|
||||
var iframe = document.createElement('iframe');
|
||||
iframe.src = iframeOrigin + '/u2f-comms.html';
|
||||
iframe.setAttribute('style', 'display:none');
|
||||
document.body.appendChild(iframe);
|
||||
|
||||
var channel = new MessageChannel();
|
||||
var ready = function(message) {
|
||||
if (message.data == 'ready') {
|
||||
channel.port1.removeEventListener('message', ready);
|
||||
callback(channel.port1);
|
||||
} else {
|
||||
console.error('First event on iframe port was not "ready"');
|
||||
}
|
||||
};
|
||||
channel.port1.addEventListener('message', ready);
|
||||
channel.port1.start();
|
||||
|
||||
iframe.addEventListener('load', function() {
|
||||
// Deliver the port to the iframe and initialize
|
||||
iframe.contentWindow.postMessage('init', iframeOrigin, [channel.port2]);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
//High-level JS API
|
||||
|
||||
/**
|
||||
* Default extension response timeout in seconds.
|
||||
* @const
|
||||
*/
|
||||
u2f.EXTENSION_TIMEOUT_SEC = 30;
|
||||
|
||||
/**
|
||||
* A singleton instance for a MessagePort to the extension.
|
||||
* @type {MessagePort|u2f.WrappedChromeRuntimePort_}
|
||||
* @private
|
||||
*/
|
||||
u2f.port_ = null;
|
||||
|
||||
/**
|
||||
* Callbacks waiting for a port
|
||||
* @type {Array<function((MessagePort|u2f.WrappedChromeRuntimePort_))>}
|
||||
* @private
|
||||
*/
|
||||
u2f.waitingForPort_ = [];
|
||||
|
||||
/**
|
||||
* A counter for requestIds.
|
||||
* @type {number}
|
||||
* @private
|
||||
*/
|
||||
u2f.reqCounter_ = 0;
|
||||
|
||||
/**
|
||||
* A map from requestIds to client callbacks
|
||||
* @type {Object.<number,(function((u2f.Error|u2f.RegisterResponse))
|
||||
* |function((u2f.Error|u2f.SignResponse)))>}
|
||||
* @private
|
||||
*/
|
||||
u2f.callbackMap_ = {};
|
||||
|
||||
/**
|
||||
* Creates or retrieves the MessagePort singleton to use.
|
||||
* @param {function((MessagePort|u2f.WrappedChromeRuntimePort_))} callback
|
||||
* @private
|
||||
*/
|
||||
u2f.getPortSingleton_ = function(callback) {
|
||||
if (u2f.port_) {
|
||||
callback(u2f.port_);
|
||||
} else {
|
||||
if (u2f.waitingForPort_.length == 0) {
|
||||
u2f.getMessagePort(function(port) {
|
||||
u2f.port_ = port;
|
||||
u2f.port_.addEventListener('message',
|
||||
/** @type {function(Event)} */ (u2f.responseHandler_));
|
||||
|
||||
// Careful, here be async callbacks. Maybe.
|
||||
while (u2f.waitingForPort_.length)
|
||||
u2f.waitingForPort_.shift()(u2f.port_);
|
||||
});
|
||||
}
|
||||
u2f.waitingForPort_.push(callback);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Handles response messages from the extension.
|
||||
* @param {MessageEvent.<u2f.Response>} message
|
||||
* @private
|
||||
*/
|
||||
u2f.responseHandler_ = function(message) {
|
||||
var response = message.data;
|
||||
var reqId = response['requestId'];
|
||||
if (!reqId || !u2f.callbackMap_[reqId]) {
|
||||
console.error('Unknown or missing requestId in response.');
|
||||
return;
|
||||
}
|
||||
var cb = u2f.callbackMap_[reqId];
|
||||
delete u2f.callbackMap_[reqId];
|
||||
cb(response['responseData']);
|
||||
};
|
||||
|
||||
/**
|
||||
* Dispatches an array of sign requests to available U2F tokens.
|
||||
* If the JS API version supported by the extension is unknown, it first sends a
|
||||
* message to the extension to find out the supported API version and then it sends
|
||||
* the sign request.
|
||||
* @param {string=} appId
|
||||
* @param {string=} challenge
|
||||
* @param {Array<u2f.RegisteredKey>} registeredKeys
|
||||
* @param {function((u2f.Error|u2f.SignResponse))} callback
|
||||
* @param {number=} opt_timeoutSeconds
|
||||
*/
|
||||
u2f.sign = function(appId, challenge, registeredKeys, callback, opt_timeoutSeconds) {
|
||||
if (js_api_version === undefined) {
|
||||
// Send a message to get the extension to JS API version, then send the actual sign request.
|
||||
u2f.getApiVersion(
|
||||
function (response) {
|
||||
js_api_version = response['js_api_version'] === undefined ? 0 : response['js_api_version'];
|
||||
console.log("Extension JS API Version: ", js_api_version);
|
||||
u2f.sendSignRequest(appId, challenge, registeredKeys, callback, opt_timeoutSeconds);
|
||||
});
|
||||
} else {
|
||||
// We know the JS API version. Send the actual sign request in the supported API version.
|
||||
u2f.sendSignRequest(appId, challenge, registeredKeys, callback, opt_timeoutSeconds);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Dispatches an array of sign requests to available U2F tokens.
|
||||
* @param {string=} appId
|
||||
* @param {string=} challenge
|
||||
* @param {Array<u2f.RegisteredKey>} registeredKeys
|
||||
* @param {function((u2f.Error|u2f.SignResponse))} callback
|
||||
* @param {number=} opt_timeoutSeconds
|
||||
*/
|
||||
u2f.sendSignRequest = function(appId, challenge, registeredKeys, callback, opt_timeoutSeconds) {
|
||||
u2f.getPortSingleton_(function(port) {
|
||||
var reqId = ++u2f.reqCounter_;
|
||||
u2f.callbackMap_[reqId] = callback;
|
||||
var timeoutSeconds = (typeof opt_timeoutSeconds !== 'undefined' ?
|
||||
opt_timeoutSeconds : u2f.EXTENSION_TIMEOUT_SEC);
|
||||
var req = u2f.formatSignRequest_(appId, challenge, registeredKeys, timeoutSeconds, reqId);
|
||||
port.postMessage(req);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Dispatches register requests to available U2F tokens. An array of sign
|
||||
* requests identifies already registered tokens.
|
||||
* If the JS API version supported by the extension is unknown, it first sends a
|
||||
* message to the extension to find out the supported API version and then it sends
|
||||
* the register request.
|
||||
* @param {string=} appId
|
||||
* @param {Array<u2f.RegisterRequest>} registerRequests
|
||||
* @param {Array<u2f.RegisteredKey>} registeredKeys
|
||||
* @param {function((u2f.Error|u2f.RegisterResponse))} callback
|
||||
* @param {number=} opt_timeoutSeconds
|
||||
*/
|
||||
u2f.register = function(appId, registerRequests, registeredKeys, callback, opt_timeoutSeconds) {
|
||||
if (js_api_version === undefined) {
|
||||
// Send a message to get the extension to JS API version, then send the actual register request.
|
||||
u2f.getApiVersion(
|
||||
function (response) {
|
||||
js_api_version = response['js_api_version'] === undefined ? 0: response['js_api_version'];
|
||||
console.log("Extension JS API Version: ", js_api_version);
|
||||
u2f.sendRegisterRequest(appId, registerRequests, registeredKeys,
|
||||
callback, opt_timeoutSeconds);
|
||||
});
|
||||
} else {
|
||||
// We know the JS API version. Send the actual register request in the supported API version.
|
||||
u2f.sendRegisterRequest(appId, registerRequests, registeredKeys,
|
||||
callback, opt_timeoutSeconds);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Dispatches register requests to available U2F tokens. An array of sign
|
||||
* requests identifies already registered tokens.
|
||||
* @param {string=} appId
|
||||
* @param {Array<u2f.RegisterRequest>} registerRequests
|
||||
* @param {Array<u2f.RegisteredKey>} registeredKeys
|
||||
* @param {function((u2f.Error|u2f.RegisterResponse))} callback
|
||||
* @param {number=} opt_timeoutSeconds
|
||||
*/
|
||||
u2f.sendRegisterRequest = function(appId, registerRequests, registeredKeys, callback, opt_timeoutSeconds) {
|
||||
u2f.getPortSingleton_(function(port) {
|
||||
var reqId = ++u2f.reqCounter_;
|
||||
u2f.callbackMap_[reqId] = callback;
|
||||
var timeoutSeconds = (typeof opt_timeoutSeconds !== 'undefined' ?
|
||||
opt_timeoutSeconds : u2f.EXTENSION_TIMEOUT_SEC);
|
||||
var req = u2f.formatRegisterRequest_(
|
||||
appId, registeredKeys, registerRequests, timeoutSeconds, reqId);
|
||||
port.postMessage(req);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Dispatches a message to the extension to find out the supported
|
||||
* JS API version.
|
||||
* If the user is on a mobile phone and is thus using Google Authenticator instead
|
||||
* of the Chrome extension, don't send the request and simply return 0.
|
||||
* @param {function((u2f.Error|u2f.GetJsApiVersionResponse))} callback
|
||||
* @param {number=} opt_timeoutSeconds
|
||||
*/
|
||||
u2f.getApiVersion = function(callback, opt_timeoutSeconds) {
|
||||
u2f.getPortSingleton_(function(port) {
|
||||
// If we are using Android Google Authenticator or iOS client app,
|
||||
// do not fire an intent to ask which JS API version to use.
|
||||
if (port.getPortType) {
|
||||
var apiVersion;
|
||||
switch (port.getPortType()) {
|
||||
case 'WrappedIosPort_':
|
||||
case 'WrappedAuthenticatorPort_':
|
||||
apiVersion = 1.1;
|
||||
break;
|
||||
|
||||
default:
|
||||
apiVersion = 0;
|
||||
break;
|
||||
}
|
||||
callback({ 'js_api_version': apiVersion });
|
||||
return;
|
||||
}
|
||||
var reqId = ++u2f.reqCounter_;
|
||||
u2f.callbackMap_[reqId] = callback;
|
||||
var req = {
|
||||
type: u2f.MessageTypes.U2F_GET_API_VERSION_REQUEST,
|
||||
timeoutSeconds: (typeof opt_timeoutSeconds !== 'undefined' ?
|
||||
opt_timeoutSeconds : u2f.EXTENSION_TIMEOUT_SEC),
|
||||
requestId: reqId
|
||||
};
|
||||
port.postMessage(req);
|
||||
});
|
||||
};
|
|
@ -1,58 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Fido 2.0 webauthn demo</title>
|
||||
<script src="/cbor.js"></script>
|
||||
<script src="/u2f-api.js"></script>
|
||||
<style>
|
||||
body { font-family: sans-serif; line-height: 1.5em; padding: 2em 10em; }
|
||||
h1, h2 { color: #325F74; }
|
||||
a { color: #0080ac; font-weight: bold; text-decoration: none;}
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>WebAuthn demo using python-fido2</h1>
|
||||
<p>This demo requires a browser supporting the WebAuthn API!</p>
|
||||
<hr>
|
||||
|
||||
<h2>Register a U2F credential</h2>
|
||||
<p>Touch your authenticator device now...</p>
|
||||
<a href="/">Cancel</a>
|
||||
|
||||
<script>
|
||||
fetch('/api/u2f/begin', {
|
||||
method: 'POST',
|
||||
}).then(function(response) {
|
||||
if(response.ok) return response.arrayBuffer();
|
||||
throw new Error('Error getting registration data!');
|
||||
}).then(CBOR.decode).then(function(challenge) {
|
||||
return new Promise(function(resolve, reject) {
|
||||
u2f.register('https://localhost:5000', [{
|
||||
challenge: challenge,
|
||||
version: 'U2F_V2'
|
||||
}], [], function(resp) {
|
||||
if(resp.errorCode) {
|
||||
reject(new Error('Error: ' + resp.errorCode));
|
||||
} else {
|
||||
resolve(resp);
|
||||
}
|
||||
});
|
||||
});
|
||||
}).then(function(attestation) {
|
||||
return fetch('/api/u2f/complete', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type': 'application/cbor'},
|
||||
body: CBOR.encode(attestation)
|
||||
});
|
||||
}).then(function(response) {
|
||||
var stat = response.ok ? 'successful' : 'unsuccessful';
|
||||
alert('Registration ' + stat + ' More details in server log...');
|
||||
}, function(reason) {
|
||||
alert(reason);
|
||||
}).then(function() {
|
||||
window.location = '/';
|
||||
});
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,6 +1,6 @@
|
|||
from fido2.pcsc import CtapPcscDevice
|
||||
from fido2.utils import sha256
|
||||
from fido2.ctap1 import CTAP1
|
||||
from fido2.ctap1 import Ctap1
|
||||
import sys
|
||||
|
||||
|
||||
|
@ -12,7 +12,7 @@ if not dev:
|
|||
chal = sha256(b"AAA")
|
||||
appid = sha256(b"BBB")
|
||||
|
||||
ctap1 = CTAP1(dev)
|
||||
ctap1 = Ctap1(dev)
|
||||
|
||||
print("version:", ctap1.get_version())
|
||||
|
||||
|
|
|
@ -34,14 +34,12 @@ creates a new credential for it, and verifies that attestation is signed by the
|
|||
Yubico FIDO root CA (this will only work for Yubico devices).
|
||||
On Windows, the native WebAuthn API will be used.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import, unicode_literals
|
||||
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.client import Fido2Client, WindowsClient
|
||||
from fido2.server import Fido2Server, AttestationVerifier
|
||||
from fido2.client import Fido2Client, WindowsClient, UserInteraction
|
||||
from fido2.server import Fido2Server
|
||||
from fido2.attestation import AttestationVerifier
|
||||
from base64 import b64decode
|
||||
from getpass import getpass
|
||||
from binascii import b2a_hex
|
||||
import sys
|
||||
import ctypes
|
||||
|
||||
|
@ -79,13 +77,25 @@ class YubicoAttestationVerifier(AttestationVerifier):
|
|||
"""
|
||||
|
||||
def ca_lookup(self, result, auth_data):
|
||||
return [YUBICO_CA]
|
||||
return YUBICO_CA
|
||||
|
||||
|
||||
use_prompt = False
|
||||
pin = None
|
||||
uv = "discouraged"
|
||||
|
||||
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
if WindowsClient.is_available() and not ctypes.windll.shell32.IsUserAnAdmin():
|
||||
# Use the Windows WebAuthn API if available, and we're not running as admin
|
||||
client = WindowsClient("https://example.com")
|
||||
|
@ -94,7 +104,6 @@ else:
|
|||
dev = next(CtapHidDevice.list_devices(), None)
|
||||
if dev is not None:
|
||||
print("Use USB HID channel.")
|
||||
use_prompt = True
|
||||
else:
|
||||
try:
|
||||
from fido2.pcsc import CtapPcscDevice
|
||||
|
@ -109,17 +118,12 @@ else:
|
|||
sys.exit(1)
|
||||
|
||||
# Set up a FIDO 2 client using the origin https://example.com
|
||||
client = Fido2Client(dev, "https://example.com")
|
||||
client = Fido2Client(dev, "https://example.com", user_interaction=CliInteraction())
|
||||
|
||||
# Prefer UV if supported
|
||||
if client.info.options.get("uv"):
|
||||
uv = "preferred"
|
||||
print("Authenticator supports User Verification")
|
||||
elif client.info.options.get("clientPin"):
|
||||
# Prompt for PIN if needed
|
||||
pin = getpass("Please enter PIN: ")
|
||||
else:
|
||||
print("PIN not set, won't use")
|
||||
|
||||
|
||||
server = Fido2Server(
|
||||
|
@ -136,10 +140,7 @@ create_options, state = server.register_begin(
|
|||
)
|
||||
|
||||
# Create a credential
|
||||
if use_prompt:
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
result = client.make_credential(create_options["publicKey"], pin=pin)
|
||||
result = client.make_credential(create_options["publicKey"])
|
||||
|
||||
# Complete registration
|
||||
auth_data = server.register_complete(
|
||||
|
@ -148,4 +149,4 @@ auth_data = server.register_complete(
|
|||
credentials = [auth_data.credential_data]
|
||||
|
||||
print("New credential created, attestation verified!")
|
||||
print("Yubico device AAGUID:", b2a_hex(auth_data.credential_data.aaguid))
|
||||
print("Yubico device AAGUID:", auth_data.credential_data.aaguid.hex())
|
||||
|
|
|
@ -0,0 +1,166 @@
|
|||
# Copyright (c) 2021 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
This example shows how to use the FIDO MDS to only allow authenticators for which
|
||||
metadata is available.
|
||||
|
||||
It connects to the first FIDO device found (starts from USB, then looks into NFC),
|
||||
creates a new credential for it, and verifies that attestation is correctly signed
|
||||
and valid according to its metadata statement.
|
||||
|
||||
On Windows, the native WebAuthn API will be used.
|
||||
|
||||
NOTE: You need to retrieve a MDS3 blob to run this example.
|
||||
See https://fidoalliance.org/metadata/ for more info.
|
||||
"""
|
||||
from fido2.hid import CtapHidDevice
|
||||
from fido2.client import Fido2Client, WindowsClient, UserInteraction
|
||||
from fido2.server import Fido2Server
|
||||
from fido2.attestation import UntrustedAttestation
|
||||
from fido2.mds3 import parse_blob, MdsAttestationVerifier
|
||||
from base64 import b64decode
|
||||
from getpass import getpass
|
||||
import sys
|
||||
import ctypes
|
||||
|
||||
# Load the root CA used to sign the Metadata Statement blob
|
||||
ca = b64decode(
|
||||
"""
|
||||
MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
|
||||
A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
|
||||
Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
|
||||
MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
|
||||
A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
|
||||
hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
|
||||
RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
|
||||
gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
|
||||
KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
|
||||
QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
|
||||
XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
|
||||
DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
|
||||
LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
|
||||
RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
|
||||
jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
|
||||
6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
|
||||
mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
|
||||
Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
|
||||
WD9f"""
|
||||
)
|
||||
|
||||
# Parse the MDS3 blob
|
||||
if len(sys.argv) != 2:
|
||||
print("This example requires a FIDO MDS3 metadata blob, which you can get here:")
|
||||
print("https://fidoalliance.org/metadata/")
|
||||
print()
|
||||
print("USAGE: python verify_attestation_mds3.py blob.jwt")
|
||||
sys.exit(1)
|
||||
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
metadata = parse_blob(f.read(), ca)
|
||||
|
||||
# The verifier is used to query for data in the blob and to verify attestation.
|
||||
# We could optionally pass a filter function to only allow specific authenticators.
|
||||
mds = MdsAttestationVerifier(metadata)
|
||||
|
||||
uv = "discouraged"
|
||||
|
||||
|
||||
# Handle user interaction
|
||||
class CliInteraction(UserInteraction):
|
||||
def prompt_up(self):
|
||||
print("\nTouch your authenticator device now...\n")
|
||||
|
||||
def request_pin(self, permissions, rd_id):
|
||||
return getpass("Enter PIN: ")
|
||||
|
||||
def request_uv(self, permissions, rd_id):
|
||||
print("User Verification required.")
|
||||
return True
|
||||
|
||||
|
||||
if WindowsClient.is_available() and not ctypes.windll.shell32.IsUserAnAdmin():
|
||||
# Use the Windows WebAuthn API if available, and we're not running as admin
|
||||
client = WindowsClient("https://example.com")
|
||||
else:
|
||||
# Locate a device
|
||||
dev = next(CtapHidDevice.list_devices(), None)
|
||||
if dev is not None:
|
||||
print("Use USB HID channel.")
|
||||
else:
|
||||
try:
|
||||
from fido2.pcsc import CtapPcscDevice
|
||||
|
||||
dev = next(CtapPcscDevice.list_devices(), None)
|
||||
print("Use NFC channel.")
|
||||
except Exception as e:
|
||||
print("NFC channel search error:", e)
|
||||
|
||||
if not dev:
|
||||
print("No FIDO device found")
|
||||
sys.exit(1)
|
||||
|
||||
# Set up a FIDO 2 client using the origin https://example.com
|
||||
client = Fido2Client(dev, "https://example.com", user_interaction=CliInteraction())
|
||||
|
||||
# Prefer UV if supported
|
||||
if client.info.options.get("uv"):
|
||||
uv = "preferred"
|
||||
print("Authenticator supports User Verification")
|
||||
|
||||
|
||||
# The MDS verifier is passed to the server to verify that new credentials registered
|
||||
# exist in the MDS blob, else the registration will fail.
|
||||
server = Fido2Server(
|
||||
{"id": "example.com", "name": "Example RP"},
|
||||
attestation="direct",
|
||||
verify_attestation=mds,
|
||||
)
|
||||
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
|
||||
# Prepare parameters for makeCredential
|
||||
create_options, state = server.register_begin(
|
||||
user, user_verification=uv, authenticator_attachment="cross-platform"
|
||||
)
|
||||
|
||||
# Create a credential
|
||||
result = client.make_credential(create_options["publicKey"])
|
||||
|
||||
# Complete registration
|
||||
try:
|
||||
auth_data = server.register_complete(
|
||||
state, result.client_data, result.attestation_object
|
||||
)
|
||||
print("Registration completed")
|
||||
|
||||
# mds can also be used to get the metadata for the Authenticator,
|
||||
# regardless of if it was used to verify the attestation or not:
|
||||
entry = mds.find_entry(result.attestation_object, result.client_data.hash)
|
||||
print("Authenticator description:", entry.metadata_statement.description)
|
||||
except UntrustedAttestation:
|
||||
print("Authenticator metadata not found")
|
|
@ -25,17 +25,5 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import abc
|
||||
import six
|
||||
|
||||
|
||||
if six.PY2:
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ABC(object):
|
||||
pass
|
||||
|
||||
abc.ABC = ABC
|
||||
|
||||
|
||||
__version__ = "0.9.2"
|
||||
__version__ = "1.1.4-dev.0"
|
||||
|
|
|
@ -25,8 +25,6 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .base import ( # noqa: F401
|
||||
Attestation,
|
||||
NoneAttestation,
|
||||
|
@ -38,6 +36,7 @@ from .base import ( # noqa: F401
|
|||
UnsupportedAttestation,
|
||||
UntrustedAttestation,
|
||||
verify_x509_chain,
|
||||
AttestationVerifier,
|
||||
)
|
||||
from .apple import AppleAttestation # noqa: F401
|
||||
from .android import AndroidSafetynetAttestation # noqa: F401
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import (
|
||||
Attestation,
|
||||
|
@ -47,7 +47,7 @@ import json
|
|||
class AndroidSafetynetAttestation(Attestation):
|
||||
FORMAT = "android-safetynet"
|
||||
|
||||
def __init__(self, allow_rooted=False):
|
||||
def __init__(self, allow_rooted: bool = False):
|
||||
self.allow_rooted = allow_rooted
|
||||
|
||||
@catch_builtins
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import (
|
||||
Attestation,
|
||||
|
|
|
@ -25,13 +25,17 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from enum import Enum
|
||||
from __future__ import annotations
|
||||
|
||||
from ..webauthn import AuthenticatorData, AttestationObject
|
||||
from enum import IntEnum, unique
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, ec, rsa
|
||||
from cryptography.exceptions import InvalidSignature as _InvalidSignature
|
||||
from collections import namedtuple
|
||||
from dataclasses import dataclass
|
||||
from functools import wraps
|
||||
from typing import List, Type, Mapping, Sequence, Optional, Any
|
||||
|
||||
import abc
|
||||
|
||||
|
@ -54,8 +58,8 @@ class UntrustedAttestation(InvalidAttestation):
|
|||
|
||||
class UnsupportedType(InvalidAttestation):
|
||||
def __init__(self, auth_data, fmt=None):
|
||||
super(UnsupportedType, self).__init__(
|
||||
'Attestation format "{}" is not supported'.format(fmt)
|
||||
super().__init__(
|
||||
f'Attestation format "{fmt}" is not supported'
|
||||
if fmt
|
||||
else "This attestation format is not supported!"
|
||||
)
|
||||
|
@ -63,10 +67,8 @@ class UnsupportedType(InvalidAttestation):
|
|||
self.fmt = fmt
|
||||
|
||||
|
||||
AttestationResult = namedtuple("AttestationResult", ["attestation_type", "trust_path"])
|
||||
|
||||
|
||||
class AttestationType(Enum):
|
||||
@unique
|
||||
class AttestationType(IntEnum):
|
||||
BASIC = 1
|
||||
SELF = 2
|
||||
ATT_CA = 3
|
||||
|
@ -74,6 +76,12 @@ class AttestationType(Enum):
|
|||
NONE = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class AttestationResult:
|
||||
attestation_type: AttestationType
|
||||
trust_path: List[bytes]
|
||||
|
||||
|
||||
def catch_builtins(f):
|
||||
@wraps(f)
|
||||
def inner(*args, **kwargs):
|
||||
|
@ -86,7 +94,12 @@ def catch_builtins(f):
|
|||
|
||||
|
||||
@catch_builtins
|
||||
def verify_x509_chain(chain):
|
||||
def verify_x509_chain(chain: List[bytes]) -> None:
|
||||
"""Verifies a chain of certificates.
|
||||
|
||||
Checks that the first item in the chain is signed by the next, and so on.
|
||||
The first item is the leaf, the last is the root.
|
||||
"""
|
||||
certs = [x509.load_der_x509_certificate(der, default_backend()) for der in chain]
|
||||
cert = certs.pop(0)
|
||||
while certs:
|
||||
|
@ -113,21 +126,26 @@ def verify_x509_chain(chain):
|
|||
|
||||
class Attestation(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def verify(self, statement, auth_data, client_data_hash):
|
||||
def verify(
|
||||
self,
|
||||
statement: Mapping[str, Any],
|
||||
auth_data: AuthenticatorData,
|
||||
client_data_hash: bytes,
|
||||
) -> AttestationResult:
|
||||
"""Verifies attestation statement.
|
||||
|
||||
:return: An AttestationResult if successful.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def for_type(fmt):
|
||||
def for_type(fmt: str) -> Type[Attestation]:
|
||||
for cls in Attestation.__subclasses__():
|
||||
if getattr(cls, "FORMAT", None) == fmt:
|
||||
return cls
|
||||
|
||||
class TypedUnsupportedAttestation(UnsupportedAttestation):
|
||||
def __init__(self):
|
||||
super(TypedUnsupportedAttestation, self).__init__(fmt)
|
||||
super().__init__(fmt)
|
||||
|
||||
return TypedUnsupportedAttestation
|
||||
|
||||
|
@ -159,3 +177,70 @@ def _validate_cert_common(cert):
|
|||
raise InvalidData("Attestation certificate must have CA=false!")
|
||||
except x509.ExtensionNotFound:
|
||||
raise InvalidData("Attestation certificate must have Basic Constraints!")
|
||||
|
||||
|
||||
def _default_attestations():
|
||||
return [
|
||||
cls() # type: ignore
|
||||
for cls in Attestation.__subclasses__()
|
||||
if getattr(cls, "FORMAT", "none") != "none"
|
||||
]
|
||||
|
||||
|
||||
class AttestationVerifier(abc.ABC):
|
||||
"""Base class for verifying attestation.
|
||||
|
||||
Override the ca_lookup method to provide a trusted root certificate used
|
||||
to verify the trust path from the attestation.
|
||||
"""
|
||||
|
||||
def __init__(self, attestation_types: Optional[Sequence[Attestation]] = None):
|
||||
self._attestation_types = attestation_types or _default_attestations()
|
||||
|
||||
@abc.abstractmethod
|
||||
def ca_lookup(
|
||||
self, attestation_result: AttestationResult, auth_data: AuthenticatorData
|
||||
) -> Optional[bytes]:
|
||||
"""Lookup a CA certificate to be used to verify a trust path.
|
||||
|
||||
:param attestation_result: The result of the attestation
|
||||
:param auth_data: The AuthenticatorData from the registration
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def verify_attestation(
|
||||
self, attestation_object: AttestationObject, client_data_hash: bytes
|
||||
) -> None:
|
||||
"""Verify attestation.
|
||||
|
||||
:param attestation_object: dict containing attestation data.
|
||||
:param client_data_hash: SHA256 hash of the ClientData bytes.
|
||||
"""
|
||||
att_verifier: Attestation = UnsupportedAttestation(attestation_object.fmt)
|
||||
for at in self._attestation_types:
|
||||
if getattr(at, "FORMAT", None) == attestation_object.fmt:
|
||||
att_verifier = at
|
||||
break
|
||||
# An unsupported format causes an exception to be thrown, which
|
||||
# includes the auth_data. The caller may choose to handle this case
|
||||
# and allow the registration.
|
||||
result = att_verifier.verify(
|
||||
attestation_object.att_stmt,
|
||||
attestation_object.auth_data,
|
||||
client_data_hash,
|
||||
)
|
||||
|
||||
# Lookup CA to use for trust path verification
|
||||
ca = self.ca_lookup(result, attestation_object.auth_data)
|
||||
if not ca:
|
||||
raise UntrustedAttestation("No root found for Authenticator")
|
||||
|
||||
# Validate the trust chain
|
||||
try:
|
||||
verify_x509_chain(result.trust_path + [ca])
|
||||
except InvalidSignature as e:
|
||||
raise UntrustedAttestation(e)
|
||||
|
||||
def __call__(self, *args):
|
||||
"""Allows passing an instance to Fido2Server as verify_attestation"""
|
||||
self.verify_attestation(*args)
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import (
|
||||
Attestation,
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import (
|
||||
Attestation,
|
||||
|
@ -41,32 +41,23 @@ from .base import (
|
|||
from ..cose import CoseKey
|
||||
from ..utils import bytes2int, ByteBuffer
|
||||
|
||||
from enum import IntEnum
|
||||
from collections import namedtuple
|
||||
from enum import IntEnum, unique
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, ec
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography import x509
|
||||
from cryptography.exceptions import InvalidSignature as _InvalidSignature
|
||||
from dataclasses import dataclass
|
||||
from typing import Tuple, Union, cast
|
||||
|
||||
import struct
|
||||
import six
|
||||
|
||||
|
||||
if six.PY2:
|
||||
# Workaround for int max size on Python 2.
|
||||
from enum import Enum
|
||||
|
||||
class _LongEnum(long, Enum): # noqa F821
|
||||
"""Like IntEnum, but supports larger values"""
|
||||
|
||||
IntEnum = _LongEnum # Use instead of IntEnum # noqa F811
|
||||
|
||||
|
||||
TPM_ALG_NULL = 0x0010
|
||||
OID_AIK_CERTIFICATE = x509.ObjectIdentifier("2.23.133.8.3")
|
||||
|
||||
|
||||
@unique
|
||||
class TpmRsaScheme(IntEnum):
|
||||
RSASSA = 0x0014
|
||||
RSAPSS = 0x0016
|
||||
|
@ -74,18 +65,20 @@ class TpmRsaScheme(IntEnum):
|
|||
RSAES = 0x0015
|
||||
|
||||
|
||||
@unique
|
||||
class TpmAlgAsym(IntEnum):
|
||||
RSA = 0x0001
|
||||
ECC = 0x0023
|
||||
|
||||
|
||||
@unique
|
||||
class TpmAlgHash(IntEnum):
|
||||
SHA1 = 0x0004
|
||||
SHA256 = 0x000B
|
||||
SHA384 = 0x000C
|
||||
SHA512 = 0x000D
|
||||
|
||||
def _hash_alg(self):
|
||||
def _hash_alg(self) -> hashes.Hash:
|
||||
if self == TpmAlgHash.SHA1:
|
||||
return hashes.SHA1() # nosec
|
||||
elif self == TpmAlgHash.SHA256:
|
||||
|
@ -95,15 +88,21 @@ class TpmAlgHash(IntEnum):
|
|||
elif self == TpmAlgHash.SHA512:
|
||||
return hashes.SHA512()
|
||||
|
||||
return NotImplementedError(
|
||||
"_hash_alg is not implemented for {0!r}".format(self)
|
||||
)
|
||||
return NotImplementedError(f"_hash_alg is not implemented for {self!r}")
|
||||
|
||||
|
||||
TpmsCertifyInfo = namedtuple("TpmsCertifyInfo", "name qualified_name")
|
||||
@dataclass
|
||||
class TpmsCertifyInfo:
|
||||
name: bytes
|
||||
qualified_name: bytes
|
||||
|
||||
|
||||
class TpmAttestationFormat(object):
|
||||
TPM_GENERATED_VALUE = b"\xffTCG"
|
||||
TPM_ST_ATTEST_CERTIFY = b"\x80\x17"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TpmAttestationFormat:
|
||||
"""the signature data is defined by [TPMv2-Part2] Section 10.12.8 (TPMS_ATTEST)
|
||||
as:
|
||||
TPM_GENERATED_VALUE (0xff544347 aka "\xffTCG")
|
||||
|
@ -130,25 +129,28 @@ class TpmAttestationFormat(object):
|
|||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-3-Commands-01.38.pdf
|
||||
"""
|
||||
|
||||
TPM_GENERATED_VALUE = b"\xffTCG"
|
||||
TPM_ST_ATTEST_CERTIFY = b"\x80\x17"
|
||||
name: bytes
|
||||
data: bytes
|
||||
clock_info: Tuple[int, int, int, bool]
|
||||
firmware_version: int
|
||||
attested: TpmsCertifyInfo
|
||||
|
||||
@classmethod
|
||||
def parse(cls, data):
|
||||
def parse(cls, data: bytes) -> TpmAttestationFormat:
|
||||
reader = ByteBuffer(data)
|
||||
generated_value = reader.read(4)
|
||||
|
||||
# Verify that magic is set to TPM_GENERATED_VALUE.
|
||||
# see https://w3c.github.io/webauthn/#sctn-tpm-attestation
|
||||
# verification procedure
|
||||
if generated_value != cls.TPM_GENERATED_VALUE:
|
||||
if generated_value != TPM_GENERATED_VALUE:
|
||||
raise ValueError("generated value field is invalid")
|
||||
|
||||
# Verify that type is set to TPM_ST_ATTEST_CERTIFY.
|
||||
# see https://w3c.github.io/webauthn/#sctn-tpm-attestation
|
||||
# verification procedure
|
||||
tpmi_st_attest = reader.read(2)
|
||||
if tpmi_st_attest != cls.TPM_ST_ATTEST_CERTIFY:
|
||||
if tpmi_st_attest != TPM_ST_ATTEST_CERTIFY:
|
||||
raise ValueError("tpmi_st_attest field is invalid")
|
||||
|
||||
try:
|
||||
|
@ -160,7 +162,7 @@ class TpmAttestationFormat(object):
|
|||
restart_count = reader.unpack("!L")
|
||||
safe_value = reader.unpack("B")
|
||||
if safe_value not in (0, 1):
|
||||
raise ValueError("invalid value 0x{0:x} for boolean".format(safe_value))
|
||||
raise ValueError(f"invalid value 0x{safe_value:x} for boolean")
|
||||
safe = safe_value == 1
|
||||
|
||||
firmware_version = reader.unpack("!Q")
|
||||
|
@ -180,26 +182,9 @@ class TpmAttestationFormat(object):
|
|||
),
|
||||
)
|
||||
|
||||
def __init__(self, name, data, clock_info, firmware_version, attested):
|
||||
self.name = name
|
||||
self.data = data
|
||||
self.clock_info = clock_info
|
||||
self.firmware_version = firmware_version
|
||||
self.attested = attested
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<TpmAttestationFormat"
|
||||
" data={self.data}"
|
||||
" name={self.name}"
|
||||
" clock_info={self.clock_info}"
|
||||
" firmware_version=0x{self.firmware_version:x}"
|
||||
" attested={self.attested}"
|
||||
">".format(self=self)
|
||||
)
|
||||
|
||||
|
||||
class TpmsRsaParms(object):
|
||||
@dataclass
|
||||
class TpmsRsaParms:
|
||||
"""Parse TPMS_RSA_PARMS struct
|
||||
|
||||
See:
|
||||
|
@ -207,10 +192,13 @@ class TpmsRsaParms(object):
|
|||
section 12.2.3.5
|
||||
"""
|
||||
|
||||
symmetric: int
|
||||
scheme: int
|
||||
key_bits: int
|
||||
exponent: int
|
||||
|
||||
@classmethod
|
||||
def parse(cls, reader, attributes):
|
||||
ATTRIBUTES = TpmPublicFormat.ATTRIBUTES
|
||||
|
||||
symmetric = reader.unpack("!H")
|
||||
|
||||
restricted_decryption = attributes & (
|
||||
|
@ -275,35 +263,18 @@ class TpmsRsaParms(object):
|
|||
exponent = reader.unpack("!L")
|
||||
if exponent == 0:
|
||||
# When zero, indicates that the exponent is the default of 2^16 + 1
|
||||
exponent = (2 ** 16) + 1
|
||||
exponent = (2**16) + 1
|
||||
|
||||
return cls(symmetric, scheme, key_bits, exponent)
|
||||
|
||||
def __init__(self, symmetric, scheme, key_bits, exponent):
|
||||
self.symmetric = symmetric
|
||||
self.scheme = scheme
|
||||
self.key_bits = key_bits
|
||||
self.exponent = exponent
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<TpmsRsaParms"
|
||||
" symmetric=0x{self.symmetric:x}"
|
||||
" scheme=0x{self.scheme:x}"
|
||||
" key_bits={self.key_bits}"
|
||||
" exponent={self.exponent}"
|
||||
">".format(self=self)
|
||||
)
|
||||
|
||||
|
||||
class Tpm2bPublicKeyRsa(bytes):
|
||||
@classmethod
|
||||
def parse(cls, reader):
|
||||
buffer = reader.read(reader.unpack("!H"))
|
||||
|
||||
return cls(buffer)
|
||||
def parse(cls, reader: ByteBuffer) -> Tpm2bPublicKeyRsa:
|
||||
return cls(reader.read(reader.unpack("!H")))
|
||||
|
||||
|
||||
@unique
|
||||
class TpmEccCurve(IntEnum):
|
||||
"""TPM_ECC_CURVE
|
||||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
|
||||
|
@ -320,7 +291,7 @@ class TpmEccCurve(IntEnum):
|
|||
BN_P638 = 0x0011
|
||||
SM2_P256 = 0x0020
|
||||
|
||||
def to_curve(self):
|
||||
def to_curve(self) -> ec.EllipticCurve:
|
||||
if self == TpmEccCurve.NONE:
|
||||
raise ValueError("No such curve")
|
||||
elif self == TpmEccCurve.NIST_P192:
|
||||
|
@ -337,6 +308,7 @@ class TpmEccCurve(IntEnum):
|
|||
raise ValueError("curve is not supported", self)
|
||||
|
||||
|
||||
@unique
|
||||
class TpmiAlgKdf(IntEnum):
|
||||
"""TPMI_ALG_KDF
|
||||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
|
||||
|
@ -349,9 +321,15 @@ class TpmiAlgKdf(IntEnum):
|
|||
KDF1_SP800_108 = 0x0022
|
||||
|
||||
|
||||
class TpmsEccParms(object):
|
||||
@dataclass
|
||||
class TpmsEccParms:
|
||||
symmetric: int
|
||||
scheme: int
|
||||
curve_id: TpmEccCurve
|
||||
kdf: TpmiAlgKdf
|
||||
|
||||
@classmethod
|
||||
def parse(cls, reader):
|
||||
def parse(cls, reader: ByteBuffer) -> TpmsEccParms:
|
||||
symmetric = reader.unpack("!H")
|
||||
scheme = reader.unpack("!H")
|
||||
if symmetric != TPM_ALG_NULL:
|
||||
|
@ -364,46 +342,62 @@ class TpmsEccParms(object):
|
|||
|
||||
return cls(symmetric, scheme, curve_id, kdf_scheme)
|
||||
|
||||
def __init__(self, symmetric, scheme, curve_id, kdf):
|
||||
self.symmetric = symmetric
|
||||
self.scheme = scheme
|
||||
self.curve_id = curve_id
|
||||
self.kdf = kdf
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<TpmsEccParms"
|
||||
" symmetric=0x{self.symmetric:x}"
|
||||
" scheme=0x{self.scheme:x}"
|
||||
" curve_id={self.curve_id!r}"
|
||||
" kdf={self.kdf!r}"
|
||||
">".format(self=self)
|
||||
)
|
||||
|
||||
|
||||
class TpmsEccPoint(object):
|
||||
@dataclass
|
||||
class TpmsEccPoint:
|
||||
"""TPMS_ECC_POINT
|
||||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
|
||||
Section 11.2.5.2
|
||||
"""
|
||||
|
||||
x: bytes
|
||||
y: bytes
|
||||
|
||||
@classmethod
|
||||
def parse(cls, reader):
|
||||
def parse(cls, reader: ByteBuffer) -> TpmsEccPoint:
|
||||
x = reader.read(reader.unpack("!H"))
|
||||
y = reader.read(reader.unpack("!H"))
|
||||
|
||||
return cls(x, y)
|
||||
|
||||
def __init__(self, x, y):
|
||||
self.x = y
|
||||
self.y = y
|
||||
|
||||
def __repr__(self):
|
||||
return "<TpmsEccPoint" " x={self.x}" " y={self.y}" ">".format(self=self)
|
||||
@unique
|
||||
class ATTRIBUTES(IntEnum):
|
||||
"""Object attributes
|
||||
see section 8.3
|
||||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
|
||||
"""
|
||||
|
||||
FIXED_TPM = 1 << 1
|
||||
ST_CLEAR = 1 << 2
|
||||
FIXED_PARENT = 1 << 4
|
||||
SENSITIVE_DATA_ORIGIN = 1 << 5
|
||||
USER_WITH_AUTH = 1 << 6
|
||||
ADMIN_WITH_POLICY = 1 << 7
|
||||
NO_DA = 1 << 10
|
||||
ENCRYPTED_DUPLICATION = 1 << 11
|
||||
RESTRICTED = 1 << 16
|
||||
DECRYPT = 1 << 17
|
||||
SIGN_ENCRYPT = 1 << 18
|
||||
|
||||
SHALL_BE_ZERO = (
|
||||
(1 << 0) # 0 Reserved
|
||||
| (1 << 3) # 3 Reserved
|
||||
| (0x3 << 8) # 9:8 Reserved
|
||||
| (0xF << 12) # 15:12 Reserved
|
||||
| ((0xFFFFFFFF << 19) & (2**32 - 1)) # 31:19 Reserved
|
||||
)
|
||||
|
||||
|
||||
class TpmPublicFormat(object):
|
||||
"""the public area structure is defined by [TPMv2-Part2] Section 12.2.4 (TPMT_PUBLIC)
|
||||
_PublicKey = Union[rsa.RSAPublicKey, ec.EllipticCurvePublicKey]
|
||||
_Parameters = Union[TpmsRsaParms, TpmsEccParms]
|
||||
_Unique = Union[Tpm2bPublicKeyRsa, TpmsEccPoint]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TpmPublicFormat:
|
||||
"""the public area structure is defined by [TPMv2-Part2] Section 12.2.4
|
||||
(TPMT_PUBLIC)
|
||||
as:
|
||||
TPMI_ALG_PUBLIC - type
|
||||
TPMI_ALG_HASH - nameAlg
|
||||
|
@ -416,56 +410,34 @@ class TpmPublicFormat(object):
|
|||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
|
||||
"""
|
||||
|
||||
class ATTRIBUTES(IntEnum):
|
||||
"""Object attributes
|
||||
see section 8.3
|
||||
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
|
||||
"""
|
||||
|
||||
FIXED_TPM = 1 << 1
|
||||
ST_CLEAR = 1 << 2
|
||||
FIXED_PARENT = 1 << 4
|
||||
SENSITIVE_DATA_ORIGIN = 1 << 5
|
||||
USER_WITH_AUTH = 1 << 6
|
||||
ADMIN_WITH_POLICY = 1 << 7
|
||||
NO_DA = 1 << 10
|
||||
ENCRYPTED_DUPLICATION = 1 << 11
|
||||
RESTRICTED = 1 << 16
|
||||
DECRYPT = 1 << 17
|
||||
SIGN_ENCRYPT = 1 << 18
|
||||
|
||||
SHALL_BE_ZERO = (
|
||||
(1 << 0) # 0 Reserved
|
||||
| (1 << 3) # 3 Reserved
|
||||
| (0x3 << 8) # 9:8 Reserved
|
||||
| (0xF << 12) # 15:12 Reserved
|
||||
| ((0xFFFFFFFF << 19) & (2 ** 32 - 1)) # 31:19 Reserved
|
||||
)
|
||||
sign_alg: TpmAlgAsym
|
||||
name_alg: TpmAlgHash
|
||||
attributes: int
|
||||
auth_policy: bytes
|
||||
parameters: _Parameters
|
||||
unique: _Unique
|
||||
data: bytes
|
||||
|
||||
@classmethod
|
||||
def parse(cls, data):
|
||||
def parse(cls, data: bytes) -> TpmPublicFormat:
|
||||
reader = ByteBuffer(data)
|
||||
sign_alg = TpmAlgAsym(reader.unpack("!H"))
|
||||
name_alg = TpmAlgHash(reader.unpack("!H"))
|
||||
|
||||
attributes = reader.unpack("!L")
|
||||
if attributes & TpmPublicFormat.ATTRIBUTES.SHALL_BE_ZERO != 0:
|
||||
raise ValueError(
|
||||
"attributes is not formated correctly: 0x{:x}".format(attributes)
|
||||
)
|
||||
if attributes & ATTRIBUTES.SHALL_BE_ZERO != 0:
|
||||
raise ValueError(f"attributes is not formated correctly: 0x{attributes:x}")
|
||||
|
||||
auth_policy = reader.read(reader.unpack("!H"))
|
||||
|
||||
if sign_alg == TpmAlgAsym.RSA:
|
||||
parameters = TpmsRsaParms.parse(reader, attributes)
|
||||
unique = Tpm2bPublicKeyRsa.parse(reader)
|
||||
parameters: _Parameters = TpmsRsaParms.parse(reader, attributes)
|
||||
unique: _Unique = Tpm2bPublicKeyRsa.parse(reader)
|
||||
elif sign_alg == TpmAlgAsym.ECC:
|
||||
parameters = TpmsEccParms.parse(reader)
|
||||
unique = TpmsEccPoint.parse(reader)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"sign alg {:x} is not " "supported".format(sign_alg)
|
||||
)
|
||||
raise NotImplementedError(f"sign alg {sign_alg:x} is not supported")
|
||||
|
||||
rest = reader.read()
|
||||
if len(rest) != 0:
|
||||
|
@ -475,46 +447,22 @@ class TpmPublicFormat(object):
|
|||
sign_alg, name_alg, attributes, auth_policy, parameters, unique, data
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, sign_alg, name_alg, attributes, auth_policy, parameters, unique, data
|
||||
):
|
||||
self.sign_alg = sign_alg
|
||||
self.name_alg = name_alg
|
||||
self.attributes = attributes
|
||||
self.auth_policy = auth_policy
|
||||
self.parameters = parameters
|
||||
self.unique = unique
|
||||
self.data = data
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<TpmPublicFormat"
|
||||
" sign_alg=0x{self.sign_alg:x}"
|
||||
" name_alg=0x{self.name_alg:x}"
|
||||
" attributes=0x{self.attributes:x}({self.attributes!r})"
|
||||
" auth_policy={self.auth_policy}"
|
||||
" parameters={self.parameters}"
|
||||
" unique={self.unique}"
|
||||
">".format(self=self)
|
||||
)
|
||||
|
||||
def public_key(self):
|
||||
def public_key(self) -> _PublicKey:
|
||||
if self.sign_alg == TpmAlgAsym.RSA:
|
||||
exponent = self.parameters.exponent
|
||||
modulus = bytes2int(self.unique)
|
||||
exponent = cast(TpmsRsaParms, self.parameters).exponent
|
||||
modulus = bytes2int(cast(Tpm2bPublicKeyRsa, self.unique))
|
||||
return rsa.RSAPublicNumbers(exponent, modulus).public_key(default_backend())
|
||||
elif self.sign_alg == TpmAlgAsym.ECC:
|
||||
unique = cast(TpmsEccPoint, self.unique)
|
||||
return ec.EllipticCurvePublicNumbers(
|
||||
bytes2int(self.unique.x),
|
||||
bytes2int(self.unique.y),
|
||||
self.parameters.to_curve(),
|
||||
bytes2int(unique.x),
|
||||
bytes2int(unique.y),
|
||||
cast(TpmsEccParms, self.parameters).curve_id.to_curve(),
|
||||
).public_key(default_backend())
|
||||
|
||||
raise NotImplementedError(
|
||||
"public_key not implemented for {0!r}".format(self.sign_alg)
|
||||
)
|
||||
raise NotImplementedError(f"public_key not implemented for {self.sign_alg!r}")
|
||||
|
||||
def name(self):
|
||||
def name(self) -> bytes:
|
||||
"""
|
||||
Computing Entity Names
|
||||
|
||||
|
@ -601,7 +549,8 @@ class TpmAttestation(Attestation):
|
|||
# Verify that extraData is set to the hash of attToBeSigned
|
||||
# using the hash algorithm employed in "alg".
|
||||
att_to_be_signed = auth_data + client_data_hash
|
||||
digest = hashes.Hash(pub_key._HASH_ALG, backend=default_backend())
|
||||
hash_alg = pub_key._HASH_ALG # type: ignore
|
||||
digest = hashes.Hash(hash_alg, backend=default_backend())
|
||||
digest.update(att_to_be_signed)
|
||||
data = digest.finalize()
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import (
|
||||
Attestation,
|
||||
|
|
|
@ -31,18 +31,23 @@ Minimal CBOR implementation supporting a subset of functionality and types
|
|||
required for FIDO 2 CTAP.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import struct
|
||||
import six
|
||||
from typing import Any, Tuple, Union, Sequence, Mapping, Type, Callable
|
||||
|
||||
|
||||
def dump_int(data, mt=0):
|
||||
CborType = Union[int, bool, str, bytes, Sequence[Any], Mapping[Any, Any]]
|
||||
|
||||
|
||||
def dump_int(data: int, mt: int = 0) -> bytes:
|
||||
if data < 0:
|
||||
mt = 1
|
||||
data = -1 - data
|
||||
|
||||
mt = mt << 5
|
||||
if data <= 23:
|
||||
args = (">B", mt | data)
|
||||
args: Any = (">B", mt | data)
|
||||
elif data <= 0xFF:
|
||||
args = (">BB", mt | 24, data)
|
||||
elif data <= 0xFFFF:
|
||||
|
@ -54,56 +59,56 @@ def dump_int(data, mt=0):
|
|||
return struct.pack(*args)
|
||||
|
||||
|
||||
def dump_bool(data):
|
||||
def dump_bool(data: bool) -> bytes:
|
||||
return b"\xf5" if data else b"\xf4"
|
||||
|
||||
|
||||
def dump_list(data):
|
||||
def dump_list(data: Sequence[CborType]) -> bytes:
|
||||
return dump_int(len(data), mt=4) + b"".join([encode(x) for x in data])
|
||||
|
||||
|
||||
def _sort_keys(entry):
|
||||
key = entry[0]
|
||||
return six.indexbytes(key, 0), len(key), key
|
||||
return key[0], len(key), key
|
||||
|
||||
|
||||
def dump_dict(data):
|
||||
def dump_dict(data: Mapping[CborType, CborType]) -> bytes:
|
||||
items = [(encode(k), encode(v)) for k, v in data.items()]
|
||||
items.sort(key=_sort_keys)
|
||||
return dump_int(len(items), mt=5) + b"".join([k + v for (k, v) in items])
|
||||
|
||||
|
||||
def dump_bytes(data):
|
||||
def dump_bytes(data: bytes) -> bytes:
|
||||
return dump_int(len(data), mt=2) + data
|
||||
|
||||
|
||||
def dump_text(data):
|
||||
def dump_text(data: str) -> bytes:
|
||||
data_bytes = data.encode("utf8")
|
||||
return dump_int(len(data_bytes), mt=3) + data_bytes
|
||||
|
||||
|
||||
_SERIALIZERS = [
|
||||
_SERIALIZERS: Sequence[Tuple[Type, Callable[[Any], bytes]]] = [
|
||||
(bool, dump_bool),
|
||||
(six.integer_types, dump_int),
|
||||
(dict, dump_dict),
|
||||
(list, dump_list),
|
||||
(six.text_type, dump_text),
|
||||
(six.binary_type, dump_bytes),
|
||||
(int, dump_int),
|
||||
(str, dump_text),
|
||||
(bytes, dump_bytes),
|
||||
(Mapping, dump_dict),
|
||||
(Sequence, dump_list),
|
||||
]
|
||||
|
||||
|
||||
def encode(data):
|
||||
def encode(data: CborType) -> bytes:
|
||||
for k, v in _SERIALIZERS:
|
||||
if isinstance(data, k):
|
||||
return v(data)
|
||||
raise ValueError("Unsupported value: {}".format(data))
|
||||
raise ValueError(f"Unsupported value: {data!r}")
|
||||
|
||||
|
||||
def load_int(ai, data):
|
||||
def load_int(ai: int, data: bytes) -> Tuple[int, bytes]:
|
||||
if ai < 24:
|
||||
return ai, data
|
||||
elif ai == 24:
|
||||
return six.indexbytes(data, 0), data[1:]
|
||||
return data[0], data[1:]
|
||||
elif ai == 25:
|
||||
return struct.unpack_from(">H", data)[0], data[2:]
|
||||
elif ai == 26:
|
||||
|
@ -113,26 +118,26 @@ def load_int(ai, data):
|
|||
raise ValueError("Invalid additional information")
|
||||
|
||||
|
||||
def load_nint(ai, data):
|
||||
def load_nint(ai: int, data: bytes) -> Tuple[int, bytes]:
|
||||
val, rest = load_int(ai, data)
|
||||
return -1 - val, rest
|
||||
|
||||
|
||||
def load_bool(ai, data):
|
||||
def load_bool(ai: int, data: bytes) -> Tuple[bool, bytes]:
|
||||
return ai == 21, data
|
||||
|
||||
|
||||
def load_bytes(ai, data):
|
||||
def load_bytes(ai: int, data: bytes) -> Tuple[bytes, bytes]:
|
||||
l, data = load_int(ai, data)
|
||||
return data[:l], data[l:]
|
||||
|
||||
|
||||
def load_text(ai, data):
|
||||
def load_text(ai: int, data: bytes) -> Tuple[str, bytes]:
|
||||
enc, rest = load_bytes(ai, data)
|
||||
return enc.decode("utf8"), rest
|
||||
|
||||
|
||||
def load_array(ai, data):
|
||||
def load_array(ai: int, data: bytes) -> Tuple[Sequence[CborType], bytes]:
|
||||
l, data = load_int(ai, data)
|
||||
values = []
|
||||
for i in range(l):
|
||||
|
@ -141,7 +146,7 @@ def load_array(ai, data):
|
|||
return values, data
|
||||
|
||||
|
||||
def load_map(ai, data):
|
||||
def load_map(ai: int, data: bytes) -> Tuple[Mapping[CborType, CborType], bytes]:
|
||||
l, data = load_int(ai, data)
|
||||
values = {}
|
||||
for i in range(l):
|
||||
|
@ -162,12 +167,12 @@ _DESERIALIZERS = {
|
|||
}
|
||||
|
||||
|
||||
def decode_from(data):
|
||||
fb = six.indexbytes(data, 0)
|
||||
def decode_from(data: bytes) -> Tuple[Any, bytes]:
|
||||
fb = data[0]
|
||||
return _DESERIALIZERS[fb >> 5](fb & 0b11111, data[1:])
|
||||
|
||||
|
||||
def decode(data):
|
||||
def decode(data) -> CborType:
|
||||
value, rest = decode_from(data)
|
||||
if rest != b"":
|
||||
raise ValueError("Extraneous data")
|
||||
|
|
997
fido2/client.py
997
fido2/client.py
File diff suppressed because it is too large
Load Diff
127
fido2/cose.py
127
fido2/cose.py
|
@ -25,17 +25,13 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .utils import bytes2int, int2bytes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa, padding
|
||||
|
||||
try:
|
||||
from cryptography.hazmat.primitives.asymmetric import ed25519
|
||||
except ImportError: # EdDSA requires Cryptography >= 2.6.
|
||||
ed25519 = None
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa, padding, ed25519
|
||||
from typing import Sequence, Type, Mapping, Any, Union, TypeVar
|
||||
|
||||
|
||||
class CoseKey(dict):
|
||||
|
@ -45,9 +41,9 @@ class CoseKey(dict):
|
|||
:cvar ALGORITHM: COSE algorithm identifier.
|
||||
"""
|
||||
|
||||
ALGORITHM = None
|
||||
ALGORITHM: int = None # type: ignore
|
||||
|
||||
def verify(self, message, signature):
|
||||
def verify(self, message: bytes, signature: bytes) -> None:
|
||||
"""Validates a digital signature over a given message.
|
||||
|
||||
:param message: The message which was signed.
|
||||
|
@ -56,7 +52,10 @@ class CoseKey(dict):
|
|||
raise NotImplementedError("Signature verification not supported.")
|
||||
|
||||
@classmethod
|
||||
def from_cryptography_key(cls, public_key):
|
||||
def from_cryptography_key(
|
||||
cls: Type[T_CoseKey],
|
||||
public_key: Union[rsa.RSAPublicKey, ec.EllipticCurvePublicKey],
|
||||
) -> T_CoseKey:
|
||||
"""Converts a PublicKey object from Cryptography into a COSE key.
|
||||
|
||||
:param public_key: Either an EC or RSA public key.
|
||||
|
@ -65,22 +64,19 @@ class CoseKey(dict):
|
|||
raise NotImplementedError("Creation from cryptography not supported.")
|
||||
|
||||
@staticmethod
|
||||
def for_alg(alg):
|
||||
def for_alg(alg: int) -> Type[CoseKey]:
|
||||
"""Get a subclass of CoseKey corresponding to an algorithm identifier.
|
||||
|
||||
:param alg: The COSE identifier of the algorithm.
|
||||
:return: A CoseKey.
|
||||
"""
|
||||
if alg == EdDSA.ALGORITHM and ed25519 is None:
|
||||
# EdDSA requires Cryptography >= 2.6.
|
||||
return UnsupportedKey
|
||||
for cls in CoseKey.__subclasses__():
|
||||
if cls.ALGORITHM == alg:
|
||||
return cls
|
||||
return UnsupportedKey
|
||||
|
||||
@staticmethod
|
||||
def for_name(name):
|
||||
def for_name(name: str) -> Type[CoseKey]:
|
||||
"""Get a subclass of CoseKey corresponding to an algorithm identifier.
|
||||
|
||||
:param alg: The COSE identifier of the algorithm.
|
||||
|
@ -92,7 +88,7 @@ class CoseKey(dict):
|
|||
return UnsupportedKey
|
||||
|
||||
@staticmethod
|
||||
def parse(cose):
|
||||
def parse(cose: Mapping[int, Any]) -> CoseKey:
|
||||
"""Create a CoseKey from a dict"""
|
||||
alg = cose.get(3)
|
||||
if not alg:
|
||||
|
@ -100,15 +96,23 @@ class CoseKey(dict):
|
|||
return CoseKey.for_alg(alg)(cose)
|
||||
|
||||
@staticmethod
|
||||
def supported_algorithms():
|
||||
def supported_algorithms() -> Sequence[int]:
|
||||
"""Get a list of all supported algorithm identifiers"""
|
||||
if ed25519:
|
||||
algs = (ES256, EdDSA, PS256, RS256)
|
||||
else:
|
||||
algs = (ES256, PS256, RS256)
|
||||
algs: Sequence[Type[CoseKey]] = [
|
||||
ES256,
|
||||
EdDSA,
|
||||
ES384,
|
||||
ES512,
|
||||
PS256,
|
||||
RS256,
|
||||
ES256K,
|
||||
]
|
||||
return [cls.ALGORITHM for cls in algs]
|
||||
|
||||
|
||||
T_CoseKey = TypeVar("T_CoseKey", bound=CoseKey)
|
||||
|
||||
|
||||
class UnsupportedKey(CoseKey):
|
||||
"""A COSE key with an unsupported algorithm."""
|
||||
|
||||
|
@ -149,6 +153,60 @@ class ES256(CoseKey):
|
|||
return cls({1: 2, 3: cls.ALGORITHM, -1: 1, -2: data[1:33], -3: data[33:65]})
|
||||
|
||||
|
||||
class ES384(CoseKey):
|
||||
ALGORITHM = -35
|
||||
_HASH_ALG = hashes.SHA384()
|
||||
|
||||
def verify(self, message, signature):
|
||||
if self[-1] != 2:
|
||||
raise ValueError("Unsupported elliptic curve")
|
||||
ec.EllipticCurvePublicNumbers(
|
||||
bytes2int(self[-2]), bytes2int(self[-3]), ec.SECP384R1()
|
||||
).public_key(default_backend()).verify(
|
||||
signature, message, ec.ECDSA(self._HASH_ALG)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_cryptography_key(cls, public_key):
|
||||
pn = public_key.public_numbers()
|
||||
return cls(
|
||||
{
|
||||
1: 2,
|
||||
3: cls.ALGORITHM,
|
||||
-1: 2,
|
||||
-2: int2bytes(pn.x, 48),
|
||||
-3: int2bytes(pn.y, 48),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ES512(CoseKey):
|
||||
ALGORITHM = -36
|
||||
_HASH_ALG = hashes.SHA512()
|
||||
|
||||
def verify(self, message, signature):
|
||||
if self[-1] != 3:
|
||||
raise ValueError("Unsupported elliptic curve")
|
||||
ec.EllipticCurvePublicNumbers(
|
||||
bytes2int(self[-2]), bytes2int(self[-3]), ec.SECP521R1()
|
||||
).public_key(default_backend()).verify(
|
||||
signature, message, ec.ECDSA(self._HASH_ALG)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_cryptography_key(cls, public_key):
|
||||
pn = public_key.public_numbers()
|
||||
return cls(
|
||||
{
|
||||
1: 2,
|
||||
3: cls.ALGORITHM,
|
||||
-1: 3,
|
||||
-2: int2bytes(pn.x, 64),
|
||||
-3: int2bytes(pn.y, 64),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class RS256(CoseKey):
|
||||
ALGORITHM = -257
|
||||
_HASH_ALG = hashes.SHA256()
|
||||
|
@ -221,3 +279,30 @@ class RS1(CoseKey):
|
|||
def from_cryptography_key(cls, public_key):
|
||||
pn = public_key.public_numbers()
|
||||
return cls({1: 3, 3: cls.ALGORITHM, -1: int2bytes(pn.n), -2: int2bytes(pn.e)})
|
||||
|
||||
|
||||
class ES256K(CoseKey):
|
||||
ALGORITHM = -47
|
||||
_HASH_ALG = hashes.SHA256()
|
||||
|
||||
def verify(self, message, signature):
|
||||
if self[-1] != 8:
|
||||
raise ValueError("Unsupported elliptic curve")
|
||||
ec.EllipticCurvePublicNumbers(
|
||||
bytes2int(self[-2]), bytes2int(self[-3]), ec.SECP256K1()
|
||||
).public_key(default_backend()).verify(
|
||||
signature, message, ec.ECDSA(self._HASH_ALG)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_cryptography_key(cls, public_key):
|
||||
pn = public_key.public_numbers()
|
||||
return cls(
|
||||
{
|
||||
1: 2,
|
||||
3: cls.ALGORITHM,
|
||||
-1: 8,
|
||||
-2: int2bytes(pn.x, 32),
|
||||
-3: int2bytes(pn.y, 32),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -25,10 +25,13 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntEnum, unique
|
||||
import abc
|
||||
from enum import IntEnum, unique
|
||||
from threading import Event
|
||||
|
||||
from typing import Optional, Callable, Iterator
|
||||
|
||||
|
||||
@unique
|
||||
|
@ -43,8 +46,19 @@ class CtapDevice(abc.ABC):
|
|||
list_devices, which should return a generator over discoverable devices.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def call(self, cmd, data=b"", event=None, on_keepalive=None):
|
||||
def capabilities(self) -> int:
|
||||
"""Get device capabilities"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def call(
|
||||
self,
|
||||
cmd: int,
|
||||
data: bytes = b"",
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> bytes:
|
||||
"""Sends a command to the authenticator, and reads the response.
|
||||
|
||||
:param cmd: The integer value of the command.
|
||||
|
@ -57,7 +71,7 @@ class CtapDevice(abc.ABC):
|
|||
:return: The response from the authenticator.
|
||||
"""
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
"""Close the device, releasing any held resources."""
|
||||
|
||||
def __enter__(self):
|
||||
|
@ -68,7 +82,7 @@ class CtapDevice(abc.ABC):
|
|||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def list_devices(cls):
|
||||
def list_devices(cls) -> Iterator[CtapDevice]:
|
||||
"""Generates instances of cls for discoverable devices."""
|
||||
|
||||
|
||||
|
@ -77,14 +91,14 @@ class CtapError(Exception):
|
|||
name = "UNKNOWN_ERR"
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
def value(self) -> int:
|
||||
return int(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<ERR.UNKNOWN: %d>" % self
|
||||
|
||||
def __str__(self):
|
||||
return "0x%02X - UNKNOWN" % self
|
||||
return f"0x{self:02X} - UNKNOWN"
|
||||
|
||||
@unique
|
||||
class ERR(IntEnum):
|
||||
|
@ -145,12 +159,11 @@ class CtapError(Exception):
|
|||
VENDOR_LAST = 0xFF
|
||||
|
||||
def __str__(self):
|
||||
return "0x%02X - %s" % (self.value, self.name)
|
||||
return f"0x{self.value:02X} - {self.name}"
|
||||
|
||||
def __init__(self, code):
|
||||
def __init__(self, code: int):
|
||||
try:
|
||||
code = CtapError.ERR(code)
|
||||
self.code = CtapError.ERR(code)
|
||||
except ValueError:
|
||||
code = CtapError.UNKNOWN_ERR(code)
|
||||
self.code = code
|
||||
super(CtapError, self).__init__("CTAP error: %s" % code)
|
||||
self.code = CtapError.UNKNOWN_ERR(code) # type: ignore
|
||||
super().__init__(f"CTAP error: {self.code}")
|
||||
|
|
114
fido2/ctap1.py
114
fido2/ctap1.py
|
@ -25,16 +25,16 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .hid import CTAPHID
|
||||
from .ctap import CtapDevice
|
||||
from .utils import websafe_encode, websafe_decode, bytes2int, ByteBuffer
|
||||
from .cose import ES256
|
||||
from .attestation import FidoU2FAttestation
|
||||
from enum import IntEnum, unique
|
||||
from binascii import b2a_hex
|
||||
from dataclasses import dataclass
|
||||
import struct
|
||||
import six
|
||||
|
||||
|
||||
@unique
|
||||
|
@ -55,16 +55,15 @@ class ApduError(Exception):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, code, data=b""):
|
||||
def __init__(self, code: int, data: bytes = b""):
|
||||
self.code = code
|
||||
self.data = data
|
||||
|
||||
def __repr__(self):
|
||||
return "APDU error: 0x{:04X} {:d} bytes of data".format(
|
||||
self.code, len(self.data)
|
||||
)
|
||||
return f"APDU error: 0x{self.code:04X} {len(self.data):d} bytes of data"
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class RegistrationData(bytes):
|
||||
"""Binary response data for a CTAP1 registration.
|
||||
|
||||
|
@ -76,33 +75,37 @@ class RegistrationData(bytes):
|
|||
:ivar signature: Attestation signature.
|
||||
"""
|
||||
|
||||
def __init__(self, _):
|
||||
super(RegistrationData, self).__init__()
|
||||
public_key: bytes
|
||||
key_handle: bytes
|
||||
certificate: bytes
|
||||
signature: bytes
|
||||
|
||||
if six.indexbytes(self, 0) != 0x05:
|
||||
def __init__(self, _):
|
||||
super().__init__()
|
||||
|
||||
reader = ByteBuffer(self)
|
||||
if reader.unpack("B") != 0x05:
|
||||
raise ValueError("Reserved byte != 0x05")
|
||||
|
||||
self.public_key = self[1:66]
|
||||
kh_len = six.indexbytes(self, 66)
|
||||
self.key_handle = self[67 : 67 + kh_len]
|
||||
self.public_key = reader.read(65)
|
||||
self.key_handle = reader.read(reader.unpack("B"))
|
||||
|
||||
cert_offs = 67 + kh_len
|
||||
cert_len = six.indexbytes(self, cert_offs + 1)
|
||||
if cert_len > 0x80:
|
||||
cert_buf = reader.read(2) # Tag and first length byte
|
||||
cert_len = cert_buf[1]
|
||||
if cert_len > 0x80: # Multi-byte length
|
||||
n_bytes = cert_len - 0x80
|
||||
cert_len = (
|
||||
bytes2int(self[cert_offs + 2 : cert_offs + 2 + n_bytes]) + n_bytes
|
||||
)
|
||||
cert_len += 2
|
||||
self.certificate = self[cert_offs : cert_offs + cert_len]
|
||||
self.signature = self[cert_offs + cert_len :]
|
||||
len_bytes = reader.read(n_bytes)
|
||||
cert_buf += len_bytes
|
||||
cert_len = bytes2int(len_bytes)
|
||||
self.certificate = cert_buf + reader.read(cert_len)
|
||||
self.signature = reader.read()
|
||||
|
||||
@property
|
||||
def b64(self):
|
||||
def b64(self) -> str:
|
||||
"""Websafe base64 encoded string of the RegistrationData."""
|
||||
return websafe_encode(self)
|
||||
|
||||
def verify(self, app_param, client_param):
|
||||
def verify(self, app_param: bytes, client_param: bytes) -> None:
|
||||
"""Verify the included signature with regard to the given app and client
|
||||
params.
|
||||
|
||||
|
@ -118,25 +121,8 @@ class RegistrationData(bytes):
|
|||
self.signature,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"RegistrationData(public_key: h'%s', key_handle: h'%s', "
|
||||
"certificate: h'%s', signature: h'%s')"
|
||||
) % tuple(
|
||||
b2a_hex(x).decode()
|
||||
for x in (
|
||||
self.public_key,
|
||||
self.key_handle,
|
||||
self.certificate,
|
||||
self.signature,
|
||||
)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "%r" % self
|
||||
|
||||
@classmethod
|
||||
def from_b64(cls, data):
|
||||
def from_b64(cls, data: str) -> RegistrationData:
|
||||
"""Parse a RegistrationData from a websafe base64 encoded string.
|
||||
|
||||
:param data: Websafe base64 encoded string.
|
||||
|
@ -145,6 +131,7 @@ class RegistrationData(bytes):
|
|||
return cls(websafe_decode(data))
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class SignatureData(bytes):
|
||||
"""Binary response data for a CTAP1 authentication.
|
||||
|
||||
|
@ -154,8 +141,12 @@ class SignatureData(bytes):
|
|||
:ivar signature: Cryptographic signature.
|
||||
"""
|
||||
|
||||
user_presence: int
|
||||
counter: int
|
||||
signature: bytes
|
||||
|
||||
def __init__(self, _):
|
||||
super(SignatureData, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
reader = ByteBuffer(self)
|
||||
self.user_presence = reader.unpack("B")
|
||||
|
@ -163,11 +154,11 @@ class SignatureData(bytes):
|
|||
self.signature = reader.read()
|
||||
|
||||
@property
|
||||
def b64(self):
|
||||
def b64(self) -> str:
|
||||
"""str: Websafe base64 encoded string of the SignatureData."""
|
||||
return websafe_encode(self)
|
||||
|
||||
def verify(self, app_param, client_param, public_key):
|
||||
def verify(self, app_param: bytes, client_param: bytes, public_key: bytes) -> None:
|
||||
"""Verify the included signature with regard to the given app and client
|
||||
params, using the given public key.
|
||||
|
||||
|
@ -178,16 +169,8 @@ class SignatureData(bytes):
|
|||
m = app_param + self[:5] + client_param
|
||||
ES256.from_ctap1(public_key).verify(m, self.signature)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"SignatureData(user_presence: 0x%02x, counter: %d, " "signature: h'%s'"
|
||||
) % (self.user_presence, self.counter, b2a_hex(self.signature))
|
||||
|
||||
def __str__(self):
|
||||
return "%r" % self
|
||||
|
||||
@classmethod
|
||||
def from_b64(cls, data):
|
||||
def from_b64(cls, data: str) -> SignatureData:
|
||||
"""Parse a SignatureData from a websafe base64 encoded string.
|
||||
|
||||
:param data: Websafe base64 encoded string.
|
||||
|
@ -196,7 +179,7 @@ class SignatureData(bytes):
|
|||
return cls(websafe_decode(data))
|
||||
|
||||
|
||||
class Ctap1(object):
|
||||
class Ctap1:
|
||||
"""Implementation of the CTAP1 specification.
|
||||
|
||||
:param device: A CtapHidDevice handle supporting CTAP1.
|
||||
|
@ -208,10 +191,12 @@ class Ctap1(object):
|
|||
AUTHENTICATE = 0x02
|
||||
VERSION = 0x03
|
||||
|
||||
def __init__(self, device):
|
||||
def __init__(self, device: CtapDevice):
|
||||
self.device = device
|
||||
|
||||
def send_apdu(self, cla=0, ins=0, p1=0, p2=0, data=b""):
|
||||
def send_apdu(
|
||||
self, cla: int = 0, ins: int = 0, p1: int = 0, p2: int = 0, data: bytes = b""
|
||||
) -> bytes:
|
||||
"""Packs and sends an APDU for use in CTAP1 commands.
|
||||
This is a low-level method mainly used internally. Avoid calling it
|
||||
directly if possible, and use the get_version, register, and
|
||||
|
@ -234,7 +219,7 @@ class Ctap1(object):
|
|||
raise ApduError(status, data)
|
||||
return data
|
||||
|
||||
def get_version(self):
|
||||
def get_version(self) -> str:
|
||||
"""Get the U2F version implemented by the authenticator.
|
||||
The only version specified is "U2F_V2".
|
||||
|
||||
|
@ -242,7 +227,7 @@ class Ctap1(object):
|
|||
"""
|
||||
return self.send_apdu(ins=Ctap1.INS.VERSION).decode()
|
||||
|
||||
def register(self, client_param, app_param):
|
||||
def register(self, client_param: bytes, app_param: bytes) -> RegistrationData:
|
||||
"""Register a new U2F credential.
|
||||
|
||||
:param client_param: SHA256 hash of the ClientData used for the request.
|
||||
|
@ -253,7 +238,13 @@ class Ctap1(object):
|
|||
response = self.send_apdu(ins=Ctap1.INS.REGISTER, data=data)
|
||||
return RegistrationData(response)
|
||||
|
||||
def authenticate(self, client_param, app_param, key_handle, check_only=False):
|
||||
def authenticate(
|
||||
self,
|
||||
client_param: bytes,
|
||||
app_param: bytes,
|
||||
key_handle: bytes,
|
||||
check_only: bool = False,
|
||||
) -> SignatureData:
|
||||
"""Authenticate a previously registered credential.
|
||||
|
||||
:param client_param: SHA256 hash of the ClientData used for the request.
|
||||
|
@ -269,6 +260,3 @@ class Ctap1(object):
|
|||
p1 = 0x07 if check_only else 0x03
|
||||
response = self.send_apdu(ins=Ctap1.INS.AUTHENTICATE, p1=p1, data=data)
|
||||
return SignatureData(response)
|
||||
|
||||
|
||||
CTAP1 = Ctap1
|
||||
|
|
|
@ -25,14 +25,10 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .base import ( # noqa
|
||||
Ctap2,
|
||||
Info,
|
||||
AuthenticatorData,
|
||||
AttestedCredentialData,
|
||||
AttestationObject,
|
||||
AttestationResponse,
|
||||
AssertionResponse,
|
||||
)
|
||||
|
||||
|
@ -41,6 +37,3 @@ from .credman import CredentialManagement # noqa
|
|||
from .bio import FPBioEnrollment, CaptureError # noqa
|
||||
from .blob import LargeBlobs # noqa
|
||||
from .config import Config # noqa
|
||||
|
||||
# Alias for compatibility, this will be going away
|
||||
CTAP2 = Ctap2
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -25,16 +25,23 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import cbor
|
||||
from ..ctap import CtapError
|
||||
from .base import Ctap2, Info
|
||||
from .pin import PinProtocol
|
||||
|
||||
from enum import IntEnum, unique
|
||||
from threading import Event
|
||||
from typing import Optional, Callable, Mapping, Any, Tuple
|
||||
import struct
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BioEnrollment(object):
|
||||
class BioEnrollment:
|
||||
@unique
|
||||
class RESULT(IntEnum):
|
||||
MODALITY = 0x01
|
||||
|
@ -55,26 +62,27 @@ class BioEnrollment(object):
|
|||
FINGERPRINT = 0x01
|
||||
|
||||
@staticmethod
|
||||
def is_supported(info):
|
||||
def is_supported(info: Info) -> bool:
|
||||
if "bioEnroll" in info.options:
|
||||
return True
|
||||
# We also support the Prototype command
|
||||
if "FIDO_2_1_PRE" in info.versions and info.options.get(
|
||||
"credentialMgmtPreview"
|
||||
if (
|
||||
"FIDO_2_1_PRE" in info.versions
|
||||
and "userVerificationMgmtPreview" in info.options
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def __init__(self, ctap, modality):
|
||||
def __init__(self, ctap: Ctap2, modality: MODALITY):
|
||||
if not self.is_supported(ctap.info):
|
||||
raise ValueError("Authenticator does not support BioEnroll")
|
||||
|
||||
self.ctap = ctap
|
||||
self.modality = self.get_modality()
|
||||
if modality != self.modality:
|
||||
raise ValueError("Device does not support {:s}".format(modality))
|
||||
raise ValueError(f"Device does not support {modality:s}")
|
||||
|
||||
def get_modality(self):
|
||||
def get_modality(self) -> int:
|
||||
"""Get bio modality.
|
||||
|
||||
:return: The type of modality supported by the authenticator.
|
||||
|
@ -85,12 +93,12 @@ class BioEnrollment(object):
|
|||
|
||||
|
||||
class CaptureError(Exception):
|
||||
def __init__(self, code):
|
||||
def __init__(self, code: int):
|
||||
self.code = code
|
||||
super(CaptureError, self).__init__("Fingerprint capture error: %s" % code)
|
||||
super().__init__(f"Fingerprint capture error: {code}")
|
||||
|
||||
|
||||
class FPEnrollmentContext(object):
|
||||
class FPEnrollmentContext:
|
||||
"""Helper object to perform fingerprint enrollment.
|
||||
|
||||
:param bio: An instance of FPBioEnrollment.
|
||||
|
@ -100,13 +108,17 @@ class FPEnrollmentContext(object):
|
|||
sample has been captured).
|
||||
"""
|
||||
|
||||
def __init__(self, bio, timeout=None):
|
||||
def __init__(self, bio: "FPBioEnrollment", timeout: Optional[int] = None):
|
||||
self._bio = bio
|
||||
self.timeout = timeout
|
||||
self.template_id = None
|
||||
self.remaining = None
|
||||
self.template_id: Optional[bytes] = None
|
||||
self.remaining: Optional[int] = None
|
||||
|
||||
def capture(self, event=None, on_keepalive=None):
|
||||
def capture(
|
||||
self,
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> Optional[bytes]:
|
||||
"""Capture a fingerprint sample.
|
||||
|
||||
This call will block for up to timeout milliseconds (or indefinitely, if
|
||||
|
@ -130,7 +142,7 @@ class FPEnrollmentContext(object):
|
|||
return self.template_id
|
||||
return None
|
||||
|
||||
def cancel(self):
|
||||
def cancel(self) -> None:
|
||||
"""Cancels ongoing enrollment."""
|
||||
self._bio.enroll_cancel()
|
||||
self.template_id = None
|
||||
|
@ -183,10 +195,10 @@ class FPBioEnrollment(BioEnrollment):
|
|||
NO_UP_TRANSITION = 0x0E
|
||||
|
||||
def __str__(self):
|
||||
return "0x%02X - %s" % (self.value, self.name)
|
||||
return f"0x{self.value:02X} - {self.name}"
|
||||
|
||||
def __init__(self, ctap, pin_uv_protocol, pin_uv_token):
|
||||
super(FPBioEnrollment, self).__init__(ctap, BioEnrollment.MODALITY.FINGERPRINT)
|
||||
def __init__(self, ctap: Ctap2, pin_uv_protocol: PinProtocol, pin_uv_token: bytes):
|
||||
super().__init__(ctap, BioEnrollment.MODALITY.FINGERPRINT)
|
||||
self.pin_uv_protocol = pin_uv_protocol
|
||||
self.pin_uv_token = pin_uv_token
|
||||
|
||||
|
@ -210,14 +222,19 @@ class FPBioEnrollment(BioEnrollment):
|
|||
)
|
||||
return self.ctap.bio_enrollment(**kwargs)
|
||||
|
||||
def get_fingerprint_sensor_info(self):
|
||||
def get_fingerprint_sensor_info(self) -> Mapping[int, Any]:
|
||||
"""Get fingerprint sensor info.
|
||||
|
||||
:return: A dict containing FINGERPRINT_KIND and MAX_SAMPLES_REQUIRES.
|
||||
"""
|
||||
return self._call(FPBioEnrollment.CMD.GET_SENSOR_INFO, auth=False)
|
||||
|
||||
def enroll_begin(self, timeout=None, event=None, on_keepalive=None):
|
||||
def enroll_begin(
|
||||
self,
|
||||
timeout: Optional[int] = None,
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> Tuple[bytes, FPBioEnrollment.FEEDBACK, int]:
|
||||
"""Start fingerprint enrollment.
|
||||
|
||||
Starts the process of enrolling a new fingerprint, and will wait for the user
|
||||
|
@ -227,12 +244,14 @@ class FPBioEnrollment(BioEnrollment):
|
|||
:return: A tuple containing the new template ID, the sample status, and the
|
||||
number of samples remaining to complete the enrollment.
|
||||
"""
|
||||
logger.debug(f"Starting fingerprint enrollment (timeout={timeout})")
|
||||
result = self._call(
|
||||
FPBioEnrollment.CMD.ENROLL_BEGIN,
|
||||
{FPBioEnrollment.PARAM.TIMEOUT_MS: timeout},
|
||||
event=event,
|
||||
on_keepalive=on_keepalive,
|
||||
)
|
||||
logger.debug(f"Sample capture result: {result}")
|
||||
return (
|
||||
result[BioEnrollment.RESULT.TEMPLATE_ID],
|
||||
FPBioEnrollment.FEEDBACK(result[BioEnrollment.RESULT.LAST_SAMPLE_STATUS]),
|
||||
|
@ -240,8 +259,12 @@ class FPBioEnrollment(BioEnrollment):
|
|||
)
|
||||
|
||||
def enroll_capture_next(
|
||||
self, template_id, timeout=None, event=None, on_keepalive=None
|
||||
):
|
||||
self,
|
||||
template_id: bytes,
|
||||
timeout: Optional[int] = None,
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> Tuple[FPBioEnrollment.FEEDBACK, int]:
|
||||
"""Continue fingerprint enrollment.
|
||||
|
||||
Continues enrolling a new fingerprint and will wait for the user to scan their
|
||||
|
@ -253,6 +276,7 @@ class FPBioEnrollment(BioEnrollment):
|
|||
:return: A tuple containing the sample status, and the number of samples
|
||||
remaining to complete the enrollment.
|
||||
"""
|
||||
logger.debug(f"Capturing next sample with (timeout={timeout})")
|
||||
result = self._call(
|
||||
FPBioEnrollment.CMD.ENROLL_CAPTURE_NEXT,
|
||||
{
|
||||
|
@ -262,16 +286,18 @@ class FPBioEnrollment(BioEnrollment):
|
|||
event=event,
|
||||
on_keepalive=on_keepalive,
|
||||
)
|
||||
logger.debug(f"Sample capture result: {result}")
|
||||
return (
|
||||
FPBioEnrollment.FEEDBACK(result[BioEnrollment.RESULT.LAST_SAMPLE_STATUS]),
|
||||
result[BioEnrollment.RESULT.REMAINING_SAMPLES],
|
||||
)
|
||||
|
||||
def enroll_cancel(self):
|
||||
def enroll_cancel(self) -> None:
|
||||
"""Cancel any ongoing fingerprint enrollment."""
|
||||
logger.debug("Cancelling fingerprint enrollment.")
|
||||
self._call(FPBioEnrollment.CMD.ENROLL_CANCEL, auth=False)
|
||||
|
||||
def enroll(self, timeout=None):
|
||||
def enroll(self, timeout: Optional[int] = None) -> FPEnrollmentContext:
|
||||
"""Convenience wrapper for doing fingerprint enrollment.
|
||||
|
||||
See FPEnrollmentContext for details.
|
||||
|
@ -279,7 +305,7 @@ class FPBioEnrollment(BioEnrollment):
|
|||
"""
|
||||
return FPEnrollmentContext(self, timeout)
|
||||
|
||||
def enumerate_enrollments(self):
|
||||
def enumerate_enrollments(self) -> Mapping[bytes, Optional[str]]:
|
||||
"""Get a dict of enrolled fingerprint templates which maps template ID's to
|
||||
their friendly names.
|
||||
|
||||
|
@ -297,12 +323,13 @@ class FPBioEnrollment(BioEnrollment):
|
|||
return {}
|
||||
raise
|
||||
|
||||
def set_name(self, template_id, name):
|
||||
def set_name(self, template_id: bytes, name: str) -> None:
|
||||
"""Set/Change the friendly name of a previously enrolled fingerprint template.
|
||||
|
||||
:param template_id: The ID of the template to change.
|
||||
:param name: A friendly name to give the template.
|
||||
"""
|
||||
logger.debug(f"Changing name of template: {template_id.hex()} to {name}")
|
||||
self._call(
|
||||
FPBioEnrollment.CMD.SET_NAME,
|
||||
{
|
||||
|
@ -310,13 +337,16 @@ class FPBioEnrollment(BioEnrollment):
|
|||
BioEnrollment.TEMPLATE_INFO.NAME: name,
|
||||
},
|
||||
)
|
||||
logger.info("Fingerprint template renamed")
|
||||
|
||||
def remove_enrollment(self, template_id):
|
||||
def remove_enrollment(self, template_id: bytes) -> None:
|
||||
"""Remove a previously enrolled fingerprint template.
|
||||
|
||||
:param template_id: The Id of the template to remove.
|
||||
"""
|
||||
logger.debug(f"Deleting template: {template_id.hex()}")
|
||||
self._call(
|
||||
FPBioEnrollment.CMD.REMOVE_ENROLLMENT,
|
||||
{BioEnrollment.TEMPLATE_INFO.ID: template_id},
|
||||
)
|
||||
logger.info("Fingerprint template deleted")
|
||||
|
|
|
@ -25,20 +25,32 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import cbor
|
||||
from ..utils import sha256
|
||||
from .base import Ctap2, Info
|
||||
from .pin import PinProtocol, _PinUv
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.exceptions import InvalidTag
|
||||
|
||||
from typing import Optional, Any, Sequence, Mapping, cast
|
||||
import struct
|
||||
import zlib
|
||||
import os
|
||||
|
||||
|
||||
def _compress(data):
|
||||
o = zlib.compressobj(wbits=-zlib.MAX_WBITS)
|
||||
return o.compress(data) + o.flush()
|
||||
|
||||
|
||||
def _decompress(data):
|
||||
o = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
||||
return o.decompress(data) + o.flush()
|
||||
|
||||
|
||||
def _lb_ad(orig_size):
|
||||
return b"blob" + struct.pack("<Q", orig_size)
|
||||
|
||||
|
@ -46,13 +58,12 @@ def _lb_ad(orig_size):
|
|||
def _lb_pack(key, data):
|
||||
orig_size = len(data)
|
||||
nonce = os.urandom(12)
|
||||
cipher = Cipher(algorithms.AES(key), modes.GCM(nonce), default_backend())
|
||||
encryptor = cipher.encryptor()
|
||||
encryptor.authenticate_additional_data(_lb_ad(orig_size))
|
||||
ciphertext = encryptor.update(zlib.compress(data)) + encryptor.finalize()
|
||||
aesgcm = AESGCM(key)
|
||||
|
||||
ciphertext = aesgcm.encrypt(nonce, _compress(data), _lb_ad(orig_size))
|
||||
|
||||
return {
|
||||
1: ciphertext + encryptor.tag,
|
||||
1: ciphertext,
|
||||
2: nonce,
|
||||
3: orig_size,
|
||||
}
|
||||
|
@ -60,23 +71,19 @@ def _lb_pack(key, data):
|
|||
|
||||
def _lb_unpack(key, entry):
|
||||
try:
|
||||
ciphertext, tag = entry[1][:-16], entry[1][-16:]
|
||||
ciphertext = entry[1]
|
||||
nonce = entry[2]
|
||||
orig_size = entry[3]
|
||||
cipher = Cipher(algorithms.AES(key), modes.GCM(nonce, tag), default_backend())
|
||||
decryptor = cipher.decryptor()
|
||||
decryptor.authenticate_additional_data(_lb_ad(orig_size))
|
||||
aesgcm = AESGCM(key)
|
||||
compressed = aesgcm.decrypt(nonce, ciphertext, _lb_ad(orig_size))
|
||||
return compressed, orig_size
|
||||
except (TypeError, IndexError, KeyError):
|
||||
raise ValueError("Invalid entry")
|
||||
|
||||
try:
|
||||
compressed = decryptor.update(ciphertext) + decryptor.finalize()
|
||||
return compressed, orig_size
|
||||
except InvalidTag:
|
||||
raise ValueError("Wrong key")
|
||||
|
||||
|
||||
class LargeBlobs(object):
|
||||
class LargeBlobs:
|
||||
"""Implementation of the CTAP2.1 Large Blobs API.
|
||||
|
||||
Getting a largeBlobKey for a credential is done via the LargeBlobKey extension.
|
||||
|
@ -87,19 +94,27 @@ class LargeBlobs(object):
|
|||
"""
|
||||
|
||||
@staticmethod
|
||||
def is_supported(info):
|
||||
return info.options.get("largeBlobs")
|
||||
def is_supported(info: Info) -> bool:
|
||||
return info.options.get("largeBlobs") is True
|
||||
|
||||
def __init__(self, ctap, pin_uv_protocol=None, pin_uv_token=None):
|
||||
def __init__(
|
||||
self,
|
||||
ctap: Ctap2,
|
||||
pin_uv_protocol: Optional[PinProtocol] = None,
|
||||
pin_uv_token: Optional[bytes] = None,
|
||||
):
|
||||
if not self.is_supported(ctap.info):
|
||||
raise ValueError("Authenticator does not support LargeBlobs")
|
||||
|
||||
self.ctap = ctap
|
||||
self.max_fragment_length = self.ctap.info.max_msg_size - 64
|
||||
self.pin_uv_protocol = pin_uv_protocol
|
||||
self.pin_uv_token = pin_uv_token
|
||||
self.pin_uv = (
|
||||
_PinUv(pin_uv_protocol, pin_uv_token)
|
||||
if pin_uv_protocol and pin_uv_token
|
||||
else None
|
||||
)
|
||||
|
||||
def read_blob_array(self):
|
||||
def read_blob_array(self) -> Sequence[Mapping[int, Any]]:
|
||||
"""Gets the entire contents of the Large Blobs array.
|
||||
|
||||
:return: The CBOR decoded list of Large Blobs.
|
||||
|
@ -116,9 +131,9 @@ class LargeBlobs(object):
|
|||
data, check = buf[:-16], buf[-16:]
|
||||
if check != sha256(data)[:-16]:
|
||||
return []
|
||||
return cbor.decode(data)
|
||||
return cast(Sequence[Mapping[int, Any]], cbor.decode(data))
|
||||
|
||||
def write_blob_array(self, blob_array):
|
||||
def write_blob_array(self, blob_array: Sequence[Mapping[int, Any]]) -> None:
|
||||
"""Writes the entire Large Blobs array.
|
||||
|
||||
:param blob_array: A list to write to the Authenticator.
|
||||
|
@ -131,48 +146,50 @@ class LargeBlobs(object):
|
|||
offset = 0
|
||||
size = len(data)
|
||||
|
||||
pin_uv_param = None
|
||||
pin_uv_protocol = self.pin_uv_protocol.VERSION if self.pin_uv_token else None
|
||||
|
||||
while offset < size:
|
||||
ln = min(size - offset, self.max_fragment_length)
|
||||
_set = data[offset : offset + ln]
|
||||
|
||||
if self.pin_uv_token:
|
||||
if self.pin_uv:
|
||||
msg = (
|
||||
b"\xff" * 32
|
||||
+ b"\x0c\x00"
|
||||
+ struct.pack("<I", offset)
|
||||
+ sha256(_set)
|
||||
)
|
||||
pin_uv_param = self.pin_uv_protocol.authenticate(self.pin_uv_token, msg)
|
||||
pin_uv_protocol = self.pin_uv.protocol.VERSION
|
||||
pin_uv_param = self.pin_uv.protocol.authenticate(self.pin_uv.token, msg)
|
||||
else:
|
||||
pin_uv_param = None
|
||||
pin_uv_protocol = None
|
||||
|
||||
self.ctap.large_blobs(
|
||||
offset,
|
||||
set=_set,
|
||||
length=ln,
|
||||
length=size if offset == 0 else None,
|
||||
pin_uv_protocol=pin_uv_protocol,
|
||||
pin_uv_param=pin_uv_param,
|
||||
)
|
||||
|
||||
offset += ln
|
||||
|
||||
def get_blob(self, large_blob_key):
|
||||
def get_blob(self, large_blob_key: bytes) -> Optional[bytes]:
|
||||
"""Gets the Large Blob stored for a single credential.
|
||||
|
||||
:param large_blob_key: The largeBlobKey for the credential.
|
||||
:param large_blob_key: The largeBlobKey for the credential, or None.
|
||||
:returns: The decrypted and deflated value stored for the credential.
|
||||
"""
|
||||
for entry in self.read_blob_array():
|
||||
try:
|
||||
compressed, orig_size = _lb_unpack(large_blob_key, entry)
|
||||
decompressed = zlib.decompress(compressed)
|
||||
decompressed = _decompress(compressed)
|
||||
if len(decompressed) == orig_size:
|
||||
return decompressed
|
||||
except (ValueError, zlib.error):
|
||||
continue
|
||||
return None
|
||||
|
||||
def put_blob(self, large_blob_key, data):
|
||||
def put_blob(self, large_blob_key: bytes, data: Optional[bytes]) -> None:
|
||||
"""Stores a Large Blob for a single credential.
|
||||
|
||||
Any existing entries for the same credential will be replaced.
|
||||
|
@ -196,7 +213,7 @@ class LargeBlobs(object):
|
|||
if modified:
|
||||
self.write_blob_array(entries)
|
||||
|
||||
def delete_blob(self, large_blob_key):
|
||||
def delete_blob(self, large_blob_key: bytes) -> None:
|
||||
"""Deletes any Large Blob(s) stored for a single credential.
|
||||
|
||||
:param large_blob_key: The largeBlobKey for the credential.
|
||||
|
|
|
@ -25,15 +25,18 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import cbor
|
||||
from .base import Ctap2, Info
|
||||
from .pin import PinProtocol, _PinUv
|
||||
|
||||
from typing import Optional, List
|
||||
from enum import IntEnum, unique
|
||||
import struct
|
||||
|
||||
|
||||
class Config(object):
|
||||
class Config:
|
||||
"""Implementation of the CTAP2.1 Authenticator Config API.
|
||||
|
||||
:param ctap: An instance of a CTAP2 object.
|
||||
|
@ -55,37 +58,52 @@ class Config(object):
|
|||
FORCE_CHANGE_PIN = 0x03
|
||||
|
||||
@staticmethod
|
||||
def is_supported(info):
|
||||
return info.options.get("authnrCfg")
|
||||
def is_supported(info: Info) -> bool:
|
||||
return info.options.get("authnrCfg") is True
|
||||
|
||||
def __init__(self, ctap, pin_uv_protocol=None, pin_uv_token=None):
|
||||
def __init__(
|
||||
self,
|
||||
ctap: Ctap2,
|
||||
pin_uv_protocol: Optional[PinProtocol] = None,
|
||||
pin_uv_token: Optional[bytes] = None,
|
||||
):
|
||||
if not self.is_supported(ctap.info):
|
||||
raise ValueError("Authenticator does not support Config")
|
||||
|
||||
self.ctap = ctap
|
||||
self.pin_uv_protocol = pin_uv_protocol
|
||||
self.pin_uv_token = pin_uv_token
|
||||
self.pin_uv = (
|
||||
_PinUv(pin_uv_protocol, pin_uv_token)
|
||||
if pin_uv_protocol and pin_uv_token
|
||||
else None
|
||||
)
|
||||
|
||||
def _call(self, sub_cmd, params=None):
|
||||
if params:
|
||||
params = {k: v for k, v in params.items() if v is not None}
|
||||
else:
|
||||
params = None
|
||||
if self.pin_uv_protocol:
|
||||
if self.pin_uv:
|
||||
msg = (
|
||||
b"\xff" * 32
|
||||
+ b"\x0d"
|
||||
+ struct.pack("<b", sub_cmd)
|
||||
+ struct.pack("<B", sub_cmd)
|
||||
+ (cbor.encode(params) if params else b"")
|
||||
)
|
||||
pin_uv_protocol = self.pin_uv_protocol.VERSION
|
||||
pin_uv_param = self.pin_uv_protocol.authenticate(self.pin_uv_token, msg)
|
||||
pin_uv_protocol = self.pin_uv.protocol.VERSION
|
||||
pin_uv_param = self.pin_uv.protocol.authenticate(self.pin_uv.token, msg)
|
||||
else:
|
||||
pin_uv_protocol = None
|
||||
pin_uv_param = None
|
||||
return self.ctap.config(sub_cmd, params, pin_uv_protocol, pin_uv_param)
|
||||
|
||||
def toggle_always_uv(self):
|
||||
def enable_enterprise_attestation(self) -> None:
|
||||
"""Enables Enterprise Attestation.
|
||||
|
||||
If already enabled, this command is ignored.
|
||||
"""
|
||||
self._call(Config.CMD.ENABLE_ENTERPRISE_ATT)
|
||||
|
||||
def toggle_always_uv(self) -> None:
|
||||
"""Toggle the alwaysUV setting.
|
||||
|
||||
When true, the Authenticator always requires UV for credential assertion.
|
||||
|
@ -93,8 +111,11 @@ class Config(object):
|
|||
self._call(Config.CMD.TOGGLE_ALWAYS_UV)
|
||||
|
||||
def set_min_pin_length(
|
||||
self, min_pin_length=None, rp_ids=None, force_change_pin=None
|
||||
):
|
||||
self,
|
||||
min_pin_length: Optional[int] = None,
|
||||
rp_ids: Optional[List[str]] = None,
|
||||
force_change_pin: bool = False,
|
||||
) -> None:
|
||||
"""Set the minimum PIN length allowed when setting/changing the PIN.
|
||||
|
||||
:param min_pin_length: The minimum PIN length the Authenticator should allow.
|
||||
|
|
|
@ -25,16 +25,24 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import cbor
|
||||
from ..ctap import CtapError
|
||||
from ..webauthn import PublicKeyCredentialDescriptor, PublicKeyCredentialUserEntity
|
||||
from .base import Ctap2, Info
|
||||
from .pin import PinProtocol, _PinUv
|
||||
|
||||
from enum import IntEnum, unique
|
||||
from typing import Mapping, Sequence, Any
|
||||
|
||||
import struct
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CredentialManagement(object):
|
||||
class CredentialManagement:
|
||||
"""Implementation of a draft specification of the Credential Management API.
|
||||
WARNING: This specification is not final and this class is likely to change.
|
||||
|
||||
|
@ -74,23 +82,25 @@ class CredentialManagement(object):
|
|||
LARGE_BLOB_KEY = 0x0B
|
||||
|
||||
@staticmethod
|
||||
def is_supported(info):
|
||||
def is_supported(info: Info) -> bool:
|
||||
if info.options.get("credMgmt"):
|
||||
return True
|
||||
# We also support the Prototype command
|
||||
if "FIDO_2_1_PRE" in info.versions and info.options.get(
|
||||
"credentialMgmtPreview"
|
||||
):
|
||||
if "FIDO_2_1_PRE" in info.versions and "credentialMgmtPreview" in info.options:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __init__(self, ctap, pin_uv_protocol, pin_uv_token):
|
||||
def __init__(
|
||||
self,
|
||||
ctap: Ctap2,
|
||||
pin_uv_protocol: PinProtocol,
|
||||
pin_uv_token: bytes,
|
||||
):
|
||||
if not self.is_supported(ctap.info):
|
||||
raise ValueError("Authenticator does not support Credential Management")
|
||||
|
||||
self.ctap = ctap
|
||||
self.pin_uv_protocol = pin_uv_protocol
|
||||
self.pin_uv_token = pin_uv_token
|
||||
self.pin_uv = _PinUv(pin_uv_protocol, pin_uv_token)
|
||||
|
||||
def _call(self, sub_cmd, params=None, auth=True):
|
||||
kwargs = {"sub_cmd": sub_cmd, "sub_cmd_params": params}
|
||||
|
@ -98,13 +108,13 @@ class CredentialManagement(object):
|
|||
msg = struct.pack(">B", sub_cmd)
|
||||
if params is not None:
|
||||
msg += cbor.encode(params)
|
||||
kwargs["pin_uv_protocol"] = self.pin_uv_protocol.VERSION
|
||||
kwargs["pin_uv_param"] = self.pin_uv_protocol.authenticate(
|
||||
self.pin_uv_token, msg
|
||||
kwargs["pin_uv_protocol"] = self.pin_uv.protocol.VERSION
|
||||
kwargs["pin_uv_param"] = self.pin_uv.protocol.authenticate(
|
||||
self.pin_uv.token, msg
|
||||
)
|
||||
return self.ctap.credential_mgmt(**kwargs)
|
||||
|
||||
def get_metadata(self):
|
||||
def get_metadata(self) -> Mapping[int, Any]:
|
||||
"""Get credentials metadata.
|
||||
|
||||
This returns the existing resident credentials count, and the max
|
||||
|
@ -115,7 +125,7 @@ class CredentialManagement(object):
|
|||
"""
|
||||
return self._call(CredentialManagement.CMD.GET_CREDS_METADATA)
|
||||
|
||||
def enumerate_rps_begin(self):
|
||||
def enumerate_rps_begin(self) -> Mapping[int, Any]:
|
||||
"""Start enumeration of RP entities of resident credentials.
|
||||
|
||||
This will begin enumeration of stored RP entities, returning the first
|
||||
|
@ -125,7 +135,7 @@ class CredentialManagement(object):
|
|||
"""
|
||||
return self._call(CredentialManagement.CMD.ENUMERATE_RPS_BEGIN)
|
||||
|
||||
def enumerate_rps_next(self):
|
||||
def enumerate_rps_next(self) -> Mapping[int, Any]:
|
||||
"""Get the next RP entity stored.
|
||||
|
||||
This continues enumeration of stored RP entities, returning the next
|
||||
|
@ -135,7 +145,7 @@ class CredentialManagement(object):
|
|||
"""
|
||||
return self._call(CredentialManagement.CMD.ENUMERATE_RPS_NEXT, auth=False)
|
||||
|
||||
def enumerate_rps(self):
|
||||
def enumerate_rps(self) -> Sequence[Mapping[int, Any]]:
|
||||
"""Convenience method to enumerate all RPs.
|
||||
|
||||
See enumerate_rps_begin and enumerate_rps_next for details.
|
||||
|
@ -152,7 +162,7 @@ class CredentialManagement(object):
|
|||
rest = [self.enumerate_rps_next() for _ in range(1, n_rps)]
|
||||
return [first] + rest
|
||||
|
||||
def enumerate_creds_begin(self, rp_id_hash):
|
||||
def enumerate_creds_begin(self, rp_id_hash: bytes) -> Mapping[int, Any]:
|
||||
"""Start enumeration of resident credentials.
|
||||
|
||||
This will begin enumeration of resident credentials for a given RP,
|
||||
|
@ -168,7 +178,7 @@ class CredentialManagement(object):
|
|||
{CredentialManagement.PARAM.RP_ID_HASH: rp_id_hash},
|
||||
)
|
||||
|
||||
def enumerate_creds_next(self):
|
||||
def enumerate_creds_next(self) -> Mapping[int, Any]:
|
||||
"""Get the next resident credential stored.
|
||||
|
||||
This continues enumeration of resident credentials, returning the next
|
||||
|
@ -178,7 +188,7 @@ class CredentialManagement(object):
|
|||
"""
|
||||
return self._call(CredentialManagement.CMD.ENUMERATE_CREDS_NEXT, auth=False)
|
||||
|
||||
def enumerate_creds(self, *args, **kwargs):
|
||||
def enumerate_creds(self, *args, **kwargs) -> Sequence[Mapping[int, Any]]:
|
||||
"""Convenience method to enumerate all resident credentials for an RP.
|
||||
|
||||
See enumerate_creds_begin and enumerate_creds_next for details.
|
||||
|
@ -197,26 +207,34 @@ class CredentialManagement(object):
|
|||
]
|
||||
return [first] + rest
|
||||
|
||||
def delete_cred(self, cred_id):
|
||||
def delete_cred(self, cred_id: PublicKeyCredentialDescriptor) -> None:
|
||||
"""Delete a resident credential.
|
||||
|
||||
:param cred_id: The PublicKeyCredentialDescriptor of the credential to delete.
|
||||
"""
|
||||
return self._call(
|
||||
logger.debug(f"Deleting credential with ID: {cred_id['id'].hex()}")
|
||||
self._call(
|
||||
CredentialManagement.CMD.DELETE_CREDENTIAL,
|
||||
{CredentialManagement.PARAM.CREDENTIAL_ID: cred_id},
|
||||
)
|
||||
logger.info("Credential deleted")
|
||||
|
||||
def update_user_info(self, cred_id, user_info):
|
||||
def update_user_info(
|
||||
self,
|
||||
cred_id: PublicKeyCredentialDescriptor,
|
||||
user_info: PublicKeyCredentialUserEntity,
|
||||
) -> None:
|
||||
"""Update the user entity of a resident key.
|
||||
|
||||
:param cred_id: The PublicKeyCredentialDescriptor of the credential to update.
|
||||
:param user_info: The user info update.
|
||||
"""
|
||||
return self._call(
|
||||
logger.debug(f"Updating credential: {cred_id} with user info: {user_info}")
|
||||
self._call(
|
||||
CredentialManagement.CMD.UPDATE_USER_INFO,
|
||||
{
|
||||
CredentialManagement.PARAM.CREDENTIAL_ID: cred_id,
|
||||
CredentialManagement.PARAM.USER: user_info,
|
||||
},
|
||||
)
|
||||
logger.info("Credential user info updated")
|
||||
|
|
|
@ -25,10 +25,13 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .pin import ClientPin
|
||||
from .base import AttestationResponse, AssertionResponse, Ctap2
|
||||
from .pin import ClientPin, PinProtocol
|
||||
from .blob import LargeBlobs
|
||||
from enum import Enum, unique
|
||||
from typing import Dict, Tuple, Any, Optional
|
||||
import abc
|
||||
|
||||
|
||||
|
@ -38,30 +41,53 @@ class Ctap2Extension(abc.ABC):
|
|||
the extension.
|
||||
"""
|
||||
|
||||
def __init__(self, ctap):
|
||||
NAME: str = None # type: ignore
|
||||
|
||||
def __init__(self, ctap: Ctap2):
|
||||
self.ctap = ctap
|
||||
|
||||
def is_supported(self):
|
||||
def is_supported(self) -> bool:
|
||||
"""Whether or not the extension is supported by the authenticator."""
|
||||
return self.NAME in self.ctap.info.extensions
|
||||
|
||||
def process_create_input(self, inputs):
|
||||
def process_create_input(self, inputs: Dict[str, Any]) -> Any:
|
||||
"""Returns a value to include in the authenticator extension input,
|
||||
or None.
|
||||
"""
|
||||
return None
|
||||
|
||||
def process_create_output(self, auth_data):
|
||||
"""Return client extension output given auth_data, or None."""
|
||||
def process_create_input_with_permissions(
|
||||
self, inputs: Dict[str, Any]
|
||||
) -> Tuple[Any, ClientPin.PERMISSION]:
|
||||
return self.process_create_input(inputs), ClientPin.PERMISSION(0)
|
||||
|
||||
def process_create_output(
|
||||
self,
|
||||
attestation_response: AttestationResponse,
|
||||
token: Optional[str],
|
||||
pin_protocol: Optional[PinProtocol],
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Return client extension output given attestation_response, or None."""
|
||||
return None
|
||||
|
||||
def process_get_input(self, inputs):
|
||||
def process_get_input(self, inputs: Dict[str, Any]) -> Any:
|
||||
"""Returns a value to include in the authenticator extension input,
|
||||
or None.
|
||||
"""
|
||||
return None
|
||||
|
||||
def process_get_output(self, auth_data):
|
||||
"""Return client extension output given auth_data, or None."""
|
||||
def process_get_input_with_permissions(
|
||||
self, inputs: Dict[str, Any]
|
||||
) -> Tuple[Any, ClientPin.PERMISSION]:
|
||||
return self.process_get_input(inputs), ClientPin.PERMISSION(0)
|
||||
|
||||
def process_get_output(
|
||||
self,
|
||||
assertion_response: AssertionResponse,
|
||||
token: Optional[str],
|
||||
pin_protocol: Optional[PinProtocol],
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Return client extension output given assertion_response, or None."""
|
||||
return None
|
||||
|
||||
|
||||
|
@ -74,15 +100,15 @@ class HmacSecretExtension(Ctap2Extension):
|
|||
SALT_LEN = 32
|
||||
|
||||
def __init__(self, ctap, pin_protocol=None):
|
||||
super(HmacSecretExtension, self).__init__(ctap)
|
||||
super().__init__(ctap)
|
||||
self.pin_protocol = pin_protocol
|
||||
|
||||
def process_create_input(self, inputs):
|
||||
if self.is_supported() and inputs.get("hmacCreateSecret") is True:
|
||||
return True
|
||||
|
||||
def process_create_output(self, auth_data):
|
||||
if auth_data.extensions.get(self.NAME):
|
||||
def process_create_output(self, attestation_response, *args):
|
||||
if attestation_response.auth_data.extensions.get(self.NAME):
|
||||
return {"hmacCreateSecret": True}
|
||||
|
||||
def process_get_input(self, inputs):
|
||||
|
@ -113,8 +139,8 @@ class HmacSecretExtension(Ctap2Extension):
|
|||
4: self.pin_protocol.VERSION,
|
||||
}
|
||||
|
||||
def process_get_output(self, auth_data):
|
||||
value = auth_data.extensions.get(self.NAME)
|
||||
def process_get_output(self, assertion_response, *args):
|
||||
value = assertion_response.auth_data.extensions.get(self.NAME)
|
||||
|
||||
decrypted = self.pin_protocol.decrypt(self.shared_secret, value)
|
||||
output1 = decrypted[: HmacSecretExtension.SALT_LEN]
|
||||
|
@ -126,20 +152,54 @@ class HmacSecretExtension(Ctap2Extension):
|
|||
return {"hmacGetSecret": outputs}
|
||||
|
||||
|
||||
class LargeBlobKeyExtension(Ctap2Extension):
|
||||
class LargeBlobExtension(Ctap2Extension):
|
||||
"""
|
||||
Implements the Large Blob Key CTAP2 extension.
|
||||
Implements the Large Blob WebAuthn extension.
|
||||
"""
|
||||
|
||||
NAME = "largeBlobKey"
|
||||
|
||||
def is_supported(self):
|
||||
return super().is_supported() and self.ctap.info.options.get("largeBlobs")
|
||||
|
||||
def process_create_input(self, inputs):
|
||||
if self.is_supported() and inputs.get("largeBlobKey") is True:
|
||||
data = inputs.get("largeBlob", {})
|
||||
if data:
|
||||
if "read" in data or "write" in data:
|
||||
raise ValueError("Invalid set of parameters")
|
||||
is_supported = self.is_supported()
|
||||
if data.get("support") == "required" and not is_supported:
|
||||
raise ValueError("Authenticator does not support large blob storage")
|
||||
return True
|
||||
|
||||
def process_get_input(self, inputs):
|
||||
if self.is_supported() and inputs.get("largeBlobKey") is True:
|
||||
return True
|
||||
def process_create_output(self, attestation_response, *args):
|
||||
return {"supported": attestation_response.large_blob_key is not None}
|
||||
|
||||
def process_get_input_with_permissions(self, inputs):
|
||||
data = inputs.get("largeBlob", {})
|
||||
permissions = ClientPin.PERMISSION(0)
|
||||
if data:
|
||||
if "support" in data or ("read" in data and "write" in data):
|
||||
raise ValueError("Invalid set of parameters")
|
||||
if not self.is_supported():
|
||||
raise ValueError("Authenticator does not support large blob storage")
|
||||
if data.get("read") is True:
|
||||
self._action = True
|
||||
else:
|
||||
self._action = data.get("write")
|
||||
permissions = ClientPin.PERMISSION.LARGE_BLOB_WRITE
|
||||
return True if data else None, permissions
|
||||
|
||||
def process_get_output(self, assertion_response, token, pin_protocol):
|
||||
blob_key = assertion_response.large_blob_key
|
||||
if self._action is True: # Read
|
||||
large_blobs = LargeBlobs(self.ctap)
|
||||
blob = large_blobs.get_blob(blob_key)
|
||||
return {"blob": blob}
|
||||
elif self._action: # Write
|
||||
large_blobs = LargeBlobs(self.ctap, pin_protocol, token)
|
||||
large_blobs.put_blob(blob_key, self._action)
|
||||
return {"written": True}
|
||||
|
||||
|
||||
class CredBlobExtension(Ctap2Extension):
|
||||
|
@ -150,9 +210,11 @@ class CredBlobExtension(Ctap2Extension):
|
|||
NAME = "credBlob"
|
||||
|
||||
def process_create_input(self, inputs):
|
||||
blob = self.is_supported() and inputs.get("credBlob")
|
||||
if blob and len(blob) <= self.ctap.info.max_cred_blob_length:
|
||||
return blob
|
||||
if self.is_supported():
|
||||
blob = inputs.get("credBlob")
|
||||
assert self.ctap.info.max_cred_blob_length is not None # nosec
|
||||
if blob and len(blob) <= self.ctap.info.max_cred_blob_length:
|
||||
return blob
|
||||
|
||||
def process_get_input(self, inputs):
|
||||
if self.is_supported() and inputs.get("getCredBlob") is True:
|
||||
|
@ -176,7 +238,9 @@ class CredProtectExtension(Ctap2Extension):
|
|||
def process_create_input(self, inputs):
|
||||
policy = inputs.get("credentialProtectionPolicy")
|
||||
if policy:
|
||||
index = list(CredProtectExtension.POLICY).index(policy)
|
||||
index = list(CredProtectExtension.POLICY).index(
|
||||
CredProtectExtension.POLICY(policy)
|
||||
)
|
||||
enforce = inputs.get("enforceCredentialProtectionPolicy", False)
|
||||
if enforce and not self.is_supported() and index > 0:
|
||||
raise ValueError("Authenticator does not support Credential Protection")
|
||||
|
@ -190,6 +254,9 @@ class MinPinLengthExtension(Ctap2Extension):
|
|||
|
||||
NAME = "minPinLength"
|
||||
|
||||
def is_supported(self): # NB: There is no key in the extensions field.
|
||||
return "setMinPINLength" in self.ctap.info.options
|
||||
|
||||
def process_create_input(self, inputs):
|
||||
if self.is_supported() and inputs.get(self.NAME) is True:
|
||||
return True
|
||||
|
|
|
@ -25,9 +25,11 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from ..utils import sha256, hmac_sha256, bytes2int, int2bytes
|
||||
from ..cose import CoseKey
|
||||
from .base import Ctap2
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
@ -35,24 +37,65 @@ from cryptography.hazmat.primitives.asymmetric import ec
|
|||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||
|
||||
from enum import IntEnum, unique
|
||||
import six
|
||||
from enum import IntEnum, IntFlag, unique
|
||||
from dataclasses import dataclass
|
||||
from threading import Event
|
||||
from typing import Optional, Any, Mapping, ClassVar, Tuple, Callable
|
||||
|
||||
import abc
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _pad_pin(pin):
|
||||
if not isinstance(pin, six.string_types):
|
||||
raise ValueError("PIN of wrong type, expecting %s" % six.string_types)
|
||||
def _pad_pin(pin: str) -> bytes:
|
||||
if not isinstance(pin, str):
|
||||
raise ValueError(f"PIN of wrong type, expecting {str}")
|
||||
if len(pin) < 4:
|
||||
raise ValueError("PIN must be >= 4 characters")
|
||||
pin = pin.encode("utf8").ljust(64, b"\0")
|
||||
pin += b"\0" * (-(len(pin) - 16) % 16)
|
||||
if len(pin) > 255:
|
||||
pin_padded = pin.encode().ljust(64, b"\0")
|
||||
pin_padded += b"\0" * (-(len(pin_padded) - 16) % 16)
|
||||
if len(pin_padded) > 255:
|
||||
raise ValueError("PIN must be <= 255 bytes")
|
||||
return pin
|
||||
return pin_padded
|
||||
|
||||
|
||||
class PinProtocolV1(object):
|
||||
class PinProtocol(abc.ABC):
|
||||
VERSION: ClassVar[int]
|
||||
|
||||
@abc.abstractmethod
|
||||
def encapsulate(self, peer_cose_key: CoseKey) -> Tuple[Mapping[int, Any], bytes]:
|
||||
"""Generates an encapsulation of the public key.
|
||||
Returns the message to transmit and the shared secret.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def encrypt(self, key: bytes, plaintext: bytes) -> bytes:
|
||||
"""Encrypts data"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def decrypt(self, key: bytes, ciphertext: bytes) -> bytes:
|
||||
"""Decrypts encrypted data"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def authenticate(self, key: bytes, message: bytes) -> bytes:
|
||||
"""Computes a MAC of the given message."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def validate_token(self, token: bytes) -> bytes:
|
||||
"""Validates that a token is well-formed.
|
||||
Returns the token, or if invalid, raises a ValueError.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class _PinUv:
|
||||
protocol: PinProtocol
|
||||
token: bytes
|
||||
|
||||
|
||||
class PinProtocolV1(PinProtocol):
|
||||
"""Implementation of the CTAP2 PIN/UV protocol v1.
|
||||
|
||||
:param ctap: An instance of a CTAP2 object.
|
||||
|
@ -63,7 +106,7 @@ class PinProtocolV1(object):
|
|||
VERSION = 1
|
||||
IV = b"\x00" * 16
|
||||
|
||||
def kdf(self, z):
|
||||
def kdf(self, z: bytes) -> bytes:
|
||||
return sha256(z)
|
||||
|
||||
def encapsulate(self, peer_cose_key):
|
||||
|
@ -167,7 +210,7 @@ class PinProtocolV2(PinProtocolV1):
|
|||
return token
|
||||
|
||||
|
||||
class ClientPin(object):
|
||||
class ClientPin:
|
||||
"""Implementation of the CTAP2 Client PIN API.
|
||||
|
||||
:param ctap: An instance of a CTAP2 object.
|
||||
|
@ -176,10 +219,7 @@ class ClientPin(object):
|
|||
will be used.
|
||||
"""
|
||||
|
||||
PROTOCOLS = [
|
||||
PinProtocolV2,
|
||||
PinProtocolV1,
|
||||
]
|
||||
PROTOCOLS = [PinProtocolV2, PinProtocolV1]
|
||||
|
||||
@unique
|
||||
class CMD(IntEnum):
|
||||
|
@ -201,7 +241,7 @@ class ClientPin(object):
|
|||
UV_RETRIES = 0x05
|
||||
|
||||
@unique
|
||||
class PERMISSION(IntEnum):
|
||||
class PERMISSION(IntFlag):
|
||||
MAKE_CREDENTIAL = 0x01
|
||||
GET_ASSERTION = 0x02
|
||||
CREDENTIAL_MGMT = 0x04
|
||||
|
@ -211,22 +251,29 @@ class ClientPin(object):
|
|||
|
||||
@staticmethod
|
||||
def is_supported(info):
|
||||
"""Checks if ClientPin functionality is supported.
|
||||
|
||||
Note that the ClientPin function is still usable without support for client
|
||||
PIN functionality, as UV token may still be supported.
|
||||
"""
|
||||
return "clientPin" in info.options
|
||||
|
||||
def __init__(self, ctap, protocol=None):
|
||||
if not self.is_supported(ctap.info):
|
||||
raise ValueError("Authenticator does not support ClientPin")
|
||||
@staticmethod
|
||||
def is_token_supported(info):
|
||||
"""Checks if pinUvAuthToken is supported."""
|
||||
return info.options.get("pinUvAuthToken") is True
|
||||
|
||||
def __init__(self, ctap: Ctap2, protocol: Optional[PinProtocol] = None):
|
||||
self.ctap = ctap
|
||||
if protocol is None:
|
||||
for proto in ClientPin.PROTOCOLS:
|
||||
if proto.VERSION in ctap.info.pin_uv_protocols:
|
||||
protocol = proto()
|
||||
self.protocol: PinProtocol = proto()
|
||||
break
|
||||
else:
|
||||
raise ValueError("No compatible PIN/UV protocols supported!")
|
||||
self.protocol = protocol
|
||||
self._supports_permissions = ctap.info.options.get("pinUvAuthToken")
|
||||
else:
|
||||
self.protocol = protocol
|
||||
|
||||
def _get_shared_secret(self):
|
||||
resp = self.ctap.client_pin(
|
||||
|
@ -236,7 +283,12 @@ class ClientPin(object):
|
|||
|
||||
return self.protocol.encapsulate(pk)
|
||||
|
||||
def get_pin_token(self, pin, permissions=None, permissions_rpid=None):
|
||||
def get_pin_token(
|
||||
self,
|
||||
pin: str,
|
||||
permissions: Optional[ClientPin.PERMISSION] = None,
|
||||
permissions_rpid: Optional[str] = None,
|
||||
) -> bytes:
|
||||
"""Get a PIN/UV token from the authenticator using PIN.
|
||||
|
||||
:param pin: The PIN of the authenticator.
|
||||
|
@ -244,12 +296,15 @@ class ClientPin(object):
|
|||
:param permissions_rpid: The permissions RPID to associate with the token.
|
||||
:return: A PIN/UV token.
|
||||
"""
|
||||
if not ClientPin.is_supported(self.ctap.info):
|
||||
raise ValueError("Authenticator does not support get_pin_token")
|
||||
|
||||
key_agreement, shared_secret = self._get_shared_secret()
|
||||
|
||||
pin_hash = sha256(pin.encode())[:16]
|
||||
pin_hash_enc = self.protocol.encrypt(shared_secret, pin_hash)
|
||||
|
||||
if self._supports_permissions and permissions:
|
||||
if ClientPin.is_token_supported(self.ctap.info) and permissions:
|
||||
cmd = ClientPin.CMD.GET_TOKEN_USING_PIN
|
||||
else:
|
||||
cmd = ClientPin.CMD.GET_TOKEN_USING_PIN_LEGACY
|
||||
|
@ -266,13 +321,18 @@ class ClientPin(object):
|
|||
permissions_rpid=permissions_rpid,
|
||||
)
|
||||
pin_token_enc = resp[ClientPin.RESULT.PIN_UV_TOKEN]
|
||||
logger.debug(f"Got PIN token for permissions: {permissions}")
|
||||
return self.protocol.validate_token(
|
||||
self.protocol.decrypt(shared_secret, pin_token_enc)
|
||||
)
|
||||
|
||||
def get_uv_token(
|
||||
self, permissions, permissions_rpid=None, event=None, on_keepalive=None
|
||||
):
|
||||
self,
|
||||
permissions: Optional[ClientPin.PERMISSION] = None,
|
||||
permissions_rpid: Optional[str] = None,
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> bytes:
|
||||
"""Get a PIN/UV token from the authenticator using built-in UV.
|
||||
|
||||
:param permissions: The permissions to associate with the token.
|
||||
|
@ -284,7 +344,7 @@ class ClientPin(object):
|
|||
consecutive keep-alive messages with the same status.
|
||||
:return: A PIN/UV token.
|
||||
"""
|
||||
if not self.ctap.info.options.get("pinUvAuthToken"):
|
||||
if not ClientPin.is_token_supported(self.ctap.info):
|
||||
raise ValueError("Authenticator does not support get_uv_token")
|
||||
|
||||
key_agreement, shared_secret = self._get_shared_secret()
|
||||
|
@ -300,11 +360,12 @@ class ClientPin(object):
|
|||
)
|
||||
|
||||
pin_token_enc = resp[ClientPin.RESULT.PIN_UV_TOKEN]
|
||||
logger.debug(f"Got UV token for permissions: {permissions}")
|
||||
return self.protocol.validate_token(
|
||||
self.protocol.decrypt(shared_secret, pin_token_enc)
|
||||
)
|
||||
|
||||
def get_pin_retries(self):
|
||||
def get_pin_retries(self) -> Tuple[int, Optional[int]]:
|
||||
"""Get the number of PIN retries remaining.
|
||||
|
||||
:return: A tuple of the number of PIN attempts remaining until the
|
||||
|
@ -318,19 +379,16 @@ class ClientPin(object):
|
|||
resp.get(ClientPin.RESULT.POWER_CYCLE_STATE),
|
||||
)
|
||||
|
||||
def get_uv_retries(self):
|
||||
def get_uv_retries(self) -> int:
|
||||
"""Get the number of UV retries remaining.
|
||||
|
||||
:return: A tuple of the number of UV attempts remaining until the
|
||||
authenticator is locked, and the power cycle state, if available.
|
||||
"""
|
||||
resp = self.ctap.client_pin(self.protocol.VERSION, ClientPin.CMD.GET_UV_RETRIES)
|
||||
return (
|
||||
resp[ClientPin.RESULT.UV_RETRIES],
|
||||
resp.get(ClientPin.RESULT.POWER_CYCLE_STATE),
|
||||
)
|
||||
return resp[ClientPin.RESULT.UV_RETRIES]
|
||||
|
||||
def set_pin(self, pin):
|
||||
def set_pin(self, pin: str) -> None:
|
||||
"""Set the PIN of the autenticator.
|
||||
|
||||
This only works when no PIN is set. To change the PIN when set, use
|
||||
|
@ -338,10 +396,12 @@ class ClientPin(object):
|
|||
|
||||
:param pin: A PIN to set.
|
||||
"""
|
||||
pin = _pad_pin(pin)
|
||||
if not ClientPin.is_supported(self.ctap.info):
|
||||
raise ValueError("Authenticator does not support ClientPin")
|
||||
|
||||
key_agreement, shared_secret = self._get_shared_secret()
|
||||
|
||||
pin_enc = self.protocol.encrypt(shared_secret, pin)
|
||||
pin_enc = self.protocol.encrypt(shared_secret, _pad_pin(pin))
|
||||
pin_uv_param = self.protocol.authenticate(shared_secret, pin_enc)
|
||||
self.ctap.client_pin(
|
||||
self.protocol.VERSION,
|
||||
|
@ -350,8 +410,9 @@ class ClientPin(object):
|
|||
new_pin_enc=pin_enc,
|
||||
pin_uv_param=pin_uv_param,
|
||||
)
|
||||
logger.info("PIN has been set")
|
||||
|
||||
def change_pin(self, old_pin, new_pin):
|
||||
def change_pin(self, old_pin: str, new_pin: str) -> None:
|
||||
"""Change the PIN of the authenticator.
|
||||
|
||||
This only works when a PIN is already set. If no PIN is set, use
|
||||
|
@ -360,12 +421,14 @@ class ClientPin(object):
|
|||
:param old_pin: The currently set PIN.
|
||||
:param new_pin: The new PIN to set.
|
||||
"""
|
||||
new_pin = _pad_pin(new_pin)
|
||||
if not ClientPin.is_supported(self.ctap.info):
|
||||
raise ValueError("Authenticator does not support ClientPin")
|
||||
|
||||
key_agreement, shared_secret = self._get_shared_secret()
|
||||
|
||||
pin_hash = sha256(old_pin.encode())[:16]
|
||||
pin_hash_enc = self.protocol.encrypt(shared_secret, pin_hash)
|
||||
new_pin_enc = self.protocol.encrypt(shared_secret, new_pin)
|
||||
new_pin_enc = self.protocol.encrypt(shared_secret, _pad_pin(new_pin))
|
||||
pin_uv_param = self.protocol.authenticate(
|
||||
shared_secret, new_pin_enc + pin_hash_enc
|
||||
)
|
||||
|
@ -377,3 +440,4 @@ class ClientPin(object):
|
|||
new_pin_enc=new_pin_enc,
|
||||
pin_uv_param=pin_uv_param,
|
||||
)
|
||||
logger.info("PIN has been changed")
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
# Copyright (c) 2022 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import warnings
|
||||
|
||||
|
||||
class FeatureNotEnabledError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _Feature:
|
||||
def __init__(self, name: str, desc: str):
|
||||
self._enabled: Optional[bool] = None
|
||||
self._name = name
|
||||
self._desc = desc
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
self.warn()
|
||||
return self._enabled is True
|
||||
|
||||
@enabled.setter
|
||||
def enabled(self, value: bool) -> None:
|
||||
if self._enabled is not None:
|
||||
raise ValueError(
|
||||
f"{self._name} has already been configured with {self._enabled}"
|
||||
)
|
||||
self._enabled = value
|
||||
|
||||
def require(self, state=True) -> None:
|
||||
if self._enabled != state:
|
||||
self.warn()
|
||||
raise FeatureNotEnabledError(
|
||||
f"Usage requires {self._name}.enabled = {state}"
|
||||
)
|
||||
|
||||
def warn(self) -> None:
|
||||
if self._enabled is None:
|
||||
warnings.warn(
|
||||
f"""Deprecated use of {self._name}.
|
||||
|
||||
You are using deprecated functionality which will change in the next major version of
|
||||
python-fido2. You can opt-in to use the new functionality now by adding the following
|
||||
to your code somewhere where it gets executed prior to using the affected functionality:
|
||||
|
||||
import fido2.features
|
||||
fido2.features.{self._name}.enabled = True
|
||||
|
||||
To silence this warning but retain the current behavior, instead set enabled to False:
|
||||
fido2.features.{self._name}.enabled = False
|
||||
|
||||
{self._desc}
|
||||
""",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
|
||||
webauthn_json_mapping = _Feature(
|
||||
"webauthn_json_mapping",
|
||||
"""JSON values for WebAuthn data class Mapping interface.
|
||||
|
||||
This changes the keys and values used by the webauthn data classes when accessed using
|
||||
the Mapping (dict) interface (eg. user_entity["id"] and the from_dict() methods) to be
|
||||
JSON-friendly and align with the current draft of the next WebAuthn Level specification.
|
||||
For the most part, this means that binary values (bytes) are represented as URL-safe
|
||||
base64 encoded strings instead.
|
||||
""",
|
||||
)
|
|
@ -25,13 +25,14 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .base import HidDescriptor
|
||||
from ..ctap import CtapDevice, CtapError, STATUS
|
||||
from ..utils import LOG_LEVEL_TRAFFIC
|
||||
from threading import Event
|
||||
from enum import IntEnum, unique
|
||||
from binascii import b2a_hex as _b2a_hex
|
||||
from enum import IntEnum, IntFlag, unique
|
||||
from typing import Tuple, Optional, Callable, Iterator
|
||||
import struct
|
||||
import sys
|
||||
import os
|
||||
|
@ -48,16 +49,14 @@ elif sys.platform.startswith("darwin"):
|
|||
from . import macos as backend
|
||||
elif sys.platform.startswith("freebsd"):
|
||||
from . import freebsd as backend
|
||||
elif sys.platform.startswith("netbsd"):
|
||||
from . import netbsd as backend
|
||||
elif sys.platform.startswith("openbsd"):
|
||||
from . import openbsd as backend
|
||||
else:
|
||||
raise Exception("Unsupported platform")
|
||||
|
||||
|
||||
def b2a_hex(data):
|
||||
return _b2a_hex(data).decode("ascii")
|
||||
|
||||
|
||||
list_descriptors = backend.list_descriptors
|
||||
get_descriptor = backend.get_descriptor
|
||||
open_connection = backend.open_connection
|
||||
|
@ -80,13 +79,13 @@ class CTAPHID(IntEnum):
|
|||
|
||||
|
||||
@unique
|
||||
class CAPABILITY(IntEnum):
|
||||
class CAPABILITY(IntFlag):
|
||||
WINK = 0x01
|
||||
LOCK = 0x02 # Not used
|
||||
CBOR = 0x04
|
||||
NMSG = 0x08
|
||||
|
||||
def supported(self, flags):
|
||||
def supported(self, flags: CAPABILITY) -> bool:
|
||||
return bool(flags & self)
|
||||
|
||||
|
||||
|
@ -100,8 +99,9 @@ class CtapHidDevice(CtapDevice):
|
|||
:cvar descriptor: Device descriptor.
|
||||
"""
|
||||
|
||||
def __init__(self, descriptor, connection):
|
||||
def __init__(self, descriptor: HidDescriptor, connection):
|
||||
self.descriptor = descriptor
|
||||
self._packet_size = descriptor.report_size_out
|
||||
self._connection = connection
|
||||
|
||||
nonce = os.urandom(8)
|
||||
|
@ -121,83 +121,89 @@ class CtapHidDevice(CtapDevice):
|
|||
self._device_version = (v1, v2, v3)
|
||||
|
||||
def __repr__(self):
|
||||
return "CtapHidDevice(%s)" % self.descriptor.path
|
||||
return f"CtapHidDevice({self.descriptor.path!r})"
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""CTAP HID protocol version.
|
||||
|
||||
:rtype: int
|
||||
"""
|
||||
def version(self) -> int:
|
||||
"""CTAP HID protocol version."""
|
||||
return self._u2fhid_version
|
||||
|
||||
@property
|
||||
def device_version(self):
|
||||
def device_version(self) -> Tuple[int, int, int]:
|
||||
"""Device version number."""
|
||||
return self._device_version
|
||||
|
||||
@property
|
||||
def capabilities(self):
|
||||
def capabilities(self) -> int:
|
||||
"""Capabilities supported by the device."""
|
||||
return self._capabilities
|
||||
|
||||
@property
|
||||
def product_name(self):
|
||||
def product_name(self) -> Optional[str]:
|
||||
"""Product name of device."""
|
||||
return self.descriptor.product_name
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
def serial_number(self) -> Optional[str]:
|
||||
"""Serial number of device."""
|
||||
return self.descriptor.serial_number
|
||||
|
||||
def call(self, cmd, data=b"", event=None, on_keepalive=None):
|
||||
def _send_cancel(self):
|
||||
packet = struct.pack(">IB", self._channel_id, TYPE_INIT | CTAPHID.CANCEL).ljust(
|
||||
self._packet_size, b"\0"
|
||||
)
|
||||
logger.log(LOG_LEVEL_TRAFFIC, "SEND: %s", packet.hex())
|
||||
self._connection.write_packet(packet)
|
||||
|
||||
def call(
|
||||
self,
|
||||
cmd: int,
|
||||
data: bytes = b"",
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> bytes:
|
||||
event = event or Event()
|
||||
remaining = data
|
||||
packet_size = self.descriptor.report_size_out
|
||||
seq = 0
|
||||
|
||||
# Send request
|
||||
header = struct.pack(">IBH", self._channel_id, TYPE_INIT | cmd, len(remaining))
|
||||
while remaining or seq == 0:
|
||||
size = min(len(remaining), packet_size - len(header))
|
||||
size = min(len(remaining), self._packet_size - len(header))
|
||||
body, remaining = remaining[:size], remaining[size:]
|
||||
packet = header + body
|
||||
logger.debug("SEND: %s", b2a_hex(packet))
|
||||
self._connection.write_packet(packet.ljust(packet_size, b"\0"))
|
||||
logger.log(LOG_LEVEL_TRAFFIC, "SEND: %s", packet.hex())
|
||||
self._connection.write_packet(packet.ljust(self._packet_size, b"\0"))
|
||||
header = struct.pack(">IB", self._channel_id, 0x7F & seq)
|
||||
seq += 1
|
||||
|
||||
try:
|
||||
# Read response
|
||||
seq = 0
|
||||
response = None
|
||||
response = b""
|
||||
last_ka = None
|
||||
while True:
|
||||
if event.is_set():
|
||||
# Cancel
|
||||
logger.debug("Sending cancel...")
|
||||
packet = struct.pack(
|
||||
">IB", self._channel_id, TYPE_INIT | CTAPHID.CANCEL
|
||||
).ljust(packet_size, b"\0")
|
||||
self._connection.write_packet(packet)
|
||||
self._send_cancel()
|
||||
|
||||
recv = self._connection.read_packet()
|
||||
logger.debug("RECV: %s", b2a_hex(recv))
|
||||
logger.log(LOG_LEVEL_TRAFFIC, "RECV: %s", recv.hex())
|
||||
|
||||
r_channel = struct.unpack_from(">I", recv)[0]
|
||||
recv = recv[4:]
|
||||
if r_channel != self._channel_id:
|
||||
raise Exception("Wrong channel")
|
||||
|
||||
if response is None: # Initialization packet
|
||||
if not response: # Initialization packet
|
||||
r_cmd, r_len = struct.unpack_from(">BH", recv)
|
||||
recv = recv[3:]
|
||||
if r_cmd == TYPE_INIT | cmd:
|
||||
response = b""
|
||||
pass # first data packet
|
||||
elif r_cmd == TYPE_INIT | CTAPHID.KEEPALIVE:
|
||||
ka_status = struct.unpack_from(">B", recv)[0]
|
||||
logger.debug("Got keepalive status: %02x" % ka_status)
|
||||
logger.debug(f"Got keepalive status: {ka_status:02x}")
|
||||
if on_keepalive and ka_status != last_ka:
|
||||
try:
|
||||
ka_status = STATUS(ka_status)
|
||||
|
@ -223,19 +229,16 @@ class CtapHidDevice(CtapDevice):
|
|||
|
||||
return response[:r_len]
|
||||
except KeyboardInterrupt:
|
||||
logger.debug("Keyboard interrupt, sending cancel...")
|
||||
packet = struct.pack(
|
||||
">IB", self._channel_id, TYPE_INIT | CTAPHID.CANCEL
|
||||
).ljust(packet_size, b"\0")
|
||||
self._connection.write_packet(packet)
|
||||
logger.debug("Keyboard interrupt, cancelling...")
|
||||
self._send_cancel()
|
||||
|
||||
raise
|
||||
|
||||
def wink(self):
|
||||
def wink(self) -> None:
|
||||
"""Causes the authenticator to blink."""
|
||||
self.call(CTAPHID.WINK)
|
||||
|
||||
def ping(self, msg=b"Hello FIDO"):
|
||||
def ping(self, msg: bytes = b"Hello FIDO") -> bytes:
|
||||
"""Sends data to the authenticator, which echoes it back.
|
||||
|
||||
:param msg: The data to send.
|
||||
|
@ -243,25 +246,25 @@ class CtapHidDevice(CtapDevice):
|
|||
"""
|
||||
return self.call(CTAPHID.PING, msg)
|
||||
|
||||
def lock(self, lock_time=10):
|
||||
def lock(self, lock_time: int = 10) -> None:
|
||||
"""Locks the channel."""
|
||||
self.call(CTAPHID.LOCK, struct.pack(">B", lock_time))
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
@classmethod
|
||||
def list_devices(cls):
|
||||
def list_devices(cls) -> Iterator[CtapHidDevice]:
|
||||
for d in list_descriptors():
|
||||
yield cls(d, open_connection(d))
|
||||
|
||||
|
||||
def list_devices():
|
||||
def list_devices() -> Iterator[CtapHidDevice]:
|
||||
return CtapHidDevice.list_devices()
|
||||
|
||||
|
||||
def open_device(path):
|
||||
def open_device(path) -> CtapHidDevice:
|
||||
descriptor = get_descriptor(path)
|
||||
return CtapHidDevice(descriptor, open_connection(descriptor))
|
||||
|
|
|
@ -25,10 +25,10 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import namedtuple
|
||||
from dataclasses import dataclass
|
||||
from typing import Tuple, Union, Optional
|
||||
import struct
|
||||
import abc
|
||||
import os
|
||||
|
@ -37,34 +37,28 @@ FIDO_USAGE_PAGE = 0xF1D0
|
|||
FIDO_USAGE = 0x1
|
||||
|
||||
|
||||
class HidDescriptor(
|
||||
namedtuple(
|
||||
"HidDescriptor",
|
||||
[
|
||||
"path",
|
||||
"vid",
|
||||
"pid",
|
||||
"report_size_in",
|
||||
"report_size_out",
|
||||
"product_name",
|
||||
"serial_number",
|
||||
],
|
||||
)
|
||||
):
|
||||
__slots__ = ()
|
||||
@dataclass
|
||||
class HidDescriptor:
|
||||
path: Union[str, bytes]
|
||||
vid: int
|
||||
pid: int
|
||||
report_size_in: int
|
||||
report_size_out: int
|
||||
product_name: Optional[str]
|
||||
serial_number: Optional[str]
|
||||
|
||||
|
||||
class CtapHidConnection(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def read_packet(self):
|
||||
def read_packet(self) -> bytes:
|
||||
"""Reads a CTAP HID packet"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def write_packet(self, data):
|
||||
def write_packet(self, data: bytes) -> None:
|
||||
"""Writes a CTAP HID packet"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
"""Closes the connection"""
|
||||
|
||||
|
||||
|
@ -97,7 +91,7 @@ USAGE_PAGE = 0x04
|
|||
USAGE = 0x08
|
||||
|
||||
|
||||
def parse_report_descriptor(data):
|
||||
def parse_report_descriptor(data: bytes) -> Tuple[int, int]:
|
||||
# Parse report descriptor data
|
||||
usage, usage_page = None, None
|
||||
max_input_size, max_output_size = None, None
|
||||
|
@ -136,6 +130,6 @@ def parse_report_descriptor(data):
|
|||
report_size = value
|
||||
|
||||
if not remaining and usage_page == FIDO_USAGE_PAGE and usage == FIDO_USAGE:
|
||||
return max_input_size, max_output_size
|
||||
return max_input_size, max_output_size # type: ignore
|
||||
|
||||
raise ValueError("Not a FIDO device")
|
||||
|
|
|
@ -15,18 +15,38 @@
|
|||
# Modified work Copyright 2020 Yubico AB. All Rights Reserved.
|
||||
# This file, with modifications, is licensed under the above Apache License.
|
||||
|
||||
# FreeBSD HID driver.
|
||||
#
|
||||
# There are two options to access UHID on FreeBSD:
|
||||
#
|
||||
# hidraw(4) - New method, not enabled by default
|
||||
# on FreeBSD 13.x and earlier
|
||||
# uhid(4) - Classic method, default option on
|
||||
# FreeBSD 13.x and earlier
|
||||
#
|
||||
# hidraw is available since FreeBSD 13 and can be activated by adding
|
||||
# `hw.usb.usbhid.enable="1"` to `/boot/loader.conf`. The actual kernel
|
||||
# module is loaded with `kldload hidraw`.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import annotations
|
||||
|
||||
from ctypes.util import find_library
|
||||
import ctypes
|
||||
import fcntl
|
||||
import glob
|
||||
import re
|
||||
import struct
|
||||
import os
|
||||
from array import array
|
||||
|
||||
from .base import HidDescriptor, parse_report_descriptor, FileCtapHidConnection
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Dict, Optional, Set, Union
|
||||
|
||||
# Don't typecheck this file on Windows
|
||||
assert sys.platform != "win32" # nosec
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -39,8 +59,16 @@ sernum_re = re.compile('sernum="([^"]+)')
|
|||
|
||||
libc = ctypes.CDLL(find_library("c"))
|
||||
|
||||
# /usr/include/dev/usb/usb_ioctl.h
|
||||
USB_GET_REPORT_DESC = 0xC0205515
|
||||
|
||||
# /usr/include/dev/hid/hidraw.h>
|
||||
HIDIOCGRAWINFO = 0x40085520
|
||||
HIDIOCGRDESC = 0x2000551F
|
||||
HIDIOCGRDESCSIZE = 0x4004551E
|
||||
HIDIOCGRAWNAME_128 = 0x40805521
|
||||
HIDIOCGRAWUNIQ_64 = 0x40405525
|
||||
|
||||
|
||||
class usb_gen_descriptor(ctypes.Structure):
|
||||
_fields_ = [
|
||||
|
@ -62,8 +90,17 @@ class usb_gen_descriptor(ctypes.Structure):
|
|||
]
|
||||
|
||||
|
||||
class HidrawCtapHidConnection(FileCtapHidConnection):
|
||||
def write_packet(self, packet):
|
||||
# Prepend the report ID
|
||||
super(HidrawCtapHidConnection, self).write_packet(b"\0" + packet)
|
||||
|
||||
|
||||
def open_connection(descriptor):
|
||||
return FileCtapHidConnection(descriptor)
|
||||
if descriptor.path.find(devdir + "hidraw") == 0:
|
||||
return HidrawCtapHidConnection(descriptor)
|
||||
else:
|
||||
return FileCtapHidConnection(descriptor)
|
||||
|
||||
|
||||
def _get_report_data(fd, report_type):
|
||||
|
@ -71,7 +108,7 @@ def _get_report_data(fd, report_type):
|
|||
desc = usb_gen_descriptor(
|
||||
ugd_data=ctypes.addressof(data),
|
||||
ugd_maxlen=ctypes.sizeof(data),
|
||||
report_type=report_type,
|
||||
ugd_report_type=report_type,
|
||||
)
|
||||
ret = libc.ioctl(fd, USB_GET_REPORT_DESC, ctypes.byref(desc))
|
||||
if ret != 0:
|
||||
|
@ -89,7 +126,6 @@ def _read_descriptor(vid, pid, name, serial, path):
|
|||
|
||||
def _enumerate():
|
||||
for uhid in glob.glob(devdir + "uhid?*"):
|
||||
|
||||
index = uhid[len(devdir) + len("uhid") :]
|
||||
if not index.isdigit():
|
||||
continue
|
||||
|
@ -104,16 +140,16 @@ def _enumerate():
|
|||
if retval != 0:
|
||||
continue
|
||||
|
||||
dev = {}
|
||||
dev: Dict[str, Optional[Union[str, int]]] = {}
|
||||
dev["name"] = uhid[len(devdir) :]
|
||||
dev["path"] = uhid
|
||||
|
||||
value = ovalue.value[: olen.value].decode()
|
||||
m = vendor_re.search(value)
|
||||
dev["vendor_id"] = m.group(1) if m else None
|
||||
dev["vendor_id"] = int(m.group(1), 16) if m else None
|
||||
|
||||
m = product_re.search(value)
|
||||
dev["product_id"] = m.group(1) if m else None
|
||||
dev["product_id"] = int(m.group(1), 16) if m else None
|
||||
|
||||
m = sernum_re.search(value)
|
||||
dev["serial_number"] = m.group(1) if m else None
|
||||
|
@ -126,7 +162,49 @@ def _enumerate():
|
|||
yield dev
|
||||
|
||||
|
||||
def get_hidraw_descriptor(path):
|
||||
with open(path, "rb") as f:
|
||||
# Read VID, PID
|
||||
buf = array("B", [0] * (4 + 2 + 2))
|
||||
fcntl.ioctl(f, HIDIOCGRAWINFO, buf, True)
|
||||
_, vid, pid = struct.unpack("<IHH", buf)
|
||||
|
||||
# FreeBSD's hidraw(4) does not return string length for
|
||||
# HIDIOCGRAWNAME and HIDIOCGRAWUNIQ, see https://reviews.freebsd.org/D35233
|
||||
|
||||
# Read product
|
||||
buf = array("B", [0] * 129)
|
||||
fcntl.ioctl(f, HIDIOCGRAWNAME_128, buf, True)
|
||||
length = buf.index(0) + 1 # emulate ioctl return value
|
||||
name = bytearray(buf[: (length - 1)]).decode("utf-8") if length > 1 else None
|
||||
|
||||
# Read unique ID
|
||||
try:
|
||||
buf = array("B", [0] * 65)
|
||||
fcntl.ioctl(f, HIDIOCGRAWUNIQ_64, buf, True)
|
||||
length = buf.index(0) + 1 # emulate ioctl return value
|
||||
serial = (
|
||||
bytearray(buf[: (length - 1)]).decode("utf-8") if length > 1 else None
|
||||
)
|
||||
except OSError:
|
||||
serial = None
|
||||
|
||||
# Read report descriptor
|
||||
buf = array("B", [0] * 4)
|
||||
fcntl.ioctl(f, HIDIOCGRDESCSIZE, buf, True)
|
||||
size = struct.unpack("<I", buf)[0]
|
||||
buf += array("B", [0] * size)
|
||||
fcntl.ioctl(f, HIDIOCGRDESC, buf, True)
|
||||
|
||||
data = bytearray(buf[4:])
|
||||
max_in_size, max_out_size = parse_report_descriptor(data)
|
||||
return HidDescriptor(path, vid, pid, max_in_size, max_out_size, name, serial)
|
||||
|
||||
|
||||
def get_descriptor(path):
|
||||
if path.find(devdir + "hidraw") == 0:
|
||||
return get_hidraw_descriptor(path)
|
||||
|
||||
for dev in _enumerate():
|
||||
if dev["path"] == path:
|
||||
vid = dev["vendor_id"]
|
||||
|
@ -137,21 +215,50 @@ def get_descriptor(path):
|
|||
raise ValueError("Device not found")
|
||||
|
||||
|
||||
# Cache for continuously failing devices
|
||||
_failed_cache: Set[str] = set()
|
||||
|
||||
|
||||
def list_descriptors():
|
||||
stale = set(_failed_cache)
|
||||
descriptors = []
|
||||
for dev in _enumerate():
|
||||
for hidraw in glob.glob(devdir + "hidraw?*"):
|
||||
stale.discard(hidraw)
|
||||
try:
|
||||
name = dev["product_desc"] or None
|
||||
serial = (dev["serial_number"] if "serial_number" in dev else None) or None
|
||||
descriptors.append(
|
||||
_read_descriptor(
|
||||
dev["vendor_id"], dev["product_id"], name, serial, dev["path"],
|
||||
)
|
||||
)
|
||||
logger.debug("Found CTAP device: %s", dev["path"])
|
||||
descriptors.append(get_descriptor(hidraw))
|
||||
except ValueError:
|
||||
pass # Not a CTAP device, ignore
|
||||
except Exception as e:
|
||||
logger.debug("Failed opening HID device", exc_info=e)
|
||||
except Exception:
|
||||
if hidraw not in _failed_cache:
|
||||
logger.debug("Failed opening device %s", hidraw, exc_info=True)
|
||||
_failed_cache.add(hidraw)
|
||||
|
||||
if not descriptors:
|
||||
for dev in _enumerate():
|
||||
path = dev["path"]
|
||||
stale.discard(path)
|
||||
try:
|
||||
name = dev["product_desc"] or None
|
||||
serial = (
|
||||
dev["serial_number"] if "serial_number" in dev else None
|
||||
) or None
|
||||
descriptors.append(
|
||||
_read_descriptor(
|
||||
dev["vendor_id"],
|
||||
dev["product_id"],
|
||||
name,
|
||||
serial,
|
||||
path,
|
||||
)
|
||||
)
|
||||
except ValueError:
|
||||
pass # Not a CTAP device, ignore
|
||||
except Exception:
|
||||
if path not in _failed_cache:
|
||||
logger.debug("Failed opening HID device %s", path, exc_info=True)
|
||||
_failed_cache.add(path)
|
||||
|
||||
# Remove entries from the cache that were not seen
|
||||
_failed_cache.difference_update(stale)
|
||||
|
||||
return descriptors
|
||||
|
|
|
@ -15,8 +15,7 @@
|
|||
# Modified work Copyright 2020 Yubico AB. All Rights Reserved.
|
||||
# This file, with modifications, is licensed under the above Apache License.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import HidDescriptor, FileCtapHidConnection, parse_report_descriptor
|
||||
|
||||
|
@ -24,8 +23,13 @@ import glob
|
|||
import fcntl
|
||||
import struct
|
||||
from array import array
|
||||
from typing import Set
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# Don't typecheck this file on Windows
|
||||
assert sys.platform != "win32" # nosec
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -40,7 +44,7 @@ HIDIOCGRAWUNIQ = 0x90044808
|
|||
class LinuxCtapHidConnection(FileCtapHidConnection):
|
||||
def write_packet(self, packet):
|
||||
# Prepend the report ID
|
||||
super(LinuxCtapHidConnection, self).write_packet(b"\0" + packet)
|
||||
super().write_packet(b"\0" + packet)
|
||||
|
||||
|
||||
def open_connection(descriptor):
|
||||
|
@ -60,9 +64,14 @@ def get_descriptor(path):
|
|||
name = bytearray(buf[: (length - 1)]).decode("utf-8") if length > 1 else None
|
||||
|
||||
# Read unique ID
|
||||
buf = array("B", [0] * 64)
|
||||
length = fcntl.ioctl(f, HIDIOCGRAWUNIQ, buf, True)
|
||||
serial = bytearray(buf[: (length - 1)]).decode("utf-8") if length > 1 else None
|
||||
try:
|
||||
buf = array("B", [0] * 64)
|
||||
length = fcntl.ioctl(f, HIDIOCGRAWUNIQ, buf, True)
|
||||
serial = (
|
||||
bytearray(buf[: (length - 1)]).decode("utf-8") if length > 1 else None
|
||||
)
|
||||
except OSError:
|
||||
serial = None
|
||||
|
||||
# Read report descriptor
|
||||
buf = array("B", [0] * 4)
|
||||
|
@ -76,16 +85,25 @@ def get_descriptor(path):
|
|||
return HidDescriptor(path, vid, pid, max_in_size, max_out_size, name, serial)
|
||||
|
||||
|
||||
# Cache for continuously failing devices
|
||||
_failed_cache: Set[str] = set()
|
||||
|
||||
|
||||
def list_descriptors():
|
||||
stale = set(_failed_cache)
|
||||
devices = []
|
||||
for hidraw in glob.glob("/dev/hidraw*"):
|
||||
stale.discard(hidraw)
|
||||
try:
|
||||
devices.append(get_descriptor(hidraw))
|
||||
logger.debug("Found CTAP device: %s", hidraw)
|
||||
except ValueError:
|
||||
pass # Not a CTAP device, ignore.
|
||||
except OSError as e:
|
||||
logger.debug("Skip device: %s", e)
|
||||
except Exception as e:
|
||||
logger.debug("Failed opening device", exc_info=e)
|
||||
except Exception:
|
||||
if hidraw not in _failed_cache:
|
||||
logger.debug("Failed opening device %s", hidraw, exc_info=True)
|
||||
_failed_cache.add(hidraw)
|
||||
|
||||
# Remove entries from the cache that were not seen
|
||||
_failed_cache.difference_update(stale)
|
||||
|
||||
return devices
|
||||
|
|
|
@ -15,15 +15,14 @@
|
|||
# Modified work Copyright 2020 Yubico AB. All Rights Reserved.
|
||||
# This file, with modifications, is licensed under the above Apache License.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import HidDescriptor, CtapHidConnection, FIDO_USAGE_PAGE, FIDO_USAGE
|
||||
|
||||
import ctypes
|
||||
import ctypes.util
|
||||
import threading
|
||||
from six.moves.queue import Queue, Empty
|
||||
from queue import Queue, Empty
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -277,10 +276,10 @@ class MacCtapHidConnection(CtapHidConnection):
|
|||
# Open device
|
||||
result = iokit.IOHIDDeviceOpen(self.handle, 0)
|
||||
if result != K_IO_RETURN_SUCCESS:
|
||||
raise OSError("Failed to open device for communication: {}".format(result))
|
||||
raise OSError(f"Failed to open device for communication: {result}")
|
||||
|
||||
# Create read queue
|
||||
self.read_queue = Queue()
|
||||
self.read_queue: Queue = Queue()
|
||||
|
||||
# Create and start read thread
|
||||
self.run_loop_ref = None
|
||||
|
@ -306,21 +305,28 @@ class MacCtapHidConnection(CtapHidConnection):
|
|||
|
||||
def write_packet(self, packet):
|
||||
result = iokit.IOHIDDeviceSetReport(
|
||||
self.handle, K_IO_HID_REPORT_TYPE_OUTPUT, 0, packet, len(packet),
|
||||
self.handle,
|
||||
K_IO_HID_REPORT_TYPE_OUTPUT,
|
||||
0,
|
||||
packet,
|
||||
len(packet),
|
||||
)
|
||||
|
||||
# Non-zero status indicates failure
|
||||
if result != K_IO_RETURN_SUCCESS:
|
||||
raise OSError("Failed to write report to device: {}".format(result))
|
||||
raise OSError(f"Failed to write report to device: {result}")
|
||||
|
||||
def read_packet(self):
|
||||
read_thread = threading.Thread(target=_dev_read_thread, args=(self,))
|
||||
read_thread.start()
|
||||
read_thread.join()
|
||||
try:
|
||||
return self.read_queue.get(False)
|
||||
except Empty:
|
||||
raise OSError("Failed reading a response")
|
||||
read_thread = threading.Thread(target=_dev_read_thread, args=(self,))
|
||||
read_thread.start()
|
||||
read_thread.join()
|
||||
try:
|
||||
return self.read_queue.get(False)
|
||||
except Empty:
|
||||
raise OSError("Failed reading a response")
|
||||
|
||||
|
||||
def get_int_property(dev, key):
|
||||
|
@ -332,7 +338,7 @@ def get_int_property(dev, key):
|
|||
return None
|
||||
|
||||
if cf.CFGetTypeID(type_ref) != cf.CFNumberGetTypeID():
|
||||
raise OSError("Expected number type, got {}".format(cf.CFGetTypeID(type_ref)))
|
||||
raise OSError(f"Expected number type, got {cf.CFGetTypeID(type_ref)}")
|
||||
|
||||
out = ctypes.c_int32()
|
||||
ret = cf.CFNumberGetValue(type_ref, K_CF_NUMBER_SINT32_TYPE, ctypes.byref(out))
|
||||
|
@ -351,7 +357,7 @@ def get_string_property(dev, key):
|
|||
return None
|
||||
|
||||
if cf.CFGetTypeID(type_ref) != cf.CFStringGetTypeID():
|
||||
raise OSError("Expected string type, got {}".format(cf.CFGetTypeID(type_ref)))
|
||||
raise OSError(f"Expected string type, got {cf.CFGetTypeID(type_ref)}")
|
||||
|
||||
out = ctypes.create_string_buffer(128)
|
||||
ret = cf.CFStringGetCString(
|
||||
|
@ -382,7 +388,7 @@ def get_device_id(handle):
|
|||
io_service_obj, ctypes.byref(entry_id)
|
||||
)
|
||||
if result != K_IO_RETURN_SUCCESS:
|
||||
raise OSError("Failed to obtain IORegistry entry ID: {}".format(result))
|
||||
raise OSError(f"Failed to obtain IORegistry entry ID: {result}")
|
||||
|
||||
return entry_id.value
|
||||
|
||||
|
@ -395,9 +401,7 @@ def _handle_from_path(path):
|
|||
K_IO_MASTER_PORT_DEFAULT, matching_dict
|
||||
)
|
||||
if not device_entry:
|
||||
raise OSError(
|
||||
"Device ID {} does not match any HID device on the system".format(path)
|
||||
)
|
||||
raise OSError(f"Device ID {path} does not match any HID device on the system")
|
||||
|
||||
return iokit.IOHIDDeviceCreate(K_CF_ALLOCATOR_DEFAULT, device_entry)
|
||||
|
||||
|
@ -450,7 +454,6 @@ def list_descriptors():
|
|||
try:
|
||||
descriptor = _get_descriptor_from_handle(handle)
|
||||
descriptors.append(descriptor)
|
||||
logger.debug("Found CTAP device: %s", descriptor.path)
|
||||
except ValueError:
|
||||
continue # Not a CTAP device, ignore it
|
||||
return descriptors
|
||||
|
|
|
@ -0,0 +1,173 @@
|
|||
# Copyright 2016 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Implements raw HID interface on NetBSD."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import select
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from ctypes import (
|
||||
Structure,
|
||||
c_char,
|
||||
c_int,
|
||||
c_ubyte,
|
||||
c_uint16,
|
||||
c_uint32,
|
||||
c_uint8,
|
||||
)
|
||||
from typing import Set
|
||||
|
||||
from . import base
|
||||
|
||||
# Don't typecheck this file on Windows
|
||||
assert sys.platform != "win32" # nosec
|
||||
|
||||
from fcntl import ioctl # noqa: E402
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
USB_MAX_DEVNAMELEN = 16
|
||||
USB_MAX_DEVNAMES = 4
|
||||
USB_MAX_STRING_LEN = 128
|
||||
USB_MAX_ENCODED_STRING_LEN = USB_MAX_STRING_LEN * 3
|
||||
|
||||
|
||||
class usb_ctl_report_desc(Structure):
|
||||
_fields_ = [
|
||||
("ucrd_size", c_int),
|
||||
("ucrd_data", c_ubyte * 1024),
|
||||
]
|
||||
|
||||
|
||||
class usb_device_info(Structure):
|
||||
_fields_ = [
|
||||
("udi_bus", c_uint8),
|
||||
("udi_addr", c_uint8),
|
||||
("udi_pad0", c_uint8 * 2),
|
||||
("udi_cookie", c_uint32),
|
||||
("udi_product", c_char * USB_MAX_ENCODED_STRING_LEN),
|
||||
("udi_vendor", c_char * USB_MAX_ENCODED_STRING_LEN),
|
||||
("udi_release", c_char * 8),
|
||||
("udi_serial", c_char * USB_MAX_ENCODED_STRING_LEN),
|
||||
("udi_productNo", c_uint16),
|
||||
("udi_vendorNo", c_uint16),
|
||||
("udi_releaseNo", c_uint16),
|
||||
("udi_class", c_uint8),
|
||||
("udi_subclass", c_uint8),
|
||||
("udi_protocol", c_uint8),
|
||||
("udi_config", c_uint8),
|
||||
("udi_speed", c_uint8),
|
||||
("udi_pad1", c_uint8),
|
||||
("udi_power", c_int),
|
||||
("udi_nports", c_int),
|
||||
("udi_devnames", c_char * USB_MAX_DEVNAMES * USB_MAX_DEVNAMELEN),
|
||||
("udi_ports", c_uint8 * 16),
|
||||
]
|
||||
|
||||
|
||||
USB_GET_DEVICE_INFO = 0x44F45570 # _IOR('U', 112, struct usb_device_info)
|
||||
USB_GET_REPORT_DESC = 0x44045515 # _IOR('U', 21, struct usb_ctl_report_desc)
|
||||
USB_HID_SET_RAW = 0x80046802 # _IOW('h', 2, int)
|
||||
|
||||
|
||||
# Cache for continuously failing devices
|
||||
# XXX not thread-safe
|
||||
_failed_cache: Set[str] = set()
|
||||
|
||||
|
||||
def list_descriptors():
|
||||
stale = set(_failed_cache)
|
||||
descriptors = []
|
||||
|
||||
for i in range(100):
|
||||
path = "/dev/uhid%d" % (i,)
|
||||
stale.discard(path)
|
||||
try:
|
||||
desc = get_descriptor(path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
break
|
||||
if path not in _failed_cache:
|
||||
logger.debug("Failed opening FIDO device %s", path, exc_info=True)
|
||||
_failed_cache.add(path)
|
||||
continue
|
||||
except Exception:
|
||||
if path not in _failed_cache:
|
||||
logger.debug("Failed opening FIDO device %s", path, exc_info=True)
|
||||
_failed_cache.add(path)
|
||||
continue
|
||||
descriptors.append(desc)
|
||||
|
||||
_failed_cache.difference_update(stale)
|
||||
return descriptors
|
||||
|
||||
|
||||
def get_descriptor(path):
|
||||
fd = None
|
||||
try:
|
||||
fd = os.open(path, os.O_RDONLY | os.O_CLOEXEC)
|
||||
devinfo = usb_device_info()
|
||||
ioctl(fd, USB_GET_DEVICE_INFO, devinfo)
|
||||
ucrd = usb_ctl_report_desc()
|
||||
ioctl(fd, USB_GET_REPORT_DESC, ucrd)
|
||||
report_desc = bytearray(ucrd.ucrd_data[: ucrd.ucrd_size])
|
||||
maxin, maxout = base.parse_report_descriptor(report_desc)
|
||||
vid = devinfo.udi_vendorNo
|
||||
pid = devinfo.udi_productNo
|
||||
try:
|
||||
name = devinfo.udi_product.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
name = None
|
||||
try:
|
||||
serial = devinfo.udi_serial.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
serial = None
|
||||
return base.HidDescriptor(path, vid, pid, maxin, maxout, name, serial)
|
||||
finally:
|
||||
if fd is not None:
|
||||
os.close(fd)
|
||||
|
||||
|
||||
def open_connection(descriptor):
|
||||
return NetBSDCtapHidConnection(descriptor)
|
||||
|
||||
|
||||
class NetBSDCtapHidConnection(base.FileCtapHidConnection):
|
||||
def __init__(self, descriptor):
|
||||
# XXX racy -- device can change identity now that it has been
|
||||
# closed
|
||||
super().__init__(descriptor)
|
||||
try:
|
||||
ioctl(self.handle, USB_HID_SET_RAW, struct.pack("@i", 1))
|
||||
ping = bytearray(64)
|
||||
ping[0:7] = bytearray([0xFF, 0xFF, 0xFF, 0xFF, 0x81, 0, 1])
|
||||
for i in range(10):
|
||||
self.write_packet(ping)
|
||||
poll = select.poll()
|
||||
poll.register(self.handle, select.POLLIN)
|
||||
if poll.poll(100):
|
||||
self.read_packet()
|
||||
break
|
||||
else:
|
||||
raise Exception("u2f ping timeout")
|
||||
except Exception:
|
||||
self.close()
|
||||
raise
|
|
@ -15,17 +15,23 @@
|
|||
# Modified work Copyright 2020 Yubico AB. All Rights Reserved.
|
||||
# This file, with modifications, is licensed under the above Apache License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import fcntl
|
||||
import select
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from ctypes import Structure, c_char, c_int, c_uint8, c_uint16, c_uint32
|
||||
|
||||
from .base import HidDescriptor, FileCtapHidConnection
|
||||
|
||||
import logging
|
||||
from typing import Set
|
||||
|
||||
# Don't typecheck this file on Windows
|
||||
assert sys.platform != "win32" # nosec
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -64,7 +70,7 @@ class UsbDeviceInfo(Structure):
|
|||
|
||||
class OpenBsdCtapHidConnection(FileCtapHidConnection):
|
||||
def __init__(self, descriptor):
|
||||
super(OpenBsdCtapHidConnection, self).__init__(descriptor)
|
||||
super().__init__(descriptor)
|
||||
try:
|
||||
self._terrible_ping_kludge()
|
||||
except Exception:
|
||||
|
@ -99,7 +105,7 @@ def get_descriptor(path):
|
|||
dev_info = UsbDeviceInfo()
|
||||
|
||||
try:
|
||||
fcntl.ioctl(f, USB_GET_DEVICEINFO, dev_info)
|
||||
fcntl.ioctl(f, USB_GET_DEVICEINFO, dev_info) # type: ignore
|
||||
finally:
|
||||
os.close(f)
|
||||
|
||||
|
@ -111,13 +117,20 @@ def get_descriptor(path):
|
|||
return HidDescriptor(path, vid, pid, MAX_U2F_HIDLEN, MAX_U2F_HIDLEN, name, serial)
|
||||
|
||||
|
||||
# Cache for continuously failing devices
|
||||
_failed_cache: Set[str] = set()
|
||||
|
||||
|
||||
def list_descriptors():
|
||||
stale = set(_failed_cache)
|
||||
descriptors = []
|
||||
for dev in os.listdir(FIDO_DEVS):
|
||||
path = os.path.join(FIDO_DEVS, dev)
|
||||
stale.discard(path)
|
||||
try:
|
||||
descriptors.append(get_descriptor(path))
|
||||
logger.debug("Found CTAP device: %s", path)
|
||||
except Exception as e:
|
||||
logger.debug("Failed opening FIDO device", exc_info=e)
|
||||
except Exception:
|
||||
if path not in _failed_cache:
|
||||
logger.debug("Failed opening FIDO device %s", path, exc_info=True)
|
||||
_failed_cache.add(path)
|
||||
return descriptors
|
||||
|
|
|
@ -15,16 +15,21 @@
|
|||
# Modified work Copyright 2020 Yubico AB. All Rights Reserved.
|
||||
# This file, with modifications, is licensed under the above Apache License.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import HidDescriptor, CtapHidConnection, FIDO_USAGE_PAGE, FIDO_USAGE
|
||||
|
||||
from ctypes import wintypes, LibraryLoader
|
||||
from typing import Dict, cast
|
||||
|
||||
import ctypes
|
||||
import platform
|
||||
from ctypes import wintypes, LibraryLoader, WinDLL
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# Only typecheck this file on Windows
|
||||
assert sys.platform == "win32" # nosec
|
||||
from ctypes import WinDLL, WinError # noqa: E402
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -56,7 +61,7 @@ if platform.architecture()[0] == "64bit":
|
|||
elif platform.architecture()[0] == "32bit":
|
||||
SETUPAPI_PACK = 1
|
||||
else:
|
||||
raise OSError("Unknown architecture: %s" % platform.architecture()[0])
|
||||
raise OSError(f"Unknown architecture: {platform.architecture()[0]}")
|
||||
|
||||
|
||||
class DeviceInterfaceData(ctypes.Structure):
|
||||
|
@ -101,7 +106,8 @@ HANDLE = ctypes.c_void_p
|
|||
PHIDP_PREPARSED_DATA = ctypes.c_void_p # pylint: disable=invalid-name
|
||||
|
||||
# This is a HANDLE.
|
||||
INVALID_HANDLE_VALUE = 0xFFFFFFFF
|
||||
# INVALID_HANDLE_VALUE = 0xFFFFFFFF
|
||||
INVALID_HANDLE_VALUE = (1 << 8 * ctypes.sizeof(ctypes.c_void_p)) - 1
|
||||
|
||||
# Status codes
|
||||
FILE_SHARE_READ = 0x00000001
|
||||
|
@ -195,7 +201,7 @@ class WinCtapHidConnection(CtapHidConnection):
|
|||
None,
|
||||
)
|
||||
if self.handle == INVALID_HANDLE_VALUE:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
def close(self):
|
||||
kernel32.CloseHandle(self.handle)
|
||||
|
@ -207,7 +213,7 @@ class WinCtapHidConnection(CtapHidConnection):
|
|||
self.handle, out, len(out), ctypes.byref(num_written), None
|
||||
)
|
||||
if not ret:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
if num_written.value != len(out):
|
||||
raise OSError(
|
||||
"Failed to write complete packet. "
|
||||
|
@ -221,7 +227,7 @@ class WinCtapHidConnection(CtapHidConnection):
|
|||
self.handle, buf, len(buf), ctypes.byref(num_read), None
|
||||
)
|
||||
if not ret:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
if num_read.value != self.descriptor.report_size_in + 1:
|
||||
raise OSError("Failed to read full length report from device.")
|
||||
|
@ -233,7 +239,7 @@ def get_vid_pid(device):
|
|||
attributes = HidAttributes()
|
||||
result = hid.HidD_GetAttributes(device, ctypes.byref(attributes))
|
||||
if not result:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
return attributes.VendorID, attributes.ProductID
|
||||
|
||||
|
@ -262,22 +268,28 @@ def get_serial(device):
|
|||
|
||||
def get_descriptor(path):
|
||||
device = kernel32.CreateFileA(
|
||||
path, 0, FILE_SHARE_READ | FILE_SHARE_WRITE, None, OPEN_EXISTING, 0, None,
|
||||
path,
|
||||
0,
|
||||
FILE_SHARE_READ | FILE_SHARE_WRITE,
|
||||
None,
|
||||
OPEN_EXISTING,
|
||||
0,
|
||||
None,
|
||||
)
|
||||
if device == INVALID_HANDLE_VALUE:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
try:
|
||||
preparsed_data = PHIDP_PREPARSED_DATA(0)
|
||||
ret = hid.HidD_GetPreparsedData(device, ctypes.byref(preparsed_data))
|
||||
if not ret:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
try:
|
||||
caps = HidCapabilities()
|
||||
ret = hid.HidP_GetCaps(preparsed_data, ctypes.byref(caps))
|
||||
|
||||
if ret != HIDP_STATUS_SUCCESS:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
if caps.UsagePage == FIDO_USAGE_PAGE and caps.Usage == FIDO_USAGE:
|
||||
vid, pid = get_vid_pid(device)
|
||||
|
@ -301,7 +313,12 @@ def open_connection(descriptor):
|
|||
return WinCtapHidConnection(descriptor)
|
||||
|
||||
|
||||
_SKIP = cast(HidDescriptor, object())
|
||||
_descriptor_cache: Dict[bytes, HidDescriptor] = {}
|
||||
|
||||
|
||||
def list_descriptors():
|
||||
stale = set(_descriptor_cache)
|
||||
descriptors = []
|
||||
|
||||
hid_guid = GUID()
|
||||
|
@ -327,19 +344,19 @@ def list_descriptors():
|
|||
if not result:
|
||||
break
|
||||
|
||||
detail_len = wintypes.DWORD()
|
||||
dw_detail_len = wintypes.DWORD()
|
||||
result = setupapi.SetupDiGetDeviceInterfaceDetailA(
|
||||
collection,
|
||||
ctypes.byref(interface_info),
|
||||
None,
|
||||
0,
|
||||
ctypes.byref(detail_len),
|
||||
ctypes.byref(dw_detail_len),
|
||||
None,
|
||||
)
|
||||
if result:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
detail_len = detail_len.value
|
||||
detail_len = dw_detail_len.value
|
||||
if detail_len == 0:
|
||||
# skip this device, some kind of error
|
||||
continue
|
||||
|
@ -357,18 +374,35 @@ def list_descriptors():
|
|||
None,
|
||||
)
|
||||
if not result:
|
||||
raise ctypes.WinError()
|
||||
raise WinError()
|
||||
|
||||
path = ctypes.string_at(interface_detail.DevicePath)
|
||||
stale.discard(path)
|
||||
|
||||
# Check if path already cached
|
||||
desc = _descriptor_cache.get(path)
|
||||
if desc:
|
||||
if desc is not _SKIP:
|
||||
descriptors.append(desc)
|
||||
continue
|
||||
|
||||
try:
|
||||
descriptors.append(get_descriptor(path))
|
||||
logger.debug("Found CTAP device: %s", path)
|
||||
descriptor = get_descriptor(path)
|
||||
_descriptor_cache[path] = descriptor
|
||||
descriptors.append(descriptor)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.debug("Failed reading HID descriptor: %s", e)
|
||||
pass # Not a CTAP device
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Failed reading HID descriptor for %s", path, exc_info=True
|
||||
)
|
||||
_descriptor_cache[path] = _SKIP
|
||||
finally:
|
||||
setupapi.SetupDiDestroyDeviceInfoList(collection)
|
||||
|
||||
# Remove entries from the cache that were not seen
|
||||
for path in stale:
|
||||
del _descriptor_cache[path]
|
||||
|
||||
return descriptors
|
||||
|
|
|
@ -0,0 +1,497 @@
|
|||
# Copyright (c) 2022 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .webauthn import AttestationObject, Aaguid
|
||||
from .attestation import (
|
||||
Attestation,
|
||||
UntrustedAttestation,
|
||||
verify_x509_chain,
|
||||
AttestationVerifier,
|
||||
)
|
||||
from .utils import websafe_decode, _CamelCaseDataObject
|
||||
from .cose import CoseKey
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum, unique
|
||||
from datetime import date
|
||||
from base64 import b64decode, b64encode
|
||||
from contextvars import ContextVar
|
||||
from typing import Sequence, Mapping, Any, Optional, Callable
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class Version(_CamelCaseDataObject):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class RogueListEntry(_CamelCaseDataObject):
|
||||
sk: bytes
|
||||
date: int
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class BiometricStatusReport(_CamelCaseDataObject):
|
||||
cert_level: int
|
||||
modality: str
|
||||
effective_date: int
|
||||
certification_descriptor: str
|
||||
certificate_number: str
|
||||
certification_policy_version: str
|
||||
certification_requirements_version: str
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class CodeAccuracyDescriptor(_CamelCaseDataObject):
|
||||
base: int
|
||||
min_length: int
|
||||
max_retries: Optional[int] = None
|
||||
block_slowdown: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class BiometricAccuracyDescriptor(_CamelCaseDataObject):
|
||||
self_attested_frr: Optional[float] = field(
|
||||
default=None, metadata=dict(name="selfAttestedFRR")
|
||||
)
|
||||
self_attested_far: Optional[float] = field(
|
||||
default=None, metadata=dict(name="selfAttestedFAR")
|
||||
)
|
||||
max_templates: Optional[int] = None
|
||||
max_retries: Optional[int] = None
|
||||
block_slowdown: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PatternAccuracyDescriptor(_CamelCaseDataObject):
|
||||
min_complexity: int
|
||||
max_retries: Optional[int] = None
|
||||
block_slowdown: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class VerificationMethodDescriptor(_CamelCaseDataObject):
|
||||
user_verification_method: Optional[str] = None
|
||||
ca_desc: Optional[CodeAccuracyDescriptor] = None
|
||||
ba_desc: Optional[BiometricAccuracyDescriptor] = None
|
||||
pa_desc: Optional[PatternAccuracyDescriptor] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class RgbPaletteEntry(_CamelCaseDataObject):
|
||||
r: int
|
||||
g: int
|
||||
b: int
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class DisplayPngCharacteristicsDescriptor(_CamelCaseDataObject):
|
||||
width: int
|
||||
height: int
|
||||
bit_depth: int
|
||||
color_type: int
|
||||
compression: int
|
||||
filter: int
|
||||
interlace: int
|
||||
plte: Optional[Sequence[RgbPaletteEntry]] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class EcdaaTrustAnchor(_CamelCaseDataObject):
|
||||
x: str = field(metadata=dict(name="X"))
|
||||
y: str = field(metadata=dict(name="Y"))
|
||||
c: str
|
||||
sx: str
|
||||
sy: str
|
||||
g1_curve: str = field(metadata=dict(name="G1Curve"))
|
||||
|
||||
|
||||
@unique
|
||||
class AuthenticatorStatus(str, Enum):
|
||||
NOT_FIDO_CERTIFIED = "NOT_FIDO_CERTIFIED"
|
||||
FIDO_CERTIFIED = "FIDO_CERTIFIED"
|
||||
USER_VERIFICATION_BYPASS = "USER_VERIFICATION_BYPASS"
|
||||
ATTESTATION_KEY_COMPROMISE = "ATTESTATION_KEY_COMPROMISE"
|
||||
USER_KEY_REMOTE_COMPROMISE = "USER_KEY_REMOTE_COMPROMISE"
|
||||
USER_KEY_PHYSICAL_COMPROMISE = "USER_KEY_PHYSICAL_COMPROMISE"
|
||||
UPDATE_AVAILABLE = "UPDATE_AVAILABLE"
|
||||
REVOKED = "REVOKED"
|
||||
SELF_ASSERTION_SUBMITTED = "SELF_ASSERTION_SUBMITTED"
|
||||
FIDO_CERTIFIED_L1 = "FIDO_CERTIFIED_L1"
|
||||
FIDO_CERTIFIED_L1plus = "FIDO_CERTIFIED_L1plus"
|
||||
FIDO_CERTIFIED_L2 = "FIDO_CERTIFIED_L2"
|
||||
FIDO_CERTIFIED_L2plus = "FIDO_CERTIFIED_L2plus"
|
||||
FIDO_CERTIFIED_L3 = "FIDO_CERTIFIED_L3"
|
||||
FIDO_CERTIFIED_L3plus = "FIDO_CERTIFIED_L3plus"
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class StatusReport(_CamelCaseDataObject):
|
||||
status: AuthenticatorStatus
|
||||
effective_date: Optional[date] = field(
|
||||
metadata=dict(
|
||||
deserialize=date.fromisoformat,
|
||||
serialize=lambda x: x.isoformat(),
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
authenticator_version: Optional[int] = None
|
||||
certificate: Optional[bytes] = field(
|
||||
metadata=dict(deserialize=b64decode, serialize=lambda x: b64encode(x).decode()),
|
||||
default=None,
|
||||
)
|
||||
url: Optional[str] = None
|
||||
certification_descriptor: Optional[str] = None
|
||||
certificate_number: Optional[str] = None
|
||||
certification_policy_version: Optional[str] = None
|
||||
certification_requirements_version: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class ExtensionDescriptor(_CamelCaseDataObject):
|
||||
fail_if_unknown: bool = field(metadata=dict(name="fail_if_unknown"))
|
||||
id: str
|
||||
tag: Optional[int] = None
|
||||
data: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class MetadataStatement(_CamelCaseDataObject):
|
||||
description: str
|
||||
authenticator_version: int
|
||||
schema: int
|
||||
upv: Sequence[Version]
|
||||
attestation_types: Sequence[str]
|
||||
user_verification_details: Sequence[Sequence[VerificationMethodDescriptor]] = field(
|
||||
metadata=dict(serialize=lambda xss: [[dict(x) for x in xs] for xs in xss])
|
||||
)
|
||||
key_protection: Sequence[str]
|
||||
matcher_protection: Sequence[str]
|
||||
attachment_hint: Sequence[str]
|
||||
tc_display: Sequence[str]
|
||||
attestation_root_certificates: Sequence[bytes] = field(
|
||||
metadata=dict(
|
||||
deserialize=lambda xs: [b64decode(x) for x in xs],
|
||||
serialize=lambda xs: [b64encode(x).decode() for x in xs],
|
||||
)
|
||||
)
|
||||
legal_header: Optional[str] = None
|
||||
aaid: Optional[str] = None
|
||||
aaguid: Optional[Aaguid] = field(
|
||||
metadata=dict(
|
||||
deserialize=Aaguid.parse,
|
||||
serialize=lambda x: str(x),
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
attestation_certificate_key_identifiers: Optional[Sequence[bytes]] = field(
|
||||
metadata=dict(
|
||||
deserialize=lambda xs: [bytes.fromhex(x) for x in xs],
|
||||
serialize=lambda xs: [x.hex() for x in xs],
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
alternative_descriptions: Optional[Mapping[str, str]] = None
|
||||
protocol_family: Optional[str] = None
|
||||
authentication_algorithms: Optional[Sequence[str]] = None
|
||||
public_key_alg_and_encodings: Optional[Sequence[str]] = None
|
||||
is_key_restricted: Optional[bool] = None
|
||||
is_fresh_user_verification_required: Optional[bool] = None
|
||||
crypto_strength: Optional[int] = None
|
||||
operating_env: Optional[str] = None
|
||||
tc_display_content_type: Optional[str] = None
|
||||
tc_display_png_characteristics: Optional[
|
||||
Sequence[DisplayPngCharacteristicsDescriptor]
|
||||
] = field(
|
||||
metadata=dict(name="tcDisplayPNGCharacteristics"),
|
||||
default=None,
|
||||
)
|
||||
ecdaa_trust_anchors: Optional[Sequence[EcdaaTrustAnchor]] = None
|
||||
icon: Optional[str] = None
|
||||
supported_extensions: Optional[Sequence[ExtensionDescriptor]] = None
|
||||
authenticator_get_info: Optional[Mapping[str, Any]] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class MetadataBlobPayloadEntry(_CamelCaseDataObject):
|
||||
status_reports: Sequence[StatusReport]
|
||||
time_of_last_status_change: date = field(
|
||||
metadata=dict(
|
||||
deserialize=date.fromisoformat,
|
||||
serialize=lambda x: x.isoformat(),
|
||||
)
|
||||
)
|
||||
aaid: Optional[str] = None
|
||||
aaguid: Optional[Aaguid] = field(
|
||||
metadata=dict(
|
||||
deserialize=Aaguid.parse,
|
||||
serialize=lambda x: str(x),
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
attestation_certificate_key_identifiers: Optional[Sequence[bytes]] = field(
|
||||
metadata=dict(
|
||||
deserialize=lambda xs: [bytes.fromhex(x) for x in xs],
|
||||
serialize=lambda xs: [x.hex() for x in xs],
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
metadata_statement: Optional[MetadataStatement] = None
|
||||
biometric_status_reports: Optional[Sequence[BiometricStatusReport]] = None
|
||||
rogue_list_url: Optional[str] = field(
|
||||
metadata=dict(name="rogueListURL"), default=None
|
||||
)
|
||||
rogue_list_hash: Optional[bytes] = field(
|
||||
metadata=dict(
|
||||
deserialize=bytes.fromhex,
|
||||
serialize=lambda x: x.hex(),
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class MetadataBlobPayload(_CamelCaseDataObject):
|
||||
legal_header: str
|
||||
no: int
|
||||
next_update: date = field(
|
||||
metadata=dict(
|
||||
deserialize=date.fromisoformat,
|
||||
serialize=lambda x: x.isoformat(),
|
||||
)
|
||||
)
|
||||
entries: Sequence[MetadataBlobPayloadEntry]
|
||||
|
||||
|
||||
EntryFilter = Callable[[MetadataBlobPayloadEntry], bool]
|
||||
LookupFilter = Callable[[MetadataBlobPayloadEntry, Sequence[bytes]], bool]
|
||||
|
||||
|
||||
def filter_revoked(entry: MetadataBlobPayloadEntry) -> bool:
|
||||
"""Filters out any revoked metadata entry.
|
||||
|
||||
This filter will remove any metadata entry which has a status_report with
|
||||
the REVOKED status.
|
||||
"""
|
||||
return not any(
|
||||
r.status == AuthenticatorStatus.REVOKED for r in entry.status_reports
|
||||
)
|
||||
|
||||
|
||||
def filter_attestation_key_compromised(
|
||||
entry: MetadataBlobPayloadEntry, certificate_chain: Sequence[bytes]
|
||||
) -> bool:
|
||||
"""Denies any attestation that has a compromised attestation key.
|
||||
|
||||
This filter checks the status reports of a metadata entry and ensures the
|
||||
attestation isn't signed by a key which is marked as compromised.
|
||||
"""
|
||||
for r in entry.status_reports:
|
||||
if r.status == AuthenticatorStatus.ATTESTATION_KEY_COMPROMISE:
|
||||
if r.certificate in certificate_chain:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
_last_entry: ContextVar[Optional[MetadataBlobPayloadEntry]] = ContextVar("_last_entry")
|
||||
|
||||
|
||||
class MdsAttestationVerifier(AttestationVerifier):
|
||||
"""MDS3 implementation of an AttestationVerifier.
|
||||
|
||||
The entry_filter is an optional predicate used to filter which metadata entries to
|
||||
include in the lookup for verification. By default, a filter that removes any
|
||||
entries that have a status report indicating the authenticator is REVOKED is used.
|
||||
See: filter_revoked
|
||||
|
||||
The attestation_filter is an optional predicate used to filter metadata entries
|
||||
while performing attestation validation, and may take into account the
|
||||
Authenticators attestation trust_chain. By default, a filter that will fail any
|
||||
verification that has a trust_chain where one of the certificates is marked as
|
||||
compromised by the metadata statement is used.
|
||||
See: filter_attestation_key_compromised
|
||||
|
||||
NOTE: The attestation_filter is not used when calling find_entry_by_aaguid nor
|
||||
find_entry_by_chain as no attestation is being verified!
|
||||
|
||||
Setting either filter (including setting it to None) will replace it, removing
|
||||
the default behavior.
|
||||
|
||||
:param blob: The MetadataBlobPayload to query for device metadata.
|
||||
:param entry_filter: An optional filter to exclude entries from lookup.
|
||||
:param attestation_filter: An optional filter to fail verification for a given
|
||||
attestation.
|
||||
:param attestation_types: A list of Attestation types to support.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
blob: MetadataBlobPayload,
|
||||
entry_filter: Optional[EntryFilter] = filter_revoked,
|
||||
attestation_filter: Optional[LookupFilter] = filter_attestation_key_compromised,
|
||||
attestation_types: Optional[Sequence[Attestation]] = None,
|
||||
):
|
||||
super().__init__(attestation_types)
|
||||
self._attestation_filter = attestation_filter or (
|
||||
lambda a, b: True
|
||||
) # No-op for None
|
||||
|
||||
entries = (
|
||||
[e for e in blob.entries if entry_filter(e)]
|
||||
if entry_filter
|
||||
else blob.entries
|
||||
)
|
||||
self._aaguid_table = {e.aaguid: e for e in entries if e.aaguid}
|
||||
self._ski_table = {
|
||||
ski: e
|
||||
for e in entries
|
||||
for ski in e.attestation_certificate_key_identifiers or []
|
||||
}
|
||||
|
||||
def find_entry_by_aaguid(
|
||||
self, aaguid: Aaguid
|
||||
) -> Optional[MetadataBlobPayloadEntry]:
|
||||
"""Find an entry by AAGUID.
|
||||
|
||||
Returns a MetadataBlobPayloadEntry with a matching aaguid field, if found.
|
||||
This method does not take the attestation_filter into account.
|
||||
"""
|
||||
return self._aaguid_table.get(aaguid)
|
||||
|
||||
def find_entry_by_chain(
|
||||
self, certificate_chain: Sequence[bytes]
|
||||
) -> Optional[MetadataBlobPayloadEntry]:
|
||||
"""Find an entry by trust chain.
|
||||
|
||||
Returns a MetadataBlobPayloadEntry containing an
|
||||
attestationCertificateKeyIdentifier which matches one of the certificates in the
|
||||
given chain, if found.
|
||||
This method does not take the attestation_filter into account.
|
||||
"""
|
||||
for der in certificate_chain:
|
||||
cert = x509.load_der_x509_certificate(der, default_backend())
|
||||
ski = x509.SubjectKeyIdentifier.from_public_key(cert.public_key()).digest
|
||||
if ski in self._ski_table:
|
||||
return self._ski_table[ski]
|
||||
return None
|
||||
|
||||
def ca_lookup(self, result, auth_data):
|
||||
aaguid = auth_data.credential_data.aaguid
|
||||
if aaguid:
|
||||
logging.debug(f"Using AAGUID: {aaguid} to look up metadata")
|
||||
entry = self.find_entry_by_aaguid(aaguid)
|
||||
else:
|
||||
logging.debug("Using trust_path chain to look up metadata")
|
||||
entry = self.find_entry_by_chain(result.trust_path)
|
||||
|
||||
if entry:
|
||||
logging.debug(f"Found entry: {entry}")
|
||||
|
||||
# Check attestation filter
|
||||
if not self._attestation_filter(entry, result.trust_path):
|
||||
logging.debug("Matched entry did not pass attestation filter")
|
||||
return None
|
||||
|
||||
# Figure out which root to use
|
||||
if not entry.metadata_statement:
|
||||
logging.warn("Matched entry has no metadata_statement, can't validate!")
|
||||
return None
|
||||
|
||||
issuer = x509.load_der_x509_certificate(
|
||||
result.trust_path[-1], default_backend()
|
||||
).issuer
|
||||
|
||||
for root in entry.metadata_statement.attestation_root_certificates:
|
||||
subject = x509.load_der_x509_certificate(
|
||||
root, default_backend()
|
||||
).subject
|
||||
if subject == issuer:
|
||||
_last_entry.set(entry)
|
||||
return root
|
||||
logger.info(f"No attestation root matching subject: {subject}")
|
||||
return None
|
||||
|
||||
def find_entry(
|
||||
self, attestation_object: AttestationObject, client_data_hash: bytes
|
||||
) -> Optional[MetadataBlobPayloadEntry]:
|
||||
"""Lookup a Metadata entry based on an Attestation.
|
||||
|
||||
Returns the first Metadata entry matching the given attestation and verifies it,
|
||||
including checking it against the attestation_filter.
|
||||
"""
|
||||
token = _last_entry.set(None)
|
||||
try:
|
||||
self.verify_attestation(attestation_object, client_data_hash)
|
||||
return _last_entry.get()
|
||||
except UntrustedAttestation:
|
||||
return None
|
||||
finally:
|
||||
_last_entry.reset(token)
|
||||
|
||||
|
||||
def parse_blob(blob: bytes, trust_root: Optional[bytes]) -> MetadataBlobPayload:
|
||||
"""Parse a FIDO MDS3 blob and verifies its signature.
|
||||
|
||||
See https://fidoalliance.org/metadata/ for details on obtaining the blob, as well as
|
||||
the CA certificate used to sign it.
|
||||
|
||||
The resulting MetadataBlobPayload can be used to lookup metadata entries for
|
||||
specific Authenticators, or used with the MdsAttestationVerifier to verify that the
|
||||
attestation from a WebAuthn registration is valid and included in the metadata blob.
|
||||
|
||||
NOTE: If trust_root is None, the signature of the blob will NOT be verified!
|
||||
"""
|
||||
message, signature_b64 = blob.rsplit(b".", 1)
|
||||
signature = websafe_decode(signature_b64)
|
||||
header, payload = (json.loads(websafe_decode(x)) for x in message.split(b"."))
|
||||
|
||||
if trust_root is not None:
|
||||
# Verify trust chain
|
||||
chain = [b64decode(c) for c in header.get("x5c", [])]
|
||||
chain += [trust_root]
|
||||
verify_x509_chain(chain)
|
||||
|
||||
# Verify blob signature using leaf
|
||||
leaf = x509.load_der_x509_certificate(chain[0], default_backend())
|
||||
public_key = CoseKey.for_name(header["alg"]).from_cryptography_key(
|
||||
leaf.public_key()
|
||||
)
|
||||
public_key.verify(message, signature)
|
||||
else:
|
||||
logger.warn("Parsing MDS blob without trust anchor, CONTENT IS NOT VERIFIED!")
|
||||
|
||||
return MetadataBlobPayload.from_dict(payload)
|
171
fido2/nfc.py
171
fido2/nfc.py
|
@ -1,171 +0,0 @@
|
|||
# Copyright (c) 2019 Yubico AB
|
||||
# Copyright (c) 2019 Oleg Moiseenko
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from .ctap import CtapDevice, CtapError, STATUS
|
||||
from .hid import CAPABILITY, CTAPHID
|
||||
from .pcsc import PCSCDevice
|
||||
from smartcard.Exceptions import CardConnectionException
|
||||
from threading import Event
|
||||
import struct
|
||||
import six
|
||||
|
||||
|
||||
AID_FIDO = b"\xa0\x00\x00\x06\x47\x2f\x00\x01"
|
||||
SW_SUCCESS = (0x90, 0x00)
|
||||
SW_UPDATE = (0x91, 0x00)
|
||||
SW1_MORE_DATA = 0x61
|
||||
|
||||
|
||||
class CardSelectException(Exception):
|
||||
"""can't select u2f/fido2 application on the card"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CtapNfcDevice(CtapDevice):
|
||||
"""
|
||||
CtapDevice implementation using the pcsc NFC transport.
|
||||
"""
|
||||
|
||||
def __init__(self, dev):
|
||||
self._dev = dev
|
||||
self._dev.connect()
|
||||
self._capabilities = 0
|
||||
|
||||
result, sw1, sw2 = self._dev.select_applet(AID_FIDO)
|
||||
if (sw1, sw2) != SW_SUCCESS:
|
||||
raise CardSelectException("Select error")
|
||||
|
||||
if result == b"U2F_V2":
|
||||
self._capabilities |= CAPABILITY.NMSG
|
||||
try: # Probe for CTAP2 by calling GET_INFO
|
||||
self.call(CTAPHID.CBOR, b"\x04")
|
||||
self._capabilities |= CAPABILITY.CBOR
|
||||
except CtapError:
|
||||
pass
|
||||
|
||||
@property
|
||||
def pcsc_device(self):
|
||||
return self._dev
|
||||
|
||||
def __repr__(self):
|
||||
return "CtapNfcDevice(%s)" % self._dev.reader.name
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""CTAP NFC protocol version.
|
||||
:rtype: int
|
||||
"""
|
||||
return 2 if self._capabilities & CAPABILITY.CBOR else 1
|
||||
|
||||
@property
|
||||
def capabilities(self):
|
||||
"""Capabilities supported by the device."""
|
||||
return self._capabilities
|
||||
|
||||
def _chain_apdus(self, cla, ins, p1, p2, data=b""):
|
||||
while len(data) > 250:
|
||||
to_send, data = data[:250], data[250:]
|
||||
header = struct.pack("!BBBBB", 0x90, ins, p1, p2, len(to_send))
|
||||
resp, sw1, sw2 = self._dev.apdu_exchange(header + to_send)
|
||||
if (sw1, sw2) != SW_SUCCESS:
|
||||
return resp, sw1, sw2
|
||||
apdu = struct.pack("!BBBB", cla, ins, p1, p2)
|
||||
if data:
|
||||
apdu += struct.pack("!B", len(data)) + data
|
||||
resp, sw1, sw2 = self._dev.apdu_exchange(apdu + b"\x00")
|
||||
while sw1 == SW1_MORE_DATA:
|
||||
apdu = b"\x00\xc0\x00\x00" + struct.pack("!B", sw2) # sw2 == le
|
||||
lres, sw1, sw2 = self._dev.apdu_exchange(apdu)
|
||||
resp += lres
|
||||
return resp, sw1, sw2
|
||||
|
||||
def _call_apdu(self, apdu):
|
||||
if len(apdu) >= 7 and six.indexbytes(apdu, 4) == 0:
|
||||
# Extended APDU
|
||||
data_len = struct.unpack("!H", apdu[5:7])[0]
|
||||
data = apdu[7 : 7 + data_len]
|
||||
else:
|
||||
# Short APDU
|
||||
data_len = six.indexbytes(apdu, 4)
|
||||
data = apdu[5 : 5 + data_len]
|
||||
(cla, ins, p1, p2) = six.iterbytes(apdu[:4])
|
||||
|
||||
resp, sw1, sw2 = self._chain_apdus(cla, ins, p1, p2, data)
|
||||
return resp + struct.pack("!BB", sw1, sw2)
|
||||
|
||||
def _call_cbor(self, data=b"", event=None, on_keepalive=None):
|
||||
event = event or Event()
|
||||
# NFCCTAP_MSG
|
||||
resp, sw1, sw2 = self._chain_apdus(0x80, 0x10, 0x80, 0x00, data)
|
||||
last_ka = None
|
||||
|
||||
while not event.is_set():
|
||||
while (sw1, sw2) == SW_UPDATE:
|
||||
ka_status = six.indexbytes(resp, 0)
|
||||
if on_keepalive and last_ka != ka_status:
|
||||
try:
|
||||
ka_status = STATUS(ka_status)
|
||||
except ValueError:
|
||||
pass # Unknown status value
|
||||
last_ka = ka_status
|
||||
on_keepalive(ka_status)
|
||||
|
||||
# NFCCTAP_GETRESPONSE
|
||||
resp, sw1, sw2 = self._chain_apdus(0x80, 0x11, 0x00, 0x00, b"")
|
||||
|
||||
if (sw1, sw2) != SW_SUCCESS:
|
||||
raise CtapError(CtapError.ERR.OTHER) # TODO: Map from SW error
|
||||
|
||||
return resp
|
||||
|
||||
raise CtapError(CtapError.ERR.KEEPALIVE_CANCEL)
|
||||
|
||||
def call(self, cmd, data=b"", event=None, on_keepalive=None):
|
||||
if cmd == CTAPHID.MSG:
|
||||
return self._call_apdu(data)
|
||||
elif cmd == CTAPHID.CBOR:
|
||||
return self._call_cbor(data, event, on_keepalive)
|
||||
else:
|
||||
raise CtapError(CtapError.ERR.INVALID_COMMAND)
|
||||
|
||||
@classmethod # selector='CL'
|
||||
def list_devices(cls, selector="", pcsc_device=PCSCDevice):
|
||||
"""
|
||||
Returns list of readers in the system. Iterator.
|
||||
:param selector:
|
||||
:param pcsc_device: device to work with. PCSCDevice by default.
|
||||
:return: iterator. next reader
|
||||
"""
|
||||
for d in pcsc_device.list_devices(selector):
|
||||
try:
|
||||
yield cls(d)
|
||||
except CardConnectionException:
|
||||
pass
|
|
@ -26,18 +26,19 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .ctap import CtapDevice, CtapError, STATUS
|
||||
from .hid import CAPABILITY, CTAPHID
|
||||
from .utils import LOG_LEVEL_TRAFFIC
|
||||
from smartcard import System
|
||||
from smartcard.CardConnection import CardConnection
|
||||
from smartcard.pcsc.PCSCExceptions import ListReadersException
|
||||
from smartcard.pcsc.PCSCContext import PCSCContext
|
||||
|
||||
from binascii import b2a_hex as _b2a_hex
|
||||
from threading import Event
|
||||
from typing import Tuple, Optional, Callable, Iterator
|
||||
import struct
|
||||
import six
|
||||
import logging
|
||||
|
||||
|
||||
|
@ -50,10 +51,6 @@ SW1_MORE_DATA = 0x61
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def b2a_hex(data):
|
||||
return _b2a_hex(data).decode("ascii")
|
||||
|
||||
|
||||
class CtapPcscDevice(CtapDevice):
|
||||
"""
|
||||
CtapDevice implementation using pyscard (PCSC).
|
||||
|
@ -61,8 +58,8 @@ class CtapPcscDevice(CtapDevice):
|
|||
This class is intended for use with NFC readers.
|
||||
"""
|
||||
|
||||
def __init__(self, connection, name):
|
||||
self._capabilities = 0
|
||||
def __init__(self, connection: CardConnection, name: str):
|
||||
self._capabilities = CAPABILITY(0)
|
||||
self.use_ext_apdu = False
|
||||
self._conn = connection
|
||||
self._conn.connect()
|
||||
|
@ -73,53 +70,55 @@ class CtapPcscDevice(CtapDevice):
|
|||
self.call(CTAPHID.CBOR, b"\x04")
|
||||
self._capabilities |= CAPABILITY.CBOR
|
||||
except CtapError:
|
||||
if self._capabilities == 0:
|
||||
if not self._capabilities:
|
||||
raise ValueError("Unsupported device")
|
||||
|
||||
def __repr__(self):
|
||||
return "CtapPcscDevice(%s)" % self._name
|
||||
return f"CtapPcscDevice({self._name})"
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""CTAPHID protocol version.
|
||||
:rtype: int
|
||||
"""
|
||||
return 2 if self._capabilities & CAPABILITY.CBOR else 1
|
||||
def version(self) -> int:
|
||||
"""CTAPHID protocol version."""
|
||||
return 2 if CAPABILITY.CBOR in self._capabilities else 1
|
||||
|
||||
@property
|
||||
def capabilities(self):
|
||||
def capabilities(self) -> CAPABILITY:
|
||||
"""Capabilities supported by the device."""
|
||||
return self._capabilities
|
||||
|
||||
@property
|
||||
def product_name(self):
|
||||
def product_name(self) -> Optional[str]:
|
||||
"""Product name of device."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
def serial_number(self) -> Optional[int]:
|
||||
"""Serial number of device."""
|
||||
return None
|
||||
|
||||
def get_atr(self):
|
||||
def get_atr(self) -> bytes:
|
||||
"""Get the ATR/ATS of the connected card."""
|
||||
return self._conn.getATR()
|
||||
return bytes(self._conn.getATR())
|
||||
|
||||
def apdu_exchange(self, apdu, protocol=None):
|
||||
def apdu_exchange(
|
||||
self, apdu: bytes, protocol: Optional[int] = None
|
||||
) -> Tuple[bytes, int, int]:
|
||||
"""Exchange data with smart card.
|
||||
|
||||
:param apdu: byte string. data to exchange with card
|
||||
:return: byte string. response from card
|
||||
"""
|
||||
|
||||
logger.debug("SEND: %s", b2a_hex(apdu))
|
||||
resp, sw1, sw2 = self._conn.transmit(list(six.iterbytes(apdu)), protocol)
|
||||
response = bytes(bytearray(resp))
|
||||
logger.debug("RECV: %s SW=%04X", b2a_hex(response), sw1 << 8 + sw2)
|
||||
logger.log(LOG_LEVEL_TRAFFIC, "SEND: %s", apdu.hex())
|
||||
resp, sw1, sw2 = self._conn.transmit(list(apdu), protocol)
|
||||
response = bytes(resp)
|
||||
logger.log(
|
||||
LOG_LEVEL_TRAFFIC, "RECV: %s SW=%04X", response.hex(), sw1 << 8 + sw2
|
||||
)
|
||||
|
||||
return response, sw1, sw2
|
||||
|
||||
def control_exchange(self, control_code, control_data=b""):
|
||||
def control_exchange(self, control_code: int, control_data: bytes = b"") -> bytes:
|
||||
"""Sends control sequence to reader's driver.
|
||||
|
||||
:param control_code: int. code to send to reader driver.
|
||||
|
@ -127,22 +126,24 @@ class CtapPcscDevice(CtapDevice):
|
|||
:return: byte string. response
|
||||
"""
|
||||
|
||||
logger.debug("control %s", b2a_hex(control_data))
|
||||
response = self._conn.control(control_code, list(six.iterbytes(control_data)))
|
||||
response = bytes(bytearray(response))
|
||||
logger.debug("response %s", b2a_hex(response))
|
||||
logger.log(LOG_LEVEL_TRAFFIC, "Send control: %s", control_data.hex())
|
||||
response = self._conn.control(control_code, list(control_data))
|
||||
response = bytes(response)
|
||||
logger.log(LOG_LEVEL_TRAFFIC, "Control response: %s", response.hex())
|
||||
|
||||
return response
|
||||
|
||||
def _select(self):
|
||||
def _select(self) -> None:
|
||||
apdu = b"\x00\xa4\x04\x00" + struct.pack("!B", len(AID_FIDO)) + AID_FIDO
|
||||
resp, sw1, sw2 = self.apdu_exchange(apdu)
|
||||
if (sw1, sw2) != SW_SUCCESS:
|
||||
raise ValueError("FIDO applet selection failure.")
|
||||
if resp == b"U2F_V2":
|
||||
self._capabilities |= 0x08
|
||||
self._capabilities |= CAPABILITY.NMSG
|
||||
|
||||
def _chain_apdus(self, cla, ins, p1, p2, data=b""):
|
||||
def _chain_apdus(
|
||||
self, cla: int, ins: int, p1: int, p2: int, data: bytes = b""
|
||||
) -> Tuple[bytes, int, int]:
|
||||
if self.use_ext_apdu:
|
||||
header = struct.pack("!BBBBBH", cla, ins, p1, p2, 0x00, len(data))
|
||||
resp, sw1, sw2 = self.apdu_exchange(header + data)
|
||||
|
@ -164,8 +165,8 @@ class CtapPcscDevice(CtapDevice):
|
|||
resp += lres
|
||||
return resp, sw1, sw2
|
||||
|
||||
def _call_apdu(self, apdu):
|
||||
if len(apdu) >= 7 and six.indexbytes(apdu, 4) == 0:
|
||||
def _call_apdu(self, apdu: bytes) -> bytes:
|
||||
if len(apdu) >= 7 and apdu[4] == 0:
|
||||
# Extended APDU
|
||||
data_len = struct.unpack("!H", apdu[5:7])[0]
|
||||
data = apdu[7 : 7 + data_len]
|
||||
|
@ -173,14 +174,19 @@ class CtapPcscDevice(CtapDevice):
|
|||
data = b""
|
||||
else:
|
||||
# Short APDU
|
||||
data_len = six.indexbytes(apdu, 4)
|
||||
data_len = apdu[4]
|
||||
data = apdu[5 : 5 + data_len]
|
||||
(cla, ins, p1, p2) = six.iterbytes(apdu[:4])
|
||||
(cla, ins, p1, p2) = apdu[:4]
|
||||
|
||||
resp, sw1, sw2 = self._chain_apdus(cla, ins, p1, p2, data)
|
||||
return resp + struct.pack("!BB", sw1, sw2)
|
||||
|
||||
def _call_cbor(self, data=b"", event=None, on_keepalive=None):
|
||||
def _call_cbor(
|
||||
self,
|
||||
data: bytes = b"",
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> bytes:
|
||||
event = event or Event()
|
||||
# NFCCTAP_MSG
|
||||
resp, sw1, sw2 = self._chain_apdus(0x80, 0x10, 0x80, 0x00, data)
|
||||
|
@ -188,7 +194,7 @@ class CtapPcscDevice(CtapDevice):
|
|||
|
||||
while not event.is_set():
|
||||
while (sw1, sw2) == SW_UPDATE:
|
||||
ka_status = six.indexbytes(resp, 0)
|
||||
ka_status = resp[0]
|
||||
if on_keepalive and last_ka != ka_status:
|
||||
try:
|
||||
ka_status = STATUS(ka_status)
|
||||
|
@ -207,7 +213,13 @@ class CtapPcscDevice(CtapDevice):
|
|||
|
||||
raise CtapError(CtapError.ERR.KEEPALIVE_CANCEL)
|
||||
|
||||
def call(self, cmd, data=b"", event=None, on_keepalive=None):
|
||||
def call(
|
||||
self,
|
||||
cmd: int,
|
||||
data: bytes = b"",
|
||||
event: Optional[Event] = None,
|
||||
on_keepalive: Optional[Callable[[int], None]] = None,
|
||||
) -> bytes:
|
||||
if cmd == CTAPHID.CBOR:
|
||||
return self._call_cbor(data, event, on_keepalive)
|
||||
elif cmd == CTAPHID.MSG:
|
||||
|
@ -215,11 +227,11 @@ class CtapPcscDevice(CtapDevice):
|
|||
else:
|
||||
raise CtapError(CtapError.ERR.INVALID_COMMAND)
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
self._conn.disconnect()
|
||||
|
||||
@classmethod
|
||||
def list_devices(cls, name=""):
|
||||
def list_devices(cls, name: str = "") -> Iterator[CtapPcscDevice]:
|
||||
for reader in _list_readers():
|
||||
if name in reader.name:
|
||||
try:
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -35,11 +35,10 @@ Advanced APP_ID values pointing to JSON files containing valid facets are not
|
|||
supported by this implementation.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import six
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
tld_fname = os.path.join(os.path.dirname(__file__), "public_suffix_list.dat")
|
||||
|
@ -51,41 +50,25 @@ with open(tld_fname, "rb") as f:
|
|||
]
|
||||
|
||||
|
||||
def verify_rp_id(rp_id, origin):
|
||||
def verify_rp_id(rp_id: str, origin: str) -> bool:
|
||||
"""Checks if a Webauthn RP ID is usable for a given origin.
|
||||
|
||||
:param rp_id: The RP ID to validate.
|
||||
:param origin: The origin of the request.
|
||||
:return: True if the RP ID is usable by the origin, False if not.
|
||||
"""
|
||||
if isinstance(rp_id, six.binary_type):
|
||||
rp_id = rp_id.decode()
|
||||
if not rp_id:
|
||||
return False
|
||||
if isinstance(origin, six.binary_type):
|
||||
origin = origin.decode()
|
||||
|
||||
url = urlparse(origin)
|
||||
if url.scheme != "https":
|
||||
return False
|
||||
host = url.hostname
|
||||
# Note that Webauthn requires a secure context, i.e. an origin with https scheme.
|
||||
# However, most browsers also treat http://localhost as a secure context. See
|
||||
# https://groups.google.com/a/chromium.org/g/blink-dev/c/RC9dSw-O3fE/m/E3_0XaT0BAAJ
|
||||
if url.scheme != "https" and (url.scheme, host) != ("http", "localhost"):
|
||||
return False
|
||||
if host == rp_id:
|
||||
return True
|
||||
if host.endswith("." + rp_id) and rp_id not in suffixes:
|
||||
if host and host.endswith("." + rp_id) and rp_id not in suffixes:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def verify_app_id(app_id, origin):
|
||||
"""Checks if a FIDO U2F App ID is usable for a given origin.
|
||||
|
||||
:param app_id: The App ID to validate.
|
||||
:param origin: The origin of the request.
|
||||
:return: True if the App ID is usable by the origin, False if not.
|
||||
"""
|
||||
if isinstance(app_id, six.binary_type):
|
||||
app_id = app_id.decode()
|
||||
url = urlparse(app_id)
|
||||
if url.scheme != "https":
|
||||
return False
|
||||
return verify_rp_id(url.hostname, origin)
|
||||
|
|
386
fido2/server.py
386
fido2/server.py
|
@ -25,23 +25,19 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .rpid import verify_rp_id, verify_app_id
|
||||
from .rpid import verify_rp_id
|
||||
from .cose import CoseKey
|
||||
from .ctap2 import AttestedCredentialData
|
||||
from .client import WEBAUTHN_TYPE
|
||||
from .attestation import (
|
||||
Attestation,
|
||||
UnsupportedAttestation,
|
||||
UntrustedAttestation,
|
||||
InvalidSignature,
|
||||
verify_x509_chain,
|
||||
)
|
||||
from .utils import websafe_encode, websafe_decode
|
||||
from .webauthn import (
|
||||
CollectedClientData,
|
||||
AuthenticatorData,
|
||||
AttestationObject,
|
||||
AttestedCredentialData,
|
||||
AttestationConveyancePreference,
|
||||
PublicKeyCredentialRpEntity,
|
||||
PublicKeyCredentialUserEntity,
|
||||
AuthenticatorSelectionCriteria,
|
||||
PublicKeyCredentialDescriptor,
|
||||
PublicKeyCredentialType,
|
||||
|
@ -49,20 +45,35 @@ from .webauthn import (
|
|||
PublicKeyCredentialCreationOptions,
|
||||
PublicKeyCredentialRequestOptions,
|
||||
UserVerificationRequirement,
|
||||
ResidentKeyRequirement,
|
||||
AuthenticatorAttachment,
|
||||
RegistrationResponse,
|
||||
AuthenticationResponse,
|
||||
CredentialCreationOptions,
|
||||
CredentialRequestOptions,
|
||||
)
|
||||
|
||||
|
||||
import os
|
||||
import abc
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.exceptions import InvalidSignature as _InvalidSignature
|
||||
from dataclasses import replace
|
||||
from urllib.parse import urlparse
|
||||
from typing import Sequence, Mapping, Optional, Callable, Union, Tuple, Any, overload
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _verify_origin_for_rp(rp_id):
|
||||
VerifyAttestation = Callable[[AttestationObject, bytes], None]
|
||||
VerifyOrigin = Callable[[str], bool]
|
||||
|
||||
|
||||
def _verify_origin_for_rp(rp_id: str) -> VerifyOrigin:
|
||||
return lambda o: verify_rp_id(rp_id, o)
|
||||
|
||||
|
||||
def _validata_challenge(challenge):
|
||||
def _validata_challenge(challenge: Optional[bytes]) -> bytes:
|
||||
if challenge is None:
|
||||
challenge = os.urandom(32)
|
||||
else:
|
||||
|
@ -73,7 +84,9 @@ def _validata_challenge(challenge):
|
|||
return challenge
|
||||
|
||||
|
||||
def to_descriptor(credential, transports=None):
|
||||
def to_descriptor(
|
||||
credential: AttestedCredentialData, transports=None
|
||||
) -> PublicKeyCredentialDescriptor:
|
||||
"""Converts an AttestedCredentialData to a PublicKeyCredentialDescriptor.
|
||||
|
||||
:param credential: AttestedCredentialData containing the credential ID to use.
|
||||
|
@ -88,84 +101,30 @@ def to_descriptor(credential, transports=None):
|
|||
)
|
||||
|
||||
|
||||
def _wrap_credentials(creds):
|
||||
def _wrap_credentials(
|
||||
creds: Optional[
|
||||
Sequence[Union[AttestedCredentialData, PublicKeyCredentialDescriptor]]
|
||||
],
|
||||
) -> Optional[Sequence[PublicKeyCredentialDescriptor]]:
|
||||
if creds is None:
|
||||
return None
|
||||
return [
|
||||
to_descriptor(c)
|
||||
if isinstance(c, AttestedCredentialData)
|
||||
else PublicKeyCredentialDescriptor._wrap(c)
|
||||
(
|
||||
to_descriptor(c)
|
||||
if isinstance(c, AttestedCredentialData)
|
||||
else PublicKeyCredentialDescriptor.from_dict(c)
|
||||
)
|
||||
for c in creds
|
||||
]
|
||||
|
||||
|
||||
def _ignore_attestation(attestation_object, client_data_hash):
|
||||
def _ignore_attestation(
|
||||
attestation_object: AttestationObject, client_data_hash: bytes
|
||||
) -> None:
|
||||
"""Ignore attestation."""
|
||||
|
||||
|
||||
def _default_attestations():
|
||||
return [
|
||||
cls()
|
||||
for cls in Attestation.__subclasses__()
|
||||
if getattr(cls, "FORMAT", "none") != "none"
|
||||
]
|
||||
|
||||
|
||||
class AttestationVerifier(abc.ABC):
|
||||
"""Base class for verifying attestation.
|
||||
|
||||
Override the ca_lookup method to provide a trusted root certificate (or chain) used
|
||||
to verify the trust path from the attestation.
|
||||
"""
|
||||
|
||||
def __init__(self, attestation_types=None):
|
||||
self._attestation_types = attestation_types or _default_attestations()
|
||||
|
||||
@abc.abstractmethod
|
||||
def ca_lookup(self, attestation_result, auth_data):
|
||||
"""Lookup a CA certificate to be used to verify a trust path.
|
||||
|
||||
:param attestation_result: The result of the attestation
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def verify_attestation(self, attestation_object, client_data_hash):
|
||||
"""Verify attestation.
|
||||
|
||||
:param attestation_object: dict containing attestation data.
|
||||
:param client_data_hash: SHA256 hash of the ClientData bytes.
|
||||
"""
|
||||
att_verifier = UnsupportedAttestation(attestation_object.fmt)
|
||||
for at in self._attestation_types:
|
||||
if getattr(at, "FORMAT", None) == attestation_object.fmt:
|
||||
att_verifier = at
|
||||
break
|
||||
# An unsupported format causes an exception to be thrown, which
|
||||
# includes the auth_data. The caller may choose to handle this case
|
||||
# and allow the registration.
|
||||
result = att_verifier.verify(
|
||||
attestation_object.att_statement,
|
||||
attestation_object.auth_data,
|
||||
client_data_hash,
|
||||
)
|
||||
|
||||
# Lookup CA to use for trust path verification
|
||||
ca = self.ca_lookup(result, attestation_object.auth_data)
|
||||
if ca is None:
|
||||
raise UntrustedAttestation("No root found for Authneticator")
|
||||
|
||||
# Validate the trust chain
|
||||
try:
|
||||
verify_x509_chain(result.trust_path + ca)
|
||||
except InvalidSignature as e:
|
||||
raise UntrustedAttestation(e)
|
||||
|
||||
def __call__(self, *args):
|
||||
"""Allows passing an instance to Fido2Server as verify_attestation"""
|
||||
self.verify_attestation(*args)
|
||||
|
||||
|
||||
class Fido2Server(object):
|
||||
class Fido2Server:
|
||||
"""FIDO2 server.
|
||||
|
||||
:param rp: Relying party data as `PublicKeyCredentialRpEntity` instance.
|
||||
|
@ -178,27 +137,35 @@ class Fido2Server(object):
|
|||
"""
|
||||
|
||||
def __init__(
|
||||
self, rp, attestation=None, verify_origin=None, verify_attestation=None
|
||||
self,
|
||||
rp: PublicKeyCredentialRpEntity,
|
||||
attestation: Optional[AttestationConveyancePreference] = None,
|
||||
verify_origin: Optional[VerifyOrigin] = None,
|
||||
verify_attestation: Optional[VerifyAttestation] = None,
|
||||
):
|
||||
self.rp = PublicKeyCredentialRpEntity._wrap(rp)
|
||||
self.rp = PublicKeyCredentialRpEntity.from_dict(rp)
|
||||
self._verify = verify_origin or _verify_origin_for_rp(self.rp.id)
|
||||
self.timeout = None
|
||||
self.attestation = AttestationConveyancePreference._wrap(attestation)
|
||||
self.attestation = AttestationConveyancePreference(attestation)
|
||||
self.allowed_algorithms = [
|
||||
PublicKeyCredentialParameters("public-key", alg)
|
||||
PublicKeyCredentialParameters(PublicKeyCredentialType.PUBLIC_KEY, alg)
|
||||
for alg in CoseKey.supported_algorithms()
|
||||
]
|
||||
self._verify_attestation = verify_attestation or _ignore_attestation
|
||||
logger.debug(f"Fido2Server initialized for RP: {self.rp}")
|
||||
|
||||
def register_begin(
|
||||
self,
|
||||
user,
|
||||
credentials=None,
|
||||
resident_key=None,
|
||||
user_verification=None,
|
||||
authenticator_attachment=None,
|
||||
challenge=None,
|
||||
):
|
||||
user: PublicKeyCredentialUserEntity,
|
||||
credentials: Optional[
|
||||
Sequence[Union[AttestedCredentialData, PublicKeyCredentialDescriptor]]
|
||||
] = None,
|
||||
resident_key_requirement: Optional[ResidentKeyRequirement] = None,
|
||||
user_verification: Optional[UserVerificationRequirement] = None,
|
||||
authenticator_attachment: Optional[AuthenticatorAttachment] = None,
|
||||
challenge: Optional[bytes] = None,
|
||||
extensions=None,
|
||||
) -> Tuple[CredentialCreationOptions, Any]:
|
||||
"""Return a PublicKeyCredentialCreationOptions registration object and
|
||||
the internal state dictionary that needs to be passed as is to the
|
||||
corresponding `register_complete` call.
|
||||
|
@ -206,7 +173,7 @@ class Fido2Server(object):
|
|||
:param user: The dict containing the user data.
|
||||
:param credentials: The list of previously registered credentials, these can be
|
||||
of type AttestedCredentialData, or PublicKeyCredentialDescriptor.
|
||||
:param resident_key: True to request a resident credential.
|
||||
:param resident_key_requirement: The desired RESIDENT_KEY_REQUIREMENT level.
|
||||
:param user_verification: The desired USER_VERIFICATION level.
|
||||
:param authenticator_attachment: The desired AUTHENTICATOR_ATTACHMENT
|
||||
or None to not provide a preference (and get both types).
|
||||
|
@ -217,30 +184,62 @@ class Fido2Server(object):
|
|||
raise ValueError("Server has no allowed algorithms.")
|
||||
|
||||
challenge = _validata_challenge(challenge)
|
||||
|
||||
descriptors = _wrap_credentials(credentials)
|
||||
state = self._make_internal_state(challenge, user_verification)
|
||||
logger.debug(
|
||||
"Starting new registration, existing credentials: "
|
||||
+ ", ".join(d.id.hex() for d in descriptors or [])
|
||||
)
|
||||
|
||||
return (
|
||||
{
|
||||
"publicKey": PublicKeyCredentialCreationOptions(
|
||||
CredentialCreationOptions(
|
||||
PublicKeyCredentialCreationOptions(
|
||||
self.rp,
|
||||
user,
|
||||
challenge,
|
||||
self.allowed_algorithms,
|
||||
self.timeout,
|
||||
_wrap_credentials(credentials),
|
||||
AuthenticatorSelectionCriteria(
|
||||
authenticator_attachment, resident_key, user_verification
|
||||
)
|
||||
if any((authenticator_attachment, resident_key, user_verification))
|
||||
else None,
|
||||
descriptors,
|
||||
(
|
||||
AuthenticatorSelectionCriteria(
|
||||
authenticator_attachment,
|
||||
resident_key_requirement,
|
||||
user_verification,
|
||||
)
|
||||
if any(
|
||||
(
|
||||
authenticator_attachment,
|
||||
resident_key_requirement,
|
||||
user_verification,
|
||||
)
|
||||
)
|
||||
else None
|
||||
),
|
||||
self.attestation,
|
||||
extensions,
|
||||
)
|
||||
},
|
||||
),
|
||||
state,
|
||||
)
|
||||
|
||||
def register_complete(self, state, client_data, attestation_object):
|
||||
@overload
|
||||
def register_complete(
|
||||
self,
|
||||
state,
|
||||
response: Union[RegistrationResponse, Mapping[str, Any]],
|
||||
) -> AuthenticatorData:
|
||||
pass
|
||||
|
||||
@overload
|
||||
def register_complete(
|
||||
self,
|
||||
state,
|
||||
client_data: CollectedClientData,
|
||||
attestation_object: AttestationObject,
|
||||
) -> AuthenticatorData:
|
||||
pass
|
||||
|
||||
def register_complete(self, state, *args, **kwargs):
|
||||
"""Verify the correctness of the registration data received from
|
||||
the client.
|
||||
|
||||
|
@ -248,11 +247,30 @@ class Fido2Server(object):
|
|||
`register_begin`.
|
||||
:param client_data: The client data.
|
||||
:param attestation_object: The attestation object.
|
||||
:return: The authenticator data"""
|
||||
if client_data.get("type") != WEBAUTHN_TYPE.MAKE_CREDENTIAL:
|
||||
raise ValueError("Incorrect type in ClientData.")
|
||||
if not self._verify(client_data.get("origin")):
|
||||
raise ValueError("Invalid origin in ClientData.")
|
||||
:return: The authenticator data
|
||||
"""
|
||||
response = None
|
||||
if len(args) == 1 and not kwargs:
|
||||
response = args[0]
|
||||
elif set(kwargs) == {"response"} and not args:
|
||||
response = kwargs["response"]
|
||||
if response:
|
||||
registration = RegistrationResponse.from_dict(response)
|
||||
client_data = registration.response.client_data
|
||||
attestation_object = registration.response.attestation_object
|
||||
else:
|
||||
names = ["client_data", "attestation_object"]
|
||||
pos = dict(zip(names, args))
|
||||
data = {**kwargs, **pos}
|
||||
if set(kwargs) & set(pos) or set(data) != set(names):
|
||||
raise TypeError("incorrect arguments passed to register_complete()")
|
||||
client_data = data[names[0]]
|
||||
attestation_object = data[names[1]]
|
||||
|
||||
if client_data.type != CollectedClientData.TYPE.CREATE:
|
||||
raise ValueError("Incorrect type in CollectedClientData.")
|
||||
if not self._verify(client_data.origin):
|
||||
raise ValueError("Invalid origin in CollectedClientData.")
|
||||
if not constant_time.bytes_eq(
|
||||
websafe_decode(state["challenge"]), client_data.challenge
|
||||
):
|
||||
|
@ -273,15 +291,28 @@ class Fido2Server(object):
|
|||
)
|
||||
|
||||
if self.attestation not in (None, AttestationConveyancePreference.NONE):
|
||||
logger.debug(f"Verifying attestation of type {attestation_object.fmt}")
|
||||
self._verify_attestation(attestation_object, client_data.hash)
|
||||
# We simply ignore attestation if self.attestation == 'none', as not all
|
||||
# clients strip the attestation.
|
||||
|
||||
return attestation_object.auth_data
|
||||
auth_data = attestation_object.auth_data
|
||||
assert auth_data.credential_data is not None # nosec
|
||||
logger.info(
|
||||
"New credential registered: "
|
||||
+ auth_data.credential_data.credential_id.hex()
|
||||
)
|
||||
return auth_data
|
||||
|
||||
def authenticate_begin(
|
||||
self, credentials=None, user_verification=None, challenge=None
|
||||
):
|
||||
self,
|
||||
credentials: Optional[
|
||||
Sequence[Union[AttestedCredentialData, PublicKeyCredentialDescriptor]]
|
||||
] = None,
|
||||
user_verification: Optional[UserVerificationRequirement] = None,
|
||||
challenge: Optional[bytes] = None,
|
||||
extensions=None,
|
||||
) -> Tuple[CredentialRequestOptions, Any]:
|
||||
"""Return a PublicKeyCredentialRequestOptions assertion object and the internal
|
||||
state dictionary that needs to be passed as is to the corresponding
|
||||
`authenticate_complete` call.
|
||||
|
@ -293,25 +324,52 @@ class Fido2Server(object):
|
|||
OS-specific random bytes.
|
||||
:return: Assertion data, internal state."""
|
||||
challenge = _validata_challenge(challenge)
|
||||
|
||||
descriptors = _wrap_credentials(credentials)
|
||||
state = self._make_internal_state(challenge, user_verification)
|
||||
if descriptors is None:
|
||||
logger.debug("Starting new authentication without credentials")
|
||||
else:
|
||||
logger.debug(
|
||||
"Starting new authentication, for credentials: "
|
||||
+ ", ".join(d.id.hex() for d in descriptors)
|
||||
)
|
||||
|
||||
return (
|
||||
{
|
||||
"publicKey": PublicKeyCredentialRequestOptions(
|
||||
CredentialRequestOptions(
|
||||
PublicKeyCredentialRequestOptions(
|
||||
challenge,
|
||||
self.timeout,
|
||||
self.rp.id,
|
||||
_wrap_credentials(credentials),
|
||||
descriptors,
|
||||
user_verification,
|
||||
extensions,
|
||||
)
|
||||
},
|
||||
),
|
||||
state,
|
||||
)
|
||||
|
||||
@overload
|
||||
def authenticate_complete(
|
||||
self, state, credentials, credential_id, client_data, auth_data, signature
|
||||
):
|
||||
self,
|
||||
state,
|
||||
credentials: Sequence[AttestedCredentialData],
|
||||
response: Union[AuthenticationResponse, Mapping[str, Any]],
|
||||
) -> AttestedCredentialData:
|
||||
pass
|
||||
|
||||
@overload
|
||||
def authenticate_complete(
|
||||
self,
|
||||
state,
|
||||
credentials: Sequence[AttestedCredentialData],
|
||||
credential_id: bytes,
|
||||
client_data: CollectedClientData,
|
||||
auth_data: AuthenticatorData,
|
||||
signature: bytes,
|
||||
) -> AttestedCredentialData:
|
||||
pass
|
||||
|
||||
def authenticate_complete(self, state, credentials, *args, **kwargs):
|
||||
"""Verify the correctness of the assertion data received from
|
||||
the client.
|
||||
|
||||
|
@ -322,10 +380,33 @@ class Fido2Server(object):
|
|||
:param client_data: The client data.
|
||||
:param auth_data: The authenticator data.
|
||||
:param signature: The signature provided by the client."""
|
||||
if client_data.get("type") != WEBAUTHN_TYPE.GET_ASSERTION:
|
||||
raise ValueError("Incorrect type in ClientData.")
|
||||
if not self._verify(client_data.get("origin")):
|
||||
raise ValueError("Invalid origin in ClientData.")
|
||||
|
||||
response = None
|
||||
if len(args) == 1 and not kwargs:
|
||||
response = args[0]
|
||||
elif set(kwargs) == {"response"} and not args:
|
||||
response = kwargs["response"]
|
||||
if response:
|
||||
authentication = AuthenticationResponse.from_dict(response)
|
||||
credential_id = authentication.id
|
||||
client_data = authentication.response.client_data
|
||||
auth_data = authentication.response.authenticator_data
|
||||
signature = authentication.response.signature
|
||||
else:
|
||||
names = ["credential_id", "client_data", "auth_data", "signature"]
|
||||
pos = dict(zip(names, args))
|
||||
data = {**kwargs, **pos}
|
||||
if set(kwargs) & set(pos) or set(data) != set(names):
|
||||
raise TypeError("incorrect arguments passed to authenticate_complete()")
|
||||
credential_id = data[names[0]]
|
||||
client_data = data[names[1]]
|
||||
auth_data = data[names[2]]
|
||||
signature = data[names[3]]
|
||||
|
||||
if client_data.type != CollectedClientData.TYPE.GET:
|
||||
raise ValueError("Incorrect type in CollectedClientData.")
|
||||
if not self._verify(client_data.origin):
|
||||
raise ValueError("Invalid origin in CollectedClientData.")
|
||||
if websafe_decode(state["challenge"]) != client_data.challenge:
|
||||
raise ValueError("Wrong challenge in response.")
|
||||
if not constant_time.bytes_eq(self.rp.id_hash, auth_data.rp_id_hash):
|
||||
|
@ -347,17 +428,39 @@ class Fido2Server(object):
|
|||
cred.public_key.verify(auth_data + client_data.hash, signature)
|
||||
except _InvalidSignature:
|
||||
raise ValueError("Invalid signature.")
|
||||
logger.info(f"Credential authenticated: {credential_id.hex()}")
|
||||
return cred
|
||||
raise ValueError("Unknown credential ID.")
|
||||
|
||||
@staticmethod
|
||||
def _make_internal_state(challenge, user_verification):
|
||||
def _make_internal_state(
|
||||
challenge: bytes, user_verification: Optional[UserVerificationRequirement]
|
||||
):
|
||||
return {
|
||||
"challenge": websafe_encode(challenge),
|
||||
"user_verification": user_verification,
|
||||
}
|
||||
|
||||
|
||||
def verify_app_id(app_id: str, origin: str) -> bool:
|
||||
"""Checks if a FIDO U2F App ID is usable for a given origin.
|
||||
|
||||
:param app_id: The App ID to validate.
|
||||
:param origin: The origin of the request.
|
||||
:return: True if the App ID is usable by the origin, False if not.
|
||||
"""
|
||||
url = urlparse(app_id)
|
||||
hostname = url.hostname
|
||||
# Note that FIDO U2F requires a secure context, i.e. an origin with https scheme.
|
||||
# However, most browsers also treat http://localhost as a secure context. See
|
||||
# https://groups.google.com/a/chromium.org/g/blink-dev/c/RC9dSw-O3fE/m/E3_0XaT0BAAJ
|
||||
if url.scheme != "https" and (url.scheme, hostname) != ("http", "localhost"):
|
||||
return False
|
||||
if not hostname:
|
||||
return False
|
||||
return verify_rp_id(hostname, origin)
|
||||
|
||||
|
||||
class U2FFido2Server(Fido2Server):
|
||||
"""Fido2Server which can be used with existing U2F credentials.
|
||||
|
||||
|
@ -367,33 +470,42 @@ class U2FFido2Server(Fido2Server):
|
|||
|
||||
:param app_id: The appId which was used for U2F registration.
|
||||
:param verify_u2f_origin: (optional) Alternative function to validate an
|
||||
origin for U2F credentials..
|
||||
origin for U2F credentials.
|
||||
For other parameters, see Fido2Server.
|
||||
"""
|
||||
|
||||
def __init__(self, app_id, rp, verify_u2f_origin=None, *args, **kwargs):
|
||||
super(U2FFido2Server, self).__init__(rp, *args, **kwargs)
|
||||
def __init__(
|
||||
self,
|
||||
app_id: str,
|
||||
rp: PublicKeyCredentialRpEntity,
|
||||
verify_u2f_origin: Optional[VerifyOrigin] = None,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(rp, *args, **kwargs)
|
||||
if verify_u2f_origin:
|
||||
kwargs["verify_origin"] = verify_u2f_origin
|
||||
else:
|
||||
kwargs["verify_origin"] = lambda o: verify_app_id(app_id, o)
|
||||
self._app_id = app_id
|
||||
self._app_id_server = Fido2Server(
|
||||
PublicKeyCredentialRpEntity(app_id, self.rp.name), *args, **kwargs
|
||||
replace(PublicKeyCredentialRpEntity.from_dict(rp), id=app_id),
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def register_begin(self, *args, **kwargs):
|
||||
req, state = super(U2FFido2Server, self).register_begin(*args, **kwargs)
|
||||
req["publicKey"].setdefault("extensions", {})["appidExclude"] = self._app_id
|
||||
kwargs.setdefault("extensions", {})["appidExclude"] = self._app_id
|
||||
req, state = super().register_begin(*args, **kwargs)
|
||||
return req, state
|
||||
|
||||
def authenticate_begin(self, *args, **kwargs):
|
||||
req, state = super(U2FFido2Server, self).authenticate_begin(*args, **kwargs)
|
||||
req["publicKey"].setdefault("extensions", {})["appid"] = self._app_id
|
||||
kwargs.setdefault("extensions", {})["appid"] = self._app_id
|
||||
req, state = super().authenticate_begin(*args, **kwargs)
|
||||
return req, state
|
||||
|
||||
def authenticate_complete(self, *args, **kwargs):
|
||||
try:
|
||||
return super(U2FFido2Server, self).authenticate_complete(*args, **kwargs)
|
||||
return super().authenticate_complete(*args, **kwargs)
|
||||
except ValueError:
|
||||
return self._app_id_server.authenticate_complete(*args, **kwargs)
|
||||
|
|
182
fido2/utils.py
182
fido2/utils.py
|
@ -30,13 +30,26 @@
|
|||
This module contains various functions used throughout the rest of the project.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from base64 import urlsafe_b64decode, urlsafe_b64encode
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hmac, hashes
|
||||
from binascii import b2a_hex
|
||||
from io import BytesIO
|
||||
import six
|
||||
from dataclasses import fields, Field
|
||||
from abc import abstractmethod
|
||||
from typing import (
|
||||
Union,
|
||||
Optional,
|
||||
Sequence,
|
||||
Mapping,
|
||||
Any,
|
||||
TypeVar,
|
||||
Hashable,
|
||||
get_type_hints,
|
||||
)
|
||||
import struct
|
||||
import warnings
|
||||
|
||||
__all__ = [
|
||||
"websafe_encode",
|
||||
|
@ -48,7 +61,10 @@ __all__ = [
|
|||
]
|
||||
|
||||
|
||||
def sha256(data):
|
||||
LOG_LEVEL_TRAFFIC = 5
|
||||
|
||||
|
||||
def sha256(data: bytes) -> bytes:
|
||||
"""Produces a SHA256 hash of the input.
|
||||
|
||||
:param data: The input data to hash.
|
||||
|
@ -59,7 +75,7 @@ def sha256(data):
|
|||
return h.finalize()
|
||||
|
||||
|
||||
def hmac_sha256(key, data):
|
||||
def hmac_sha256(key: bytes, data: bytes) -> bytes:
|
||||
"""Performs an HMAC-SHA256 operation on the given data, using the given key.
|
||||
|
||||
:param key: The key to use.
|
||||
|
@ -71,16 +87,16 @@ def hmac_sha256(key, data):
|
|||
return h.finalize()
|
||||
|
||||
|
||||
def bytes2int(value):
|
||||
def bytes2int(value: bytes) -> int:
|
||||
"""Parses an arbitrarily sized integer from a byte string.
|
||||
|
||||
:param value: A byte string encoding a big endian unsigned integer.
|
||||
:return: The parsed int.
|
||||
"""
|
||||
return int(b2a_hex(value), 16)
|
||||
return int.from_bytes(value, "big")
|
||||
|
||||
|
||||
def int2bytes(value, minlen=-1):
|
||||
def int2bytes(value: int, minlen: int = -1) -> bytes:
|
||||
"""Encodes an int as a byte string.
|
||||
|
||||
:param value: The integer value to encode.
|
||||
|
@ -93,24 +109,31 @@ def int2bytes(value, minlen=-1):
|
|||
value >>= 8
|
||||
ba.append(value)
|
||||
ba.extend([0] * (minlen - len(ba)))
|
||||
return bytes(bytearray(reversed(ba)))
|
||||
return bytes(reversed(ba))
|
||||
|
||||
|
||||
def websafe_decode(data):
|
||||
"""Decodes a websafe-base64 encoded string (bytes or str).
|
||||
def websafe_decode(data: Union[str, bytes]) -> bytes:
|
||||
"""Decodes a websafe-base64 encoded string.
|
||||
See: "Base 64 Encoding with URL and Filename Safe Alphabet" from Section 5
|
||||
in RFC4648 without padding.
|
||||
|
||||
:param data: The input to decode.
|
||||
:return: The decoded bytes.
|
||||
"""
|
||||
if isinstance(data, six.text_type):
|
||||
if isinstance(data, str):
|
||||
data = data.encode("ascii")
|
||||
else:
|
||||
warnings.warn(
|
||||
"Calling websafe_decode on a byte value is deprecated, "
|
||||
"and will no longer be allowed starting in python-fido2 2.0",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
data += b"=" * (-len(data) % 4)
|
||||
return urlsafe_b64decode(data)
|
||||
|
||||
|
||||
def websafe_encode(data):
|
||||
def websafe_encode(data: bytes) -> str:
|
||||
"""Encodes a byte string into websafe-base64 encoding.
|
||||
|
||||
:param data: The input to encode.
|
||||
|
@ -122,7 +145,7 @@ def websafe_encode(data):
|
|||
class ByteBuffer(BytesIO):
|
||||
"""BytesIO-like object with the ability to unpack values."""
|
||||
|
||||
def unpack(self, fmt):
|
||||
def unpack(self, fmt: str):
|
||||
"""Reads and unpacks a value from the buffer.
|
||||
|
||||
:param fmt: A struct format string yielding a single value.
|
||||
|
@ -131,13 +154,140 @@ class ByteBuffer(BytesIO):
|
|||
s = struct.Struct(fmt)
|
||||
return s.unpack(self.read(s.size))[0]
|
||||
|
||||
def read(self, size=-1):
|
||||
def read(self, size: Optional[int] = -1) -> bytes:
|
||||
"""Like BytesIO.read(), but checks the number of bytes read and raises an error
|
||||
if fewer bytes were read than expected.
|
||||
"""
|
||||
data = super(ByteBuffer, self).read(size)
|
||||
if size > 0 and len(data) != size:
|
||||
data = super().read(size)
|
||||
if size is not None and size > 0 and len(data) != size:
|
||||
raise ValueError(
|
||||
"Not enough data to read (need: %d, had: %d)." % (size, len(data))
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def _snake2camel(name: str) -> str:
|
||||
parts = name.split("_")
|
||||
return parts[0] + "".join(p.title() for p in parts[1:])
|
||||
|
||||
|
||||
def _parse_value(t, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if Optional[t] == t: # Optional, get the type
|
||||
t = t.__args__[0]
|
||||
|
||||
# Handle list of values
|
||||
if issubclass(getattr(t, "__origin__", object), Sequence):
|
||||
t = t.__args__[0]
|
||||
return [_parse_value(t, v) for v in value]
|
||||
|
||||
# Handle Mappings
|
||||
if issubclass(getattr(t, "__origin__", object), Mapping) and isinstance(
|
||||
value, Mapping
|
||||
):
|
||||
return value
|
||||
|
||||
# Check if type is already correct
|
||||
try:
|
||||
if isinstance(value, t):
|
||||
return value
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
# Check for subclass of _DataClassMapping
|
||||
try:
|
||||
is_dataclass = issubclass(t, _DataClassMapping)
|
||||
except TypeError:
|
||||
is_dataclass = False
|
||||
|
||||
if is_dataclass:
|
||||
# Recursively call from_dict for nested _DataClassMappings
|
||||
return t.from_dict(value)
|
||||
|
||||
# Convert to enum values, other wrappers
|
||||
return t(value)
|
||||
|
||||
|
||||
_T = TypeVar("_T", bound=Hashable)
|
||||
|
||||
|
||||
class _DataClassMapping(Mapping[_T, Any]):
|
||||
# TODO: This requires Python 3.9, and fixes the tpye errors we now ignore
|
||||
# __dataclass_fields__: ClassVar[Dict[str, Field[Any]]]
|
||||
|
||||
def __post_init__(self):
|
||||
hints = get_type_hints(type(self))
|
||||
for f in fields(self): # type: ignore
|
||||
value = getattr(self, f.name)
|
||||
if value is None:
|
||||
continue
|
||||
try:
|
||||
value = _parse_value(hints[f.name], value)
|
||||
except (TypeError, KeyError, ValueError):
|
||||
raise ValueError(
|
||||
f"Error parsing field {f.name} for {self.__class__.__name__}"
|
||||
)
|
||||
object.__setattr__(self, f.name, value)
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def _get_field_key(cls, field: Field) -> _T:
|
||||
raise NotImplementedError()
|
||||
|
||||
def __getitem__(self, key):
|
||||
for f in fields(self): # type: ignore
|
||||
if key == self._get_field_key(f):
|
||||
value = getattr(self, f.name)
|
||||
serialize = f.metadata.get("serialize")
|
||||
if serialize:
|
||||
return serialize(value)
|
||||
if isinstance(value, _DataClassMapping):
|
||||
return dict(value)
|
||||
if isinstance(value, Sequence) and all(
|
||||
isinstance(x, _DataClassMapping) for x in value
|
||||
):
|
||||
return [dict(x) for x in value]
|
||||
return value
|
||||
raise KeyError(key)
|
||||
|
||||
def __iter__(self):
|
||||
return (
|
||||
self._get_field_key(f)
|
||||
for f in fields(self) # type: ignore
|
||||
if getattr(self, f.name) is not None
|
||||
)
|
||||
|
||||
def __len__(self):
|
||||
return len(list(iter(self)))
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Optional[Mapping[_T, Any]]):
|
||||
if data is None:
|
||||
return None
|
||||
if isinstance(data, cls):
|
||||
return data
|
||||
if not isinstance(data, Mapping):
|
||||
raise TypeError(
|
||||
f"{cls.__name__}.from_dict called with non-Mapping data of type"
|
||||
f"{type(data)}"
|
||||
)
|
||||
|
||||
kwargs = {}
|
||||
for f in fields(cls): # type: ignore
|
||||
key = cls._get_field_key(f)
|
||||
if key in data:
|
||||
value = data[key]
|
||||
if value is not None:
|
||||
deserialize = f.metadata.get("deserialize")
|
||||
if deserialize:
|
||||
value = deserialize(value)
|
||||
kwargs[f.name] = value
|
||||
return cls(**kwargs)
|
||||
|
||||
|
||||
class _CamelCaseDataObject(_DataClassMapping[str]):
|
||||
@classmethod
|
||||
def _get_field_key(cls, field: Field) -> str:
|
||||
return field.metadata.get("name", _snake2camel(field.name))
|
||||
|
|
|
@ -25,12 +25,23 @@
|
|||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from .utils import sha256
|
||||
from enum import Enum, unique
|
||||
import six
|
||||
import re
|
||||
from . import cbor
|
||||
from .cose import CoseKey, ES256
|
||||
from .utils import (
|
||||
sha256,
|
||||
websafe_decode,
|
||||
websafe_encode,
|
||||
ByteBuffer,
|
||||
_CamelCaseDataObject,
|
||||
)
|
||||
from .features import webauthn_json_mapping
|
||||
from enum import Enum, EnumMeta, unique, IntFlag
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Mapping, Optional, Sequence, Tuple, Union, cast
|
||||
import struct
|
||||
import json
|
||||
|
||||
"""
|
||||
Data classes based on the W3C WebAuthn specification (https://www.w3.org/TR/webauthn/).
|
||||
|
@ -38,13 +49,370 @@ Data classes based on the W3C WebAuthn specification (https://www.w3.org/TR/weba
|
|||
See the specification for a description and details on their usage.
|
||||
"""
|
||||
|
||||
# Binary types
|
||||
|
||||
|
||||
class Aaguid(bytes):
|
||||
def __init__(self, data: bytes):
|
||||
if len(self) != 16:
|
||||
raise ValueError("AAGUID must be 16 bytes")
|
||||
|
||||
def __bool__(self):
|
||||
return self != Aaguid.NONE
|
||||
|
||||
def __str__(self):
|
||||
h = self.hex()
|
||||
return f"{h[:8]}-{h[8:12]}-{h[12:16]}-{h[16:20]}-{h[20:]}"
|
||||
|
||||
def __repr__(self):
|
||||
return f"AAGUID({str(self)})"
|
||||
|
||||
class _StringEnum(six.text_type, Enum):
|
||||
@classmethod
|
||||
def _wrap(cls, value):
|
||||
if value is None:
|
||||
return None
|
||||
return cls(value)
|
||||
def parse(cls, value: str) -> Aaguid:
|
||||
return cls.fromhex(value.replace("-", ""))
|
||||
|
||||
NONE: Aaguid
|
||||
|
||||
|
||||
# Special instance of AAGUID used when there is no AAGUID
|
||||
Aaguid.NONE = Aaguid(b"\0" * 16)
|
||||
|
||||
|
||||
@dataclass(init=False, frozen=True)
|
||||
class AttestedCredentialData(bytes):
|
||||
aaguid: Aaguid
|
||||
credential_id: bytes
|
||||
public_key: CoseKey
|
||||
|
||||
def __init__(self, _: bytes):
|
||||
super().__init__()
|
||||
|
||||
parsed = AttestedCredentialData._parse(self)
|
||||
object.__setattr__(self, "aaguid", parsed[0])
|
||||
object.__setattr__(self, "credential_id", parsed[1])
|
||||
object.__setattr__(self, "public_key", parsed[2])
|
||||
if parsed[3]:
|
||||
raise ValueError("Wrong length")
|
||||
|
||||
def __str__(self): # Override default implementation from bytes.
|
||||
return repr(self)
|
||||
|
||||
@staticmethod
|
||||
def _parse(data: bytes) -> Tuple[bytes, bytes, CoseKey, bytes]:
|
||||
"""Parse the components of an AttestedCredentialData from a binary
|
||||
string, and return them.
|
||||
|
||||
:param data: A binary string containing an attested credential data.
|
||||
:return: AAGUID, credential ID, public key, and remaining data.
|
||||
"""
|
||||
reader = ByteBuffer(data)
|
||||
aaguid = Aaguid(reader.read(16))
|
||||
cred_id = reader.read(reader.unpack(">H"))
|
||||
pub_key, rest = cbor.decode_from(reader.read())
|
||||
return aaguid, cred_id, CoseKey.parse(pub_key), rest
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, aaguid: bytes, credential_id: bytes, public_key: CoseKey
|
||||
) -> AttestedCredentialData:
|
||||
"""Create an AttestedCredentialData by providing its components.
|
||||
|
||||
:param aaguid: The AAGUID of the authenticator.
|
||||
:param credential_id: The binary ID of the credential.
|
||||
:param public_key: A COSE formatted public key.
|
||||
:return: The attested credential data.
|
||||
"""
|
||||
return cls(
|
||||
aaguid
|
||||
+ struct.pack(">H", len(credential_id))
|
||||
+ credential_id
|
||||
+ cbor.encode(public_key)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def unpack_from(cls, data: bytes) -> Tuple[AttestedCredentialData, bytes]:
|
||||
"""Unpack an AttestedCredentialData from a byte string, returning it and
|
||||
any remaining data.
|
||||
|
||||
:param data: A binary string containing an attested credential data.
|
||||
:return: The parsed AttestedCredentialData, and any remaining data from
|
||||
the input.
|
||||
"""
|
||||
aaguid, cred_id, pub_key, rest = cls._parse(data)
|
||||
return cls.create(aaguid, cred_id, pub_key), rest
|
||||
|
||||
@classmethod
|
||||
def from_ctap1(cls, key_handle: bytes, public_key: bytes) -> AttestedCredentialData:
|
||||
"""Create an AttestatedCredentialData from a CTAP1 RegistrationData instance.
|
||||
|
||||
:param key_handle: The CTAP1 credential key_handle.
|
||||
:type key_handle: bytes
|
||||
:param public_key: The CTAP1 65 byte public key.
|
||||
:type public_key: bytes
|
||||
:return: The credential data, using an all-zero AAGUID.
|
||||
:rtype: AttestedCredentialData
|
||||
"""
|
||||
return cls.create(Aaguid.NONE, key_handle, ES256.from_ctap1(public_key))
|
||||
|
||||
|
||||
@dataclass(init=False, frozen=True)
|
||||
class AuthenticatorData(bytes):
|
||||
"""Binary encoding of the authenticator data.
|
||||
|
||||
:param _: The binary representation of the authenticator data.
|
||||
:ivar rp_id_hash: SHA256 hash of the RP ID.
|
||||
:ivar flags: The flags of the authenticator data, see
|
||||
AuthenticatorData.FLAG.
|
||||
:ivar counter: The signature counter of the authenticator.
|
||||
:ivar credential_data: Attested credential data, if available.
|
||||
:ivar extensions: Authenticator extensions, if available.
|
||||
"""
|
||||
|
||||
class FLAG(IntFlag):
|
||||
"""Authenticator data flags
|
||||
|
||||
See https://www.w3.org/TR/webauthn/#sec-authenticator-data for details
|
||||
"""
|
||||
|
||||
# Names used in WebAuthn
|
||||
UP = 0x01
|
||||
UV = 0x04
|
||||
BE = 0x08
|
||||
BS = 0x10
|
||||
AT = 0x40
|
||||
ED = 0x80
|
||||
|
||||
# Aliases (for historical purposes)
|
||||
USER_PRESENT = 0x01
|
||||
USER_VERIFIED = 0x04
|
||||
BACKUP_ELIGIBILITY = 0x08
|
||||
BACKUP_STATE = 0x10
|
||||
ATTESTED = 0x40
|
||||
EXTENSION_DATA = 0x80
|
||||
|
||||
rp_id_hash: bytes
|
||||
flags: AuthenticatorData.FLAG
|
||||
counter: int
|
||||
credential_data: Optional[AttestedCredentialData]
|
||||
extensions: Optional[Mapping]
|
||||
|
||||
def __init__(self, _: bytes):
|
||||
super().__init__()
|
||||
|
||||
reader = ByteBuffer(self)
|
||||
object.__setattr__(self, "rp_id_hash", reader.read(32))
|
||||
object.__setattr__(self, "flags", reader.unpack("B"))
|
||||
object.__setattr__(self, "counter", reader.unpack(">I"))
|
||||
rest = reader.read()
|
||||
|
||||
if self.flags & AuthenticatorData.FLAG.AT:
|
||||
credential_data, rest = AttestedCredentialData.unpack_from(rest)
|
||||
else:
|
||||
credential_data = None
|
||||
object.__setattr__(self, "credential_data", credential_data)
|
||||
|
||||
if self.flags & AuthenticatorData.FLAG.ED:
|
||||
extensions, rest = cbor.decode_from(rest)
|
||||
else:
|
||||
extensions = None
|
||||
object.__setattr__(self, "extensions", extensions)
|
||||
|
||||
if rest:
|
||||
raise ValueError("Wrong length")
|
||||
|
||||
def __str__(self): # Override default implementation from bytes.
|
||||
return repr(self)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
rp_id_hash: bytes,
|
||||
flags: AuthenticatorData.FLAG,
|
||||
counter: int,
|
||||
credential_data: bytes = b"",
|
||||
extensions: Optional[Mapping] = None,
|
||||
):
|
||||
"""Create an AuthenticatorData instance.
|
||||
|
||||
:param rp_id_hash: SHA256 hash of the RP ID.
|
||||
:param flags: Flags of the AuthenticatorData.
|
||||
:param counter: Signature counter of the authenticator data.
|
||||
:param credential_data: Authenticated credential data (only if attested
|
||||
credential data flag is set).
|
||||
:param extensions: Authenticator extensions (only if ED flag is set).
|
||||
:return: The authenticator data.
|
||||
"""
|
||||
return cls(
|
||||
rp_id_hash
|
||||
+ struct.pack(">BI", flags, counter)
|
||||
+ credential_data
|
||||
+ (cbor.encode(extensions) if extensions is not None else b"")
|
||||
)
|
||||
|
||||
def is_user_present(self) -> bool:
|
||||
"""Return true if the User Present flag is set."""
|
||||
return bool(self.flags & AuthenticatorData.FLAG.UP)
|
||||
|
||||
def is_user_verified(self) -> bool:
|
||||
"""Return true if the User Verified flag is set."""
|
||||
return bool(self.flags & AuthenticatorData.FLAG.UV)
|
||||
|
||||
def is_backup_eligible(self) -> bool:
|
||||
"""Return true if the Backup Eligibility flag is set."""
|
||||
return bool(self.flags & AuthenticatorData.FLAG.BE)
|
||||
|
||||
def is_backed_up(self) -> bool:
|
||||
"""Return true if the Backup State flag is set."""
|
||||
return bool(self.flags & AuthenticatorData.FLAG.BS)
|
||||
|
||||
def is_attested(self) -> bool:
|
||||
"""Return true if the Attested credential data flag is set."""
|
||||
return bool(self.flags & AuthenticatorData.FLAG.AT)
|
||||
|
||||
def has_extension_data(self) -> bool:
|
||||
"""Return true if the Extenstion data flag is set."""
|
||||
return bool(self.flags & AuthenticatorData.FLAG.ED)
|
||||
|
||||
|
||||
@dataclass(init=False, frozen=True)
|
||||
class AttestationObject(bytes): # , Mapping[str, Any]):
|
||||
"""Binary CBOR encoded attestation object.
|
||||
|
||||
:param _: The binary representation of the attestation object.
|
||||
:ivar fmt: The type of attestation used.
|
||||
:ivar auth_data: The attested authenticator data.
|
||||
:ivar att_statement: The attestation statement.
|
||||
"""
|
||||
|
||||
fmt: str
|
||||
auth_data: AuthenticatorData
|
||||
att_stmt: Mapping[str, Any]
|
||||
|
||||
def __init__(self, _: bytes):
|
||||
super().__init__()
|
||||
|
||||
data = cast(Mapping[str, Any], cbor.decode(bytes(self)))
|
||||
object.__setattr__(self, "fmt", data["fmt"])
|
||||
object.__setattr__(self, "auth_data", AuthenticatorData(data["authData"]))
|
||||
object.__setattr__(self, "att_stmt", data["attStmt"])
|
||||
|
||||
def __str__(self): # Override default implementation from bytes.
|
||||
return repr(self)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, fmt: str, auth_data: AuthenticatorData, att_stmt: Mapping[str, Any]
|
||||
) -> AttestationObject:
|
||||
return cls(
|
||||
cbor.encode({"fmt": fmt, "authData": auth_data, "attStmt": att_stmt})
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_ctap1(cls, app_param: bytes, registration) -> AttestationObject:
|
||||
"""Create an AttestationObject from a CTAP1 RegistrationData instance.
|
||||
|
||||
:param app_param: SHA256 hash of the RP ID used for the CTAP1 request.
|
||||
:type app_param: bytes
|
||||
:param registration: The CTAP1 registration data.
|
||||
:type registration: RegistrationData
|
||||
:return: The attestation object, using the "fido-u2f" format.
|
||||
:rtype: AttestationObject
|
||||
"""
|
||||
return cls.create(
|
||||
"fido-u2f",
|
||||
AuthenticatorData.create(
|
||||
app_param,
|
||||
AuthenticatorData.FLAG.AT | AuthenticatorData.FLAG.UP,
|
||||
0,
|
||||
AttestedCredentialData.from_ctap1(
|
||||
registration.key_handle, registration.public_key
|
||||
),
|
||||
),
|
||||
{"x5c": [registration.certificate], "sig": registration.signature},
|
||||
)
|
||||
|
||||
|
||||
@dataclass(init=False, frozen=True)
|
||||
class CollectedClientData(bytes):
|
||||
@unique
|
||||
class TYPE(str, Enum):
|
||||
CREATE = "webauthn.create"
|
||||
GET = "webauthn.get"
|
||||
|
||||
type: str
|
||||
challenge: bytes
|
||||
origin: str
|
||||
cross_origin: bool = False
|
||||
|
||||
def __init__(self, _: bytes):
|
||||
super().__init__()
|
||||
|
||||
data = json.loads(self.decode())
|
||||
object.__setattr__(self, "type", data["type"])
|
||||
object.__setattr__(self, "challenge", websafe_decode(data["challenge"]))
|
||||
object.__setattr__(self, "origin", data["origin"])
|
||||
object.__setattr__(self, "cross_origin", data.get("crossOrigin", False))
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
type: str,
|
||||
challenge: Union[bytes, str],
|
||||
origin: str,
|
||||
cross_origin: bool = False,
|
||||
**kwargs,
|
||||
) -> CollectedClientData:
|
||||
if isinstance(challenge, bytes):
|
||||
encoded_challenge = websafe_encode(challenge)
|
||||
else:
|
||||
encoded_challenge = challenge
|
||||
return cls(
|
||||
json.dumps(
|
||||
{
|
||||
"type": type,
|
||||
"challenge": encoded_challenge,
|
||||
"origin": origin,
|
||||
"crossOrigin": cross_origin,
|
||||
**kwargs,
|
||||
},
|
||||
separators=(",", ":"),
|
||||
).encode()
|
||||
)
|
||||
|
||||
def __str__(self): # Override default implementation from bytes.
|
||||
return repr(self)
|
||||
|
||||
@property
|
||||
def b64(self) -> str:
|
||||
return websafe_encode(self)
|
||||
|
||||
@property
|
||||
def hash(self) -> bytes:
|
||||
return sha256(self)
|
||||
|
||||
|
||||
class _StringEnumMeta(EnumMeta):
|
||||
def _get_value(cls, value):
|
||||
return None
|
||||
|
||||
def __call__(cls, value, *args, **kwargs):
|
||||
try:
|
||||
return super().__call__(value, *args, **kwargs)
|
||||
except ValueError:
|
||||
return cls._get_value(value)
|
||||
|
||||
|
||||
class _StringEnum(str, Enum, metaclass=_StringEnumMeta):
|
||||
"""Enum of strings for WebAuthn types.
|
||||
|
||||
Unrecognized values are treated as missing.
|
||||
"""
|
||||
|
||||
|
||||
_b64_metadata = dict(
|
||||
serialize=lambda x: websafe_encode(x) if webauthn_json_mapping.enabled else x,
|
||||
deserialize=lambda x: websafe_decode(x) if webauthn_json_mapping.enabled else x,
|
||||
)
|
||||
|
||||
|
||||
@unique
|
||||
|
@ -52,6 +420,7 @@ class AttestationConveyancePreference(_StringEnum):
|
|||
NONE = "none"
|
||||
INDIRECT = "indirect"
|
||||
DIRECT = "direct"
|
||||
ENTERPRISE = "enterprise"
|
||||
|
||||
|
||||
@unique
|
||||
|
@ -61,6 +430,13 @@ class UserVerificationRequirement(_StringEnum):
|
|||
DISCOURAGED = "discouraged"
|
||||
|
||||
|
||||
@unique
|
||||
class ResidentKeyRequirement(_StringEnum):
|
||||
REQUIRED = "required"
|
||||
PREFERRED = "preferred"
|
||||
DISCOURAGED = "discouraged"
|
||||
|
||||
|
||||
@unique
|
||||
class AuthenticatorAttachment(_StringEnum):
|
||||
PLATFORM = "platform"
|
||||
|
@ -72,6 +448,7 @@ class AuthenticatorTransport(_StringEnum):
|
|||
USB = "usb"
|
||||
NFC = "nfc"
|
||||
BLE = "ble"
|
||||
HYBRID = "hybrid"
|
||||
INTERNAL = "internal"
|
||||
|
||||
|
||||
|
@ -80,186 +457,193 @@ class PublicKeyCredentialType(_StringEnum):
|
|||
PUBLIC_KEY = "public-key"
|
||||
|
||||
|
||||
def _snake2camel(name):
|
||||
parts = name.split("_")
|
||||
return parts[0] + "".join(p.title() for p in parts[1:])
|
||||
|
||||
|
||||
def _camel2snake(name):
|
||||
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
|
||||
|
||||
|
||||
class _DataObject(dict):
|
||||
"""Base class for WebAuthn data types, acting both as dict and providing attribute
|
||||
access to values.
|
||||
"""
|
||||
|
||||
def __init__(self, **data):
|
||||
keys = {k: _snake2camel(k) for k in data.keys()}
|
||||
super(_DataObject, self).__init__(
|
||||
{keys[k]: v for k, v in data.items() if v is not None}
|
||||
)
|
||||
super(_DataObject, self).__setattr__("_keys", keys)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in self._keys:
|
||||
return self.get(self._keys[name])
|
||||
raise AttributeError(
|
||||
"'{}' object has no attribute '{}'".format(type(self).__name__, name)
|
||||
)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in self._keys:
|
||||
self[self._keys[name]] = value
|
||||
else:
|
||||
raise AttributeError(
|
||||
"'{}' object has no attribute '{}'".format(type(self).__name__, name)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({!r})".format(self.__class__.__name__, dict(self))
|
||||
|
||||
@classmethod
|
||||
def _wrap(cls, data):
|
||||
if data is None:
|
||||
return None
|
||||
if isinstance(data, cls):
|
||||
return data
|
||||
return cls(**{_camel2snake(k): v for k, v in data.items()})
|
||||
|
||||
@classmethod
|
||||
def _wrap_list(cls, datas):
|
||||
return [cls._wrap(x) for x in datas] if datas is not None else None
|
||||
|
||||
|
||||
class PublicKeyCredentialRpEntity(_DataObject):
|
||||
def __init__(self, id, name, icon=None):
|
||||
super(PublicKeyCredentialRpEntity, self).__init__(id=id, name=name, icon=icon)
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PublicKeyCredentialRpEntity(_CamelCaseDataObject):
|
||||
name: str
|
||||
id: Optional[str] = None
|
||||
|
||||
@property
|
||||
def id_hash(self):
|
||||
def id_hash(self) -> Optional[bytes]:
|
||||
"""Return SHA256 hash of the identifier."""
|
||||
return sha256(self.id.encode("utf8"))
|
||||
return sha256(self.id.encode("utf8")) if self.id else None
|
||||
|
||||
|
||||
class PublicKeyCredentialUserEntity(_DataObject):
|
||||
def __init__(self, id, name, icon=None, display_name=None):
|
||||
super(PublicKeyCredentialUserEntity, self).__init__(
|
||||
id=id, name=name, icon=icon, display_name=display_name
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PublicKeyCredentialUserEntity(_CamelCaseDataObject):
|
||||
name: str
|
||||
id: bytes = field(metadata=_b64_metadata)
|
||||
display_name: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PublicKeyCredentialParameters(_CamelCaseDataObject):
|
||||
type: PublicKeyCredentialType
|
||||
alg: int
|
||||
|
||||
@classmethod
|
||||
def _deserialize_list(cls, value):
|
||||
if value is None:
|
||||
return None
|
||||
items = [cls.from_dict(e) for e in value]
|
||||
return [e for e in items if e.type is not None]
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PublicKeyCredentialDescriptor(_CamelCaseDataObject):
|
||||
type: PublicKeyCredentialType
|
||||
id: bytes = field(metadata=_b64_metadata)
|
||||
transports: Optional[Sequence[AuthenticatorTransport]] = None
|
||||
|
||||
@classmethod
|
||||
def _deserialize_list(cls, value):
|
||||
if value is None:
|
||||
return None
|
||||
items = [cls.from_dict(e) for e in value]
|
||||
return [e for e in items if e.type is not None]
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class AuthenticatorSelectionCriteria(_CamelCaseDataObject):
|
||||
authenticator_attachment: Optional[AuthenticatorAttachment] = None
|
||||
resident_key: Optional[ResidentKeyRequirement] = None
|
||||
user_verification: Optional[UserVerificationRequirement] = None
|
||||
require_resident_key: Optional[bool] = False
|
||||
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
|
||||
if self.resident_key is None:
|
||||
object.__setattr__(
|
||||
self,
|
||||
"resident_key",
|
||||
(
|
||||
ResidentKeyRequirement.REQUIRED
|
||||
if self.require_resident_key
|
||||
else ResidentKeyRequirement.DISCOURAGED
|
||||
),
|
||||
)
|
||||
object.__setattr__(
|
||||
self,
|
||||
"require_resident_key",
|
||||
self.resident_key == ResidentKeyRequirement.REQUIRED,
|
||||
)
|
||||
|
||||
|
||||
class PublicKeyCredentialParameters(_DataObject):
|
||||
def __init__(self, type, alg):
|
||||
super(PublicKeyCredentialParameters, self).__init__(
|
||||
type=PublicKeyCredentialType(type), alg=alg
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PublicKeyCredentialCreationOptions(_CamelCaseDataObject):
|
||||
rp: PublicKeyCredentialRpEntity
|
||||
user: PublicKeyCredentialUserEntity
|
||||
challenge: bytes = field(metadata=_b64_metadata)
|
||||
pub_key_cred_params: Sequence[PublicKeyCredentialParameters] = field(
|
||||
metadata=dict(deserialize=PublicKeyCredentialParameters._deserialize_list),
|
||||
)
|
||||
timeout: Optional[int] = None
|
||||
exclude_credentials: Optional[Sequence[PublicKeyCredentialDescriptor]] = field(
|
||||
default=None,
|
||||
metadata=dict(deserialize=PublicKeyCredentialDescriptor._deserialize_list),
|
||||
)
|
||||
authenticator_selection: Optional[AuthenticatorSelectionCriteria] = None
|
||||
attestation: Optional[AttestationConveyancePreference] = None
|
||||
extensions: Optional[Mapping[str, Any]] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class PublicKeyCredentialRequestOptions(_CamelCaseDataObject):
|
||||
challenge: bytes = field(metadata=_b64_metadata)
|
||||
timeout: Optional[int] = None
|
||||
rp_id: Optional[str] = None
|
||||
allow_credentials: Optional[Sequence[PublicKeyCredentialDescriptor]] = field(
|
||||
default=None,
|
||||
metadata=dict(deserialize=PublicKeyCredentialDescriptor._deserialize_list),
|
||||
)
|
||||
user_verification: Optional[UserVerificationRequirement] = None
|
||||
extensions: Optional[Mapping[str, Any]] = None
|
||||
|
||||
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class AuthenticatorAttestationResponse(_CamelCaseDataObject):
|
||||
client_data: CollectedClientData = field(
|
||||
metadata=dict(
|
||||
_b64_metadata,
|
||||
name="clientDataJSON",
|
||||
)
|
||||
)
|
||||
attestation_object: AttestationObject = field(metadata=_b64_metadata)
|
||||
extension_results: Optional[Mapping[str, Any]] = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == "clientData" and not webauthn_json_mapping.enabled:
|
||||
return self.client_data
|
||||
return super().__getitem__(key)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Optional[Mapping[str, Any]]):
|
||||
if data is not None and not webauthn_json_mapping.enabled:
|
||||
value = dict(data)
|
||||
value["clientDataJSON"] = value.pop("clientData", None)
|
||||
data = value
|
||||
return super().from_dict(data)
|
||||
|
||||
|
||||
class PublicKeyCredentialDescriptor(_DataObject):
|
||||
def __init__(self, type, id, transports=None):
|
||||
super(PublicKeyCredentialDescriptor, self).__init__(
|
||||
type=PublicKeyCredentialType(type),
|
||||
id=id,
|
||||
transports=transports, # Note: Type is str as in current WebAuthn draft!
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class AuthenticatorAssertionResponse(_CamelCaseDataObject):
|
||||
client_data: CollectedClientData = field(
|
||||
metadata=dict(
|
||||
_b64_metadata,
|
||||
name="clientDataJSON",
|
||||
)
|
||||
)
|
||||
authenticator_data: AuthenticatorData = field(metadata=_b64_metadata)
|
||||
signature: bytes = field(metadata=_b64_metadata)
|
||||
user_handle: Optional[bytes] = field(metadata=_b64_metadata, default=None)
|
||||
credential_id: Optional[bytes] = field(metadata=_b64_metadata, default=None)
|
||||
extension_results: Optional[Mapping[str, Any]] = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == "clientData" and not webauthn_json_mapping.enabled:
|
||||
return self.client_data
|
||||
return super().__getitem__(key)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Optional[Mapping[str, Any]]):
|
||||
if data is not None and not webauthn_json_mapping.enabled:
|
||||
value = dict(data)
|
||||
value["clientDataJSON"] = value.pop("clientData", None)
|
||||
data = value
|
||||
return super().from_dict(data)
|
||||
|
||||
|
||||
class AuthenticatorSelectionCriteria(_DataObject):
|
||||
def __init__(
|
||||
self,
|
||||
authenticator_attachment=None,
|
||||
require_resident_key=None,
|
||||
user_verification=None,
|
||||
):
|
||||
super(AuthenticatorSelectionCriteria, self).__init__(
|
||||
authenticator_attachment=AuthenticatorAttachment._wrap(
|
||||
authenticator_attachment
|
||||
),
|
||||
require_resident_key=require_resident_key,
|
||||
user_verification=UserVerificationRequirement._wrap(user_verification),
|
||||
)
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class RegistrationResponse(_CamelCaseDataObject):
|
||||
id: bytes = field(metadata=_b64_metadata)
|
||||
response: AuthenticatorAttestationResponse
|
||||
authenticator_attachment: Optional[AuthenticatorAttachment] = None
|
||||
client_extension_results: Optional[Mapping] = None
|
||||
type: Optional[PublicKeyCredentialType] = None
|
||||
|
||||
def __post_init__(self):
|
||||
webauthn_json_mapping.require()
|
||||
super().__post_init__()
|
||||
|
||||
|
||||
class PublicKeyCredentialCreationOptions(_DataObject):
|
||||
def __init__(
|
||||
self,
|
||||
rp,
|
||||
user,
|
||||
challenge,
|
||||
pub_key_cred_params,
|
||||
timeout=None,
|
||||
exclude_credentials=None,
|
||||
authenticator_selection=None,
|
||||
attestation=None,
|
||||
extensions=None,
|
||||
):
|
||||
super(PublicKeyCredentialCreationOptions, self).__init__(
|
||||
rp=PublicKeyCredentialRpEntity._wrap(rp),
|
||||
user=PublicKeyCredentialUserEntity._wrap(user),
|
||||
challenge=challenge,
|
||||
pub_key_cred_params=PublicKeyCredentialParameters._wrap_list(
|
||||
pub_key_cred_params
|
||||
),
|
||||
timeout=timeout,
|
||||
exclude_credentials=PublicKeyCredentialDescriptor._wrap_list(
|
||||
exclude_credentials
|
||||
),
|
||||
authenticator_selection=AuthenticatorSelectionCriteria._wrap(
|
||||
authenticator_selection
|
||||
),
|
||||
attestation=AttestationConveyancePreference._wrap(attestation),
|
||||
extensions=extensions,
|
||||
)
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class AuthenticationResponse(_CamelCaseDataObject):
|
||||
id: bytes = field(metadata=_b64_metadata)
|
||||
response: AuthenticatorAssertionResponse
|
||||
authenticator_attachment: Optional[AuthenticatorAttachment] = None
|
||||
client_extension_results: Optional[Mapping] = None
|
||||
type: Optional[PublicKeyCredentialType] = None
|
||||
|
||||
def __post_init__(self):
|
||||
webauthn_json_mapping.require()
|
||||
super().__post_init__()
|
||||
|
||||
|
||||
class PublicKeyCredentialRequestOptions(_DataObject):
|
||||
def __init__(
|
||||
self,
|
||||
challenge,
|
||||
timeout=None,
|
||||
rp_id=None,
|
||||
allow_credentials=None,
|
||||
user_verification=None,
|
||||
extensions=None,
|
||||
):
|
||||
super(PublicKeyCredentialRequestOptions, self).__init__(
|
||||
challenge=challenge,
|
||||
timeout=timeout,
|
||||
rp_id=rp_id,
|
||||
allow_credentials=PublicKeyCredentialDescriptor._wrap_list(
|
||||
allow_credentials
|
||||
),
|
||||
user_verification=UserVerificationRequirement._wrap(user_verification),
|
||||
extensions=extensions,
|
||||
)
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class CredentialCreationOptions(_CamelCaseDataObject):
|
||||
public_key: PublicKeyCredentialCreationOptions
|
||||
|
||||
|
||||
class AuthenticatorAttestationResponse(_DataObject):
|
||||
def __init__(self, client_data, attestation_object, extension_results=None):
|
||||
super(AuthenticatorAttestationResponse, self).__init__(
|
||||
client_data=client_data,
|
||||
attestation_object=attestation_object,
|
||||
extension_results=extension_results,
|
||||
)
|
||||
|
||||
|
||||
class AuthenticatorAssertionResponse(_DataObject):
|
||||
def __init__(
|
||||
self,
|
||||
client_data,
|
||||
authenticator_data,
|
||||
signature,
|
||||
user_handle,
|
||||
credential_id,
|
||||
extension_results=None,
|
||||
):
|
||||
super(AuthenticatorAssertionResponse, self).__init__(
|
||||
client_data=client_data,
|
||||
authenticator_data=authenticator_data,
|
||||
signature=signature,
|
||||
user_handle=user_handle,
|
||||
credential_id=credential_id,
|
||||
extension_results=extension_results,
|
||||
)
|
||||
@dataclass(eq=False, frozen=True)
|
||||
class CredentialRequestOptions(_CamelCaseDataObject):
|
||||
public_key: PublicKeyCredentialRequestOptions
|
||||
|
|
|
@ -37,20 +37,26 @@ https://github.com/microsoft/webauthn
|
|||
#
|
||||
# pylint: disable=invalid-name, super-init-not-called, too-few-public-methods
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntEnum, unique
|
||||
from ctypes.wintypes import BOOL, DWORD, LONG, LPCWSTR, HWND
|
||||
from threading import Thread
|
||||
from typing import Mapping
|
||||
|
||||
import ctypes
|
||||
from ctypes import WinDLL # type: ignore
|
||||
from ctypes import LibraryLoader
|
||||
|
||||
|
||||
windll = LibraryLoader(WinDLL)
|
||||
|
||||
|
||||
PBYTE = ctypes.POINTER(ctypes.c_ubyte) # Different from wintypes.PBYTE, which is signed
|
||||
PCWSTR = ctypes.c_wchar_p
|
||||
|
||||
|
||||
class BytesProperty(object):
|
||||
class BytesProperty:
|
||||
"""Property for structs storing byte arrays as DWORD + PBYTE.
|
||||
|
||||
Allows for easy reading/writing to struct fields using Python bytes objects.
|
||||
|
@ -86,10 +92,10 @@ class GUID(ctypes.Structure):
|
|||
self.Data2,
|
||||
self.Data3,
|
||||
self.Data4[0] * 256 + self.Data4[1],
|
||||
self.Data4[2] * (256 ** 5)
|
||||
+ self.Data4[3] * (256 ** 4)
|
||||
+ self.Data4[4] * (256 ** 3)
|
||||
+ self.Data4[5] * (256 ** 2)
|
||||
self.Data4[2] * (256**5)
|
||||
+ self.Data4[3] * (256**4)
|
||||
+ self.Data4[4] * (256**3)
|
||||
+ self.Data4[5] * (256**2)
|
||||
+ self.Data4[6] * 256
|
||||
+ self.Data4[7],
|
||||
)
|
||||
|
@ -616,7 +622,8 @@ class WebAuthNLargeBlobOperation(_FromString, IntEnum):
|
|||
DELETE = 3
|
||||
|
||||
|
||||
WEBAUTHN = ctypes.windll.webauthn
|
||||
HRESULT = ctypes.HRESULT # type: ignore
|
||||
WEBAUTHN = windll.webauthn # type: ignore
|
||||
WEBAUTHN_API_VERSION = WEBAUTHN.WebAuthNGetApiVersionNumber()
|
||||
# The following is derived from
|
||||
# https://github.com/microsoft/webauthn/blob/master/webauthn.h#L37
|
||||
|
@ -624,7 +631,7 @@ WEBAUTHN_API_VERSION = WEBAUTHN.WebAuthNGetApiVersionNumber()
|
|||
WEBAUTHN.WebAuthNIsUserVerifyingPlatformAuthenticatorAvailable.argtypes = [
|
||||
ctypes.POINTER(ctypes.c_bool)
|
||||
]
|
||||
WEBAUTHN.WebAuthNIsUserVerifyingPlatformAuthenticatorAvailable.restype = ctypes.HRESULT
|
||||
WEBAUTHN.WebAuthNIsUserVerifyingPlatformAuthenticatorAvailable.restype = HRESULT
|
||||
|
||||
WEBAUTHN.WebAuthNAuthenticatorMakeCredential.argtypes = [
|
||||
HWND,
|
||||
|
@ -635,7 +642,7 @@ WEBAUTHN.WebAuthNAuthenticatorMakeCredential.argtypes = [
|
|||
ctypes.POINTER(WebAuthNMakeCredentialOptions),
|
||||
ctypes.POINTER(ctypes.POINTER(WebAuthNCredentialAttestation)),
|
||||
]
|
||||
WEBAUTHN.WebAuthNAuthenticatorMakeCredential.restype = ctypes.HRESULT
|
||||
WEBAUTHN.WebAuthNAuthenticatorMakeCredential.restype = HRESULT
|
||||
|
||||
WEBAUTHN.WebAuthNAuthenticatorGetAssertion.argtypes = [
|
||||
HWND,
|
||||
|
@ -644,7 +651,7 @@ WEBAUTHN.WebAuthNAuthenticatorGetAssertion.argtypes = [
|
|||
ctypes.POINTER(WebAuthNGetAssertionOptions),
|
||||
ctypes.POINTER(ctypes.POINTER(WebAuthNAssertion)),
|
||||
]
|
||||
WEBAUTHN.WebAuthNAuthenticatorGetAssertion.restype = ctypes.HRESULT
|
||||
WEBAUTHN.WebAuthNAuthenticatorGetAssertion.restype = HRESULT
|
||||
|
||||
WEBAUTHN.WebAuthNFreeCredentialAttestation.argtypes = [
|
||||
ctypes.POINTER(WebAuthNCredentialAttestation)
|
||||
|
@ -652,16 +659,16 @@ WEBAUTHN.WebAuthNFreeCredentialAttestation.argtypes = [
|
|||
WEBAUTHN.WebAuthNFreeAssertion.argtypes = [ctypes.POINTER(WebAuthNAssertion)]
|
||||
|
||||
WEBAUTHN.WebAuthNGetCancellationId.argtypes = [ctypes.POINTER(GUID)]
|
||||
WEBAUTHN.WebAuthNGetCancellationId.restype = ctypes.HRESULT
|
||||
WEBAUTHN.WebAuthNGetCancellationId.restype = HRESULT
|
||||
|
||||
WEBAUTHN.WebAuthNCancelCurrentOperation.argtypes = [ctypes.POINTER(GUID)]
|
||||
WEBAUTHN.WebAuthNCancelCurrentOperation.restype = ctypes.HRESULT
|
||||
WEBAUTHN.WebAuthNCancelCurrentOperation.restype = HRESULT
|
||||
|
||||
WEBAUTHN.WebAuthNGetErrorName.argtypes = [ctypes.HRESULT]
|
||||
WEBAUTHN.WebAuthNGetErrorName.argtypes = [HRESULT]
|
||||
WEBAUTHN.WebAuthNGetErrorName.restype = PCWSTR
|
||||
|
||||
|
||||
WEBAUTHN_STRUCT_VERSIONS = {
|
||||
WEBAUTHN_STRUCT_VERSIONS: Mapping[int, Mapping[str, int]] = {
|
||||
1: {
|
||||
"WebAuthNRpEntityInformation": 1,
|
||||
"WebAuthNUserEntityInformation": 1,
|
||||
|
@ -685,7 +692,7 @@ WEBAUTHN_STRUCT_VERSIONS = {
|
|||
}
|
||||
|
||||
|
||||
def get_version(class_name):
|
||||
def get_version(class_name: str) -> int:
|
||||
"""Get version of struct.
|
||||
|
||||
:param str class_name: Struct class name.
|
||||
|
@ -698,11 +705,12 @@ def get_version(class_name):
|
|||
and class_name in WEBAUTHN_STRUCT_VERSIONS[api_version]
|
||||
):
|
||||
return WEBAUTHN_STRUCT_VERSIONS[api_version][class_name]
|
||||
raise ValueError("Unknown class name")
|
||||
|
||||
|
||||
class CancelThread(Thread):
|
||||
def __init__(self, event):
|
||||
super(CancelThread, self).__init__()
|
||||
super().__init__()
|
||||
self.daemon = True
|
||||
self._completed = False
|
||||
self.event = event
|
||||
|
@ -720,7 +728,7 @@ class CancelThread(Thread):
|
|||
self.join()
|
||||
|
||||
|
||||
class WinAPI(object):
|
||||
class WinAPI:
|
||||
"""Implementation of Microsoft's WebAuthN APIs.
|
||||
|
||||
:param ctypes.HWND handle: Window handle to use for API calls.
|
||||
|
@ -729,7 +737,7 @@ class WinAPI(object):
|
|||
version = WEBAUTHN_API_VERSION
|
||||
|
||||
def __init__(self, handle=None):
|
||||
self.handle = handle or ctypes.windll.user32.GetForegroundWindow()
|
||||
self.handle = handle or windll.user32.GetForegroundWindow()
|
||||
|
||||
def get_error_name(self, winerror):
|
||||
"""Returns an error name given an error HRESULT value.
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
[mypy]
|
||||
files = fido2/
|
||||
check_untyped_defs = True
|
||||
|
||||
[mypy-smartcard.*]
|
||||
ignore_missing_imports = True
|
|
@ -0,0 +1,49 @@
|
|||
[tool.poetry]
|
||||
name = "fido2"
|
||||
version = "1.1.4-dev.0"
|
||||
description = "FIDO2/WebAuthn library for implementing clients and servers."
|
||||
authors = ["Dain Nilsson <dain@yubico.com>"]
|
||||
homepage = "https://github.com/Yubico/python-fido2"
|
||||
repository = "https://github.com/Yubico/python-fido2"
|
||||
keywords = ["fido2", "webauthn", "ctap", "u2f"]
|
||||
classifiers = [
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
|
||||
"Operating System :: MacOS",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Topic :: Internet",
|
||||
"Topic :: Security :: Cryptography",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules"
|
||||
]
|
||||
include = [
|
||||
{ path = "COPYING", format = "sdist"},
|
||||
{ path = "COPYING.MPLv2", format = "sdist"},
|
||||
{ path = "COPYING.APLv2", format = "sdist"},
|
||||
{ path = "NEWS", format = "sdist"},
|
||||
{ path = "README.adoc", format = "sdist"},
|
||||
{ path= "tests/", format = "sdist"},
|
||||
{ path= "examples/", format = "sdist"},
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
cryptography = ">=2.6, !=35, <45"
|
||||
pyscard = {version = "^1.9 || ^2", optional = true}
|
||||
|
||||
[tool.poetry.extras]
|
||||
pcsc = ["pyscard"]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^7.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
79
setup.py
79
setup.py
|
@ -1,79 +0,0 @@
|
|||
# Copyright (c) 2018 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from setuptools import setup, find_packages, __version__
|
||||
import re
|
||||
import sys
|
||||
|
||||
if LooseVersion(__version__) < LooseVersion("20.2"):
|
||||
sys.exit(
|
||||
"Your setuptools version does not support PEP 508.\n"
|
||||
"Please install setuptools 20.2 or later."
|
||||
)
|
||||
|
||||
|
||||
def get_version():
|
||||
with open("fido2/__init__.py", "r") as f:
|
||||
match = re.search(r"(?m)^__version__\s*=\s*['\"](.+)['\"]$", f.read())
|
||||
return match.group(1)
|
||||
|
||||
|
||||
setup(
|
||||
name="fido2",
|
||||
version=get_version(),
|
||||
packages=find_packages(exclude=["test", "test.*"]),
|
||||
include_package_data=True,
|
||||
author="Dain Nilsson",
|
||||
author_email="dain@yubico.com",
|
||||
description="Python based FIDO 2.0 library",
|
||||
url="https://github.com/Yubico/python-fido2",
|
||||
python_requires=">=2.7.6,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
|
||||
install_requires=["six", "cryptography>=1.5"],
|
||||
extras_require={':python_version < "3.4"': ["enum34"], "pcsc": ["pyscard"]},
|
||||
classifiers=[
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
|
||||
"Operating System :: MacOS",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Development Status :: 4 - Beta",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Topic :: Internet",
|
||||
"Topic :: Security :: Cryptography",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
],
|
||||
)
|
|
@ -1,453 +0,0 @@
|
|||
# coding=utf-8
|
||||
|
||||
# Copyright (c) 2013 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import mock
|
||||
import unittest
|
||||
from threading import Event, Timer
|
||||
from binascii import a2b_hex
|
||||
from fido2.utils import sha256, websafe_decode
|
||||
from fido2.hid import CAPABILITY
|
||||
from fido2.ctap import CtapError
|
||||
from fido2.ctap1 import ApduError, APDU, RegistrationData, SignatureData
|
||||
from fido2.ctap2 import Info, AttestationObject
|
||||
from fido2.client import ClientData, U2fClient, ClientError, Fido2Client
|
||||
from fido2.webauthn import PublicKeyCredentialCreationOptions
|
||||
|
||||
|
||||
class TestClientData(unittest.TestCase):
|
||||
def test_client_data(self):
|
||||
client_data = ClientData(
|
||||
b'{"typ":"navigator.id.finishEnrollment","challenge":"vqrS6WXDe1JUs5_c3i4-LkKIHRr-3XVb3azuA5TifHo","cid_pubkey":{"kty":"EC","crv":"P-256","x":"HzQwlfXX7Q4S5MtCCnZUNBw3RMzPO9tOyWjBqRl4tJ8","y":"XVguGFLIZx1fXg3wNqfdbn75hi4-_7-BxhMljw42Ht4"},"origin":"http://example.com"}' # noqa E501
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
client_data.hash,
|
||||
a2b_hex("4142d21c00d94ffb9d504ada8f99b721f4b191ae4e37ca0140f696b6983cfacb"),
|
||||
)
|
||||
self.assertEqual(client_data.get("origin"), "http://example.com")
|
||||
|
||||
self.assertEqual(client_data, ClientData.from_b64(client_data.b64))
|
||||
|
||||
self.assertEqual(
|
||||
client_data.data,
|
||||
{
|
||||
"typ": "navigator.id.finishEnrollment",
|
||||
"challenge": "vqrS6WXDe1JUs5_c3i4-LkKIHRr-3XVb3azuA5TifHo",
|
||||
"cid_pubkey": {
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "HzQwlfXX7Q4S5MtCCnZUNBw3RMzPO9tOyWjBqRl4tJ8",
|
||||
"y": "XVguGFLIZx1fXg3wNqfdbn75hi4-_7-BxhMljw42Ht4",
|
||||
},
|
||||
"origin": "http://example.com",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
APP_ID = "https://foo.example.com"
|
||||
REG_DATA = RegistrationData(
|
||||
a2b_hex(
|
||||
b"0504b174bc49c7ca254b70d2e5c207cee9cf174820ebd77ea3c65508c26da51b657c1cc6b952f8621697936482da0a6d3d3826a59095daf6cd7c03e2e60385d2f6d9402a552dfdb7477ed65fd84133f86196010b2215b57da75d315b7b9e8fe2e3925a6019551bab61d16591659cbaf00b4950f7abfe6660e2e006f76868b772d70c253082013c3081e4a003020102020a47901280001155957352300a06082a8648ce3d0403023017311530130603550403130c476e756262792050696c6f74301e170d3132303831343138323933325a170d3133303831343138323933325a3031312f302d0603550403132650696c6f74476e756262792d302e342e312d34373930313238303030313135353935373335323059301306072a8648ce3d020106082a8648ce3d030107034200048d617e65c9508e64bcc5673ac82a6799da3c1446682c258c463fffdf58dfd2fa3e6c378b53d795c4a4dffb4199edd7862f23abaf0203b4b8911ba0569994e101300a06082a8648ce3d0403020347003044022060cdb6061e9c22262d1aac1d96d8c70829b2366531dda268832cb836bcd30dfa0220631b1459f09e6330055722c8d89b7f48883b9089b88d60d1d9795902b30410df304502201471899bcc3987e62e8202c9b39c33c19033f7340352dba80fcab017db9230e402210082677d673d891933ade6f617e5dbde2e247e70423fd5ad7804a6d3d3961ef871" # noqa E501
|
||||
)
|
||||
)
|
||||
SIG_DATA = SignatureData(
|
||||
a2b_hex(
|
||||
b"0100000001304402204b5f0cd17534cedd8c34ee09570ef542a353df4436030ce43d406de870b847780220267bb998fac9b7266eb60e7cb0b5eabdfd5ba9614f53c7b22272ec10047a923f" # noqa E501
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class TestU2fClient(unittest.TestCase):
|
||||
def test_register_wrong_app_id(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
try:
|
||||
client.register(
|
||||
"https://bar.example.com",
|
||||
[{"version": "U2F_V2", "challenge": "foobar"}],
|
||||
[],
|
||||
)
|
||||
self.fail("register did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.BAD_REQUEST)
|
||||
|
||||
def test_register_unsupported_version(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_XXX"
|
||||
|
||||
try:
|
||||
client.register(APP_ID, [{"version": "U2F_V2", "challenge": "foobar"}], [])
|
||||
self.fail("register did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.DEVICE_INELIGIBLE)
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
|
||||
def test_register_existing_key(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.side_effect = ApduError(APDU.USE_NOT_SATISFIED)
|
||||
|
||||
try:
|
||||
client.register(
|
||||
APP_ID,
|
||||
[{"version": "U2F_V2", "challenge": "foobar"}],
|
||||
[{"version": "U2F_V2", "keyHandle": "a2V5"}],
|
||||
)
|
||||
self.fail("register did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.DEVICE_INELIGIBLE)
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
client.ctap.authenticate.assert_called_once()
|
||||
# Check keyHandle
|
||||
self.assertEqual(client.ctap.authenticate.call_args[0][2], b"key")
|
||||
# Ensure check-only was set
|
||||
self.assertTrue(client.ctap.authenticate.call_args[0][3])
|
||||
|
||||
def test_register(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.side_effect = ApduError(APDU.WRONG_DATA)
|
||||
client.ctap.register.return_value = REG_DATA
|
||||
|
||||
resp = client.register(
|
||||
APP_ID,
|
||||
[{"version": "U2F_V2", "challenge": "foobar"}],
|
||||
[{"version": "U2F_V2", "keyHandle": "a2V5"}],
|
||||
)
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
client.ctap.authenticate.assert_called_once()
|
||||
client.ctap.register.assert_called_once()
|
||||
|
||||
client_param, app_param = client.ctap.register.call_args[0]
|
||||
self.assertEqual(sha256(websafe_decode(resp["clientData"])), client_param)
|
||||
self.assertEqual(websafe_decode(resp["registrationData"]), REG_DATA)
|
||||
self.assertEqual(sha256(APP_ID.encode()), app_param)
|
||||
|
||||
def test_register_await_timeout(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.side_effect = ApduError(APDU.WRONG_DATA)
|
||||
client.ctap.register.side_effect = ApduError(APDU.USE_NOT_SATISFIED)
|
||||
|
||||
client.poll_delay = 0.01
|
||||
event = Event()
|
||||
timer = Timer(0.1, event.set)
|
||||
timer.start()
|
||||
try:
|
||||
client.register(
|
||||
APP_ID,
|
||||
[{"version": "U2F_V2", "challenge": "foobar"}],
|
||||
[{"version": "U2F_V2", "keyHandle": "a2V5"}],
|
||||
event=event,
|
||||
)
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.TIMEOUT)
|
||||
|
||||
def test_register_await_touch(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.side_effect = ApduError(APDU.WRONG_DATA)
|
||||
client.ctap.register.side_effect = [
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
REG_DATA,
|
||||
]
|
||||
|
||||
event = Event()
|
||||
event.wait = mock.MagicMock()
|
||||
resp = client.register(
|
||||
APP_ID,
|
||||
[{"version": "U2F_V2", "challenge": "foobar"}],
|
||||
[{"version": "U2F_V2", "keyHandle": "a2V5"}],
|
||||
event=event,
|
||||
)
|
||||
|
||||
event.wait.assert_called()
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
client.ctap.authenticate.assert_called_once()
|
||||
client.ctap.register.assert_called()
|
||||
|
||||
client_param, app_param = client.ctap.register.call_args[0]
|
||||
self.assertEqual(sha256(websafe_decode(resp["clientData"])), client_param)
|
||||
self.assertEqual(websafe_decode(resp["registrationData"]), REG_DATA)
|
||||
self.assertEqual(sha256(APP_ID.encode()), app_param)
|
||||
|
||||
def test_sign_wrong_app_id(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
|
||||
try:
|
||||
client.sign(
|
||||
"http://foo.example.com",
|
||||
"challenge",
|
||||
[{"version": "U2F_V2", "keyHandle": "a2V5"}],
|
||||
)
|
||||
self.fail("sign did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.BAD_REQUEST)
|
||||
|
||||
def test_sign_unsupported_version(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_XXX"
|
||||
|
||||
try:
|
||||
client.sign(
|
||||
APP_ID, "challenge", [{"version": "U2F_V2", "keyHandle": "a2V5"}]
|
||||
)
|
||||
self.fail("sign did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.DEVICE_INELIGIBLE)
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
|
||||
def test_sign_missing_key(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.side_effect = ApduError(APDU.WRONG_DATA)
|
||||
|
||||
try:
|
||||
client.sign(
|
||||
APP_ID, "challenge", [{"version": "U2F_V2", "keyHandle": "a2V5"}]
|
||||
)
|
||||
self.fail("sign did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.DEVICE_INELIGIBLE)
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
client.ctap.authenticate.assert_called_once()
|
||||
_, app_param, key_handle = client.ctap.authenticate.call_args[0]
|
||||
self.assertEqual(app_param, sha256(APP_ID.encode()))
|
||||
self.assertEqual(key_handle, b"key")
|
||||
|
||||
def test_sign(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.return_value = SIG_DATA
|
||||
|
||||
resp = client.sign(
|
||||
APP_ID, "challenge", [{"version": "U2F_V2", "keyHandle": "a2V5"}]
|
||||
)
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
client.ctap.authenticate.assert_called_once()
|
||||
client_param, app_param, key_handle = client.ctap.authenticate.call_args[0]
|
||||
|
||||
self.assertEqual(client_param, sha256(websafe_decode(resp["clientData"])))
|
||||
self.assertEqual(app_param, sha256(APP_ID.encode()))
|
||||
self.assertEqual(key_handle, b"key")
|
||||
self.assertEqual(websafe_decode(resp["signatureData"]), SIG_DATA)
|
||||
|
||||
def test_sign_await_touch(self):
|
||||
client = U2fClient(None, APP_ID)
|
||||
client.ctap = mock.MagicMock()
|
||||
client.ctap.get_version.return_value = "U2F_V2"
|
||||
client.ctap.authenticate.side_effect = [
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
ApduError(APDU.USE_NOT_SATISFIED),
|
||||
SIG_DATA,
|
||||
]
|
||||
|
||||
event = Event()
|
||||
event.wait = mock.MagicMock()
|
||||
|
||||
resp = client.sign(
|
||||
APP_ID,
|
||||
"challenge",
|
||||
[{"version": "U2F_V2", "keyHandle": "a2V5"}],
|
||||
event=event,
|
||||
)
|
||||
|
||||
event.wait.assert_called()
|
||||
|
||||
client.ctap.get_version.assert_called_with()
|
||||
client.ctap.authenticate.assert_called()
|
||||
client_param, app_param, key_handle = client.ctap.authenticate.call_args[0]
|
||||
|
||||
self.assertEqual(client_param, sha256(websafe_decode(resp["clientData"])))
|
||||
self.assertEqual(app_param, sha256(APP_ID.encode()))
|
||||
self.assertEqual(key_handle, b"key")
|
||||
self.assertEqual(websafe_decode(resp["signatureData"]), SIG_DATA)
|
||||
|
||||
|
||||
rp = {"id": "example.com", "name": "Example RP"}
|
||||
user = {"id": b"user_id", "name": "A. User"}
|
||||
challenge = b"Y2hhbGxlbmdl"
|
||||
_INFO_NO_PIN = a2b_hex(
|
||||
"a60182665532465f5632684649444f5f325f3002826375766d6b686d61632d7365637265740350f8a011f38c0a4d15800617111f9edc7d04a462726bf5627570f564706c6174f469636c69656e7450696ef4051904b0068101" # noqa E501
|
||||
)
|
||||
_MC_RESP = a2b_hex(
|
||||
"a301667061636b6564025900c40021f5fc0b85cd22e60623bcd7d1ca48948909249b4776eb515154e57b66ae12410000001cf8a011f38c0a4d15800617111f9edc7d0040fe3aac036d14c1e1c65518b698dd1da8f596bc33e11072813466c6bf3845691509b80fb76d59309b8d39e0a93452688f6ca3a39a76f3fc52744fb73948b15783a5010203262001215820643566c206dd00227005fa5de69320616ca268043a38f08bde2e9dc45a5cafaf225820171353b2932434703726aae579fa6542432861fe591e481ea22d63997e1a529003a363616c67266373696758483046022100cc1ef43edf07de8f208c21619c78a565ddcf4150766ad58781193be8e0a742ed022100f1ed7c7243e45b7d8e5bda6b1abf10af7391789d1ef21b70bd69fed48dba4cb163783563815901973082019330820138a003020102020900859b726cb24b4c29300a06082a8648ce3d0403023047310b300906035504061302555331143012060355040a0c0b59756269636f205465737431223020060355040b0c1941757468656e74696361746f72204174746573746174696f6e301e170d3136313230343131353530305a170d3236313230323131353530305a3047310b300906035504061302555331143012060355040a0c0b59756269636f205465737431223020060355040b0c1941757468656e74696361746f72204174746573746174696f6e3059301306072a8648ce3d020106082a8648ce3d03010703420004ad11eb0e8852e53ad5dfed86b41e6134a18ec4e1af8f221a3c7d6e636c80ea13c3d504ff2e76211bb44525b196c44cb4849979cf6f896ecd2bb860de1bf4376ba30d300b30090603551d1304023000300a06082a8648ce3d0403020349003046022100e9a39f1b03197525f7373e10ce77e78021731b94d0c03f3fda1fd22db3d030e7022100c4faec3445a820cf43129cdb00aabefd9ae2d874f9c5d343cb2f113da23723f3" # noqa E501
|
||||
)
|
||||
|
||||
|
||||
class TestFido2Client(unittest.TestCase):
|
||||
def test_ctap1_info(self):
|
||||
dev = mock.Mock()
|
||||
dev.capabilities = 0
|
||||
client = Fido2Client(dev, APP_ID)
|
||||
self.assertEqual(client.info.versions, ["U2F_V2"])
|
||||
self.assertEqual(client.info.pin_uv_protocols, [])
|
||||
|
||||
@mock.patch("fido2.client.Ctap2")
|
||||
def test_make_credential_wrong_app_id(self, PatchedCtap2):
|
||||
dev = mock.Mock()
|
||||
dev.capabilities = CAPABILITY.CBOR
|
||||
ctap2 = mock.MagicMock()
|
||||
ctap2.get_info.return_value = Info(_INFO_NO_PIN)
|
||||
PatchedCtap2.return_value = ctap2
|
||||
client = Fido2Client(dev, APP_ID)
|
||||
try:
|
||||
client.make_credential(
|
||||
PublicKeyCredentialCreationOptions(
|
||||
{"id": "bar.example.com", "name": "Invalid RP"},
|
||||
user,
|
||||
challenge,
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
)
|
||||
)
|
||||
self.fail("make_credential did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.BAD_REQUEST)
|
||||
|
||||
@mock.patch("fido2.client.Ctap2")
|
||||
def test_make_credential_existing_key(self, PatchedCtap2):
|
||||
dev = mock.Mock()
|
||||
dev.capabilities = CAPABILITY.CBOR
|
||||
ctap2 = mock.MagicMock()
|
||||
ctap2.get_info.return_value = Info(_INFO_NO_PIN)
|
||||
ctap2.info = ctap2.get_info()
|
||||
ctap2.make_credential.side_effect = CtapError(CtapError.ERR.CREDENTIAL_EXCLUDED)
|
||||
PatchedCtap2.return_value = ctap2
|
||||
client = Fido2Client(dev, APP_ID)
|
||||
|
||||
try:
|
||||
client.make_credential(
|
||||
PublicKeyCredentialCreationOptions(
|
||||
rp,
|
||||
user,
|
||||
challenge,
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
authenticator_selection={"userVerification": "discouraged"},
|
||||
)
|
||||
)
|
||||
self.fail("make_credential did not raise error")
|
||||
except ClientError as e:
|
||||
self.assertEqual(e.code, ClientError.ERR.DEVICE_INELIGIBLE)
|
||||
|
||||
ctap2.make_credential.assert_called_once()
|
||||
|
||||
@mock.patch("fido2.client.Ctap2")
|
||||
def test_make_credential_ctap2(self, PatchedCtap2):
|
||||
dev = mock.Mock()
|
||||
dev.capabilities = CAPABILITY.CBOR
|
||||
ctap2 = mock.MagicMock()
|
||||
ctap2.get_info.return_value = Info(_INFO_NO_PIN)
|
||||
ctap2.info = ctap2.get_info()
|
||||
ctap2.make_credential.return_value = AttestationObject(_MC_RESP)
|
||||
PatchedCtap2.return_value = ctap2
|
||||
client = Fido2Client(dev, APP_ID)
|
||||
|
||||
response = client.make_credential(
|
||||
PublicKeyCredentialCreationOptions(
|
||||
rp,
|
||||
user,
|
||||
challenge,
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
timeout=1000,
|
||||
authenticator_selection={"userVerification": "discouraged"},
|
||||
)
|
||||
)
|
||||
|
||||
self.assertIsInstance(response.attestation_object, AttestationObject)
|
||||
self.assertIsInstance(response.client_data, ClientData)
|
||||
|
||||
ctap2.make_credential.assert_called_with(
|
||||
response.client_data.hash,
|
||||
rp,
|
||||
user,
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
mock.ANY,
|
||||
None,
|
||||
)
|
||||
|
||||
self.assertEqual(response.client_data.get("origin"), APP_ID)
|
||||
self.assertEqual(response.client_data.get("type"), "webauthn.create")
|
||||
self.assertEqual(response.client_data.challenge, challenge)
|
||||
|
||||
def test_make_credential_ctap1(self):
|
||||
dev = mock.Mock()
|
||||
dev.capabilities = 0 # No CTAP2
|
||||
client = Fido2Client(dev, APP_ID)
|
||||
|
||||
client.ctap1 = mock.MagicMock()
|
||||
client.ctap1.get_version.return_value = "U2F_V2"
|
||||
client.ctap1.register.return_value = REG_DATA
|
||||
|
||||
response = client.make_credential(
|
||||
PublicKeyCredentialCreationOptions(
|
||||
rp, user, challenge, [{"type": "public-key", "alg": -7}]
|
||||
)
|
||||
)
|
||||
|
||||
self.assertIsInstance(response.attestation_object, AttestationObject)
|
||||
self.assertIsInstance(response.client_data, ClientData)
|
||||
client_data = response.client_data
|
||||
|
||||
client.ctap1.register.assert_called_with(
|
||||
client_data.hash, sha256(rp["id"].encode())
|
||||
)
|
||||
|
||||
self.assertEqual(client_data.get("origin"), APP_ID)
|
||||
self.assertEqual(client_data.get("type"), "webauthn.create")
|
||||
self.assertEqual(client_data.challenge, challenge)
|
||||
|
||||
self.assertEqual(response.attestation_object.fmt, "fido-u2f")
|
|
@ -1,143 +0,0 @@
|
|||
# coding=utf-8
|
||||
|
||||
# Copyright (c) 2013 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from fido2.rpid import verify_app_id, verify_rp_id
|
||||
import unittest
|
||||
|
||||
|
||||
class TestAppId(unittest.TestCase):
|
||||
def test_valid_ids(self):
|
||||
self.assertTrue(
|
||||
verify_app_id("https://example.com", "https://register.example.com")
|
||||
)
|
||||
self.assertTrue(
|
||||
verify_app_id("https://example.com", "https://fido.example.com")
|
||||
)
|
||||
self.assertTrue(
|
||||
verify_app_id("https://example.com", "https://www.example.com:444")
|
||||
)
|
||||
|
||||
self.assertTrue(
|
||||
verify_app_id(
|
||||
"https://companyA.hosting.example.com",
|
||||
"https://fido.companyA.hosting.example.com",
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
verify_app_id(
|
||||
"https://companyA.hosting.example.com",
|
||||
"https://xyz.companyA.hosting.example.com",
|
||||
)
|
||||
)
|
||||
|
||||
def test_valid_ids_mixed_type(self):
|
||||
self.assertTrue(
|
||||
verify_app_id(b"https://example.com", "https://register.example.com")
|
||||
)
|
||||
self.assertTrue(
|
||||
verify_app_id("https://example.com", b"https://fido.example.com")
|
||||
)
|
||||
self.assertTrue(
|
||||
verify_app_id(b"https://example.com", b"https://www.example.com:444")
|
||||
)
|
||||
|
||||
def test_invalid_ids(self):
|
||||
self.assertFalse(verify_app_id("https://example.com", "http://example.com"))
|
||||
self.assertFalse(verify_app_id("https://example.com", "http://www.example.com"))
|
||||
self.assertFalse(
|
||||
verify_app_id("https://example.com", "https://example-test.com")
|
||||
)
|
||||
|
||||
self.assertFalse(
|
||||
verify_app_id(
|
||||
"https://companyA.hosting.example.com", "https://register.example.com"
|
||||
)
|
||||
)
|
||||
self.assertFalse(
|
||||
verify_app_id(
|
||||
"https://companyA.hosting.example.com",
|
||||
"https://companyB.hosting.example.com",
|
||||
)
|
||||
)
|
||||
|
||||
def test_invalid_ids_mixed_type(self):
|
||||
self.assertFalse(verify_app_id(b"https://example.com", "http://example.com"))
|
||||
self.assertFalse(
|
||||
verify_app_id("https://example.com", b"http://www.example.com")
|
||||
)
|
||||
self.assertFalse(
|
||||
verify_app_id(b"https://example.com", b"https://example-test.com")
|
||||
)
|
||||
|
||||
def test_effective_tld_names(self):
|
||||
self.assertFalse(
|
||||
verify_app_id("https://appspot.com", "https://foo.appspot.com")
|
||||
)
|
||||
self.assertFalse(verify_app_id("https://co.uk", "https://example.co.uk"))
|
||||
|
||||
|
||||
class TestRpId(unittest.TestCase):
|
||||
def test_valid_ids(self):
|
||||
self.assertTrue(verify_rp_id("example.com", "https://register.example.com"))
|
||||
self.assertTrue(verify_rp_id("example.com", "https://fido.example.com"))
|
||||
self.assertTrue(verify_rp_id("example.com", "https://www.example.com:444"))
|
||||
|
||||
def test_valid_ids_mixed_type(self):
|
||||
self.assertTrue(verify_rp_id(b"example.com", "https://register.example.com"))
|
||||
self.assertTrue(verify_rp_id("example.com", b"https://fido.example.com"))
|
||||
self.assertTrue(verify_rp_id(b"example.com", b"https://www.example.com:444"))
|
||||
|
||||
def test_invalid_ids(self):
|
||||
self.assertFalse(verify_rp_id("example.com", "http://example.com"))
|
||||
self.assertFalse(verify_rp_id("example.com", "http://www.example.com"))
|
||||
self.assertFalse(verify_rp_id("example.com", "https://example-test.com"))
|
||||
|
||||
self.assertFalse(
|
||||
verify_rp_id("companyA.hosting.example.com", "https://register.example.com")
|
||||
)
|
||||
self.assertFalse(
|
||||
verify_rp_id(
|
||||
"companyA.hosting.example.com", "https://companyB.hosting.example.com"
|
||||
)
|
||||
)
|
||||
|
||||
def test_invalid_ids_mixed_type(self):
|
||||
self.assertFalse(verify_rp_id(b"example.com", "http://example.com"))
|
||||
self.assertFalse(verify_rp_id("example.com", b"http://www.example.com"))
|
||||
self.assertFalse(verify_rp_id(b"example.com", b"https://example-test.com"))
|
||||
|
||||
def test_suffix_list(self):
|
||||
self.assertFalse(verify_rp_id(b"co.uk", "https://foobar.co.uk"))
|
||||
self.assertTrue(verify_rp_id(b"foobar.co.uk", "https://site.foobar.co.uk"))
|
||||
self.assertFalse(verify_rp_id(b"appspot.com", "https://example.appspot.com"))
|
||||
self.assertTrue(
|
||||
verify_rp_id(b"example.appspot.com", "https://example.appspot.com")
|
||||
)
|
|
@ -1,219 +0,0 @@
|
|||
# Copyright (c) 2019 Yubico AB
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or
|
||||
# without modification, are permitted provided that the following
|
||||
# conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# 2. Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from fido2.webauthn import (
|
||||
AuthenticatorSelectionCriteria,
|
||||
PublicKeyCredentialRpEntity,
|
||||
PublicKeyCredentialUserEntity,
|
||||
PublicKeyCredentialParameters,
|
||||
PublicKeyCredentialDescriptor,
|
||||
PublicKeyCredentialCreationOptions,
|
||||
PublicKeyCredentialRequestOptions,
|
||||
)
|
||||
|
||||
import unittest
|
||||
|
||||
|
||||
class TestWebAuthnDataTypes(unittest.TestCase):
|
||||
def test_authenticator_selection_criteria(self):
|
||||
o = AuthenticatorSelectionCriteria("platform", True, "required")
|
||||
self.assertEqual(
|
||||
o,
|
||||
{
|
||||
"authenticatorAttachment": "platform",
|
||||
"requireResidentKey": True,
|
||||
"userVerification": "required",
|
||||
},
|
||||
)
|
||||
self.assertEqual(o.authenticator_attachment, "platform")
|
||||
self.assertEqual(o.require_resident_key, True)
|
||||
self.assertEqual(o.user_verification, "required")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
AuthenticatorSelectionCriteria(authenticator_attachment="invalid")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
AuthenticatorSelectionCriteria(user_verification="invalid")
|
||||
|
||||
o = AuthenticatorSelectionCriteria()
|
||||
self.assertEqual(o, {})
|
||||
self.assertIsNone(o.authenticator_attachment)
|
||||
self.assertIsNone(o.require_resident_key)
|
||||
self.assertIsNone(o.user_verification)
|
||||
|
||||
def test_rp_entity(self):
|
||||
o = PublicKeyCredentialRpEntity("example.com", "Example")
|
||||
self.assertEqual(o, {"id": "example.com", "name": "Example"})
|
||||
self.assertEqual(o.id, "example.com")
|
||||
self.assertEqual(o.name, "Example")
|
||||
self.assertIsNone(o.icon)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialRpEntity("example.com")
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialRpEntity()
|
||||
|
||||
def test_user_entity(self):
|
||||
o = PublicKeyCredentialUserEntity(b"user", "Example", display_name="Display")
|
||||
self.assertEqual(
|
||||
o, {"id": b"user", "name": "Example", "displayName": "Display"}
|
||||
)
|
||||
self.assertEqual(o.id, b"user")
|
||||
self.assertEqual(o.name, "Example")
|
||||
self.assertEqual(o.display_name, "Display")
|
||||
self.assertIsNone(o.icon)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialUserEntity(b"user")
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialUserEntity()
|
||||
|
||||
def test_parameters(self):
|
||||
o = PublicKeyCredentialParameters("public-key", -7)
|
||||
self.assertEqual(o, {"type": "public-key", "alg": -7})
|
||||
self.assertEqual(o.type, "public-key")
|
||||
self.assertEqual(o.alg, -7)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
PublicKeyCredentialParameters("invalid-type", -7)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialParameters("public-key")
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialParameters()
|
||||
|
||||
def test_descriptor(self):
|
||||
o = PublicKeyCredentialDescriptor("public-key", b"credential_id")
|
||||
self.assertEqual(o, {"type": "public-key", "id": b"credential_id"})
|
||||
self.assertEqual(o.type, "public-key")
|
||||
self.assertEqual(o.id, b"credential_id")
|
||||
self.assertIsNone(o.transports)
|
||||
|
||||
o = PublicKeyCredentialDescriptor(
|
||||
"public-key", b"credential_id", ["usb", "nfc"]
|
||||
)
|
||||
self.assertEqual(
|
||||
o,
|
||||
{
|
||||
"type": "public-key",
|
||||
"id": b"credential_id",
|
||||
"transports": ["usb", "nfc"],
|
||||
},
|
||||
)
|
||||
self.assertEqual(o.transports, ["usb", "nfc"])
|
||||
|
||||
PublicKeyCredentialDescriptor("public-key", b"credential_id", ["valid_value"])
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
PublicKeyCredentialDescriptor("wrong-type", b"credential_id")
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialDescriptor("wrong-type")
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
PublicKeyCredentialDescriptor()
|
||||
|
||||
def test_creation_options(self):
|
||||
o = PublicKeyCredentialCreationOptions(
|
||||
{"id": "example.com", "name": "Example"},
|
||||
{"id": b"user_id", "name": "A. User"},
|
||||
b"request_challenge",
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
10000,
|
||||
[{"type": "public-key", "id": b"credential_id"}],
|
||||
{
|
||||
"authenticatorAttachment": "platform",
|
||||
"requireResidentKey": True,
|
||||
"userVerification": "required",
|
||||
},
|
||||
"direct",
|
||||
)
|
||||
self.assertEqual(o.rp, {"id": "example.com", "name": "Example"})
|
||||
self.assertEqual(o.user, {"id": b"user_id", "name": "A. User"})
|
||||
self.assertIsNone(o.extensions)
|
||||
|
||||
o = PublicKeyCredentialCreationOptions(
|
||||
{"id": "example.com", "name": "Example"},
|
||||
{"id": b"user_id", "name": "A. User"},
|
||||
b"request_challenge",
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
)
|
||||
self.assertIsNone(o.timeout)
|
||||
self.assertIsNone(o.authenticator_selection)
|
||||
self.assertIsNone(o.attestation)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
PublicKeyCredentialCreationOptions(
|
||||
{"id": "example.com", "name": "Example"},
|
||||
{"id": b"user_id", "name": "A. User"},
|
||||
b"request_challenge",
|
||||
[{"type": "public-key", "alg": -7}],
|
||||
attestation="invalid",
|
||||
)
|
||||
|
||||
def test_request_options(self):
|
||||
o = PublicKeyCredentialRequestOptions(
|
||||
b"request_challenge",
|
||||
10000,
|
||||
"example.com",
|
||||
[{"type": "public-key", "id": b"credential_id"}],
|
||||
"discouraged",
|
||||
)
|
||||
self.assertEqual(o.challenge, b"request_challenge")
|
||||
self.assertEqual(o.rp_id, "example.com")
|
||||
self.assertEqual(o.timeout, 10000)
|
||||
self.assertIsNone(o.extensions)
|
||||
|
||||
o = PublicKeyCredentialRequestOptions(b"request_challenge")
|
||||
self.assertIsNone(o.timeout)
|
||||
self.assertIsNone(o.rp_id)
|
||||
self.assertIsNone(o.allow_credentials)
|
||||
self.assertIsNone(o.user_verification)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
PublicKeyCredentialRequestOptions(
|
||||
b"request_challenge", user_verification="invalid"
|
||||
)
|
||||
|
||||
def test_update_value(self):
|
||||
o = PublicKeyCredentialRpEntity("example.com", "Example")
|
||||
self.assertEqual(o, {"id": "example.com", "name": "Example"})
|
||||
self.assertEqual(o.id, "example.com")
|
||||
self.assertEqual(o.name, "Example")
|
||||
|
||||
o.id = "new-id.com"
|
||||
self.assertEqual(o, {"id": "new-id.com", "name": "Example"})
|
||||
self.assertEqual(o.id, "new-id.com")
|
||||
|
||||
o["name"] = "New Name"
|
||||
self.assertEqual(o, {"id": "new-id.com", "name": "New Name"})
|
||||
self.assertEqual(o.name, "New Name")
|
|
@ -0,0 +1,3 @@
|
|||
import fido2.features
|
||||
|
||||
fido2.features.webauthn_json_mapping.enabled = True
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue