Compare commits

...

91 Commits

Author SHA1 Message Date
Oscar Krause
ffc9f91c2e Merge branch 'db' into 'main'
DB - store settings in database

See merge request oscar.krause/fastapi-dls!49
2025-04-16 19:18:17 +02:00
Oscar Krause
6b54d4794b .gitlab-ci.yml bearbeiten 2025-04-16 15:07:28 +02:00
Oscar Krause
5209ece1cd merge fixes 2025-04-16 14:56:26 +02:00
Oscar Krause
522b0a123e Merge branch 'main' into db
# Conflicts:
#	app/orm.py
2025-04-16 14:55:27 +02:00
Oscar Krause
9605ba3eee Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!51
2025-04-16 14:43:58 +02:00
Oscar Krause
cd5c2a6cb1 code styling 2025-04-16 14:18:04 +02:00
Oscar Krause
1d3255188e code styling 2025-04-16 14:12:40 +02:00
Oscar Krause
cea7a01b54 code styling 2025-04-16 14:12:15 +02:00
Oscar Krause
9f417b61a9 fixes 2025-04-16 14:07:18 +02:00
Oscar Krause
4f77200628 code styling 2025-04-16 14:07:18 +02:00
Oscar Krause
20b03446dc Merge branch 'main' into 'dev'
# Conflicts:
#   .gitlab-ci.yml
2025-04-16 13:57:51 +02:00
Oscar Krause
df506e8591 removed unsupported python versions 2025-04-16 13:56:36 +02:00
Oscar Krause
3fe3429986 added some python versions and added EOL 2025-04-16 12:54:24 +02:00
Oscar Krause
a996504c50 test python3.13 and 3.11 2025-04-16 12:44:58 +02:00
Oscar Krause
5d2bff88d8 fixes 2025-04-16 12:35:24 +02:00
Oscar Krause
67f2d18a95 requirements.txt updated 2025-04-16 12:25:37 +02:00
Oscar Krause
52cd34cb5c ci improvements 2025-04-16 12:24:19 +02:00
Oscar Krause
6fb03309a5 ci improvements 2025-04-16 12:23:21 +02:00
Oscar Krause
477e5abbca refactored test pipeline to test different python versions 2025-04-16 12:12:23 +02:00
Oscar Krause
a8c1cdf095 updated create_driver_matrix_json.py 2025-04-11 14:10:26 +02:00
Oscar Krause
a12d05281c updated default data 2025-04-08 14:30:59 +02:00
Oscar Krause
a31c80465a code styling 2025-04-08 14:05:54 +02:00
Oscar Krause
ddf5f12409 fixes 2025-04-08 14:00:15 +02:00
Oscar Krause
20cdaefa1c code refactorings after merge from main 2025-04-08 13:52:09 +02:00
Oscar Krause
f62f2a2701 Merge branch 'main' into db
# Conflicts:
#	app/main.py
#	app/orm.py
#	app/util.py
#	test/main.py
2025-04-08 10:56:25 +02:00
Oscar Krause
cd4674caad fixes 2024-06-21 19:35:42 +02:00
Oscar Krause
b0b627a3f0 Merge branch 'refs/heads/dev' into db
# Conflicts:
#	.gitlab-ci.yml
#	Dockerfile
#	README.md
#	app/main.py
#	app/orm.py
#	requirements.txt
2024-06-21 19:28:23 +02:00
Oscar Krause
16f80cd78b added "16.3" support 2024-03-04 21:19:59 +01:00
Oscar Krause
07aec53787 requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
3e87820f63 removed todo 2024-03-04 21:19:59 +01:00
Oscar Krause
a927e291b5 fixes 2024-03-04 21:19:59 +01:00
Oscar Krause
72054d30c4 make tests interruptible 2024-03-04 21:19:59 +01:00
Oscar Krause
00dc848083 only run test matrix when "app" or "test" changes 2024-03-04 21:19:59 +01:00
Oscar Krause
78b6fe52c7 fixed CI/CD path from "/builds" to "/tmp/builds" 2024-03-04 21:19:59 +01:00
Oscar Krause
f82d73bb01 run different jobs on "$CI_DEFAULT_BRANCH" 2024-03-04 21:19:59 +01:00
Oscar Krause
416df311b8 removed pylint 2024-03-04 21:19:59 +01:00
Oscar Krause
a6ea8241c2 disabled pylint 2024-03-04 21:19:59 +01:00
Oscar Krause
e70f70d806 disabled code_quality debug 2024-03-04 21:19:59 +01:00
Oscar Krause
77be5772c4 Update .codeclimate.yml 2024-03-04 21:19:59 +01:00
Oscar Krause
6c1b05c66a fixed test_coverage (fail on matrix) 2024-03-04 21:19:59 +01:00
Oscar Krause
a54411a957 added code_quality debug 2024-03-04 21:19:59 +01:00
Oscar Krause
90e0cb8e84 added code_quality “SOURCE_CODE” variable 2024-03-04 21:19:59 +01:00
Oscar Krause
eecb59e2e4 removed "cython" from "test" 2024-03-04 21:19:59 +01:00
Oscar Krause
4c0f65faec removed tests for "23.04"
> gcc -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -fPIC -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/include -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/src -I/usr/local/include/python3.11 -c httptools/parser/parser.c -o build/temp.linux-x86_64-cpython-311/httptools/parser/parser.o -O2
      httptools/parser/parser.c:212:12: fatal error: longintrepr.h: No such file or directory
2024-03-04 21:19:59 +01:00
Oscar Krause
e41084f5c5 added tests for Ubuntu "Mantic Minotaur" 2024-03-04 21:19:59 +01:00
Oscar Krause
36d5b83fb8 requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
11138c2191 updated debian bookworm 12 dependencies 2024-03-04 21:19:59 +01:00
Oscar Krause
ff9e85e32b updated test to debian bookworm 2024-03-04 21:19:59 +01:00
Oscar Krause
cb6a089678 fixed testing dependency 2024-03-04 21:19:59 +01:00
Oscar Krause
085186f82a added gcc as dependency 2024-03-04 21:19:59 +01:00
Oscar Krause
f77d3feee1 fixes 2024-03-04 21:19:59 +01:00
Oscar Krause
f2721c7663 fixed debian package versions 2024-03-04 21:19:59 +01:00
Oscar Krause
40cb5518cb fixed versions & added 16.2 as supported 2024-03-04 21:19:59 +01:00
Oscar Krause
021c0ac38d added os specific requirements.txt 2024-03-04 21:19:59 +01:00
Oscar Krause
9c22628b4e implemented python test matrix for different python dependencies on different os releases 2024-03-04 21:19:59 +01:00
Oscar Krause
966b421dad README.md updated 2024-03-04 21:19:59 +01:00
Oscar Krause
7f8752a93d updated ubuntu from 22.10 (EOL) to 23.04 2024-03-04 21:19:59 +01:00
Oscar Krause
30979fd18e requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
72965cc879 added 16.1 as supported nvidia driver release 2024-03-04 21:19:59 +01:00
Oscar Krause
1887cbc534 added macOS as supported host (using python-venv) 2024-03-04 21:19:59 +01:00
Oscar Krause
2e942f4553 added Docker supported system architectures 2024-03-04 21:19:59 +01:00
Oscar Krause
3dda920a52 added linkt to driver compatibility section 2024-03-04 21:19:59 +01:00
Oscar Krause
765a994d83 requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
23488f94d4 added support for 16.0 drivers to readme 2024-03-04 21:19:59 +01:00
Oscar Krause
f9341cdab4 fixed docker image name (gitlab registry) 2024-03-04 21:19:59 +01:00
Oscar Krause
cad81ad1d6 fixed deploy docker 2024-03-04 21:19:59 +01:00
Oscar Krause
b07b7da2f3 fixed new docker registry image path 2024-03-04 21:19:59 +01:00
Oscar Krause
1ef7dd82f6 toggle api endpoints 2024-03-04 21:19:25 +01:00
Oscar Krause
5a1b1a5950 typos 2024-03-04 21:19:25 +01:00
Oscar Krause
83f4b42f01 added information about ipv6 may be must disabled 2024-03-04 21:19:25 +01:00
Oscar Krause
a3baaab26f removed mysql from included docker drivers 2024-03-04 21:19:25 +01:00
Oscar Krause
aa4ebfce73 added docker command to logging section
thanks to @libreshare (https://gitea.publichub.eu/oscar.krause/fastapi-dls/issues/2)
2024-03-04 21:19:25 +01:00
Oscar Krause
aa746feb13 improvements
thanks to @AbsolutelyFree (https://gitea.publichub.eu/oscar.krause/fastapi-dls/issues/1)
2024-03-04 21:19:25 +01:00
Oscar Krause
fce0eb6d74 fixed "deploy:pacman" 2024-03-04 21:19:25 +01:00
Oscar Krause
32806e5cca push multiarch image to docker-hub 2024-03-04 21:19:25 +01:00
Oscar Krause
50eddeecfc fixed mariadb-client installation
ref. https://github.com/PyMySQL/mysqlclient/discussions/624
2024-03-04 21:19:25 +01:00
Oscar Krause
092e6186ab added missing "pkg-config" for "mysqlclient==2.2.0"
ref. https://stackoverflow.com/questions/76533384/docker-alpine-build-fails-on-mysqlclient-installation-with-error-exception-can
2024-03-04 21:19:25 +01:00
Oscar Krause
acbe889fd9 fixed versions 2024-03-04 21:19:25 +01:00
Oscar Krause
05cad95c2a refactored docker-compose.yml so very simple example, and moved proxy to "examples" directory 2024-03-04 21:19:25 +01:00
Oscar Krause
c9e36759e3 added 15.3 to supported drivers list 2024-03-04 21:19:25 +01:00
Oscar Krause
d116eec626 updated compatibility list 2024-03-04 21:19:25 +01:00
Oscar Krause
b1620154db docker-compose.yml - added note for TZ 2024-03-04 21:19:25 +01:00
Oscar Krause
4181095791 requirements.txt updated 2024-03-04 21:19:25 +01:00
Oscar Krause
248c70a862 Merge branch 'dev' into db 2023-06-12 15:19:28 +02:00
Oscar Krause
39a2408d8d migrated api to database-config (Site, Instance) 2023-06-12 15:19:06 +02:00
Oscar Krause
18807401e4 orm improvements & fixes 2023-06-12 15:14:12 +02:00
Oscar Krause
5e47ad7729 fastapi openapi url 2023-06-12 15:13:53 +02:00
Oscar Krause
20448bc587 improved relationships 2023-06-12 15:13:29 +02:00
Oscar Krause
5e945bc43a code styling 2023-06-12 14:47:32 +02:00
Oscar Krause
b4150fa527 implemented Site and Instance orm models including initialization 2023-06-12 12:42:13 +02:00
Oscar Krause
38e1a1725c code styling 2023-06-12 12:40:10 +02:00
8 changed files with 435 additions and 187 deletions

View File

@ -120,13 +120,12 @@ build:pacman:
paths: paths:
- "*.pkg.tar.zst" - "*.pkg.tar.zst"
test: test:python:
image: python:3.12-slim-bookworm image: $IMAGE
stage: test stage: test
interruptible: true interruptible: true
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
@ -136,17 +135,20 @@ test:
DATABASE: sqlite:///../app/db.sqlite DATABASE: sqlite:///../app/db.sqlite
parallel: parallel:
matrix: matrix:
- REQUIREMENTS: - IMAGE:
- 'requirements.txt' # https://devguide.python.org/versions/#supported-versions
# - '.DEBIAN/requirements-bookworm-12.txt' # - python:3.14-rc-alpine # EOL 2030-10 => uvicorn does not support 3.14 yet
# - '.DEBIAN/requirements-ubuntu-24.04.txt' - python:3.13-alpine # EOL 2029-10
# - '.DEBIAN/requirements-ubuntu-24.10.txt' - python:3.12-alpine # EOL 2028-10
- python:3.11-alpine # EOL 2027-10
# - python:3.10-alpine # EOL 2026-10 => ImportError: cannot import name 'UTC' from 'datetime'
# - python:3.9-alpine # EOL 2025-10 => ImportError: cannot import name 'UTC' from 'datetime'
before_script: before_script:
- apt-get update && apt-get install -y python3-dev python3-pip python3-venv gcc - apk --no-cache add openssl
- python3 -m venv venv - python3 -m venv venv
- source venv/bin/activate - source venv/bin/activate
- pip install --upgrade pip - pip install --upgrade pip
- pip install -r $REQUIREMENTS - pip install -r requirements.txt
- pip install pytest pytest-cov pytest-custom_exit_code httpx - pip install pytest pytest-cov pytest-custom_exit_code httpx
- mkdir -p app/cert - mkdir -p app/cert
- openssl genrsa -out app/cert/instance.private.pem 2048 - openssl genrsa -out app/cert/instance.private.pem 2048
@ -156,10 +158,10 @@ test:
- python -m pytest main.py --junitxml=report.xml - python -m pytest main.py --junitxml=report.xml
artifacts: artifacts:
reports: reports:
dotenv: version.env
junit: ['**/report.xml'] junit: ['**/report.xml']
.test:apt: test:apt:
image: $IMAGE
stage: test stage: test
rules: rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
@ -167,7 +169,15 @@ test:
changes: changes:
- app/**/* - app/**/*
- .DEBIAN/**/* - .DEBIAN/**/*
- .gitlab-ci.yml - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
parallel:
matrix:
- IMAGE:
- debian:trixie-slim # EOL: t.b.a.
- debian:bookworm-slim # EOL: June 06, 2026
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@ -199,16 +209,6 @@ test:
- apt-get purge -qq -y fastapi-dls - apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq - apt-get autoremove -qq -y && apt-get clean -qq
test:apt:
extends: .test:apt
image: $IMAGE
parallel:
matrix:
- IMAGE:
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
test:pacman:archlinux: test:pacman:archlinux:
image: archlinux:base image: archlinux:base
rules: rules:
@ -292,15 +292,12 @@ gemnasium-python-dependency_scanning:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
.deploy:
rules:
- if: $CI_COMMIT_TAG
deploy:docker: deploy:docker:
extends: .deploy
image: docker:dind image: docker:dind
stage: deploy stage: deploy
tags: [ docker ] tags: [ docker ]
rules:
- if: $CI_COMMIT_TAG
before_script: before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME" - echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect - docker buildx inspect
@ -319,9 +316,10 @@ deploy:docker:
deploy:apt: deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package # doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy
image: debian:bookworm-slim image: debian:bookworm-slim
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@ -358,9 +356,10 @@ deploy:apt:
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"' - 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
deploy:pacman: deploy:pacman:
extends: .deploy
image: archlinux:base-devel image: archlinux:base-devel
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: needs:
- job: build:pacman - job: build:pacman
artifacts: true artifacts: true
@ -381,7 +380,7 @@ deploy:pacman:
release: release:
image: registry.gitlab.com/gitlab-org/release-cli:latest image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post stage: .post
needs: [ test ] needs: [ deploy:docker, deploy:apt, deploy:pacman ]
rules: rules:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
script: script:

View File

@ -795,13 +795,13 @@ Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to: Special thanks to:
- @samicrusader who created build file for **ArchLinux** - `samicrusader` who created build file for **ArchLinux**
- @cyrus who wrote the section for **openSUSE** - `cyrus` who wrote the section for **openSUSE**
- @midi who wrote the section for **unRAID** - `midi` who wrote the section for **unRAID**
- @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)* - `polloloco` who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - `DualCoder` who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/) - `Krutav Shah` who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - `Wim van 't Hoog` for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
- @mrzenc who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos) - `mrzenc` who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos)
And thanks to all people who contributed to all these libraries! And thanks to all people who contributed to all these libraries!

View File

@ -1,8 +1,9 @@
import logging import logging
import sys
from base64 import b64encode as b64enc from base64 import b64encode as b64enc
from calendar import timegm from calendar import timegm
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from datetime import datetime, timedelta, UTC from datetime import datetime, UTC
from hashlib import sha256 from hashlib import sha256
from json import loads as json_loads from json import loads as json_loads
from os import getenv as env from os import getenv as env
@ -13,15 +14,14 @@ from dateutil.relativedelta import relativedelta
from dotenv import load_dotenv from dotenv import load_dotenv
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.requests import Request from fastapi.requests import Request
from jose import jws, jwk, jwt, JWTError from jose import jws, jwt, JWTError
from jose.constants import ALGORITHMS from jose.constants import ALGORITHMS
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from starlette.middleware.cors import CORSMiddleware from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from orm import Origin, Lease, init as db_init, migrate from orm import Origin, Lease, init as db_init, migrate, Instance, Site
from util import PrivateKey, PublicKey, load_file
# Load variables # Load variables
load_dotenv('../version.env') load_dotenv('../version.env')
@ -39,20 +39,9 @@ db_init(db), migrate(db)
# Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service) # Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service)
DLS_URL = str(env('DLS_URL', 'localhost')) DLS_URL = str(env('DLS_URL', 'localhost'))
DLS_PORT = int(env('DLS_PORT', '443')) DLS_PORT = int(env('DLS_PORT', '443'))
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
INSTANCE_KEY_RSA = PrivateKey.from_file(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = PublicKey.from_file(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}'] CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.pem(), algorithm=ALGORITHMS.RS256) ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.pem(), algorithm=ALGORITHMS.RS256)
# Logging # Logging
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
@ -60,25 +49,33 @@ logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL) logger.setLevel(LOG_LEVEL)
logging.getLogger('util').setLevel(LOG_LEVEL) logging.getLogger('util').setLevel(LOG_LEVEL)
logging.getLogger('NV').setLevel(LOG_LEVEL) logging.getLogger('DriverMatrix').setLevel(LOG_LEVEL)
# FastAPI # FastAPI
@asynccontextmanager @asynccontextmanager
async def lifespan(_: FastAPI): async def lifespan(_: FastAPI):
# on startup # on startup
default_instance = Instance.get_default_instance(db)
lease_renewal_period = default_instance.lease_renewal_period
lease_renewal_delta = default_instance.get_lease_renewal_delta()
client_token_expire_delta = default_instance.get_client_token_expire_delta()
logger.info(f''' logger.info(f'''
Using timezone: {str(TZ)}. Make sure this is correct and match your clients! Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}. Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid. If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}. Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
''') ''')
logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.') logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.')
validate_settings()
yield yield
# on shutdown # on shutdown
@ -99,12 +96,24 @@ app.add_middleware(
# Helper # Helper
def __get_token(request: Request) -> dict: def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
authorization_header = request.headers.get('authorization') authorization_header = request.headers.get('authorization')
token = authorization_header.split(' ')[1] token = authorization_header.split(' ')[1]
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False}) return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
def validate_settings():
session = sessionmaker(bind=db)()
lease_expire_delta_min, lease_expire_delta_max = 86_400, 7_776_000
for instance in session.query(Instance).all():
lease_expire_delta = instance.lease_expire_delta
if lease_expire_delta < 86_400 or lease_expire_delta > 7_776_000:
logging.warning(f'> [ instance ]: {instance.instance_ref}: "lease_expire_delta" should be between {lease_expire_delta_min} and {lease_expire_delta_max}')
session.close()
# Endpoints # Endpoints
@app.get('/', summary='Index') @app.get('/', summary='Index')
@ -124,18 +133,20 @@ async def _health():
@app.get('/-/config', summary='* Config', description='returns environment variables.') @app.get('/-/config', summary='* Config', description='returns environment variables.')
async def _config(): async def _config():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
return JSONr({ return JSONr({
'VERSION': str(VERSION), 'VERSION': str(VERSION),
'COMMIT': str(COMMIT), 'COMMIT': str(COMMIT),
'DEBUG': str(DEBUG), 'DEBUG': str(DEBUG),
'DLS_URL': str(DLS_URL), 'DLS_URL': str(DLS_URL),
'DLS_PORT': str(DLS_PORT), 'DLS_PORT': str(DLS_PORT),
'SITE_KEY_XID': str(SITE_KEY_XID), 'SITE_KEY_XID': str(default_site.site_key),
'INSTANCE_REF': str(INSTANCE_REF), 'INSTANCE_REF': str(default_instance.instance_ref),
'ALLOTMENT_REF': [str(ALLOTMENT_REF)], 'ALLOTMENT_REF': [str(ALLOTMENT_REF)],
'TOKEN_EXPIRE_DELTA': str(TOKEN_EXPIRE_DELTA), 'TOKEN_EXPIRE_DELTA': str(default_instance.get_token_expire_delta()),
'LEASE_EXPIRE_DELTA': str(LEASE_EXPIRE_DELTA), 'LEASE_EXPIRE_DELTA': str(default_instance.get_lease_expire_delta()),
'LEASE_RENEWAL_PERIOD': str(LEASE_RENEWAL_PERIOD), 'LEASE_RENEWAL_PERIOD': str(default_instance.lease_renewal_period),
'CORS_ORIGINS': str(CORS_ORIGINS), 'CORS_ORIGINS': str(CORS_ORIGINS),
'TZ': str(TZ), 'TZ': str(TZ),
}) })
@ -144,6 +155,7 @@ async def _config():
@app.get('/-/readme', summary='* Readme') @app.get('/-/readme', summary='* Readme')
async def _readme(): async def _readme():
from markdown import markdown from markdown import markdown
from util import load_file
content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8') content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8')
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc'])) return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
@ -193,8 +205,7 @@ async def _origins(request: Request, leases: bool = False):
for origin in session.query(Origin).all(): for origin in session.query(Origin).all():
x = origin.serialize() x = origin.serialize()
if leases: if leases:
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA) x['leases'] = list(map(lambda _: _.serialize(), Lease.find_by_origin_ref(db, origin.origin_ref)))
x['leases'] = list(map(lambda _: _.serialize(**serialize), Lease.find_by_origin_ref(db, origin.origin_ref)))
response.append(x) response.append(x)
session.close() session.close()
return JSONr(response) return JSONr(response)
@ -211,8 +222,7 @@ async def _leases(request: Request, origin: bool = False):
session = sessionmaker(bind=db)() session = sessionmaker(bind=db)()
response = [] response = []
for lease in session.query(Lease).all(): for lease in session.query(Lease).all():
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA) x = lease.serialize()
x = lease.serialize(**serialize)
if origin: if origin:
lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first() lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first()
if lease_origin is not None: if lease_origin is not None:
@ -239,7 +249,13 @@ async def _lease_delete(request: Request, lease_ref: str):
@app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance') @app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance')
async def _client_token(): async def _client_token():
cur_time = datetime.now(UTC) cur_time = datetime.now(UTC)
exp_time = cur_time + CLIENT_TOKEN_EXPIRE_DELTA
default_instance = Instance.get_default_instance(db)
public_key = default_instance.get_public_key()
# todo: implemented request parameter to support different instances
jwt_encode_key = default_instance.get_jwt_encode_key()
exp_time = cur_time + default_instance.get_client_token_expire_delta()
payload = { payload = {
"jti": str(uuid4()), "jti": str(uuid4()),
@ -252,7 +268,7 @@ async def _client_token():
"scope_ref_list": [ALLOTMENT_REF], "scope_ref_list": [ALLOTMENT_REF],
"fulfillment_class_ref_list": [], "fulfillment_class_ref_list": [],
"service_instance_configuration": { "service_instance_configuration": {
"nls_service_instance_ref": INSTANCE_REF, "nls_service_instance_ref": default_instance.instance_ref,
"svc_port_set_list": [ "svc_port_set_list": [
{ {
"idx": 0, "idx": 0,
@ -264,10 +280,10 @@ async def _client_token():
}, },
"service_instance_public_key_configuration": { "service_instance_public_key_configuration": {
"service_instance_public_key_me": { "service_instance_public_key_me": {
"mod": hex(INSTANCE_KEY_PUB.raw().public_numbers().n)[2:], "mod": hex(public_key.raw().public_numbers().n)[2:],
"exp": int(INSTANCE_KEY_PUB.raw().public_numbers().e), "exp": int(public_key.raw().public_numbers().e),
}, },
"service_instance_public_key_pem": INSTANCE_KEY_PUB.pem().decode('utf-8'), "service_instance_public_key_pem": public_key.pem().decode('utf-8'),
"key_retention_mode": "LATEST_ONLY" "key_retention_mode": "LATEST_ONLY"
}, },
} }
@ -349,13 +365,16 @@ async def auth_v1_code(request: Request):
delta = relativedelta(minutes=15) delta = relativedelta(minutes=15)
expires = cur_time + delta expires = cur_time + delta
default_site = Site.get_default_site(db)
jwt_encode_key = Instance.get_default_instance(db).get_jwt_encode_key()
payload = { payload = {
'iat': timegm(cur_time.timetuple()), 'iat': timegm(cur_time.timetuple()),
'exp': timegm(expires.timetuple()), 'exp': timegm(expires.timetuple()),
'challenge': j.get('code_challenge'), 'challenge': j.get('code_challenge'),
'origin_ref': j.get('origin_ref'), 'origin_ref': j.get('origin_ref'),
'key_ref': SITE_KEY_XID, 'key_ref': default_site.site_key,
'kid': SITE_KEY_XID 'kid': default_site.site_key,
} }
auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256) auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@ -375,6 +394,9 @@ async def auth_v1_code(request: Request):
async def auth_v1_token(request: Request): async def auth_v1_token(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC) j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
jwt_encode_key, jwt_decode_key = default_instance.get_jwt_encode_key(), default_instance.get_jwt_decode_key()
try: try:
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key, algorithms=ALGORITHMS.RS256) payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key, algorithms=ALGORITHMS.RS256)
except JWTError as e: except JWTError as e:
@ -388,7 +410,7 @@ async def auth_v1_token(request: Request):
if payload.get('challenge') != challenge: if payload.get('challenge') != challenge:
return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'}) return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'})
access_expires_on = cur_time + TOKEN_EXPIRE_DELTA access_expires_on = cur_time + default_instance.get_token_expire_delta()
new_payload = { new_payload = {
'iat': timegm(cur_time.timetuple()), 'iat': timegm(cur_time.timetuple()),
@ -397,8 +419,8 @@ async def auth_v1_token(request: Request):
'aud': 'https://cls.nvidia.org', 'aud': 'https://cls.nvidia.org',
'exp': timegm(access_expires_on.timetuple()), 'exp': timegm(access_expires_on.timetuple()),
'origin_ref': origin_ref, 'origin_ref': origin_ref,
'key_ref': SITE_KEY_XID, 'key_ref': default_site.site_key,
'kid': SITE_KEY_XID, 'kid': default_site.site_key,
} }
auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256) auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@ -415,10 +437,13 @@ async def auth_v1_token(request: Request):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py # venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin') @app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin')
async def leasing_v1_lessor(request: Request): async def leasing_v1_lessor(request: Request):
j, token, cur_time = json_loads((await request.body()).decode('utf-8')), __get_token(request), datetime.now(UTC) j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try: try:
token = __get_token(request) token = __get_token(request, jwt_decode_key)
except JWTError: except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'}) return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
@ -432,7 +457,7 @@ async def leasing_v1_lessor(request: Request):
# return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]') # return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]')
lease_ref = str(uuid4()) lease_ref = str(uuid4())
expires = cur_time + LEASE_EXPIRE_DELTA expires = cur_time + default_instance.get_lease_expire_delta()
lease_result_list.append({ lease_result_list.append({
"ordinal": 0, "ordinal": 0,
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html # https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
@ -440,13 +465,13 @@ async def leasing_v1_lessor(request: Request):
"ref": lease_ref, "ref": lease_ref,
"created": cur_time.isoformat(), "created": cur_time.isoformat(),
"expires": expires.isoformat(), "expires": expires.isoformat(),
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD, "recommended_lease_renewal": default_instance.lease_renewal_period,
"offline_lease": "true", "offline_lease": "true",
"license_type": "CONCURRENT_COUNTED_SINGLE" "license_type": "CONCURRENT_COUNTED_SINGLE"
} }
}) })
data = Lease(origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires) data = Lease(instance_ref=default_instance.instance_ref, origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
Lease.create_or_update(db, data) Lease.create_or_update(db, data)
response = { response = {
@ -463,7 +488,14 @@ async def leasing_v1_lessor(request: Request):
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql # venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
@app.get('/leasing/v1/lessor/leases', description='get active leases for current origin') @app.get('/leasing/v1/lessor/leases', description='get active leases for current origin')
async def leasing_v1_lessor_lease(request: Request): async def leasing_v1_lessor_lease(request: Request):
token, cur_time = __get_token(request), datetime.now(UTC) cur_time = datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
@ -483,7 +515,15 @@ async def leasing_v1_lessor_lease(request: Request):
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py # venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
@app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease') @app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease')
async def leasing_v1_lease_renew(request: Request, lease_ref: str): async def leasing_v1_lease_renew(request: Request, lease_ref: str):
token, cur_time = __get_token(request), datetime.now(UTC) cur_time = datetime.now(UTC)
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
logger.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}') logger.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
@ -492,11 +532,11 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
if entity is None: if entity is None:
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'}) return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
expires = cur_time + LEASE_EXPIRE_DELTA expires = cur_time + default_instance.get_lease_expire_delta()
response = { response = {
"lease_ref": lease_ref, "lease_ref": lease_ref,
"expires": expires.isoformat(), "expires": expires.isoformat(),
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD, "recommended_lease_renewal": default_instance.lease_renewal_period,
"offline_lease": True, "offline_lease": True,
"prompts": None, "prompts": None,
"sync_timestamp": cur_time.isoformat(), "sync_timestamp": cur_time.isoformat(),
@ -510,7 +550,14 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py # venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
@app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease') @app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease')
async def leasing_v1_lease_delete(request: Request, lease_ref: str): async def leasing_v1_lease_delete(request: Request, lease_ref: str):
token, cur_time = __get_token(request), datetime.now(UTC) cur_time = datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
logger.info(f'> [ return ]: {origin_ref}: return {lease_ref}') logger.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
@ -536,7 +583,14 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py # venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.delete('/leasing/v1/lessor/leases', description='release all leases') @app.delete('/leasing/v1/lessor/leases', description='release all leases')
async def leasing_v1_lessor_lease_remove(request: Request): async def leasing_v1_lessor_lease_remove(request: Request):
token, cur_time = __get_token(request), datetime.now(UTC) cur_time = datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
@ -558,6 +612,8 @@ async def leasing_v1_lessor_lease_remove(request: Request):
async def leasing_v1_lessor_shutdown(request: Request): async def leasing_v1_lessor_shutdown(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC) j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
token = j.get('token') token = j.get('token')
token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False}) token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')

View File

@ -1,20 +1,143 @@
import logging
from datetime import datetime, timedelta, timezone, UTC from datetime import datetime, timedelta, timezone, UTC
from os import getenv as env
from os.path import join, dirname, isfile
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text from jose import jwk
from jose.constants import ALGORITHMS
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, declarative_base from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
from sqlalchemy.schema import CreateTable
from util import NV from util import DriverMatrix, PrivateKey, PublicKey, DriverMatrix
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
Base = declarative_base() Base = declarative_base()
class Site(Base):
__tablename__ = "site"
INITIAL_SITE_KEY_XID = '10000000-0000-0000-0000-000000000000'
INITIAL_SITE_NAME = 'default-site'
site_key = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, SITE_KEY_XID
name = Column(VARCHAR(length=256), nullable=False)
def __str__(self):
return f'SITE_KEY_XID: {self.site_key}'
@staticmethod
def create_statement(engine: Engine):
return CreateTable(Site.__table__).compile(engine)
@staticmethod
def get_default_site(engine: Engine) -> "Site":
session = sessionmaker(bind=engine)()
entity = session.query(Site).filter(Site.site_key == Site.INITIAL_SITE_KEY_XID).first()
session.close()
return entity
class Instance(Base):
__tablename__ = "instance"
DEFAULT_INSTANCE_REF = '10000000-0000-0000-0000-000000000001'
DEFAULT_TOKEN_EXPIRE_DELTA = 86_400 # 1 day
DEFAULT_LEASE_EXPIRE_DELTA = 7_776_000 # 90 days
DEFAULT_LEASE_RENEWAL_PERIOD = 0.15
DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA = 378_432_000 # 12 years
# 1 day = 86400 (min. in production setup, max 90 days), 1 hour = 3600
instance_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, INSTANCE_REF
site_key = Column(CHAR(length=36), ForeignKey(Site.site_key, ondelete='CASCADE'), nullable=False, index=True) # uuid4
private_key = Column(BLOB(length=2048), nullable=False)
public_key = Column(BLOB(length=512), nullable=False)
token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_TOKEN_EXPIRE_DELTA, comment='in seconds')
lease_expire_delta = Column(INT(), nullable=False, default=DEFAULT_LEASE_EXPIRE_DELTA, comment='in seconds')
lease_renewal_period = Column(FLOAT(precision=2), nullable=False, default=DEFAULT_LEASE_RENEWAL_PERIOD)
client_token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA, comment='in seconds')
__origin = relationship(Site, foreign_keys=[site_key])
def __str__(self):
return f'INSTANCE_REF: {self.instance_ref} (SITE_KEY_XID: {self.site_key})'
@staticmethod
def create_statement(engine: Engine):
return CreateTable(Instance.__table__).compile(engine)
@staticmethod
def create_or_update(engine: Engine, instance: "Instance"):
session = sessionmaker(bind=engine)()
entity = session.query(Instance).filter(Instance.instance_ref == instance.instance_ref).first()
if entity is None:
session.add(instance)
else:
x = dict(
site_key=instance.site_key,
private_key=instance.private_key,
public_key=instance.public_key,
token_expire_delta=instance.token_expire_delta,
lease_expire_delta=instance.lease_expire_delta,
lease_renewal_period=instance.lease_renewal_period,
client_token_expire_delta=instance.client_token_expire_delta,
)
session.execute(update(Instance).where(Instance.instance_ref == instance.instance_ref).values(**x))
session.commit()
session.flush()
session.close()
# todo: validate on startup that "lease_expire_delta" is between 1 day and 90 days
@staticmethod
def get_default_instance(engine: Engine) -> "Instance":
session = sessionmaker(bind=engine)()
site = Site.get_default_site(engine)
entity = session.query(Instance).filter(Instance.site_key == site.site_key).first()
session.close()
return entity
def get_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.token_expire_delta)
def get_lease_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.lease_expire_delta)
def get_lease_renewal_delta(self) -> "datetime.timedelta":
return timedelta(seconds=self.lease_expire_delta)
def get_client_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.client_token_expire_delta)
def __get_private_key(self) -> "PrivateKey":
return PrivateKey(self.private_key)
def get_public_key(self) -> "PublicKey":
return PublicKey(self.public_key)
def get_jwt_encode_key(self) -> "jose.jkw":
return jwk.construct(self.__get_private_key().pem().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_jwt_decode_key(self) -> "jose.jwt":
return jwk.construct(self.get_public_key().pem().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_private_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
def get_public_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
class Origin(Base): class Origin(Base):
__tablename__ = "origin" __tablename__ = "origin"
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4 origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
# service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF') # service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF')
hostname = Column(VARCHAR(length=256), nullable=True) hostname = Column(VARCHAR(length=256), nullable=True)
guest_driver_version = Column(VARCHAR(length=10), nullable=True) guest_driver_version = Column(VARCHAR(length=10), nullable=True)
@ -25,7 +148,7 @@ class Origin(Base):
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})' return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
def serialize(self) -> dict: def serialize(self) -> dict:
_ = NV().find(self.guest_driver_version) _ = DriverMatrix().find(self.guest_driver_version)
return { return {
'origin_ref': self.origin_ref, 'origin_ref': self.origin_ref,
@ -39,7 +162,6 @@ class Origin(Base):
@staticmethod @staticmethod
def create_statement(engine: Engine): def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Origin.__table__).compile(engine) return CreateTable(Origin.__table__).compile(engine)
@staticmethod @staticmethod
@ -85,18 +207,24 @@ class Origin(Base):
class Lease(Base): class Lease(Base):
__tablename__ = "lease" __tablename__ = "lease"
instance_ref = Column(CHAR(length=36), ForeignKey(Instance.instance_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4 lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4 origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
# scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF') # scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF')
lease_created = Column(DATETIME(), nullable=False) lease_created = Column(DATETIME(), nullable=False)
lease_expires = Column(DATETIME(), nullable=False) lease_expires = Column(DATETIME(), nullable=False)
lease_updated = Column(DATETIME(), nullable=False) lease_updated = Column(DATETIME(), nullable=False)
__instance = relationship(Instance, foreign_keys=[instance_ref])
__origin = relationship(Origin, foreign_keys=[origin_ref])
def __repr__(self): def __repr__(self):
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})' return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
def serialize(self, renewal_period: float, renewal_delta: timedelta) -> dict: def serialize(self) -> dict:
renewal_period = self.__instance.lease_renewal_period
renewal_delta = self.__instance.get_lease_renewal_delta
lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds()) lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds())
lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal) lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal)
@ -112,7 +240,6 @@ class Lease(Base):
@staticmethod @staticmethod
def create_statement(engine: Engine): def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Lease.__table__).compile(engine) return CreateTable(Lease.__table__).compile(engine)
@staticmethod @staticmethod
@ -206,38 +333,104 @@ class Lease(Base):
return renew return renew
def init_default_site(session: Session):
private_key = PrivateKey.generate()
public_key = private_key.public_key()
site = Site(
site_key=Site.INITIAL_SITE_KEY_XID,
name=Site.INITIAL_SITE_NAME
)
session.add(site)
session.commit()
instance = Instance(
instance_ref=Instance.DEFAULT_INSTANCE_REF,
site_key=site.site_key,
private_key=private_key.pem(),
public_key=public_key.pem(),
)
session.add(instance)
session.commit()
def init(engine: Engine): def init(engine: Engine):
tables = [Origin, Lease] tables = [Site, Instance, Origin, Lease]
db = inspect(engine) db = inspect(engine)
session = sessionmaker(bind=engine)() session = sessionmaker(bind=engine)()
for table in tables: for table in tables:
if not db.dialect.has_table(engine.connect(), table.__tablename__): exists = db.dialect.has_table(engine.connect(), table.__tablename__)
logger.info(f'> Table "{table.__tablename__:<16}" exists: {exists}')
if not exists:
session.execute(text(str(table.create_statement(engine)))) session.execute(text(str(table.create_statement(engine))))
session.commit() session.commit()
# create default site
cnt = session.query(Site).count()
if cnt == 0:
init_default_site(session)
session.flush()
session.close() session.close()
def migrate(engine: Engine): def migrate(engine: Engine):
db = inspect(engine) db = inspect(engine)
def upgrade_1_0_to_1_1(): # todo: add update guide to use 1.LATEST to 2.0
x = db.dialect.get_columns(engine.connect(), Lease.__tablename__) def upgrade_1_x_to_2_0():
x = next(_ for _ in x if _['name'] == 'origin_ref') site = Site.get_default_site(engine)
if x['primary_key'] > 0: logger.info(site)
print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!') instance = Instance.get_default_instance(engine)
print(' Your leases are recreated on next renewal!') logger.info(instance)
print(' If an error message appears on the client, you can ignore it.')
Lease.__table__.drop(bind=engine)
init(engine)
# def upgrade_1_2_to_1_3(): # SITE_KEY_XID
# x = db.dialect.get_columns(engine.connect(), Lease.__tablename__) if site_key := env('SITE_KEY_XID', None) is not None:
# x = next((_ for _ in x if _['name'] == 'scope_ref'), None) site.site_key = str(site_key)
# if x is None:
# Lease.scope_ref.compile()
# column_name = Lease.scope_ref.name
# column_type = Lease.scope_ref.type.compile(engine.dialect)
# engine.execute(f'ALTER TABLE "{Lease.__tablename__}" ADD COLUMN "{column_name}" {column_type}')
upgrade_1_0_to_1_1() # INSTANCE_REF
# upgrade_1_2_to_1_3() if instance_ref := env('INSTANCE_REF', None) is not None:
instance.instance_ref = str(instance_ref)
# ALLOTMENT_REF
if allotment_ref := env('ALLOTMENT_REF', None) is not None:
pass # todo
# INSTANCE_KEY_RSA, INSTANCE_KEY_PUB
default_instance_private_key_path = str(join(dirname(__file__), 'cert/instance.private.pem'))
instance_private_key = env('INSTANCE_KEY_RSA', None)
if instance_private_key is not None:
instance.private_key = PrivateKey(instance_private_key.encode('utf-8'))
elif isfile(default_instance_private_key_path):
instance.private_key = PrivateKey.from_file(default_instance_private_key_path)
default_instance_public_key_path = str(join(dirname(__file__), 'cert/instance.public.pem'))
instance_public_key = env('INSTANCE_KEY_PUB', None)
if instance_public_key is not None:
instance.public_key = PublicKey(instance_public_key.encode('utf-8'))
elif isfile(default_instance_public_key_path):
instance.public_key = PublicKey.from_file(default_instance_public_key_path)
# TOKEN_EXPIRE_DELTA
token_expire_delta = env('TOKEN_EXPIRE_DAYS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 86_400
token_expire_delta = env('TOKEN_EXPIRE_HOURS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 3_600
# LEASE_EXPIRE_DELTA, LEASE_RENEWAL_DELTA
lease_expire_delta = env('LEASE_EXPIRE_DAYS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 86_400
lease_expire_delta = env('LEASE_EXPIRE_HOURS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 3_600
# LEASE_RENEWAL_PERIOD
lease_renewal_period = env('LEASE_RENEWAL_PERIOD', None)
if lease_renewal_period is not None:
instance.lease_renewal_period = lease_renewal_period
# todo: update site, instance
upgrade_1_x_to_2_0()

View File

@ -1,4 +1,5 @@
import logging import logging
from json import load as json_load
from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, generate_private_key from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, generate_private_key
@ -7,6 +8,14 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key, l
logging.basicConfig() logging.basicConfig()
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
class PrivateKey: class PrivateKey:
def __init__(self, data: bytes): def __init__(self, data: bytes):
@ -76,37 +85,32 @@ class PublicKey:
format=serialization.PublicFormat.SubjectPublicKeyInfo format=serialization.PublicFormat.SubjectPublicKeyInfo
) )
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
class DriverMatrix:
class NV:
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json' __DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions" __DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
def __init__(self): def __init__(self):
self.log = logging.getLogger(self.__class__.__name__) self.log = logging.getLogger(self.__class__.__name__)
if NV.__DRIVER_MATRIX is None: if DriverMatrix.__DRIVER_MATRIX is None:
from json import load as json_load self.__load()
try:
file = open(NV.__DRIVER_MATRIX_FILENAME) def __load(self):
NV.__DRIVER_MATRIX = json_load(file) try:
file.close() file = open(DriverMatrix.__DRIVER_MATRIX_FILENAME)
self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".') DriverMatrix.__DRIVER_MATRIX = json_load(file)
except Exception as e: file.close()
NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app self.log.debug(f'Successfully loaded "{DriverMatrix.__DRIVER_MATRIX_FILENAME}".')
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}') except Exception as e:
DriverMatrix.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
# self.log.warning(f'Failed to load "{DriverMatrix.__DRIVER_MATRIX_FILENAME}": {e}')
@staticmethod @staticmethod
def find(version: str) -> dict | None: def find(version: str) -> dict | None:
if NV.__DRIVER_MATRIX is None: if DriverMatrix.__DRIVER_MATRIX is None:
return None return None
for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()): for idx, (key, branch) in enumerate(DriverMatrix.__DRIVER_MATRIX.items()):
for release in branch.get('$releases'): for release in branch.get('$releases'):
linux_driver = release.get('Linux Driver') linux_driver = release.get('Linux Driver')
windows_driver = release.get('Windows Driver') windows_driver = release.get('Windows Driver')

View File

@ -1,8 +1,8 @@
fastapi==0.115.12 fastapi==0.115.12
uvicorn[standard]==0.34.0 uvicorn[standard]==0.34.1
python-jose[cryptography]==3.4.0 python-jose[cryptography]==3.4.0
cryptography==44.0.2 cryptography==44.0.2
python-dateutil==2.9.0 python-dateutil==2.9.0
sqlalchemy==2.0.40 sqlalchemy==2.0.40
markdown==3.7 markdown==3.8
python-dotenv==1.1.0 python-dotenv==1.1.0

View File

@ -6,7 +6,7 @@ logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/vgpu/index.html' URL = 'https://docs.nvidia.com/vgpu/index.html'
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch' BRANCH_STATUS_KEY = 'vGPU Branch Status'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch' VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver' LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver' WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
@ -26,12 +26,15 @@ def __driver_versions(html: 'BeautifulSoup'):
# find wrapper for "DriverVersions" and find tables # find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'driver-versions'}) data = html.find('div', {'id': 'driver-versions'})
items = data.findAll('bsp-accordion', {'class': 'Accordion-items-item'}) items = data.find_all('bsp-accordion', {'class': 'Accordion-items-item'})
for item in items: for item in items:
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip() software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
software_branch = software_branch.replace(' Releases', '') software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower() matrix_key = software_branch.lower()
branch_status = item.find('a', href=True, string='Branch status')
branch_status = branch_status.next_sibling.replace(':', '').strip()
# driver version info from table-heads (ths) and table-rows (trs) # driver version info from table-heads (ths) and table-rows (trs)
table = item.find('table') table = item.find('table')
ths, trs = table.find_all('th'), table.find_all('tr') ths, trs = table.find_all('th'), table.find_all('tr')
@ -42,48 +45,20 @@ def __driver_versions(html: 'BeautifulSoup'):
continue continue
# create dict with table-heads as key and cell content as value # create dict with table-heads as key and cell content as value
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)} x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
x.setdefault(BRANCH_STATUS_KEY, branch_status)
releases.append(x) releases.append(x)
# add to matrix # add to matrix
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}}) MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
def __release_branches(html: 'BeautifulSoup'):
# find wrapper for "AllReleaseBranches" and find table
data = html.find('div', {'id': 'all-release-branches'})
table = data.find('table')
# branch releases info from table-heads (ths) and table-rows (trs)
ths, trs = table.find_all('th'), table.find_all('tr')
headers = [header.text.strip() for header in ths]
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
# get matrix_key
software_branch = x.get(SOFTWARE_BRANCH_KEY)
matrix_key = software_branch.lower()
# add to matrix
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
def __debug(): def __debug():
# print table head # print table head
s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}' s = f'{VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {BRANCH_STATUS_KEY:^21}'
print(s) print(s)
# iterate over dict & format some variables to not overload table # iterate over dict & format some variables to not overload table
for idx, (key, branch) in enumerate(MATRIX.items()): for idx, (key, branch) in enumerate(MATRIX.items()):
branch_status = branch.get(BRANCH_STATUS_KEY)
branch_status = branch_status.replace('Branch ', '')
branch_status = branch_status.replace('Long-Term Support', 'LTS')
branch_status = branch_status.replace('Production', 'Prod.')
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
for release in branch.get(JSON_RELEASES_KEY): for release in branch.get(JSON_RELEASES_KEY):
version = release.get(VGPU_KEY, release.get(GRID_KEY, '')) version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, '')) linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
@ -92,13 +67,25 @@ def __debug():
windows_driver = release.get(WINDOWS_DRIVER_KEY) windows_driver = release.get(WINDOWS_DRIVER_KEY)
release_date = release.get(RELEASE_DATE_KEY) release_date = release.get(RELEASE_DATE_KEY)
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY) is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
branch_status = __parse_branch_status(release.get(BRANCH_STATUS_KEY, ''))
version = f'{version} *' if is_latest else version version = f'{version} *' if is_latest else version
eol = branch.get(EOL_KEY) if is_latest else '' s = f'{version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {branch_status:^21}'
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
print(s) print(s)
def __parse_branch_status(string: str) -> str:
string = string.replace('Production Branch', 'Prod. -')
string = string.replace('Long-Term Support Branch', 'LTS -')
string = string.replace('supported until', '')
string = string.replace('EOL since', 'EOL - ')
string = string.replace('EOL from', 'EOL -')
return string
def __dump(filename: str): def __dump(filename: str):
import json import json
@ -128,7 +115,6 @@ if __name__ == '__main__':
# build matrix # build matrix
__driver_versions(soup) __driver_versions(soup)
__release_branches(soup)
# debug output # debug output
__debug() __debug()

View File

@ -3,12 +3,13 @@ from base64 import b64encode as b64enc
from calendar import timegm from calendar import timegm
from datetime import datetime, UTC from datetime import datetime, UTC
from hashlib import sha256 from hashlib import sha256
from os.path import dirname, join from os import getenv as env
from uuid import uuid4, UUID from uuid import uuid4, UUID
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from jose import jwt, jwk from jose import jwt
from jose.constants import ALGORITHMS from jose.constants import ALGORITHMS
from sqlalchemy import create_engine
from starlette.testclient import TestClient from starlette.testclient import TestClient
# add relative path to use packages as they were in the app/ dir # add relative path to use packages as they were in the app/ dir
@ -16,20 +17,23 @@ sys.path.append('../')
sys.path.append('../app') sys.path.append('../app')
from app import main from app import main
from util import PrivateKey, PublicKey from orm import init as db_init, migrate, Site, Instance
client = TestClient(main.app)
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld' ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
# INSTANCE_KEY_RSA = generate_key() # fastapi setup
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key() client = TestClient(main.app)
INSTANCE_KEY_RSA = PrivateKey.from_file(str(join(dirname(__file__), '../app/cert/instance.private.pem'))) # database setup
INSTANCE_KEY_PUB = PublicKey.from_file(str(join(dirname(__file__), '../app/cert/instance.public.pem'))) db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db)
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.pem(), algorithm=ALGORITHMS.RS256) # test vars
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.pem(), algorithm=ALGORITHMS.RS256) DEFAULT_SITE, DEFAULT_INSTANCE = Site.get_default_site(db), Instance.get_default_instance(db)
SITE_KEY = DEFAULT_SITE.site_key
jwt_encode_key, jwt_decode_key = DEFAULT_INSTANCE.get_jwt_encode_key(), DEFAULT_INSTANCE.get_jwt_decode_key()
def __bearer_token(origin_ref: str) -> str: def __bearer_token(origin_ref: str) -> str:
@ -38,6 +42,12 @@ def __bearer_token(origin_ref: str) -> str:
return token return token
def test_initial_default_site_and_instance():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
assert default_site.site_key == Site.INITIAL_SITE_KEY_XID
assert default_instance.instance_ref == Instance.DEFAULT_INSTANCE_REF
def test_index(): def test_index():
response = client.get('/') response = client.get('/')
assert response.status_code == 200 assert response.status_code == 200