Compare commits

...

24 Commits

Author SHA1 Message Date
Oscar Krause
f38378bbc8 updated credits 2025-04-16 15:05:45 +02:00
Oscar Krause
cd4c3d379a code styling 2025-04-16 15:01:14 +02:00
Oscar Krause
59645d1daf Merge branch 'main' into v18.x-support
# Conflicts:
#	app/util.py
2025-04-16 15:00:33 +02:00
Oscar Krause
9605ba3eee Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!51
2025-04-16 14:43:58 +02:00
Oscar Krause
cd5c2a6cb1 code styling 2025-04-16 14:18:04 +02:00
Oscar Krause
1d3255188e code styling 2025-04-16 14:12:40 +02:00
Oscar Krause
cea7a01b54 code styling 2025-04-16 14:12:15 +02:00
Oscar Krause
ff2fbaf83f code styling 2025-04-16 14:11:04 +02:00
Oscar Krause
9f417b61a9 fixes 2025-04-16 14:07:18 +02:00
Oscar Krause
4f77200628 code styling 2025-04-16 14:07:18 +02:00
Oscar Krause
20b03446dc Merge branch 'main' into 'dev'
# Conflicts:
#   .gitlab-ci.yml
2025-04-16 13:57:51 +02:00
Oscar Krause
df506e8591 removed unsupported python versions 2025-04-16 13:56:36 +02:00
Oscar Krause
3fe3429986 added some python versions and added EOL 2025-04-16 12:54:24 +02:00
Oscar Krause
a996504c50 test python3.13 and 3.11 2025-04-16 12:44:58 +02:00
Oscar Krause
5d2bff88d8 fixes 2025-04-16 12:35:24 +02:00
Oscar Krause
67f2d18a95 requirements.txt updated 2025-04-16 12:25:37 +02:00
Oscar Krause
52cd34cb5c ci improvements 2025-04-16 12:24:19 +02:00
Oscar Krause
6fb03309a5 ci improvements 2025-04-16 12:23:21 +02:00
Oscar Krause
477e5abbca refactored test pipeline to test different python versions 2025-04-16 12:12:23 +02:00
Oscar Krause
e00e7569eb removed todo 2025-04-16 12:01:19 +02:00
Oscar Krause
4650e18821 added todo 2025-04-16 12:00:21 +02:00
Oscar Krause
383c7f8684 updated compatibility 2025-04-16 11:59:53 +02:00
Oscar Krause
e2c4e45764 Datei .gitlab-ci.yml aktualisieren 2025-04-14 20:45:12 +02:00
Oscar Krause
a8c1cdf095 updated create_driver_matrix_json.py 2025-04-11 14:10:26 +02:00
8 changed files with 97 additions and 103 deletions

View File

@ -41,11 +41,10 @@ build:apt:
interruptible: true interruptible: true
stage: build stage: build
rules: rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
variables: variables:
VERSION: $CI_COMMIT_REF_NAME VERSION: $CI_COMMIT_REF_NAME
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' - if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH)
changes: changes:
- app/**/* - app/**/*
- .DEBIAN/**/* - .DEBIAN/**/*
@ -89,11 +88,10 @@ build:pacman:
interruptible: true interruptible: true
stage: build stage: build
rules: rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
variables: variables:
VERSION: $CI_COMMIT_REF_NAME VERSION: $CI_COMMIT_REF_NAME
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' - if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH)
changes: changes:
- app/**/* - app/**/*
- .PKGBUILD/**/* - .PKGBUILD/**/*
@ -122,13 +120,12 @@ build:pacman:
paths: paths:
- "*.pkg.tar.zst" - "*.pkg.tar.zst"
test: test:python:
image: python:3.12-slim-bookworm image: $IMAGE
stage: test stage: test
interruptible: true interruptible: true
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
@ -138,17 +135,20 @@ test:
DATABASE: sqlite:///../app/db.sqlite DATABASE: sqlite:///../app/db.sqlite
parallel: parallel:
matrix: matrix:
- REQUIREMENTS: - IMAGE:
- 'requirements.txt' # https://devguide.python.org/versions/#supported-versions
# - '.DEBIAN/requirements-bookworm-12.txt' # - python:3.14-rc-alpine # EOL 2030-10 => uvicorn does not support 3.14 yet
# - '.DEBIAN/requirements-ubuntu-24.04.txt' - python:3.13-alpine # EOL 2029-10
# - '.DEBIAN/requirements-ubuntu-24.10.txt' - python:3.12-alpine # EOL 2028-10
- python:3.11-alpine # EOL 2027-10
# - python:3.10-alpine # EOL 2026-10 => ImportError: cannot import name 'UTC' from 'datetime'
# - python:3.9-alpine # EOL 2025-10 => ImportError: cannot import name 'UTC' from 'datetime'
before_script: before_script:
- apt-get update && apt-get install -y python3-dev python3-pip python3-venv gcc - apk --no-cache add openssl
- python3 -m venv venv - python3 -m venv venv
- source venv/bin/activate - source venv/bin/activate
- pip install --upgrade pip - pip install --upgrade pip
- pip install -r $REQUIREMENTS - pip install -r requirements.txt
- pip install pytest pytest-cov pytest-custom_exit_code httpx - pip install pytest pytest-cov pytest-custom_exit_code httpx
- mkdir -p app/cert - mkdir -p app/cert
- cd test - cd test
@ -156,10 +156,10 @@ test:
- python -m pytest main.py --junitxml=report.xml - python -m pytest main.py --junitxml=report.xml
artifacts: artifacts:
reports: reports:
dotenv: version.env
junit: ['**/report.xml'] junit: ['**/report.xml']
.test:apt: test:apt:
image: $IMAGE
stage: test stage: test
rules: rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
@ -167,7 +167,15 @@ test:
changes: changes:
- app/**/* - app/**/*
- .DEBIAN/**/* - .DEBIAN/**/*
- .gitlab-ci.yml - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
parallel:
matrix:
- IMAGE:
- debian:trixie-slim # EOL: t.b.a.
- debian:bookworm-slim # EOL: June 06, 2026
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@ -199,16 +207,6 @@ test:
- apt-get purge -qq -y fastapi-dls - apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq - apt-get autoremove -qq -y && apt-get clean -qq
test:apt:
extends: .test:apt
image: $IMAGE
parallel:
matrix:
- IMAGE:
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
test:pacman:archlinux: test:pacman:archlinux:
image: archlinux:base image: archlinux:base
rules: rules:
@ -290,15 +288,12 @@ gemnasium-python-dependency_scanning:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
.deploy:
rules:
- if: $CI_COMMIT_TAG
deploy:docker: deploy:docker:
extends: .deploy
image: docker:dind image: docker:dind
stage: deploy stage: deploy
tags: [ docker ] tags: [ docker ]
rules:
- if: $CI_COMMIT_TAG
before_script: before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME" - echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect - docker buildx inspect
@ -317,9 +312,10 @@ deploy:docker:
deploy:apt: deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package # doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy
image: debian:bookworm-slim image: debian:bookworm-slim
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@ -356,9 +352,10 @@ deploy:apt:
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"' - 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
deploy:pacman: deploy:pacman:
extends: .deploy
image: archlinux:base-devel image: archlinux:base-devel
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: needs:
- job: build:pacman - job: build:pacman
artifacts: true artifacts: true
@ -379,7 +376,7 @@ deploy:pacman:
release: release:
image: registry.gitlab.com/gitlab-org/release-cli:latest image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post stage: .post
needs: [ test ] needs: [ build:docker, build:apt, build:pacman ]
rules: rules:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
script: script:

View File

@ -3,9 +3,10 @@
Minimal Delegated License Service (DLS). Minimal Delegated License Service (DLS).
> [!warning] Branch support \ > [!warning] Branch support \
> Use FastAPI-DLS == 1.x until **17.x** releases. \ > FastAPI-DLS Version 1.x supports up to **`17.x`** releases. \
> Use FastAPI-DLS == 2.x since **18.x** releases in combination > FastAPI-DLS Version 2.x is backwards compatible to `17.x` and supports **`18.x`** releases in combination
> with [gridd-unlock-patcher](https://git.collinwebdesigns.de/oscar.krause/gridd-unlock-patcher). > with [gridd-unlock-patcher](https://git.collinwebdesigns.de/oscar.krause/gridd-unlock-patcher).
> Other combinations of FastAPI-DLS and Driver-Branches may work but are not tested.
> [!note] Compatibility > [!note] Compatibility
> Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1, 3.4.0. For Driver compatibility > Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1, 3.4.0. For Driver compatibility
@ -754,7 +755,7 @@ Successfully tested with this package versions.
| FastAPI-DLS Version | vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date | | FastAPI-DLS Version | vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|---------------------|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:| |---------------------|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
| `2.x` | `18.0` | **R570** | `570.124.03` | `570.124.06` | `572.60` | March 2025 | March 2026 | | `2.x` | `18.0` | **R570** | `570.124.03` | `570.124.06` | `572.60` | March 2025 | March 2026 |
| `1.x` | `17.5` | | `550.144.02` | `550.144.03` | `553.62` | January 2025 | June 2025 | | `1.x` & `2.x` | `17.5` | | `550.144.02` | `550.144.03` | `553.62` | January 2025 | June 2025 |
| | `17.4` | | `550.127.06` | `550.127.05` | `553.24` | October 2024 | | | | `17.4` | | `550.127.06` | `550.127.05` | `553.24` | October 2024 | |
| | `17.3` | | `550.90.05` | `550.90.07` | `552.74` | July 2024 | | | | `17.3` | | `550.90.05` | `550.90.07` | `552.74` | July 2024 | |
| | `17.2` | | `550.90.05` | `550.90.07` | `552.55` | June 2024 | | | | `17.2` | | `550.90.05` | `550.90.07` | `552.55` | June 2024 | |
@ -778,13 +779,14 @@ Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to: Special thanks to:
- @samicrusader who created build file for **ArchLinux** - `samicrusader` who created build file for **ArchLinux**
- @cyrus who wrote the section for **openSUSE** - `cyrus` who wrote the section for **openSUSE**
- @midi who wrote the section for **unRAID** - `midi` who wrote the section for **unRAID**
- @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)* - `polloloco` who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - `DualCoder` who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/) - `Krutav Shah` who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - `Wim van 't Hoog` for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
- @mrzenc who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos) - `mrzenc` who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos)
- `electricsheep49` who wrote [gridd-unlock-patcher](https://git.collinwebdesigns.de/oscar.krause/gridd-unlock-patcher)
And thanks to all people who contributed to all these libraries! And thanks to all people who contributed to all these libraries!

View File

@ -276,6 +276,7 @@ async def _client_token():
{ {
"idx": 0, "idx": 0,
"d_name": "DLS", "d_name": "DLS",
# todo: {"service": "quick_release", "port": 80} - see "shutdown for windows"
"svc_port_map": [{"service": "auth", "port": DLS_PORT}, {"service": "lease", "port": DLS_PORT}] "svc_port_map": [{"service": "auth", "port": DLS_PORT}, {"service": "lease", "port": DLS_PORT}]
} }
], ],

View File

@ -5,7 +5,7 @@ from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, declarative_base from sqlalchemy.orm import sessionmaker, declarative_base
from util import NV from util import DriverMatrix
Base = declarative_base() Base = declarative_base()
@ -25,7 +25,7 @@ class Origin(Base):
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})' return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
def serialize(self) -> dict: def serialize(self) -> dict:
_ = NV().find(self.guest_driver_version) _ = DriverMatrix().find(self.guest_driver_version)
return { return {
'origin_ref': self.origin_ref, 'origin_ref': self.origin_ref,

View File

@ -16,6 +16,14 @@ from cryptography.x509 import load_pem_x509_certificate, Certificate
logging.basicConfig() logging.basicConfig()
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
class CASetup: class CASetup:
### ###
# #
@ -236,7 +244,7 @@ class PrivateKey:
return PublicKey(data=data) return PublicKey(data=data)
def generate_signature(self, data: bytes) -> bytes: def generate_signature(self, data: bytes) -> bytes:
return self.__key.sign(data, padding=PKCS1v15(), algorithm=SHA256()) return self.__key.sign(data=data, padding=PKCS1v15(), algorithm=SHA256())
@staticmethod @staticmethod
def generate(public_exponent: int = 65537, key_size: int = 2048) -> "PrivateKey": def generate(public_exponent: int = 65537, key_size: int = 2048) -> "PrivateKey":
@ -281,8 +289,8 @@ class PublicKey:
def exp(self): def exp(self):
return int(self.__key.public_numbers().e) return int(self.__key.public_numbers().e)
def verify_signature(self, signature: bytes, data: bytes) -> bytes: def verify_signature(self, signature: bytes, data: bytes) -> None:
return self.__key.verify(signature, data, padding=PKCS1v15(), algorithm=SHA256()) self.__key.verify(signature=signature, data=data, padding=PKCS1v15(), algorithm=SHA256())
class Cert: class Cert:
@ -317,30 +325,30 @@ def load_file(filename: str) -> bytes:
content = file.read() content = file.read()
return content return content
class DriverMatrix:
class NV:
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json' __DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions" __DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
def __init__(self): def __init__(self):
self.log = logging.getLogger(self.__class__.__name__) self.log = logging.getLogger(self.__class__.__name__)
if NV.__DRIVER_MATRIX is None: if DriverMatrix.__DRIVER_MATRIX is None:
from json import load as json_load self.__load()
try:
file = open(NV.__DRIVER_MATRIX_FILENAME) def __load(self):
NV.__DRIVER_MATRIX = json_load(file) try:
file.close() with open(DriverMatrix.__DRIVER_MATRIX_FILENAME, 'r') as f:
self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".') DriverMatrix.__DRIVER_MATRIX = json_loads(f.read())
except Exception as e: self.log.debug(f'Successfully loaded "{DriverMatrix.__DRIVER_MATRIX_FILENAME}".')
NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app except Exception as e:
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}') DriverMatrix.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}')
@staticmethod @staticmethod
def find(version: str) -> dict | None: def find(version: str) -> dict | None:
if NV.__DRIVER_MATRIX is None: if DriverMatrix.__DRIVER_MATRIX is None:
return None return None
for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()): for idx, (key, branch) in enumerate(DriverMatrix.__DRIVER_MATRIX.items()):
for release in branch.get('$releases'): for release in branch.get('$releases'):
linux_driver = release.get('Linux Driver') linux_driver = release.get('Linux Driver')
windows_driver = release.get('Windows Driver') windows_driver = release.get('Windows Driver')

View File

@ -1,8 +1,8 @@
fastapi==0.115.12 fastapi==0.115.12
uvicorn[standard]==0.34.0 uvicorn[standard]==0.34.1
python-jose[cryptography]==3.4.0 python-jose[cryptography]==3.4.0
cryptography==44.0.2 cryptography==44.0.2
python-dateutil==2.9.0 python-dateutil==2.9.0
sqlalchemy==2.0.40 sqlalchemy==2.0.40
markdown==3.7 markdown==3.8
python-dotenv==1.1.0 python-dotenv==1.1.0

View File

@ -6,7 +6,7 @@ logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/vgpu/index.html' URL = 'https://docs.nvidia.com/vgpu/index.html'
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch' BRANCH_STATUS_KEY = 'vGPU Branch Status'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch' VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver' LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver' WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
@ -26,12 +26,15 @@ def __driver_versions(html: 'BeautifulSoup'):
# find wrapper for "DriverVersions" and find tables # find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'driver-versions'}) data = html.find('div', {'id': 'driver-versions'})
items = data.findAll('bsp-accordion', {'class': 'Accordion-items-item'}) items = data.find_all('bsp-accordion', {'class': 'Accordion-items-item'})
for item in items: for item in items:
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip() software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
software_branch = software_branch.replace(' Releases', '') software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower() matrix_key = software_branch.lower()
branch_status = item.find('a', href=True, string='Branch status')
branch_status = branch_status.next_sibling.replace(':', '').strip()
# driver version info from table-heads (ths) and table-rows (trs) # driver version info from table-heads (ths) and table-rows (trs)
table = item.find('table') table = item.find('table')
ths, trs = table.find_all('th'), table.find_all('tr') ths, trs = table.find_all('th'), table.find_all('tr')
@ -42,48 +45,20 @@ def __driver_versions(html: 'BeautifulSoup'):
continue continue
# create dict with table-heads as key and cell content as value # create dict with table-heads as key and cell content as value
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)} x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
x.setdefault(BRANCH_STATUS_KEY, branch_status)
releases.append(x) releases.append(x)
# add to matrix # add to matrix
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}}) MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
def __release_branches(html: 'BeautifulSoup'):
# find wrapper for "AllReleaseBranches" and find table
data = html.find('div', {'id': 'all-release-branches'})
table = data.find('table')
# branch releases info from table-heads (ths) and table-rows (trs)
ths, trs = table.find_all('th'), table.find_all('tr')
headers = [header.text.strip() for header in ths]
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
# get matrix_key
software_branch = x.get(SOFTWARE_BRANCH_KEY)
matrix_key = software_branch.lower()
# add to matrix
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
def __debug(): def __debug():
# print table head # print table head
s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}' s = f'{VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {BRANCH_STATUS_KEY:^21}'
print(s) print(s)
# iterate over dict & format some variables to not overload table # iterate over dict & format some variables to not overload table
for idx, (key, branch) in enumerate(MATRIX.items()): for idx, (key, branch) in enumerate(MATRIX.items()):
branch_status = branch.get(BRANCH_STATUS_KEY)
branch_status = branch_status.replace('Branch ', '')
branch_status = branch_status.replace('Long-Term Support', 'LTS')
branch_status = branch_status.replace('Production', 'Prod.')
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
for release in branch.get(JSON_RELEASES_KEY): for release in branch.get(JSON_RELEASES_KEY):
version = release.get(VGPU_KEY, release.get(GRID_KEY, '')) version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, '')) linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
@ -92,13 +67,25 @@ def __debug():
windows_driver = release.get(WINDOWS_DRIVER_KEY) windows_driver = release.get(WINDOWS_DRIVER_KEY)
release_date = release.get(RELEASE_DATE_KEY) release_date = release.get(RELEASE_DATE_KEY)
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY) is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
branch_status = __parse_branch_status(release.get(BRANCH_STATUS_KEY, ''))
version = f'{version} *' if is_latest else version version = f'{version} *' if is_latest else version
eol = branch.get(EOL_KEY) if is_latest else '' s = f'{version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {branch_status:^21}'
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
print(s) print(s)
def __parse_branch_status(string: str) -> str:
string = string.replace('Production Branch', 'Prod. -')
string = string.replace('Long-Term Support Branch', 'LTS -')
string = string.replace('supported until', '')
string = string.replace('EOL since', 'EOL - ')
string = string.replace('EOL from', 'EOL -')
return string
def __dump(filename: str): def __dump(filename: str):
import json import json
@ -128,7 +115,6 @@ if __name__ == '__main__':
# build matrix # build matrix
__driver_versions(soup) __driver_versions(soup)
__release_branches(soup)
# debug output # debug output
__debug() __debug()

View File

@ -96,7 +96,7 @@ def test_client_token():
assert response.status_code == 200 assert response.status_code == 200
def test_config_token(): # todo: /leasing/v1/config-token def test_config_token():
# https://git.collinwebdesigns.de/nvidia/nls/-/blob/main/src/test/test_config_token.py # https://git.collinwebdesigns.de/nvidia/nls/-/blob/main/src/test/test_config_token.py
response = client.post('/leasing/v1/config-token', json={"service_instance_ref": INSTANCE_REF}) response = client.post('/leasing/v1/config-token', json={"service_instance_ref": INSTANCE_REF})