mirror of
https://git.collinwebdesigns.de/oscar.krause/fastapi-dls.git
synced 2025-08-26 06:26:54 +08:00
Compare commits
6 Commits
d829c7ae3e
...
40b42c248d
Author | SHA1 | Date | |
---|---|---|---|
|
40b42c248d | ||
|
699d85acc4 | ||
|
584eee41ef | ||
|
25658cb1fb | ||
|
43fdf1170c | ||
|
3f5fcbebb3 |
@ -734,6 +734,9 @@ The error message can safely be ignored (since we have no license limitation :P)
|
||||
|
||||
**18.x Drivers are not supported on FastAPI-DLS Versions < 1.6.0**
|
||||
|
||||
<details>
|
||||
<summary>Show Table</summary>
|
||||
|
||||
Successfully tested with this package versions.
|
||||
|
||||
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|
||||
@ -757,6 +760,8 @@ Successfully tested with this package versions.
|
||||
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | December 2023 |
|
||||
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
|
||||
|
||||
</details>
|
||||
|
||||
- https://docs.nvidia.com/grid/index.html
|
||||
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
|
||||
|
||||
|
49
app/main.py
49
app/main.py
@ -1,11 +1,12 @@
|
||||
import logging
|
||||
import os.path
|
||||
from base64 import b64encode as b64enc
|
||||
from calendar import timegm
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timedelta, UTC
|
||||
from hashlib import sha256
|
||||
from json import loads as json_loads
|
||||
from os import getenv as env
|
||||
from os import getenv as env, listdir
|
||||
from os.path import join, dirname
|
||||
from uuid import uuid4
|
||||
|
||||
@ -13,6 +14,7 @@ from dateutil.relativedelta import relativedelta
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import FastAPI
|
||||
from fastapi.requests import Request
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from jose import jws, jwk, jwt, JWTError
|
||||
from jose.constants import ALGORITHMS
|
||||
from sqlalchemy import create_engine
|
||||
@ -50,6 +52,7 @@ LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
|
||||
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
|
||||
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
|
||||
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
|
||||
DRIVERS_DIR = env('DRIVERS_DIR', None)
|
||||
|
||||
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
@ -88,6 +91,9 @@ async def lifespan(_: FastAPI):
|
||||
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
|
||||
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, lifespan=lifespan, **config)
|
||||
|
||||
if DRIVERS_DIR is not None:
|
||||
app.mount('/-/static-drivers', StaticFiles(directory=str(DRIVERS_DIR), html=False), name='drivers')
|
||||
|
||||
app.debug = DEBUG
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
@ -186,6 +192,25 @@ async def _manage(request: Request):
|
||||
return HTMLr(response)
|
||||
|
||||
|
||||
@app.get('/-/drivers/{directory:path}', summary='* List drivers directory')
|
||||
async def _drivers(request: Request, directory: str | None):
|
||||
if DRIVERS_DIR is None:
|
||||
return Response(status_code=404, content=f'Variable "DRIVERS_DIR" not set.')
|
||||
|
||||
path = os.path.join(DRIVERS_DIR, directory)
|
||||
|
||||
if not os.path.exists(path) and not os.path.isfile(path):
|
||||
return Response(status_code=404, content=f'Resource "{path}" not found!')
|
||||
|
||||
content = [{
|
||||
"type": "file" if os.path.isfile(f'{path}/{_}') else "folder" if os.path.isdir(f'{path}/{_}') else "unknown",
|
||||
"name": _,
|
||||
"link": f'/-/static-drivers/{directory}{_}',
|
||||
} for _ in listdir(path)]
|
||||
|
||||
return JSONr({"directory": path, "content": content})
|
||||
|
||||
|
||||
@app.get('/-/origins', summary='* Origins')
|
||||
async def _origins(request: Request, leases: bool = False):
|
||||
session = sessionmaker(bind=db)()
|
||||
@ -287,7 +312,7 @@ async def auth_v1_origin(request: Request):
|
||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
|
||||
|
||||
origin_ref = j.get('candidate_origin_ref')
|
||||
logging.info(f'> [ origin ]: {origin_ref}: {j}')
|
||||
logger.info(f'> [ origin ]: {origin_ref}: {j}')
|
||||
|
||||
data = Origin(
|
||||
origin_ref=origin_ref,
|
||||
@ -317,7 +342,7 @@ async def auth_v1_origin_update(request: Request):
|
||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
|
||||
|
||||
origin_ref = j.get('origin_ref')
|
||||
logging.info(f'> [ update ]: {origin_ref}: {j}')
|
||||
logger.info(f'> [ update ]: {origin_ref}: {j}')
|
||||
|
||||
data = Origin(
|
||||
origin_ref=origin_ref,
|
||||
@ -344,7 +369,7 @@ async def auth_v1_code(request: Request):
|
||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
|
||||
|
||||
origin_ref = j.get('origin_ref')
|
||||
logging.info(f'> [ code ]: {origin_ref}: {j}')
|
||||
logger.info(f'> [ code ]: {origin_ref}: {j}')
|
||||
|
||||
delta = relativedelta(minutes=15)
|
||||
expires = cur_time + delta
|
||||
@ -381,7 +406,7 @@ async def auth_v1_token(request: Request):
|
||||
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
|
||||
|
||||
origin_ref = payload.get('origin_ref')
|
||||
logging.info(f'> [ auth ]: {origin_ref}: {j}')
|
||||
logger.info(f'> [ auth ]: {origin_ref}: {j}')
|
||||
|
||||
# validate the code challenge
|
||||
challenge = b64enc(sha256(j.get('code_verifier').encode('utf-8')).digest()).rstrip(b'=').decode('utf-8')
|
||||
@ -424,7 +449,7 @@ async def leasing_v1_lessor(request: Request):
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
scope_ref_list = j.get('scope_ref_list')
|
||||
logging.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
|
||||
logger.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
|
||||
|
||||
lease_result_list = []
|
||||
for scope_ref in scope_ref_list:
|
||||
@ -468,7 +493,7 @@ async def leasing_v1_lessor_lease(request: Request):
|
||||
origin_ref = token.get('origin_ref')
|
||||
|
||||
active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
||||
logging.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
|
||||
logger.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
|
||||
|
||||
response = {
|
||||
"active_lease_list": active_lease_list,
|
||||
@ -486,7 +511,7 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
||||
token, cur_time = __get_token(request), datetime.now(UTC)
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
|
||||
logger.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
|
||||
|
||||
entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref)
|
||||
if entity is None:
|
||||
@ -513,7 +538,7 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
|
||||
token, cur_time = __get_token(request), datetime.now(UTC)
|
||||
|
||||
origin_ref = token.get('origin_ref')
|
||||
logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
|
||||
logger.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
|
||||
|
||||
entity = Lease.find_by_lease_ref(db, lease_ref)
|
||||
if entity.origin_ref != origin_ref:
|
||||
@ -542,7 +567,7 @@ async def leasing_v1_lessor_lease_remove(request: Request):
|
||||
|
||||
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
||||
deletions = Lease.cleanup(db, origin_ref)
|
||||
logging.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases')
|
||||
logger.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases')
|
||||
|
||||
response = {
|
||||
"released_lease_list": released_lease_list,
|
||||
@ -564,7 +589,7 @@ async def leasing_v1_lessor_shutdown(request: Request):
|
||||
|
||||
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
||||
deletions = Lease.cleanup(db, origin_ref)
|
||||
logging.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases')
|
||||
logger.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases')
|
||||
|
||||
response = {
|
||||
"released_lease_list": released_lease_list,
|
||||
@ -587,7 +612,7 @@ if __name__ == '__main__':
|
||||
#
|
||||
###
|
||||
|
||||
logging.info(f'> Starting dev-server ...')
|
||||
logger.info(f'> Starting dev-server ...')
|
||||
|
||||
ssl_keyfile = join(dirname(__file__), 'cert/webserver.key')
|
||||
ssl_certfile = join(dirname(__file__), 'cert/webserver.crt')
|
||||
|
@ -29,7 +29,7 @@ nvidia-gridd[2986]: Acquiring license. (Info: license.nvidia.space; NVIDIA RTX V
|
||||
nvidia-gridd[2986]: License acquired successfully. (Info: license.nvidia.space, NVIDIA RTX Virtual Workstation; Expiry: 2023-1-29 22:3:0 GMT)
|
||||
```
|
||||
|
||||
# DLS-Container File-System (Docker)
|
||||
# Docker DLS-Container File-System
|
||||
|
||||
- More about Docker Images https://git.collinwebdesigns.de/nvidia/nls
|
||||
|
||||
@ -42,23 +42,6 @@ Files can be modified with `docker cp <container-id>:/venv/... /opt/localfile/..
|
||||
|
||||
Config-Variables are in `etc/dls/config/service_env.conf`.
|
||||
|
||||
## Dive / Docker image inspector
|
||||
|
||||
- `dive dls:appliance`
|
||||
|
||||
The source code is stored in `/venv/lib/python3.9/site-packages/nls_*`.
|
||||
|
||||
Image-Reference:
|
||||
|
||||
```
|
||||
Tags: (unavailable)
|
||||
Id: d1c7976a5d2b3681ff6c5a30f8187e4015187a83f3f285ba4a37a45458bd6b98
|
||||
Digest: sha256:311223c5af7a298ec1104f5dc8c3019bfb0e1f77256dc3d995244ffb295a97
|
||||
1f
|
||||
Command:
|
||||
#(nop) ADD file:c1900d3e3a29c29a743a8da86c437006ec5d2aa873fb24e48033b6bf492bb37b in /
|
||||
```
|
||||
|
||||
|
||||
## Site Key Uri - `/etc/dls/config/site_key_uri.bin`
|
||||
|
||||
@ -80,18 +63,43 @@ cat dls_db_password.bin | base64 -d > dls_db_password.bin.raw
|
||||
openssl rsautl -decrypt -inkey /tmp/private-key.pem -in dls_db_password.bin.raw
|
||||
```
|
||||
|
||||
# Database
|
||||
# Docker Postgres-Container
|
||||
|
||||
- It's enough to manipulate database licenses. There must not be changed any line of code to bypass licensing
|
||||
validations.
|
||||
|
||||
## Inspect
|
||||
|
||||
Valid users are `dls_writer` and `postgres`.
|
||||
|
||||
```shell
|
||||
docker exec -it <dls:pgsql> psql -h localhost -U postgres
|
||||
```
|
||||
|
||||
Or you can modify `docker-compose.yaml` to forward Postgres port.
|
||||
## External Access
|
||||
|
||||
Or you can modify `docker-compose.yaml` to forward Postgres port. To create a superuser for external access, use `docker exec` from above and rund the following:
|
||||
|
||||
```sql
|
||||
CREATE USER admin WITH LOGIN SUPERUSER PASSWORD 'admin';
|
||||
```
|
||||
|
||||
# Dive / Docker image inspector
|
||||
|
||||
- `dive dls:appliance`
|
||||
|
||||
The source code is stored in `/venv/lib/python3.9/site-packages/nls_*`.
|
||||
|
||||
Image-Reference:
|
||||
|
||||
```
|
||||
Tags: (unavailable)
|
||||
Id: d1c7976a5d2b3681ff6c5a30f8187e4015187a83f3f285ba4a37a45458bd6b98
|
||||
Digest: sha256:311223c5af7a298ec1104f5dc8c3019bfb0e1f77256dc3d995244ffb295a97
|
||||
1f
|
||||
Command:
|
||||
#(nop) ADD file:c1900d3e3a29c29a743a8da86c437006ec5d2aa873fb24e48033b6bf492bb37b in /
|
||||
```
|
||||
|
||||
# Logging / Stack Trace
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user