mirror of
https://github.com/RROrg/rr.git
synced 2025-06-21 05:51:05 +08:00
修复 getpats4mv 函数中的 checksum 获取逻辑,确保在缺失时返回默认值 '0' * 32
This commit is contained in:
parent
2ff44c4b1f
commit
2a4eb4996d
@ -234,7 +234,7 @@ def getpats4mv(model, version):
|
|||||||
if V not in pats:
|
if V not in pats:
|
||||||
pats[V] = {
|
pats[V] = {
|
||||||
'url': data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
'url': data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
||||||
'sum': data['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
'sum': data['info']['system']['detail'][0]['items'][0]['files'][0].get('checksum', '0' * 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
from_ver = min(I['build'] for I in data['info']['pubVers'])
|
from_ver = min(I['build'] for I in data['info']['pubVers'])
|
||||||
@ -256,7 +256,7 @@ def getpats4mv(model, version):
|
|||||||
if V not in pats:
|
if V not in pats:
|
||||||
pats[V] = {
|
pats[V] = {
|
||||||
'url': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
'url': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
||||||
'sum': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
'sum': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0].get('checksum', '0' * 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
for J in I['versions']:
|
for J in I['versions']:
|
||||||
@ -276,7 +276,7 @@ def getpats4mv(model, version):
|
|||||||
continue
|
continue
|
||||||
pats[V] = {
|
pats[V] = {
|
||||||
'url': S['files'][0]['url'].split('?')[0],
|
'url': S['files'][0]['url'].split('?')[0],
|
||||||
'sum': S['files'][0]['checksum']
|
'sum': S['files'][0].get('checksum', '0' * 32)
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# click.echo(f"Error: {e}")
|
# click.echo(f"Error: {e}")
|
||||||
|
@ -1401,7 +1401,7 @@ function extractDsmFiles() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
printf "$(TEXT "Checking hash of %s:")" "${PAT_FILE}"
|
printf "$(TEXT "Checking hash of %s:")" "${PAT_FILE}"
|
||||||
if [ "$(md5sum "${PAT_PATH}" | awk '{print $1}')" != "${PATSUM}" ]; then
|
if [ "00000000000000000000000000000000" != "${PATSUM}" ] && [ "$(md5sum "${PAT_PATH}" | awk '{print $1}')" != "${PATSUM}" ]; then
|
||||||
rm -f "${PAT_PATH}"
|
rm -f "${PAT_PATH}"
|
||||||
echo -e "$(TEXT "md5 hash of pat not match, Please reget pat data from the version menu and try again!")" >"${LOG_FILE}"
|
echo -e "$(TEXT "md5 hash of pat not match, Please reget pat data from the version menu and try again!")" >"${LOG_FILE}"
|
||||||
return 1
|
return 1
|
||||||
|
580
scripts/func.py
580
scripts/func.py
@ -1,290 +1,290 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2022 Ing <https://github.com/wjz304>
|
# Copyright (C) 2022 Ing <https://github.com/wjz304>
|
||||||
#
|
#
|
||||||
# This is free software, licensed under the MIT License.
|
# This is free software, licensed under the MIT License.
|
||||||
# See /LICENSE for more information.
|
# See /LICENSE for more information.
|
||||||
#
|
#
|
||||||
|
|
||||||
import os, re, sys, glob, json, yaml, click, shutil, tarfile, kmodule, requests, urllib3
|
import os, re, sys, glob, json, yaml, click, shutil, tarfile, kmodule, requests, urllib3
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||||
from openpyxl import Workbook
|
from openpyxl import Workbook
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
def cli():
|
def cli():
|
||||||
"""
|
"""
|
||||||
The CLI is a commands to RR.
|
The CLI is a commands to RR.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
||||||
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
||||||
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
||||||
def getmodels(workpath, jsonpath, xlsxpath):
|
def getmodels(workpath, jsonpath, xlsxpath):
|
||||||
models = {}
|
models = {}
|
||||||
platforms_yml = os.path.join(workpath, "opt", "rr", "platforms.yml")
|
platforms_yml = os.path.join(workpath, "opt", "rr", "platforms.yml")
|
||||||
with open(platforms_yml, "r") as f:
|
with open(platforms_yml, "r") as f:
|
||||||
P_data = yaml.safe_load(f)
|
P_data = yaml.safe_load(f)
|
||||||
P_platforms = P_data.get("platforms", [])
|
P_platforms = P_data.get("platforms", [])
|
||||||
for P in P_platforms:
|
for P in P_platforms:
|
||||||
productvers = {}
|
productvers = {}
|
||||||
for V in P_platforms[P]["productvers"]:
|
for V in P_platforms[P]["productvers"]:
|
||||||
kpre = P_platforms[P]["productvers"][V].get("kpre", "")
|
kpre = P_platforms[P]["productvers"][V].get("kpre", "")
|
||||||
kver = P_platforms[P]["productvers"][V].get("kver", "")
|
kver = P_platforms[P]["productvers"][V].get("kver", "")
|
||||||
productvers[V] = f"{kpre}-{kver}" if kpre else kver
|
productvers[V] = f"{kpre}-{kver}" if kpre else kver
|
||||||
models[P] = {"productvers": productvers, "models": []}
|
models[P] = {"productvers": productvers, "models": []}
|
||||||
|
|
||||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]))
|
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]))
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
session.mount("http://", adapter)
|
session.mount("http://", adapter)
|
||||||
session.mount("https://", adapter)
|
session.mount("https://", adapter)
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = "http://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
url = "http://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
||||||
#url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
#url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
||||||
|
|
||||||
req = session.get(url, timeout=10, verify=False)
|
req = session.get(url, timeout=10, verify=False)
|
||||||
req.encoding = "utf-8"
|
req.encoding = "utf-8"
|
||||||
p = re.compile(r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL)
|
p = re.compile(r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL)
|
||||||
data = p.findall(req.text)
|
data = p.findall(req.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
click.echo(f"Error: {e}")
|
click.echo(f"Error: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
for item in data:
|
for item in data:
|
||||||
if not "DSM" in item[1]:
|
if not "DSM" in item[1]:
|
||||||
continue
|
continue
|
||||||
arch = item[0].split("_")[1]
|
arch = item[0].split("_")[1]
|
||||||
name = item[1].split("/")[-1].split("_")[1].replace("%2B", "+")
|
name = item[1].split("/")[-1].split("_")[1].replace("%2B", "+")
|
||||||
if arch not in models:
|
if arch not in models:
|
||||||
continue
|
continue
|
||||||
if name in (A for B in models for A in models[B]["models"]):
|
if name in (A for B in models for A in models[B]["models"]):
|
||||||
continue
|
continue
|
||||||
models[arch]["models"].append(name)
|
models[arch]["models"].append(name)
|
||||||
|
|
||||||
if jsonpath:
|
if jsonpath:
|
||||||
with open(jsonpath, "w") as f:
|
with open(jsonpath, "w") as f:
|
||||||
json.dump(models, f, indent=4, ensure_ascii=False)
|
json.dump(models, f, indent=4, ensure_ascii=False)
|
||||||
if xlsxpath:
|
if xlsxpath:
|
||||||
wb = Workbook()
|
wb = Workbook()
|
||||||
ws = wb.active
|
ws = wb.active
|
||||||
ws.append(["platform", "productvers", "Model"])
|
ws.append(["platform", "productvers", "Model"])
|
||||||
for k, v in models.items():
|
for k, v in models.items():
|
||||||
ws.append([k, str(v["productvers"]), str(v["models"])])
|
ws.append([k, str(v["productvers"]), str(v["models"])])
|
||||||
wb.save(xlsxpath)
|
wb.save(xlsxpath)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
||||||
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
||||||
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
||||||
def getpats(workpath, jsonpath, xlsxpath):
|
def getpats(workpath, jsonpath, xlsxpath):
|
||||||
def __fullversion(ver):
|
def __fullversion(ver):
|
||||||
arr = ver.split('-')
|
arr = ver.split('-')
|
||||||
a, b, c = (arr[0].split('.') + ['0', '0', '0'])[:3]
|
a, b, c = (arr[0].split('.') + ['0', '0', '0'])[:3]
|
||||||
d = arr[1] if len(arr) > 1 else '00000'
|
d = arr[1] if len(arr) > 1 else '00000'
|
||||||
e = arr[2] if len(arr) > 2 else '0'
|
e = arr[2] if len(arr) > 2 else '0'
|
||||||
return f'{a}.{b}.{c}-{d}-{e}'
|
return f'{a}.{b}.{c}-{d}-{e}'
|
||||||
|
|
||||||
platforms_yml = os.path.join(workpath, "opt", "rr", "platforms.yml")
|
platforms_yml = os.path.join(workpath, "opt", "rr", "platforms.yml")
|
||||||
with open(platforms_yml, "r") as f:
|
with open(platforms_yml, "r") as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
platforms = data.get("platforms", [])
|
platforms = data.get("platforms", [])
|
||||||
|
|
||||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]))
|
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]))
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
session.mount("http://", adapter)
|
session.mount("http://", adapter)
|
||||||
session.mount("https://", adapter)
|
session.mount("https://", adapter)
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = "http://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
url = "http://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
||||||
#url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
#url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
|
||||||
|
|
||||||
req = session.get(url, timeout=10, verify=False)
|
req = session.get(url, timeout=10, verify=False)
|
||||||
req.encoding = "utf-8"
|
req.encoding = "utf-8"
|
||||||
p = re.compile(r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL)
|
p = re.compile(r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL)
|
||||||
data = p.findall(req.text)
|
data = p.findall(req.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
click.echo(f"Error: {e}")
|
click.echo(f"Error: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
models = []
|
models = []
|
||||||
for item in data:
|
for item in data:
|
||||||
if not "DSM" in item[1]:
|
if not "DSM" in item[1]:
|
||||||
continue
|
continue
|
||||||
arch = item[0].split("_")[1]
|
arch = item[0].split("_")[1]
|
||||||
name = item[1].split("/")[-1].split("_")[1].replace("%2B", "+")
|
name = item[1].split("/")[-1].split("_")[1].replace("%2B", "+")
|
||||||
if arch not in platforms:
|
if arch not in platforms:
|
||||||
continue
|
continue
|
||||||
if name in models:
|
if name in models:
|
||||||
continue
|
continue
|
||||||
models.append(name)
|
models.append(name)
|
||||||
|
|
||||||
pats = {}
|
pats = {}
|
||||||
for M in models:
|
for M in models:
|
||||||
pats[M] = {}
|
pats[M] = {}
|
||||||
version = '7'
|
version = '7'
|
||||||
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
|
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
|
||||||
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
|
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
|
||||||
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
|
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
|
||||||
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
|
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
|
||||||
|
|
||||||
major = f"&major={version.split('.')[0]}" if len(version.split('.')) > 0 else ""
|
major = f"&major={version.split('.')[0]}" if len(version.split('.')) > 0 else ""
|
||||||
minor = f"&minor={version.split('.')[1]}" if len(version.split('.')) > 1 else ""
|
minor = f"&minor={version.split('.')[1]}" if len(version.split('.')) > 1 else ""
|
||||||
try:
|
try:
|
||||||
req = session.get(f"{urlInfo}&product={M.replace('+', '%2B')}{major}{minor}", timeout=10, verify=False)
|
req = session.get(f"{urlInfo}&product={M.replace('+', '%2B')}{major}{minor}", timeout=10, verify=False)
|
||||||
req.encoding = "utf-8"
|
req.encoding = "utf-8"
|
||||||
data = json.loads(req.text)
|
data = json.loads(req.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
click.echo(f"Error: {e}")
|
click.echo(f"Error: {e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
|
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
|
||||||
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
|
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
|
||||||
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
|
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
|
||||||
V = __fullversion(f"{build_ver}-{build_num}-{buildnano}")
|
V = __fullversion(f"{build_ver}-{build_num}-{buildnano}")
|
||||||
if V not in pats[M]:
|
if V not in pats[M]:
|
||||||
pats[M][V] = {
|
pats[M][V] = {
|
||||||
'url': data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
'url': data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
||||||
'sum': data['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
'sum': data['info']['system']['detail'][0]['items'][0]['files'][0].get('checksum', '0' * 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
from_ver = min(I['build'] for I in data['info']['pubVers'])
|
from_ver = min(I['build'] for I in data['info']['pubVers'])
|
||||||
|
|
||||||
for I in data['info']['productVers']:
|
for I in data['info']['productVers']:
|
||||||
if not I['version'].startswith(version):
|
if not I['version'].startswith(version):
|
||||||
continue
|
continue
|
||||||
if not major or not minor:
|
if not major or not minor:
|
||||||
majorTmp = f"&major={I['version'].split('.')[0]}" if len(I['version'].split('.')) > 0 else ""
|
majorTmp = f"&major={I['version'].split('.')[0]}" if len(I['version'].split('.')) > 0 else ""
|
||||||
minorTmp = f"&minor={I['version'].split('.')[1]}" if len(I['version'].split('.')) > 1 else ""
|
minorTmp = f"&minor={I['version'].split('.')[1]}" if len(I['version'].split('.')) > 1 else ""
|
||||||
try:
|
try:
|
||||||
reqTmp = session.get(f"{urlInfo}&product={M.replace('+', '%2B')}{majorTmp}{minorTmp}", timeout=10, verify=False)
|
reqTmp = session.get(f"{urlInfo}&product={M.replace('+', '%2B')}{majorTmp}{minorTmp}", timeout=10, verify=False)
|
||||||
reqTmp.encoding = "utf-8"
|
reqTmp.encoding = "utf-8"
|
||||||
dataTmp = json.loads(reqTmp.text)
|
dataTmp = json.loads(reqTmp.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
click.echo(f"Error: {e}")
|
click.echo(f"Error: {e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
|
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
|
||||||
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
|
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
|
||||||
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
|
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
|
||||||
V = __fullversion(f"{build_ver}-{build_num}-{buildnano}")
|
V = __fullversion(f"{build_ver}-{build_num}-{buildnano}")
|
||||||
if V not in pats[M]:
|
if V not in pats[M]:
|
||||||
pats[M][V] = {
|
pats[M][V] = {
|
||||||
'url': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
'url': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
|
||||||
'sum': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
'sum': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0].get('checksum', '0' * 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
for J in I['versions']:
|
for J in I['versions']:
|
||||||
to_ver = J['build']
|
to_ver = J['build']
|
||||||
try:
|
try:
|
||||||
reqSteps = session.get(f"{urlSteps}&product={M.replace('+', '%2B')}&from_ver={from_ver}&to_ver={to_ver}", timeout=10, verify=False)
|
reqSteps = session.get(f"{urlSteps}&product={M.replace('+', '%2B')}&from_ver={from_ver}&to_ver={to_ver}", timeout=10, verify=False)
|
||||||
if reqSteps.status_code != 200:
|
if reqSteps.status_code != 200:
|
||||||
continue
|
continue
|
||||||
reqSteps.encoding = "utf-8"
|
reqSteps.encoding = "utf-8"
|
||||||
dataSteps = json.loads(reqSteps.text)
|
dataSteps = json.loads(reqSteps.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
click.echo(f"Error: {e}")
|
click.echo(f"Error: {e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for S in dataSteps['upgrade_steps']:
|
for S in dataSteps['upgrade_steps']:
|
||||||
if not S.get('full_patch') or not S['build_ver'].startswith(version):
|
if not S.get('full_patch') or not S['build_ver'].startswith(version):
|
||||||
continue
|
continue
|
||||||
V = __fullversion(f"{S['build_ver']}-{S['build_num']}-{S['nano']}")
|
V = __fullversion(f"{S['build_ver']}-{S['build_num']}-{S['nano']}")
|
||||||
if V not in pats[M]:
|
if V not in pats[M]:
|
||||||
reqPat = session.head(S['files'][0]['url'].split('?')[0], timeout=10, verify=False)
|
reqPat = session.head(S['files'][0]['url'].split('?')[0], timeout=10, verify=False)
|
||||||
if reqPat.status_code == 403:
|
if reqPat.status_code == 403:
|
||||||
continue
|
continue
|
||||||
pats[M][V] = {
|
pats[M][V] = {
|
||||||
'url': S['files'][0]['url'].split('?')[0],
|
'url': S['files'][0]['url'].split('?')[0],
|
||||||
'sum': S['files'][0]['checksum']
|
'sum': S['files'][0].get('checksum', '0' * 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
if jsonpath:
|
if jsonpath:
|
||||||
with open(jsonpath, "w") as f:
|
with open(jsonpath, "w") as f:
|
||||||
json.dump(pats, f, indent=4, ensure_ascii=False)
|
json.dump(pats, f, indent=4, ensure_ascii=False)
|
||||||
if xlsxpath:
|
if xlsxpath:
|
||||||
wb = Workbook()
|
wb = Workbook()
|
||||||
ws = wb.active
|
ws = wb.active
|
||||||
ws.append(["Model", "version", "url", "sum"])
|
ws.append(["Model", "version", "url", "sum"])
|
||||||
for k1, v1 in pats.items():
|
for k1, v1 in pats.items():
|
||||||
for k2, v2 in v1.items():
|
for k2, v2 in v1.items():
|
||||||
ws.append([k1, k2, v2["url"], v2["sum"]])
|
ws.append([k1, k2, v2["url"], v2["sum"]])
|
||||||
wb.save(xlsxpath)
|
wb.save(xlsxpath)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
||||||
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
||||||
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
||||||
def getaddons(workpath, jsonpath, xlsxpath):
|
def getaddons(workpath, jsonpath, xlsxpath):
|
||||||
AS = glob.glob(os.path.join(workpath, "mnt", "p3", "addons", "*", "manifest.yml"))
|
AS = glob.glob(os.path.join(workpath, "mnt", "p3", "addons", "*", "manifest.yml"))
|
||||||
AS.sort()
|
AS.sort()
|
||||||
addons = {}
|
addons = {}
|
||||||
for A in AS:
|
for A in AS:
|
||||||
with open(A, "r") as file:
|
with open(A, "r") as file:
|
||||||
A_data = yaml.safe_load(file)
|
A_data = yaml.safe_load(file)
|
||||||
A_name = A_data.get("name", "")
|
A_name = A_data.get("name", "")
|
||||||
A_system = A_data.get("system", False)
|
A_system = A_data.get("system", False)
|
||||||
A_description = A_data.get("description", {"en_US": "Unknown", "zh_CN": "Unknown"})
|
A_description = A_data.get("description", {"en_US": "Unknown", "zh_CN": "Unknown"})
|
||||||
addons[A_name] = {"system": A_system, "description": A_description}
|
addons[A_name] = {"system": A_system, "description": A_description}
|
||||||
if jsonpath:
|
if jsonpath:
|
||||||
with open(jsonpath, "w") as f:
|
with open(jsonpath, "w") as f:
|
||||||
json.dump(addons, f, indent=4, ensure_ascii=False)
|
json.dump(addons, f, indent=4, ensure_ascii=False)
|
||||||
if xlsxpath:
|
if xlsxpath:
|
||||||
wb = Workbook()
|
wb = Workbook()
|
||||||
ws = wb.active
|
ws = wb.active
|
||||||
ws.append(["Name", "system", "en_US", "zh_CN"])
|
ws.append(["Name", "system", "en_US", "zh_CN"])
|
||||||
for k1, v1 in addons.items():
|
for k1, v1 in addons.items():
|
||||||
ws.append([k1, v1.get("system", False), v1.get("description").get("en_US", ""), v1.get("description").get("zh_CN", "")])
|
ws.append([k1, v1.get("system", False), v1.get("description").get("en_US", ""), v1.get("description").get("zh_CN", "")])
|
||||||
wb.save(xlsxpath)
|
wb.save(xlsxpath)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
|
||||||
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
|
||||||
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
|
||||||
def getmodules(workpath, jsonpath, xlsxpath):
|
def getmodules(workpath, jsonpath, xlsxpath):
|
||||||
MS = glob.glob(os.path.join(workpath, "mnt", "p3", "modules", "*.tgz"))
|
MS = glob.glob(os.path.join(workpath, "mnt", "p3", "modules", "*.tgz"))
|
||||||
MS.sort()
|
MS.sort()
|
||||||
modules = {}
|
modules = {}
|
||||||
TMP_PATH = "/tmp/modules"
|
TMP_PATH = "/tmp/modules"
|
||||||
if os.path.exists(TMP_PATH):
|
if os.path.exists(TMP_PATH):
|
||||||
shutil.rmtree(TMP_PATH)
|
shutil.rmtree(TMP_PATH)
|
||||||
for M in MS:
|
for M in MS:
|
||||||
M_name = os.path.splitext(os.path.basename(M))[0]
|
M_name = os.path.splitext(os.path.basename(M))[0]
|
||||||
M_modules = {}
|
M_modules = {}
|
||||||
os.makedirs(TMP_PATH)
|
os.makedirs(TMP_PATH)
|
||||||
with tarfile.open(M, "r") as tar:
|
with tarfile.open(M, "r") as tar:
|
||||||
tar.extractall(TMP_PATH)
|
tar.extractall(TMP_PATH)
|
||||||
KS = glob.glob(os.path.join(TMP_PATH, "*.ko"))
|
KS = glob.glob(os.path.join(TMP_PATH, "*.ko"))
|
||||||
KS.sort()
|
KS.sort()
|
||||||
for K in KS:
|
for K in KS:
|
||||||
K_name = os.path.splitext(os.path.basename(K))[0]
|
K_name = os.path.splitext(os.path.basename(K))[0]
|
||||||
K_info = kmodule.modinfo(K, basedir=os.path.dirname(K), kernel=None)[0]
|
K_info = kmodule.modinfo(K, basedir=os.path.dirname(K), kernel=None)[0]
|
||||||
K_description = K_info.get("description", "")
|
K_description = K_info.get("description", "")
|
||||||
K_depends = K_info.get("depends", "")
|
K_depends = K_info.get("depends", "")
|
||||||
M_modules[K_name] = {"description": K_description, "depends": K_depends}
|
M_modules[K_name] = {"description": K_description, "depends": K_depends}
|
||||||
modules[M_name] = M_modules
|
modules[M_name] = M_modules
|
||||||
if os.path.exists(TMP_PATH):
|
if os.path.exists(TMP_PATH):
|
||||||
shutil.rmtree(TMP_PATH)
|
shutil.rmtree(TMP_PATH)
|
||||||
if jsonpath:
|
if jsonpath:
|
||||||
with open(jsonpath, "w") as file:
|
with open(jsonpath, "w") as file:
|
||||||
json.dump(modules, file, indent=4, ensure_ascii=False)
|
json.dump(modules, file, indent=4, ensure_ascii=False)
|
||||||
if xlsxpath:
|
if xlsxpath:
|
||||||
wb = Workbook()
|
wb = Workbook()
|
||||||
ws = wb.active
|
ws = wb.active
|
||||||
ws.append(["Name", "Arch", "description", "depends"])
|
ws.append(["Name", "Arch", "description", "depends"])
|
||||||
for k1, v1 in modules.items():
|
for k1, v1 in modules.items():
|
||||||
for k2, v2 in v1.items():
|
for k2, v2 in v1.items():
|
||||||
ws.append([k2, k1, v2["description"], v2["depends"]])
|
ws.append([k2, k1, v2["description"], v2["depends"]])
|
||||||
wb.save(xlsxpath)
|
wb.save(xlsxpath)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
cli()
|
cli()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user