formatting

This commit is contained in:
Ing 2025-12-16 13:11:13 +08:00
parent f8c4589f84
commit d5dcb28318
11 changed files with 1806 additions and 1637 deletions

View File

@ -176,23 +176,6 @@ jobs:
echo "TAG=${TAG}" >> $GITHUB_ENV
case "${{ env.size }}" in
2GB)
echo "2GB"
;;
4GB)
echo "4GB"
resizeImg rr/rr.img +2048M
;;
8GB)
echo "8GB"
resizeImg rr/rr.img +6144M
;;
*)
echo "unknown size"
;;
esac
LOOPX=$(sudo losetup -f)
sudo losetup -P "${LOOPX}" rr/rr.img
@ -254,6 +237,26 @@ jobs:
echo "Of course, you can also modify the settings yourself."
} >README.txt
case "${{ env.size }}" in
2GB)
echo "2GB"
gzip -dc "files/initrd/opt/rr/grub.img.gz" > rr/rr_2GB.img
repackImg rr/rr.img rr/rr_2GB.img
rm -f rr/rr.img
mv -f rr/rr_2GB.img rr/rr.img
;;
4GB)
echo "4GB"
;;
8GB)
echo "8GB"
resizeImg rr/rr.img +4096M
;;
*)
echo "unknown size"
;;
esac
case "${{ env.format }}" in
ova)
echo "OVA"

View File

@ -32,14 +32,14 @@ WTITLE="$(printf "$(TEXT "Welcome to %s")" "${RR_TITLE}${RR_RELEASE:+(${RR_RELEA
DATE="$(date)"
printf "\033[1;44m%*s\n" "${COLUMNS}" ""
printf "\033[1;44m%*s\033[A\n" "${COLUMNS}" ""
printf "\033[1;31m%*s\033[0m\n" "$(((${#WTITLE} + ${COLUMNS}) / 2))" "${WTITLE}"
printf "\033[1;31m%*s\033[0m\n" "$(((${#WTITLE} + COLUMNS) / 2))" "${WTITLE}"
printf "\033[1;44m%*s\033[A\n" "${COLUMNS}" ""
printf "\033[1;32m%*s\033[0m\n" "${COLUMNS}" "${DATE}"
BTITLE="Boot Type:"
BTITLE+="$([ ${EFI} -eq 1 ] && echo " [UEFI]" || echo " [BIOS]")"
BTITLE+="$([ "${BUS}" = "usb" ] && echo " [${BUS^^} flashdisk]" || echo " [${BUS^^} DoM]")"
printf "\033[1;33m%*s\033[0m\n" $(((${#BTITLE} + ${COLUMNS}) / 2)) "${BTITLE}"
printf "\033[1;33m%*s\033[0m\n" $(((${#BTITLE} + COLUMNS) / 2)) "${BTITLE}"
if [ -f "${PART1_PATH}/.upgraded" ]; then
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
@ -378,7 +378,7 @@ else
COUNT=0
BOOTIPWAIT="$(readConfigKey "bootipwait" "${USER_CONFIG_FILE}")"
BOOTIPWAIT=${BOOTIPWAIT:-10}
while [ ${COUNT} -lt $((${BOOTIPWAIT} + 32)) ]; do
while [ ${COUNT} -lt $((BOOTIPWAIT + 32)) ]; do
MSG=""
for N in ${ETHX}; do
if [ "1" = "$(cat "/sys/class/net/${N}/carrier" 2>/dev/null)" ]; then

View File

@ -23,10 +23,10 @@ trap 'flock -u 911; rm -f "${TMP_PATH}/helper.lock"' EXIT INT TERM HUP
{
printf "$(TEXT "Closing this window or press 'ctrl + c' to exit the assistance.")\n"
printf "$(TEXT "Please give the following link to the helper. (Click to open and copy)")\n"
printf "$(TEXT "Please give the following link to the helper. (Click to open and copy)")\n"
printf " 👇\n"
sshx -q --name "RR-Helper" 2>&1
[ $? -ne 0 ] && while true; do sleep 1; done
[ $? -ne 0 ] && while true; do sleep 1; done
} | dialog --colors --aspect 50 --title "$(TEXT "Online Assistance")" --progressbox "$(TEXT "Notice: Please keep this window open.")" 20 100 2>&1
clear

View File

@ -48,7 +48,7 @@ function mergeConfigModules() {
XF=$(mktemp 2>/dev/null)
XF=${XF:-/tmp/tmp.XXXXXXXXXX}
echo -en "${ML}" | yq -p p -o y >"${XF}"
deleteConfigKey "modules.\"RRORG\"" "${XF}"
deleteConfigKey 'modules."RRORG"' "${XF}"
yq eval-all --inplace '. as $item ireduce ({}; . * $item)' --inplace "${2}" "${XF}" 2>/dev/null
rm -f "${XF}"
}

View File

@ -114,12 +114,12 @@ function generateSerial() {
SERIAL="${PREFIX:-"0000"}${MIDDLE:-"XXX"}"
case "${SUFFIX:-"alpha"}" in
numeric)
SERIAL+="$(random)"
;;
alpha)
SERIAL+="$(genRandomLetter)$(genRandomValue)$(genRandomValue)$(genRandomValue)$(genRandomValue)$(genRandomLetter)"
;;
numeric)
SERIAL+="$(random)"
;;
alpha)
SERIAL+="$(genRandomLetter)$(genRandomValue)$(genRandomValue)$(genRandomValue)$(genRandomValue)$(genRandomLetter)"
;;
esac
echo "${SERIAL}"
}
@ -167,16 +167,16 @@ function validateSerial() {
return 1
fi
case "${SUFFIX:-"alpha"}" in
numeric)
if ! echo "${S}" | grep -q "^[0-9]\{6\}$"; then
return 1
fi
;;
alpha)
if ! echo "${S}" | grep -q "^[A-Z][0-9][0-9][0-9][0-9][A-Z]$"; then
return 1
fi
;;
numeric)
if ! echo "${S}" | grep -q "^[0-9]\{6\}$"; then
return 1
fi
;;
alpha)
if ! echo "${S}" | grep -q "^[A-Z][0-9][0-9][0-9][0-9][A-Z]$"; then
return 1
fi
;;
esac
return 0
}

View File

@ -32,7 +32,7 @@ TITLE="$(printf "$(TEXT "Welcome to %s")" "${RR_TITLE}${RR_RELEASE:+(${RR_RELEAS
DATE="$(date)"
printf "\033[1;44m%*s\n" "${COLUMNS}" ""
printf "\033[1;44m%*s\033[A\n" "${COLUMNS}" ""
printf "\033[1;31m%*s\033[0m\n" "$(((${#TITLE} + ${COLUMNS}) / 2))" "${TITLE}"
printf "\033[1;31m%*s\033[0m\n" "$(((${#TITLE} + COLUMNS) / 2))" "${TITLE}"
printf "\033[1;44m%*s\033[A\n" "${COLUMNS}" ""
printf "\033[1;32m%*s\033[0m\n" "${COLUMNS}" "${DATE}"

File diff suppressed because it is too large Load Diff

View File

@ -59,8 +59,8 @@ mkdir -p "${RAMDISK_PATH}"
# Check if DSM buildnumber changed
. "${RAMDISK_PATH}/etc/VERSION"
if [ -n "${PRODUCTVER}" ] && [ -n "${BUILDNUM}" ] && [ -n "${SMALLNUM}" ] &&
([ ! "${PRODUCTVER}" = "${majorversion:-0}.${minorversion:-0}" ] || [ ! "${BUILDNUM}" = "${buildnumber:-0}" ] || [ ! "${SMALLNUM}" = "${smallfixnumber:-0}" ]); then
if [ -n "${PRODUCTVER}" ] && [ -n "${BUILDNUM}" ] && [ -n "${SMALLNUM}" ] \
&& ([ ! "${PRODUCTVER}" = "${majorversion:-0}.${minorversion:-0}" ] || [ ! "${BUILDNUM}" = "${buildnumber:-0}" ] || [ ! "${SMALLNUM}" = "${smallfixnumber:-0}" ]); then
OLDVER="${PRODUCTVER}(${BUILDNUM}$([ ${SMALLNUM:-0} -ne 0 ] && echo "u${SMALLNUM}"))"
NEWVER="${majorversion}.${minorversion}(${buildnumber}$([ ${smallfixnumber:-0} -ne 0 ] && echo "u${smallfixnumber}"))"
echo -e "\033[A\n\033[1;32mBuild number changed from \033[1;31m${OLDVER}\033[1;32m to \033[1;31m${NEWVER}\033[0m"
@ -144,7 +144,7 @@ echo "Create addons.sh" >"${LOG_FILE}"
{
echo "#!/bin/sh"
echo 'echo "addons.sh called with params ${@}"'
echo "export LOADERLABEL=\"RR\""
echo 'export LOADERLABEL="RR"'
echo "export LOADERRELEASE=\"${RR_RELEASE}\""
echo "export LOADERVERSION=\"${RR_VERSION}\""
echo "export PLATFORM=\"${PLATFORM}\""
@ -213,7 +213,7 @@ echo "Modify files" >"${LOG_FILE}"
# backup current loader configs
mkdir -p "${RAMDISK_PATH}/usr/rr"
{
echo "LOADERLABEL=\"RR\""
echo 'LOADERLABEL="RR"'
echo "LOADERRELEASE=\"${RR_RELEASE}\""
echo "LOADERVERSION=\"${RR_VERSION}\""
} >"${RAMDISK_PATH}/usr/rr/VERSION"

View File

@ -11,6 +11,7 @@ from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry # type: ignore
from openpyxl import Workbook
@click.group()
def cli():
"""
@ -21,8 +22,12 @@ def cli():
@cli.command()
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
@click.option(
"-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile."
)
@click.option(
"-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile."
)
def getmodels(workpath, jsonpath, xlsxpath):
models = {}
platforms_yml = os.path.join(workpath, "opt", "rr", "platforms.yml")
@ -37,19 +42,25 @@ def getmodels(workpath, jsonpath, xlsxpath):
productvers[V] = f"{kpre}-{kver}" if kpre else kver
models[P] = {"productvers": productvers, "models": []}
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]))
adapter = HTTPAdapter(
max_retries=Retry(
total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]
)
)
session = requests.Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
try:
url = "http://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
#url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
# url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
req = session.get(url, timeout=10, verify=False)
req.encoding = "utf-8"
p = re.compile(r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL)
p = re.compile(
r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL
)
data = p.findall(req.text)
except Exception as e:
click.echo(f"Error: {e}")
@ -80,34 +91,44 @@ def getmodels(workpath, jsonpath, xlsxpath):
@cli.command()
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
@click.option(
"-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile."
)
@click.option(
"-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile."
)
def getpats(workpath, jsonpath, xlsxpath):
def __fullversion(ver):
arr = ver.split('-')
a, b, c = (arr[0].split('.') + ['0', '0', '0'])[:3]
d = arr[1] if len(arr) > 1 else '00000'
e = arr[2] if len(arr) > 2 else '0'
return f'{a}.{b}.{c}-{d}-{e}'
arr = ver.split("-")
a, b, c = (arr[0].split(".") + ["0", "0", "0"])[:3]
d = arr[1] if len(arr) > 1 else "00000"
e = arr[2] if len(arr) > 2 else "0"
return f"{a}.{b}.{c}-{d}-{e}"
platforms_yml = os.path.join(workpath, "opt", "rr", "platforms.yml")
with open(platforms_yml, "r") as f:
data = yaml.safe_load(f)
platforms = data.get("platforms", [])
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]))
adapter = HTTPAdapter(
max_retries=Retry(
total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]
)
)
session = requests.Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
try:
url = "http://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
#url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
# url = "https://update7.synology.com/autoupdate/genRSS.php?include_beta=1"
req = session.get(url, timeout=10, verify=False)
req.encoding = "utf-8"
p = re.compile(r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL)
p = re.compile(
r"<mUnique>(.*?)</mUnique>.*?<mLink>(.*?)</mLink>", re.MULTILINE | re.DOTALL
)
data = p.findall(req.text)
except Exception as e:
click.echo(f"Error: {e}")
@ -128,62 +149,94 @@ def getpats(workpath, jsonpath, xlsxpath):
pats = {}
for M in models:
pats[M] = {}
version = '7'
version = "7"
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
# urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
# urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
major = f"&major={version.split('.')[0]}" if len(version.split('.')) > 0 else ""
minor = f"&minor={version.split('.')[1]}" if len(version.split('.')) > 1 else ""
major = f"&major={version.split('.')[0]}" if len(version.split(".")) > 0 else ""
minor = f"&minor={version.split('.')[1]}" if len(version.split(".")) > 1 else ""
try:
req = session.get(f"{urlInfo}&product={M.replace('+', '%2B')}{major}{minor}", timeout=10, verify=False)
req = session.get(
f"{urlInfo}&product={M.replace('+', '%2B')}{major}{minor}",
timeout=10,
verify=False,
)
req.encoding = "utf-8"
data = json.loads(req.text)
except Exception as e:
click.echo(f"Error: {e}")
continue
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
build_ver = data["info"]["system"]["detail"][0]["items"][0]["build_ver"]
build_num = data["info"]["system"]["detail"][0]["items"][0]["build_num"]
buildnano = data["info"]["system"]["detail"][0]["items"][0]["nano"]
V = __fullversion(f"{build_ver}-{build_num}-{buildnano}")
if V not in pats[M]:
pats[M][V] = {
'url': data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
'sum': data['info']['system']['detail'][0]['items'][0]['files'][0].get('checksum', '0' * 32)
"url": data["info"]["system"]["detail"][0]["items"][0]["files"][0][
"url"
].split("?")[0],
"sum": data["info"]["system"]["detail"][0]["items"][0]["files"][0].get(
"checksum", "0" * 32
),
}
from_ver = min(I['build'] for I in data['info']['pubVers'])
from_ver = min(I["build"] for I in data["info"]["pubVers"])
for I in data['info']['productVers']:
if not I['version'].startswith(version):
for I in data["info"]["productVers"]:
if not I["version"].startswith(version):
continue
if not major or not minor:
majorTmp = f"&major={I['version'].split('.')[0]}" if len(I['version'].split('.')) > 0 else ""
minorTmp = f"&minor={I['version'].split('.')[1]}" if len(I['version'].split('.')) > 1 else ""
majorTmp = (
f"&major={I['version'].split('.')[0]}"
if len(I["version"].split(".")) > 0
else ""
)
minorTmp = (
f"&minor={I['version'].split('.')[1]}"
if len(I["version"].split(".")) > 1
else ""
)
try:
reqTmp = session.get(f"{urlInfo}&product={M.replace('+', '%2B')}{majorTmp}{minorTmp}", timeout=10, verify=False)
reqTmp = session.get(
f"{urlInfo}&product={M.replace('+', '%2B')}{majorTmp}{minorTmp}",
timeout=10,
verify=False,
)
reqTmp.encoding = "utf-8"
dataTmp = json.loads(reqTmp.text)
except Exception as e:
click.echo(f"Error: {e}")
continue
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
build_ver = dataTmp["info"]["system"]["detail"][0]["items"][0][
"build_ver"
]
build_num = dataTmp["info"]["system"]["detail"][0]["items"][0][
"build_num"
]
buildnano = dataTmp["info"]["system"]["detail"][0]["items"][0]["nano"]
V = __fullversion(f"{build_ver}-{build_num}-{buildnano}")
if V not in pats[M]:
pats[M][V] = {
'url': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0],
'sum': dataTmp['info']['system']['detail'][0]['items'][0]['files'][0].get('checksum', '0' * 32)
"url": dataTmp["info"]["system"]["detail"][0]["items"][0][
"files"
][0]["url"].split("?")[0],
"sum": dataTmp["info"]["system"]["detail"][0]["items"][0][
"files"
][0].get("checksum", "0" * 32),
}
for J in I['versions']:
to_ver = J['build']
for J in I["versions"]:
to_ver = J["build"]
try:
reqSteps = session.get(f"{urlSteps}&product={M.replace('+', '%2B')}&from_ver={from_ver}&to_ver={to_ver}", timeout=10, verify=False)
reqSteps = session.get(
f"{urlSteps}&product={M.replace('+', '%2B')}&from_ver={from_ver}&to_ver={to_ver}",
timeout=10,
verify=False,
)
if reqSteps.status_code != 200:
continue
reqSteps.encoding = "utf-8"
@ -192,17 +245,28 @@ def getpats(workpath, jsonpath, xlsxpath):
click.echo(f"Error: {e}")
continue
for S in dataSteps['upgrade_steps']:
if not S.get('full_patch') or not S['build_ver'].startswith(version):
for S in dataSteps["upgrade_steps"]:
if not S.get("full_patch") or not S["build_ver"].startswith(
version
):
continue
V = __fullversion(f"{S['build_ver']}-{S['build_num']}-{S['nano']}")
if V not in pats[M]:
reqPat = session.head(S['files'][0]['url'].split('?')[0].replace("global.synologydownload.com", "global.download.synology.com"), timeout=10, verify=False)
reqPat = session.head(
S["files"][0]["url"]
.split("?")[0]
.replace(
"global.synologydownload.com",
"global.download.synology.com",
),
timeout=10,
verify=False,
)
if reqPat.status_code == 403:
continue
pats[M][V] = {
'url': S['files'][0]['url'].split('?')[0],
'sum': S['files'][0].get('checksum', '0' * 32)
"url": S["files"][0]["url"].split("?")[0],
"sum": S["files"][0].get("checksum", "0" * 32),
}
if jsonpath:
@ -220,8 +284,12 @@ def getpats(workpath, jsonpath, xlsxpath):
@cli.command()
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
@click.option(
"-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile."
)
@click.option(
"-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile."
)
def getaddons(workpath, jsonpath, xlsxpath):
AS = glob.glob(os.path.join(workpath, "mnt", "p3", "addons", "*", "manifest.yml"))
AS.sort()
@ -231,7 +299,9 @@ def getaddons(workpath, jsonpath, xlsxpath):
A_data = yaml.safe_load(file)
A_name = A_data.get("name", "")
A_system = A_data.get("system", False)
A_description = A_data.get("description", {"en_US": "Unknown", "zh_CN": "Unknown"})
A_description = A_data.get(
"description", {"en_US": "Unknown", "zh_CN": "Unknown"}
)
addons[A_name] = {"system": A_system, "description": A_description}
if jsonpath:
with open(jsonpath, "w") as f:
@ -241,14 +311,25 @@ def getaddons(workpath, jsonpath, xlsxpath):
ws = wb.active
ws.append(["Name", "system", "en_US", "zh_CN"])
for k1, v1 in addons.items():
ws.append([k1, v1.get("system", False), v1.get("description").get("en_US", ""), v1.get("description").get("zh_CN", "")])
ws.append(
[
k1,
v1.get("system", False),
v1.get("description").get("en_US", ""),
v1.get("description").get("zh_CN", ""),
]
)
wb.save(xlsxpath)
@cli.command()
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of RR.")
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
@click.option("-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile.")
@click.option(
"-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile."
)
@click.option(
"-x", "--xlsxpath", type=str, required=False, help="The output path of xlsxfile."
)
def getmodules(workpath, jsonpath, xlsxpath):
MS = glob.glob(os.path.join(workpath, "mnt", "p3", "modules", "*.tgz"))
MS.sort()

View File

@ -259,14 +259,14 @@ function unpackInitrd() {
INITRD_FORMAT=$(file -b --mime-type "${INITRD_FILE}")
case "${INITRD_FORMAT}" in
*'x-cpio'*) (cd "${OUTPUT_PATH}" && sudo cpio -idm <"${INITRD_FILE}") >/dev/null 2>&1 ;;
*'x-xz'*) (cd "${OUTPUT_PATH}" && xz -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lz4'*) (cd "${OUTPUT_PATH}" && lz4 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lzma'*) (cd "${OUTPUT_PATH}" && lzma -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-bzip2'*) (cd "${OUTPUT_PATH}" && bzip2 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'gzip'*) (cd "${OUTPUT_PATH}" && gzip -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'zstd'*) (cd "${OUTPUT_PATH}" && zstd -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*) ;;
*'x-cpio'*) (cd "${OUTPUT_PATH}" && sudo cpio -idm <"${INITRD_FILE}") >/dev/null 2>&1 ;;
*'x-xz'*) (cd "${OUTPUT_PATH}" && xz -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lz4'*) (cd "${OUTPUT_PATH}" && lz4 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lzma'*) (cd "${OUTPUT_PATH}" && lzma -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-bzip2'*) (cd "${OUTPUT_PATH}" && bzip2 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'gzip'*) (cd "${OUTPUT_PATH}" && gzip -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'zstd'*) (cd "${OUTPUT_PATH}" && zstd -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*) ;;
esac
}
@ -292,32 +292,117 @@ function repackInitrd() {
INITRD_FORMAT=$(file -b --mime-type "${INITRD_FILE}")
case "${INITRD_FORMAT}" in
*'x-cpio'*) (cd "${RDXZ_PATH}" && sudo cpio -idm <"${INITRD_FILE}") >/dev/null 2>&1 ;;
*'x-xz'*) (cd "${RDXZ_PATH}" && xz -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lz4'*) (cd "${RDXZ_PATH}" && lz4 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lzma'*) (cd "${RDXZ_PATH}" && lzma -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-bzip2'*) (cd "${RDXZ_PATH}" && bzip2 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'gzip'*) (cd "${RDXZ_PATH}" && gzip -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'zstd'*) (cd "${RDXZ_PATH}" && zstd -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*) ;;
*'x-cpio'*) (cd "${RDXZ_PATH}" && sudo cpio -idm <"${INITRD_FILE}") >/dev/null 2>&1 ;;
*'x-xz'*) (cd "${RDXZ_PATH}" && xz -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lz4'*) (cd "${RDXZ_PATH}" && lz4 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-lzma'*) (cd "${RDXZ_PATH}" && lzma -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'x-bzip2'*) (cd "${RDXZ_PATH}" && bzip2 -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'gzip'*) (cd "${RDXZ_PATH}" && gzip -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*'zstd'*) (cd "${RDXZ_PATH}" && zstd -dc "${INITRD_FILE}" | sudo cpio -idm) >/dev/null 2>&1 ;;
*) ;;
esac
sudo cp -rf "${PLUGIN_PATH}/"* "${RDXZ_PATH}/"
[ -f "${OUTPUT_PATH}" ] && rm -rf "${OUTPUT_PATH}"
# shellcheck disable=SC2024
case "${INITRD_FORMAT}" in
*'x-cpio'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-xz'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | xz -9 -C crc32 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-lz4'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | lz4 -9 -l -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-lzma'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | lzma -9 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-bzip2'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | bzip2 -9 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'gzip'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | gzip -9 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'zstd'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | zstd -19 -T0 -f -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*) ;;
*'x-cpio'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-xz'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | xz -9 -C crc32 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-lz4'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | lz4 -9 -l -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-lzma'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | lzma -9 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'x-bzip2'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | bzip2 -9 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'gzip'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | gzip -9 -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*'zstd'*) (cd "${RDXZ_PATH}" && sudo find . 2>/dev/null | sudo cpio -o -H newc -R root:root | zstd -19 -T0 -f -c - >"${OUTPUT_PATH}") >/dev/null 2>&1 ;;
*) ;;
esac
sudo rm -rf "${RDXZ_PATH}"
}
function repackImg() {
local INPUT_FILE="${1}"
local OUTPUT_FILE="${2}"
[ -z "${INPUT_FILE}" ] && exit 1
[ -z "${OUTPUT_FILE}" ] || [ ! -f "${OUTPUT_FILE}" ] && exit 1
OUTPUT_FILE="$(realpath "${OUTPUT_FILE}")"
if [ -d "${INPUT_FILE}" ]; then
_umount() {
for i in {1..3}; do
sudo mount | grep -q "/tmp/mnt/p${i}" || continue
sudo umount "/tmp/mnt/p${i}"
rm -rf "/tmp/mnt/p${i}"
done
}
LOOPX=$(sudo losetup -f)
sudo losetup -P "${LOOPX}" "${OUTPUT_FILE}"
for i in {1..3}; do
[ ! -d "${INPUT_FILE}/mnt/p${i}" ] && continue
rm -rf "/tmp/mnt/p${i}" 2>/dev/null
mkdir -p "/tmp/mnt/p${i}"
sudo mount "${LOOPX}p${i}" "/tmp/mnt/p${i}" || {
echo "Mount failed"
_umount
break
}
sudo cp -rf "${INPUT_FILE}/mnt/p${i}/". "/tmp/mnt/p${i}" || {
echo "Copy failed"
_umount
break
}
sudo sync
_umount
done
sudo losetup --detach "${LOOPX}"
elif [ -f "${INPUT_FILE}" ]; then
_umount() {
for i in {1..3}; do
sudo mount | grep -q "/tmp/i/mnt/p${i}" || continue
sudo umount "/tmp/i/mnt/p${i}"
rm -rf "/tmp/i/mnt/p${i}"
done
for i in {1..3}; do
sudo mount | grep -q "/tmp/x/mnt/p${i}" || continue
sudo umount "/tmp/x/mnt/p${i}"
rm -rf "/tmp/x/mnt/p${i}"
done
}
LOOPI=$(sudo losetup -f)
sudo losetup -P "${LOOPI}" "${INPUT_FILE}"
LOOPX=$(sudo losetup -f)
sudo losetup -P "${LOOPX}" "${OUTPUT_FILE}"
for i in {1..3}; do
rm -rf "/tmp/i/mnt/p${i}" 2>/dev/null
mkdir -p "/tmp/i/mnt/p${i}"
rm -rf "/tmp/x/mnt/p${i}" 2>/dev/null
mkdir -p "/tmp/x/mnt/p${i}"
sudo mount "${LOOPI}p${i}" "/tmp/i/mnt/p${i}" || {
echo "Mount failed"
_umount
break
}
sudo mount "${LOOPX}p${i}" "/tmp/x/mnt/p${i}" || {
echo "Mount failed"
_umount
break
}
sudo cp -rf "/tmp/i/mnt/p${i}/". "/tmp/x/mnt/p${i}" || {
echo "Copy failed"
_umount
break
}
sudo sync
_umount
done
sudo losetup --detach "${LOOPX}"
sudo losetup --detach "${LOOPI}"
else
exit 1
fi
}
# resizeimg
# $1 input file
# $2 changsize MB eg: +50M -50M
@ -365,7 +450,7 @@ function createvmx() {
# Convert raw image to VMDK
rm -rf "VMX_${VMNAME}"
mkdir -p "VMX_${VMNAME}"
qemu-img convert -O vmdk -o 'adapter_type=lsilogic,subformat=monolithicSparse,compat6' "${BLIMAGE}" "VMX_${VMNAME}/${VMNAME}-disk1.vmdk" # 'adapter_type=lsilogic,subformat=streamOptimized,compat6'
qemu-img convert -O vmdk -o 'adapter_type=lsilogic,subformat=monolithicSparse,compat6' "${BLIMAGE}" "VMX_${VMNAME}/${VMNAME}-disk1.vmdk" # 'adapter_type=lsilogic,subformat=streamOptimized,compat6'
qemu-img create -f vmdk "VMX_${VMNAME}/${VMNAME}-disk2.vmdk" "32G"
# Create VM configuration

View File

@ -40,53 +40,53 @@ fi
eval set -- "$ARGS"
while true; do
case "$1" in
--onboot)
ONBOOT="$2"
echo "$ONBOOT" | grep -qvE '^(0|1)$' && ONBOOT=1
shift 2
;;
--efi)
EFI="$2"
echo "$EFI" | grep -qvE '^(0|1)$' && EFI=1
shift 2
;;
--bltype)
BLTYPE="$2"
echo "$BLTYPE" | grep -qvE '^(sata|usb|nvme)$' && BLTYPE="sata"
shift 2
;;
--storage)
STORAGE="$2"
[ -n "${STORAGE}" ] && pvesm status -content images | grep -qw "^${STORAGE}" || STORAGE=""
shift 2
;;
--v9ppath)
V9PPATH="$2"
[ -d "${V9PPATH}" ] && V9PPATH="$(realpath "${V9PPATH}")" || V9PPATH=""
shift 2
;;
--vfsdirid)
VFSDIRID="$2"
[ -n "${VFSDIRID}" ] && pvesh ls /cluster/mapping/dir | grep -qw "${VFSDIRID}" || VFSDIRID=""
shift 2
;;
--tag)
TAG="$(echo "$2" | sed 's/^[v|V]//g')"
shift 2
;;
--img)
IMG="$2"
[ -f "${IMG}" ] && IMG="$(realpath "${IMG}")" || IMG=""
shift 2
;;
--)
shift
break
;;
*)
usage
exit 1
;;
--onboot)
ONBOOT="$2"
echo "$ONBOOT" | grep -qvE '^(0|1)$' && ONBOOT=1
shift 2
;;
--efi)
EFI="$2"
echo "$EFI" | grep -qvE '^(0|1)$' && EFI=1
shift 2
;;
--bltype)
BLTYPE="$2"
echo "$BLTYPE" | grep -qvE '^(sata|usb|nvme)$' && BLTYPE="sata"
shift 2
;;
--storage)
STORAGE="$2"
[ -n "${STORAGE}" ] && pvesm status -content images | grep -qw "^${STORAGE}" || STORAGE=""
shift 2
;;
--v9ppath)
V9PPATH="$2"
[ -d "${V9PPATH}" ] && V9PPATH="$(realpath "${V9PPATH}")" || V9PPATH=""
shift 2
;;
--vfsdirid)
VFSDIRID="$2"
[ -n "${VFSDIRID}" ] && pvesh ls /cluster/mapping/dir | grep -qw "${VFSDIRID}" || VFSDIRID=""
shift 2
;;
--tag)
TAG="$(echo "$2" | sed 's/^[v|V]//g')"
shift 2
;;
--img)
IMG="$2"
[ -f "${IMG}" ] && IMG="$(realpath "${IMG}")" || IMG=""
shift 2
;;
--)
shift
break
;;
*)
usage
exit 1
;;
esac
done
@ -140,7 +140,7 @@ echo "Creating VM with RR ... "
last_vmid=$(qm list | awk 'NR>1{print$1}' | sort -n | tail -1 2>/dev/null)
if [ -z "$last_vmid" ]; then
# 如果 last_vmid 是空字符串说明没有VM设置一个起始ID
VMID=100
VMID=100
else
# 否则在最后一个ID的基础上加1
VMID=$((last_vmid + 1))
@ -182,20 +182,20 @@ if [ "${STATUS:-0}" -ne 0 ] || [ -z "${BLDISK}" ]; then
fi
[ -n "${IMG}" ] || rm -f "${IMG_PATH}"
case "${BLTYPE}" in
usb)
ARGS+="-device nec-usb-xhci,id=usb-bus0,multifunction=on -drive file=$(pvesm path ${BLDISK}),media=disk,format=raw,if=none,id=usb1 -device usb-storage,bus=usb-bus0.0,port=1,drive=usb1,bootindex=999,removable=on "
;;
nvme)
ARGS+="-drive file=$(pvesm path ${BLDISK}),media=disk,format=raw,if=none,id=nvme1 -device nvme,drive=nvme1,serial=nvme001 "
;;
sata)
qm set ${VMID} --sata$((SATAIDX++)) "${BLDISK}"
;;
*)
echo "Setting bootloader disk failed"
qm destroy ${VMID} --purge
exit 1
;;
usb)
ARGS+="-device nec-usb-xhci,id=usb-bus0,multifunction=on -drive file=$(pvesm path ${BLDISK}),media=disk,format=raw,if=none,id=usb1 -device usb-storage,bus=usb-bus0.0,port=1,drive=usb1,bootindex=999,removable=on "
;;
nvme)
ARGS+="-drive file=$(pvesm path ${BLDISK}),media=disk,format=raw,if=none,id=nvme1 -device nvme,drive=nvme1,serial=nvme001 "
;;
sata)
qm set ${VMID} --sata$((SATAIDX++)) "${BLDISK}"
;;
*)
echo "Setting bootloader disk failed"
qm destroy ${VMID} --purge
exit 1
;;
esac
X86_VENDOR=$(awk -F: '/vendor_id/ {gsub(/^[ \t]+/, "", $2); print $2; exit}' /proc/cpuinfo)