mirror of
https://github.com/RROrg/rr.git
synced 2025-06-21 05:51:05 +08:00
Optimize build scripts.
This commit is contained in:
parent
4bb14ac9b0
commit
e225133099
104
.github/workflows/main.yml
vendored
104
.github/workflows/main.yml
vendored
@ -37,9 +37,9 @@ jobs:
|
|||||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|
||||||
sudo apt-get update
|
sudo apt update
|
||||||
sudo apt-get install -y jq gettext libelf-dev qemu-utils
|
sudo apt install -y jq gettext libelf-dev qemu-utils
|
||||||
sudo cp -f files/board/arpl/overlayfs/usr/bin/yq /usr/bin/yq
|
sudo snap install yq
|
||||||
|
|
||||||
# Check dl cache
|
# Check dl cache
|
||||||
- name: Cache downloads
|
- name: Cache downloads
|
||||||
@ -115,6 +115,7 @@ jobs:
|
|||||||
echo "${VERSION}" > files/board/arpl/p1/ARPL-VERSION
|
echo "${VERSION}" > files/board/arpl/p1/ARPL-VERSION
|
||||||
sed 's/^ARPL_VERSION=.*/ARPL_VERSION="'${VERSION}'"/' -i files/board/arpl/overlayfs/opt/arpl/include/consts.sh
|
sed 's/^ARPL_VERSION=.*/ARPL_VERSION="'${VERSION}'"/' -i files/board/arpl/overlayfs/opt/arpl/include/consts.sh
|
||||||
|
|
||||||
|
git checkout main
|
||||||
git pull
|
git pull
|
||||||
status=$(git status -s | awk '{printf " %s", $2}')
|
status=$(git status -s | awk '{printf " %s", $2}')
|
||||||
if [ -n "${status}" ]; then
|
if [ -n "${status}" ]; then
|
||||||
@ -127,94 +128,15 @@ jobs:
|
|||||||
# Convert po2mo, Get extractor, LKM, addons and Modules
|
# Convert po2mo, Get extractor, LKM, addons and Modules
|
||||||
- name: Convert po2mo, Get extractor, LKM, addons and Modules
|
- name: Convert po2mo, Get extractor, LKM, addons and Modules
|
||||||
run: |
|
run: |
|
||||||
# Convert po2mo
|
. scripts/func.sh
|
||||||
echo "Convert po2mo"
|
|
||||||
if [ -d files/board/arpl/overlayfs/opt/arpl/lang ]; then
|
|
||||||
for P in "`ls files/board/arpl/overlayfs/opt/arpl/lang/*.po`"
|
|
||||||
do
|
|
||||||
# Use msgfmt command to compile the .po file into a binary .mo file
|
|
||||||
msgfmt ${P} -o ${P/.po/.mo}
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
convertpo2mo "files/board/arpl/overlayfs/opt/arpl/lang"
|
||||||
|
getExtractor "files/board/arpl/p3/extractor"
|
||||||
|
getLKMs "files/board/arpl/p3/lkms"
|
||||||
|
getAddons "files/board/arpl/p3/addons"
|
||||||
|
getModules "files/board/arpl/p3/modules"
|
||||||
|
|
||||||
# Get extractor
|
echo "OK"
|
||||||
echo "Getting syno extractor"
|
|
||||||
TOOL_PATH="files/board/arpl/p3/extractor"
|
|
||||||
CACHE_DIR="/tmp/pat"
|
|
||||||
|
|
||||||
rm -rf "${TOOL_PATH}"
|
|
||||||
mkdir -p "${TOOL_PATH}"
|
|
||||||
rm -rf "${CACHE_DIR}"
|
|
||||||
mkdir -p "${CACHE_DIR}"
|
|
||||||
|
|
||||||
OLDPAT_URL="https://global.download.synology.com/download/DSM/release/7.0.1/42218/DSM_DS3622xs%2B_42218.pat"
|
|
||||||
OLDPAT_FILE="DSM_DS3622xs+_42218.pat"
|
|
||||||
STATUS=`curl -# -w "%{http_code}" -L "${OLDPAT_URL}" -o "${CACHE_DIR}/${OLDPAT_FILE}"`
|
|
||||||
if [ $? -ne 0 -o ${STATUS} -ne 200 ]; then
|
|
||||||
echo "[E] DSM_DS3622xs%2B_42218.pat download error!"
|
|
||||||
rm -rf ${CACHE_DIR}
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir -p "${CACHE_DIR}/ramdisk"
|
|
||||||
tar -C "${CACHE_DIR}/ramdisk/" -xf "${CACHE_DIR}/${OLDPAT_FILE}" rd.gz 2>&1
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "[E] extractor rd.gz error!"
|
|
||||||
rm -rf ${CACHE_DIR}
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
(cd "${CACHE_DIR}/ramdisk"; xz -dc < rd.gz | cpio -idm) >/dev/null 2>&1 || true
|
|
||||||
|
|
||||||
# Copy only necessary files
|
|
||||||
for f in libcurl.so.4 libmbedcrypto.so.5 libmbedtls.so.13 libmbedx509.so.1 libmsgpackc.so.2 libsodium.so libsynocodesign-ng-virtual-junior-wins.so.7; do
|
|
||||||
cp "${CACHE_DIR}/ramdisk/usr/lib/${f}" "${TOOL_PATH}"
|
|
||||||
done
|
|
||||||
cp "${CACHE_DIR}/ramdisk/usr/syno/bin/scemd" "${TOOL_PATH}/syno_extract_system_patch"
|
|
||||||
rm -rf ${CACHE_DIR}
|
|
||||||
|
|
||||||
|
|
||||||
# Get latest LKMs
|
|
||||||
echo "Getting latest LKMs"
|
|
||||||
TAG=`curl -s https://api.github.com/repos/wjz304/redpill-lkm/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'`
|
|
||||||
STATUS=`curl -w "%{http_code}" -L "https://github.com/wjz304/redpill-lkm/releases/download/${TAG}/rp-lkms.zip" -o /tmp/rp-lkms.zip`
|
|
||||||
echo "Status=${STATUS}"
|
|
||||||
[ ${STATUS} -ne 200 ] && exit 1
|
|
||||||
# Unzip LKMs
|
|
||||||
rm -rf files/board/arpl/p3/lkms
|
|
||||||
mkdir -p files/board/arpl/p3/lkms
|
|
||||||
unzip /tmp/rp-lkms.zip -d files/board/arpl/p3/lkms
|
|
||||||
|
|
||||||
# Get latest addons and install its
|
|
||||||
echo "Getting latest Addons"
|
|
||||||
TAG=`curl -s https://api.github.com/repos/wjz304/arpl-addons/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'`
|
|
||||||
STATUS=`curl -w "%{http_code}" -L "https://github.com/wjz304/arpl-addons/releases/download/${TAG}/addons.zip" -o /tmp/addons.zip`
|
|
||||||
echo "Status=${STATUS}"
|
|
||||||
[ ${STATUS} -ne 200 ] && exit 1
|
|
||||||
# Install Addons
|
|
||||||
mkdir -p /tmp/addons
|
|
||||||
unzip /tmp/addons.zip -d /tmp/addons
|
|
||||||
DEST_PATH="files/board/arpl/p3/addons"
|
|
||||||
echo "Installing addons to ${DEST_PATH}"
|
|
||||||
for PKG in `ls /tmp/addons/*.addon`; do
|
|
||||||
ADDON=`basename "${PKG}" .addon`
|
|
||||||
mkdir -p "${DEST_PATH}/${ADDON}"
|
|
||||||
echo "Extracting ${PKG} to ${DEST_PATH}/${ADDON}"
|
|
||||||
tar xaf "${PKG}" -C "${DEST_PATH}/${ADDON}"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Get latest modules
|
|
||||||
echo "Getting latest Modules"
|
|
||||||
MODULES_DIR="files/board/arpl/p3/modules"
|
|
||||||
TAG=`curl -s https://api.github.com/repos/wjz304/arpl-modules/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'`
|
|
||||||
STATUS=`curl -w "%{http_code}" -L "https://github.com/wjz304/arpl-modules/releases/download/${TAG}/modules.zip" -o "/tmp/modules.zip"`
|
|
||||||
echo "Status=${STATUS}"
|
|
||||||
[ ${STATUS} -ne 200 ] && exit 1
|
|
||||||
# Unzip Modules
|
|
||||||
rm -rf "${MODULES_DIR}/"*
|
|
||||||
unzip /tmp/modules.zip -d "${MODULES_DIR}"
|
|
||||||
|
|
||||||
echo OK
|
|
||||||
|
|
||||||
# Build incremental from caches
|
# Build incremental from caches
|
||||||
- name: Build image
|
- name: Build image
|
||||||
@ -238,14 +160,14 @@ jobs:
|
|||||||
qemu-img convert -O vmdk arpl.img arpl-dyn.vmdk
|
qemu-img convert -O vmdk arpl.img arpl-dyn.vmdk
|
||||||
qemu-img convert -O vmdk -o adapter_type=lsilogic arpl.img -o subformat=monolithicFlat arpl.vmdk
|
qemu-img convert -O vmdk -o adapter_type=lsilogic arpl.img -o subformat=monolithicFlat arpl.vmdk
|
||||||
|
|
||||||
|
zip -9 "arpl-i18n.zip" arpl.img arpl-dyn.vmdk arpl.vmdk arpl-flat.vmdk
|
||||||
# Upload artifact
|
# Upload artifact
|
||||||
- name: Upload
|
- name: Upload
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: Images
|
name: Images
|
||||||
path: |
|
path: |
|
||||||
arpl.img
|
arpl-i18n.zip
|
||||||
arpl*.vmdk
|
|
||||||
retention-days: 5
|
retention-days: 5
|
||||||
|
|
||||||
# Zip image and generate checksum
|
# Zip image and generate checksum
|
||||||
|
88
img-gen.sh
88
img-gen.sh
@ -2,90 +2,20 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
. scripts/func.sh
|
||||||
|
|
||||||
|
|
||||||
if [ ! -d .buildroot ]; then
|
if [ ! -d .buildroot ]; then
|
||||||
echo "Downloading buildroot"
|
echo "Downloading buildroot"
|
||||||
git clone --single-branch -b 2022.02 https://github.com/buildroot/buildroot.git .buildroot
|
git clone --single-branch -b 2022.02 https://github.com/buildroot/buildroot.git .buildroot
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Convert po2mo"
|
# Convert po2mo, Get extractor, LKM, addons and Modules
|
||||||
if [ -d files/board/arpl/overlayfs/opt/arpl/lang ]; then
|
convertpo2mo "files/board/arpl/overlayfs/opt/arpl/lang"
|
||||||
for P in "`ls files/board/arpl/overlayfs/opt/arpl/lang/*.po`"
|
getExtractor "files/board/arpl/p3/extractor"
|
||||||
do
|
getLKMs "files/board/arpl/p3/lkms"
|
||||||
# Use msgfmt command to compile the .po file into a binary .mo file
|
getAddons "files/board/arpl/p3/addons"
|
||||||
msgfmt ${P} -o ${P/.po/.mo}
|
getModules "files/board/arpl/p3/modules"
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get extractor
|
|
||||||
echo "Getting syno extractor"
|
|
||||||
TOOL_PATH="files/board/arpl/p3/extractor"
|
|
||||||
CACHE_DIR="/tmp/pat"
|
|
||||||
rm -rf "${TOOL_PATH}"
|
|
||||||
mkdir -p "${TOOL_PATH}"
|
|
||||||
rm -rf "${CACHE_DIR}"
|
|
||||||
mkdir -p "${CACHE_DIR}"
|
|
||||||
OLDPAT_URL="https://global.download.synology.com/download/DSM/release/7.0.1/42218/DSM_DS3622xs%2B_42218.pat"
|
|
||||||
OLDPAT_FILE="DSM_DS3622xs+_42218.pat"
|
|
||||||
STATUS=`curl -# -w "%{http_code}" -L "${OLDPAT_URL}" -o "${CACHE_DIR}/${OLDPAT_FILE}"`
|
|
||||||
if [ $? -ne 0 -o ${STATUS} -ne 200 ]; then
|
|
||||||
echo "[E] DSM_DS3622xs%2B_42218.pat download error!"
|
|
||||||
rm -rf ${CACHE_DIR}
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir -p "${CACHE_DIR}/ramdisk"
|
|
||||||
tar -C "${CACHE_DIR}/ramdisk/" -xf "${CACHE_DIR}/${OLDPAT_FILE}" rd.gz 2>&1
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "[E] extractor rd.gz error!"
|
|
||||||
rm -rf ${CACHE_DIR}
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
(cd "${CACHE_DIR}/ramdisk"; xz -dc < rd.gz | cpio -idm) >/dev/null 2>&1 || true
|
|
||||||
|
|
||||||
# Copy only necessary files
|
|
||||||
for f in libcurl.so.4 libmbedcrypto.so.5 libmbedtls.so.13 libmbedx509.so.1 libmsgpackc.so.2 libsodium.so libsynocodesign-ng-virtual-junior-wins.so.7; do
|
|
||||||
cp "${CACHE_DIR}/ramdisk/usr/lib/${f}" "${TOOL_PATH}"
|
|
||||||
done
|
|
||||||
cp "${CACHE_DIR}/ramdisk/usr/syno/bin/scemd" "${TOOL_PATH}/syno_extract_system_patch"
|
|
||||||
rm -rf ${CACHE_DIR}
|
|
||||||
|
|
||||||
# Get latest LKMs
|
|
||||||
echo "Getting latest LKMs"
|
|
||||||
echo " Downloading LKMs from github"
|
|
||||||
TAG=`curl -s https://api.github.com/repos/wjz304/redpill-lkm/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'`
|
|
||||||
curl -L "https://github.com/wjz304/redpill-lkm/releases/download/${TAG}/rp-lkms.zip" -o /tmp/rp-lkms.zip
|
|
||||||
rm -rf files/board/arpl/p3/lkms/*
|
|
||||||
unzip /tmp/rp-lkms.zip -d files/board/arpl/p3/lkms
|
|
||||||
|
|
||||||
|
|
||||||
# Get latest addons and install its
|
|
||||||
echo "Getting latest Addons"
|
|
||||||
rm -Rf /tmp/addons
|
|
||||||
mkdir -p /tmp/addons
|
|
||||||
echo " Downloading Addons from github"
|
|
||||||
TAG=`curl -s https://api.github.com/repos/wjz304/arpl-addons/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'`
|
|
||||||
curl -L "https://github.com/wjz304/arpl-addons/releases/download/${TAG}/addons.zip" -o /tmp/addons.zip
|
|
||||||
rm -rf /tmp/addons
|
|
||||||
unzip /tmp/addons.zip -d /tmp/addons
|
|
||||||
DEST_PATH="files/board/arpl/p3/addons"
|
|
||||||
echo "Installing addons to ${DEST_PATH}"
|
|
||||||
for PKG in `ls /tmp/addons/*.addon`; do
|
|
||||||
ADDON=`basename ${PKG} | sed 's|.addon||'`
|
|
||||||
mkdir -p "${DEST_PATH}/${ADDON}"
|
|
||||||
echo "Extracting ${PKG} to ${DEST_PATH}/${ADDON}"
|
|
||||||
tar xaf "${PKG}" -C "${DEST_PATH}/${ADDON}"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Get latest modules
|
|
||||||
echo "Getting latest modules"
|
|
||||||
echo " Downloading Modules from github"
|
|
||||||
MODULES_DIR="${PWD}/files/board/arpl/p3/modules"
|
|
||||||
|
|
||||||
TAG=`curl -s https://api.github.com/repos/wjz304/arpl-modules/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'`
|
|
||||||
curl -L "https://github.com/wjz304/arpl-modules/releases/download/${TAG}/modules.zip" -o "/tmp/modules.zip"
|
|
||||||
rm -rf "${MODULES_DIR}/"*
|
|
||||||
unzip /tmp/modules.zip -d "${MODULES_DIR}"
|
|
||||||
|
|
||||||
|
|
||||||
# Remove old files
|
# Remove old files
|
||||||
rm -rf ".buildroot/output/target/opt/arpl"
|
rm -rf ".buildroot/output/target/opt/arpl"
|
||||||
|
218
rssupdate.py
Normal file
218
rssupdate.py
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2022 Ing <https://github.com/wjz304>
|
||||||
|
#
|
||||||
|
# This is free software, licensed under the MIT License.
|
||||||
|
# See /LICENSE for more information.
|
||||||
|
#
|
||||||
|
|
||||||
|
import os, re, subprocess, hashlib, requests, json, yaml
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
root = '' # os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
def fullversion(ver):
|
||||||
|
out = ver
|
||||||
|
arr = ver.split('-')
|
||||||
|
if len(arr) > 0:
|
||||||
|
a = arr[0].split('.')[0] if len(arr[0].split('.')) > 0 else '0'
|
||||||
|
b = arr[0].split('.')[1] if len(arr[0].split('.')) > 1 else '0'
|
||||||
|
c = arr[0].split('.')[2] if len(arr[0].split('.')) > 2 else '0'
|
||||||
|
d = arr[1] if len(arr) > 1 else '00000'
|
||||||
|
e = arr[2] if len(arr) > 2 else '0'
|
||||||
|
out = '{}.{}.{}-{}-{}'.format(a,b,c,d,e)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def sha256sum(file):
|
||||||
|
sha256Obj = ''
|
||||||
|
if os.path.isfile(file):
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
sha256Obj = hashlib.sha256(f.read()).hexdigest()
|
||||||
|
return sha256Obj
|
||||||
|
|
||||||
|
def md5sum(file):
|
||||||
|
md5Obj = ''
|
||||||
|
if os.path.isfile(file):
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
md5Obj = hashlib.md5(f.read()).hexdigest()
|
||||||
|
return md5Obj
|
||||||
|
|
||||||
|
def synoextractor(url):
|
||||||
|
data={'url': '', 'hash': '', 'md5-hash': '', 'ramdisk-hash': '', 'zimage-hash': '', 'unique': ''}
|
||||||
|
|
||||||
|
filename = os.path.basename(url)
|
||||||
|
filepath = os.path.splitext(filename)[0]
|
||||||
|
|
||||||
|
commands = ['sudo', 'rm', '-rf', filename, filepath]
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
|
||||||
|
req = requests.get(url.replace(urlparse(url).netloc, 'cndl.synology.cn'))
|
||||||
|
#req = requests.get(url)
|
||||||
|
with open(filename, "wb") as f:
|
||||||
|
f.write(req.content)
|
||||||
|
|
||||||
|
# Get the first two bytes of the file and extract the third byte
|
||||||
|
output = subprocess.check_output(["od", "-bcN2", filename])
|
||||||
|
header = output.decode().splitlines()[0].split()[2]
|
||||||
|
|
||||||
|
if header == '105':
|
||||||
|
# print("Uncompressed tar")
|
||||||
|
isencrypted = False
|
||||||
|
elif header == '213':
|
||||||
|
# print("Compressed tar")
|
||||||
|
isencrypted = False
|
||||||
|
elif header == '255':
|
||||||
|
# print("Encrypted")
|
||||||
|
isencrypted = True
|
||||||
|
else:
|
||||||
|
# print("error")
|
||||||
|
return data
|
||||||
|
|
||||||
|
os.mkdir(filepath)
|
||||||
|
|
||||||
|
if isencrypted is True:
|
||||||
|
toolpath = "extractor"
|
||||||
|
commands = [f"sudo", f"LD_LIBRARY_PATH={toolpath}", f"{toolpath}/syno_extract_system_patch", filename, filepath]
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
commands = ['tar', '-xf', filename, '-C', filepath]
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
|
||||||
|
if os.path.exists(filename):
|
||||||
|
data['url'] = url
|
||||||
|
data['md5-hash'] = md5sum(filename)
|
||||||
|
data['hash'] = sha256sum(filename)
|
||||||
|
if os.path.exists(os.path.join(filepath, "rd.gz")): data['ramdisk-hash'] = sha256sum(os.path.join(filepath, "rd.gz"))
|
||||||
|
if os.path.exists(os.path.join(filepath, "zImage")): data['zimage-hash'] = sha256sum(os.path.join(filepath, "zImage"))
|
||||||
|
if os.path.exists(os.path.join(filepath, "VERSION")):
|
||||||
|
with open(os.path.join(filepath, "VERSION"), 'r') as f:
|
||||||
|
for line in f.readlines():
|
||||||
|
if line.startswith('unique'):
|
||||||
|
data['unique'] = line.split('=')[1].replace('"','').strip()
|
||||||
|
|
||||||
|
|
||||||
|
commands = ['sudo', 'rm', '-rf', filename, filepath]
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def main(): # if __name__ == '__main__':
|
||||||
|
# Get models
|
||||||
|
models=[]
|
||||||
|
|
||||||
|
configs = "files/board/arpl/overlayfs/opt/arpl/model-configs"
|
||||||
|
|
||||||
|
for filename in os.listdir(os.path.join(root, configs)):
|
||||||
|
if ".yml" in filename: # filename.endswith(".yml"):
|
||||||
|
models.append(filename.split(".yml")[0])
|
||||||
|
|
||||||
|
print(models)
|
||||||
|
|
||||||
|
# Get pats
|
||||||
|
pats = {}
|
||||||
|
req = requests.get('https://prerelease.synology.com/webapi/models?event=dsm72_beta')
|
||||||
|
rels = json.loads(req.text)
|
||||||
|
if "models" in rels and len(rels["models"]) > 0:
|
||||||
|
for i in rels["models"]:
|
||||||
|
if "name" not in i or "dsm" not in i: continue
|
||||||
|
if i["name"] not in models: continue
|
||||||
|
if i["name"] not in pats.keys(): pats[i["name"]]={}
|
||||||
|
pats[i["name"]][fullversion(i["dsm"]["version"])] = i["dsm"]["url"].split('?')[0]
|
||||||
|
|
||||||
|
req = requests.get('https://archive.synology.com/download/Os/DSM')
|
||||||
|
req.encoding = 'utf-8'
|
||||||
|
bs=BeautifulSoup(req.text, 'html.parser')
|
||||||
|
p = re.compile(r"(.*?)-(.*?)", re.MULTILINE | re.DOTALL)
|
||||||
|
l = bs.find_all('a', string=p)
|
||||||
|
for i in l:
|
||||||
|
ver = i.attrs['href'].split('/')[-1]
|
||||||
|
if not any([ver.startswith('6.2.4'), ver.startswith('7')]): continue
|
||||||
|
req = requests.get('https://archive.synology.cn{}'.format(i.attrs['href']))
|
||||||
|
req.encoding = 'utf-8'
|
||||||
|
bs=BeautifulSoup(req.text, 'html.parser')
|
||||||
|
p = re.compile(r"^(.*?)_(.*?)_(.*?).pat$", re.MULTILINE | re.DOTALL)
|
||||||
|
data = bs.find_all('a', string=p)
|
||||||
|
for item in data:
|
||||||
|
p = re.compile(r"DSM_(.*?)_(.*?).pat", re.MULTILINE | re.DOTALL)
|
||||||
|
rels = p.search(item.attrs['href'])
|
||||||
|
if rels != None:
|
||||||
|
info = p.search(item.attrs['href']).groups()
|
||||||
|
model = info[0].replace('%2B', '+')
|
||||||
|
if model not in models: continue
|
||||||
|
if model not in pats.keys(): pats[model]={}
|
||||||
|
pats[model][fullversion(ver)] = item.attrs['href']
|
||||||
|
|
||||||
|
print(json.dumps(pats, indent=4))
|
||||||
|
|
||||||
|
# Update configs, rss.xml, rss.json
|
||||||
|
rssxml=None
|
||||||
|
rssxml = ET.parse('rsshead.xml')
|
||||||
|
|
||||||
|
rssjson = {}
|
||||||
|
with open('rsshead.json', "r", encoding='utf-8') as f:
|
||||||
|
rssjson = json.loads(f.read())
|
||||||
|
|
||||||
|
for filename in os.listdir(os.path.join(root, configs)):
|
||||||
|
if ".yml" not in filename: # filename.endswith(".yml"):
|
||||||
|
continue
|
||||||
|
model = filename.split(".yml")[0]
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
with open(os.path.join(root, configs, filename), "r", encoding='utf-8') as f:
|
||||||
|
data = yaml.load(f, Loader=yaml.BaseLoader)
|
||||||
|
try:
|
||||||
|
isChange=False
|
||||||
|
for ver in data["builds"].keys():
|
||||||
|
tmp, url = '0.0.0-00000-0', ''
|
||||||
|
for item in pats[model].keys():
|
||||||
|
if str(ver) not in item: continue
|
||||||
|
if item > tmp: tmp, url = item, pats[model][item]
|
||||||
|
if url != '':
|
||||||
|
print("[I] {} synoextractor ...".format(url))
|
||||||
|
hashdata = synoextractor(url)
|
||||||
|
if not all(bool(key) for key in hashdata.keys()):
|
||||||
|
print("[E] {} synoextractor error".format(url))
|
||||||
|
return
|
||||||
|
isChange = True
|
||||||
|
# config.yml
|
||||||
|
# data["builds"][ver]["pat"] = hashdata # pyyaml 会修改文件格式
|
||||||
|
# yq -iy '.builds."25556".pat |= {url:"...", hash:"..."}' DS918+.yml # yq 也会修改文件格式
|
||||||
|
pat = data["builds"][ver]["pat"]
|
||||||
|
commands = ['sed', '-i', 's|{}|{}|; s|{}|{}|; s|{}|{}|; s|{}|{}|; s|{}|{}|'.format(pat["url"], hashdata["url"], pat["hash"], hashdata["hash"], pat["ramdisk-hash"], hashdata["ramdisk-hash"], pat["zimage-hash"], hashdata["zimage-hash"], pat["md5-hash"], hashdata["md5-hash"]), os.path.join(root, configs, filename)]
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
|
||||||
|
# rss.xml
|
||||||
|
for n in rssxml.findall('.//item'):
|
||||||
|
if n.find('.//BuildNum').text == str(ver):
|
||||||
|
n.append(ET.fromstring("<model>\n<mUnique>{}</mUnique>\n<mLink>{}</mLink>\n<mCheckSum>{}</mCheckSum>\n</model>\n".format(hashdata["unique"], hashdata["url"], hashdata["md5-hash"])))
|
||||||
|
# rss.json
|
||||||
|
for idx in range(len(rssjson["channel"]["item"])):
|
||||||
|
if rssjson["channel"]["item"][idx]["BuildNum"] == int(ver):
|
||||||
|
rssjson["channel"]["item"][idx]["model"].append({"mUnique": hashdata["unique"], "mLink": hashdata["url"], "mCheckSum": hashdata["md5-hash"]})
|
||||||
|
# # pyyaml 会修改文件格式
|
||||||
|
# if isChange is True:
|
||||||
|
# with open(os.path.join(root, configs, filename), "w", encoding='utf-8') as f:
|
||||||
|
# yaml.dump(data, f, Dumper=yaml.SafeDumper, sort_keys=False) # 双引号: default_style='"',
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
rssxml.write("rss.xml", xml_declaration=True)
|
||||||
|
# ET 处理 rss 的后与原有rss会多一个encode
|
||||||
|
commands = ['sed', '-i', 's|^<?xml .*\?>$|<?xml version="1.0"?>|', os.path.join(root, 'rss.xml')]
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
# ET 处理 rss 的并不会格式化
|
||||||
|
commands = ['xmllint', '--format', 'rss.xml', '-o', 'rss_new.xml']
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
commands = ['mv', 'rss_new.xml', 'rss.xml']
|
||||||
|
result = subprocess.check_output(commands)
|
||||||
|
|
||||||
|
with open('rss.json', 'w', encoding='utf-8') as f:
|
||||||
|
f.write(json.dumps(rssjson, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
142
scripts/func.sh
Normal file
142
scripts/func.sh
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Copyright (C) 2022 Ing <https://github.com/wjz304>
|
||||||
|
#
|
||||||
|
# This is free software, licensed under the MIT License.
|
||||||
|
# See /LICENSE for more information.
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
# Convert po2mo
|
||||||
|
# $1 path
|
||||||
|
function convertpo2mo() {
|
||||||
|
echo "Convert po2mo begin"
|
||||||
|
local DEST_PATH="${1:-lang}"
|
||||||
|
if [ `find ${DEST_PATH}/ -name "*.po" | wc -l` -gt 0 ]; then
|
||||||
|
for P in "`ls ${DEST_PATH}/*.po`"
|
||||||
|
do
|
||||||
|
# Use msgfmt command to compile the .po file into a binary .mo file
|
||||||
|
echo "msgfmt ${P} to ${P/.po/.mo}"
|
||||||
|
msgfmt ${P} -o ${P/.po/.mo}
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
echo "Convert po2mo end"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Get extractor
|
||||||
|
# $1 path
|
||||||
|
function getExtractor(){
|
||||||
|
echo "Getting syno extractor begin"
|
||||||
|
local DEST_PATH="${1:-extractor}"
|
||||||
|
local CACHE_DIR="/tmp/pat"
|
||||||
|
rm -rf "${CACHE_DIR}"; mkdir -p "${CACHE_DIR}"
|
||||||
|
# Download pat file
|
||||||
|
# global.synologydownload.com, global.download.synology.com, cndl.synology.cn
|
||||||
|
local PAT_URL="https://global.download.synology.com/download/DSM/release/7.0.1/42218/DSM_DS3622xs%2B_42218.pat"
|
||||||
|
local PAT_FILE="DSM_DS3622xs+_42218.pat"
|
||||||
|
local STATUS=`curl -# -w "%{http_code}" -L "${PAT_URL}" -o "${CACHE_DIR}/${PAT_FILE}"`
|
||||||
|
if [ $? -ne 0 -o ${STATUS} -ne 200 ]; then
|
||||||
|
echo "[E] DSM_DS3622xs%2B_42218.pat download error!"
|
||||||
|
rm -rf ${CACHE_DIR}
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "${CACHE_DIR}/ramdisk"
|
||||||
|
tar -C "${CACHE_DIR}/ramdisk/" -xf "${CACHE_DIR}/${PAT_FILE}" rd.gz 2>&1
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "[E] extractor rd.gz error!"
|
||||||
|
rm -rf ${CACHE_DIR}
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
(cd "${CACHE_DIR}/ramdisk"; xz -dc < rd.gz | cpio -idm) >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
rm -rf "${DEST_PATH}"; mkdir -p "${DEST_PATH}"
|
||||||
|
|
||||||
|
# Copy only necessary files
|
||||||
|
for f in libcurl.so.4 libmbedcrypto.so.5 libmbedtls.so.13 libmbedx509.so.1 libmsgpackc.so.2 libsodium.so libsynocodesign-ng-virtual-junior-wins.so.7; do
|
||||||
|
cp "${CACHE_DIR}/ramdisk/usr/lib/${f}" "${DEST_PATH}"
|
||||||
|
done
|
||||||
|
cp "${CACHE_DIR}/ramdisk/usr/syno/bin/scemd" "${DEST_PATH}/syno_extract_system_patch"
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
rm -rf ${CACHE_DIR}
|
||||||
|
echo "Getting syno extractor end"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Get latest LKMs
|
||||||
|
# $1 path
|
||||||
|
function getLKMs() {
|
||||||
|
echo "Getting LKMs begin"
|
||||||
|
local DEST_PATH="${1:-lkms}"
|
||||||
|
local CACHE_FILE="/tmp/rp-lkms.zip"
|
||||||
|
rm -f "${CACHE_FILE}"
|
||||||
|
TAG=`curl -s "https://api.github.com/repos/wjz304/redpill-lkm/releases/latest" | grep -oP '"tag_name": "\K(.*)(?=")'`
|
||||||
|
STATUS=`curl -w "%{http_code}" -L "https://github.com/wjz304/redpill-lkm/releases/download/${TAG}/rp-lkms.zip" -o "${CACHE_FILE}"`
|
||||||
|
echo "Status=${STATUS}"
|
||||||
|
[ ${STATUS} -ne 200 ] && exit 1
|
||||||
|
# Unzip LKMs
|
||||||
|
rm -rf "${DEST_PATH}"; mkdir -p "${DEST_PATH}"
|
||||||
|
unzip "${CACHE_FILE}" -d "${DEST_PATH}"
|
||||||
|
rm -f "${CACHE_FILE}"
|
||||||
|
echo "Getting LKMs end"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Get latest addons and install its
|
||||||
|
# $1 path
|
||||||
|
function getAddons() {
|
||||||
|
echo "Getting Addons begin"
|
||||||
|
local DEST_PATH="${1:-addons}"
|
||||||
|
local CACHE_DIR="/tmp/addons"
|
||||||
|
local CACHE_FILE="/tmp/addons.zip"
|
||||||
|
TAG=`curl -s https://api.github.com/repos/wjz304/arpl-addons/releases/latest | grep -oP '"tag_name": "\K(.*)(?=")'`
|
||||||
|
STATUS=`curl -w "%{http_code}" -L "https://github.com/wjz304/arpl-addons/releases/download/${TAG}/addons.zip" -o "${CACHE_FILE}"`
|
||||||
|
echo "Status=${STATUS}"
|
||||||
|
[ ${STATUS} -ne 200 ] && exit 1
|
||||||
|
# Install Addons
|
||||||
|
rm -rf "${CACHE_DIR}"; mkdir -p "${CACHE_DIR}"
|
||||||
|
unzip "${CACHE_FILE}" -d "${CACHE_DIR}"
|
||||||
|
echo "Installing addons to ${DEST_PATH}"
|
||||||
|
for PKG in `ls ${CACHE_DIR}/*.addon`; do
|
||||||
|
ADDON=`basename "${PKG}" .addon`
|
||||||
|
mkdir -p "${DEST_PATH}/${ADDON}"
|
||||||
|
echo "Extracting ${PKG} to ${DEST_PATH}/${ADDON}"
|
||||||
|
tar xaf "${PKG}" -C "${DEST_PATH}/${ADDON}"
|
||||||
|
done
|
||||||
|
echo "Getting Addons end"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Get latest modules
|
||||||
|
# $1 path
|
||||||
|
function getModules() {
|
||||||
|
echo "Getting Modules begin"
|
||||||
|
local DEST_PATH="${1:-addons}"
|
||||||
|
local CACHE_FILE="/tmp/modules.zip"
|
||||||
|
rm -f "${CACHE_FILE}"
|
||||||
|
TAG=`curl -s https://api.github.com/repos/wjz304/arpl-modules/releases/latest | grep -oP '"tag_name": "\K(.*)(?=")'`
|
||||||
|
STATUS=`curl -w "%{http_code}" -L "https://github.com/wjz304/arpl-modules/releases/download/${TAG}/modules.zip" -o "${CACHE_FILE}"`
|
||||||
|
echo "Status=${STATUS}"
|
||||||
|
[ ${STATUS} -ne 200 ] && exit 1
|
||||||
|
# Unzip Modules
|
||||||
|
rm -rf "${DEST_PATH}"; mkdir -p "${DEST_PATH}"
|
||||||
|
unzip "${CACHE_FILE}" -d "${DEST_PATH}"
|
||||||
|
rm -f "${CACHE_FILE}"
|
||||||
|
echo "Getting Modules end"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user