diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 9d707114..95f2d636 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -160,7 +160,8 @@ jobs:
qemu-img convert -O vmdk arpl.img arpl-dyn.vmdk
qemu-img convert -O vmdk -o adapter_type=lsilogic arpl.img -o subformat=monolithicFlat arpl.vmdk
- zip -9 "arpl-i18n.zip" arpl.img arpl-dyn.vmdk arpl.vmdk arpl-flat.vmdk
+ zip -9 "arpl-i18n.zip" arpl.img
+
# Upload artifact
- name: Upload
uses: actions/upload-artifact@v3
diff --git a/.github/workflows/update.yml b/.github/workflows/update.yml
index c77c487e..1be52158 100644
--- a/.github/workflows/update.yml
+++ b/.github/workflows/update.yml
@@ -9,7 +9,19 @@ name: Update
on:
workflow_dispatch:
-
+ inputs:
+ push:
+ description: 'push'
+ default: false
+ type: boolean
+ config:
+ description: 'update configs'
+ default: true
+ type: boolean
+ rss:
+ description: 'update rss'
+ default: true
+ type: boolean
jobs:
build:
runs-on: ubuntu-latest
@@ -34,9 +46,19 @@ jobs:
- name: update pats
run: |
- python rssupdate.py
+ python rssupdate.py ${{ inputs.config }} ${{ inputs.rss }}
+
+ - name: Upload artifact
+ uses: actions/upload-artifact@v3
+ with:
+ name: grub
+ path: |
+ files/board/arpl/overlayfs/opt/arpl/model-configs
+ rss.json
+ rss.xml
- name: Check and Push
+ if: success() && inputs.push == true
run: |
git pull
status=$(git status -s | grep -E 'model-configs|rss' | awk '{printf " %s", $2}')
diff --git a/rsshead.json b/rsshead.json
index 41e04d18..7c3eafd6 100644
--- a/rsshead.json
+++ b/rsshead.json
@@ -7,13 +7,13 @@
"copyright": "Copyright 2022 Synology Inc",
"item": [
{
- "title": "DSM 7.2-64216",
+ "title": "DSM 7.2-64551",
"MajorVer": 7,
"MinorVer": 2,
"NanoVer": 0,
- "BuildPhase": 0,
- "BuildNum": 64216,
- "BuildDate": "2023/03/08",
+ "BuildPhase": "RC",
+ "BuildNum": 64551,
+ "BuildDate": "2023/04/21",
"ReqMajorVer": 7,
"ReqMinorVer": 2,
"ReqBuildPhase": 0,
diff --git a/rsshead.xml b/rsshead.xml
index 602f3fe3..98c7d097 100644
--- a/rsshead.xml
+++ b/rsshead.xml
@@ -6,11 +6,11 @@
Sat Aug 6 0:11:41 CST 2022
Copyright 2022 Synology Inc
-
- DSM 7.2-64216
+ DSM 7.2-64551
7
2
0
- 64216
+ 64551
2023/03/08
7
0
diff --git a/rssupdate.py b/rssupdate.py
index c997b7f3..d78c8f36 100644
--- a/rssupdate.py
+++ b/rssupdate.py
@@ -6,7 +6,7 @@
# See /LICENSE for more information.
#
-import os, re, subprocess, hashlib, requests, json, yaml
+import os, re, sys, subprocess, hashlib, requests, json, yaml
import xml.etree.ElementTree as ET
from urllib.parse import urlparse
from bs4 import BeautifulSoup
@@ -104,7 +104,7 @@ def synoextractor(url):
return data
-def main(): # if __name__ == '__main__':
+def main(isUpdateConfigs = True, isUpdateRss = True):
# Get models
models=[]
@@ -182,26 +182,30 @@ def main(): # if __name__ == '__main__':
if not all(bool(key) for key in hashdata.keys()):
print("[E] {} synoextractor error".format(url))
return
- isChange = True
- # config.yml
- # data["builds"][ver]["pat"] = hashdata # pyyaml 会修改文件格式
- # yq -iy '.builds."25556".pat |= {url:"...", hash:"..."}' DS918+.yml # yq 也会修改文件格式
- pat = data["builds"][ver]["pat"]
- commands = ['sed', '-i', 's|{}|{}|; s|{}|{}|; s|{}|{}|; s|{}|{}|; s|{}|{}|'.format(pat["url"], hashdata["url"], pat["hash"], hashdata["hash"], pat["ramdisk-hash"], hashdata["ramdisk-hash"], pat["zimage-hash"], hashdata["zimage-hash"], pat["md5-hash"], hashdata["md5-hash"]), os.path.join(FILE_PATH, configs, filename)]
- result = subprocess.check_output(commands)
+
+ if isUpdateConfigs is True:
+ isChange = True
+ # config.yml
+ # data["builds"][ver]["pat"] = hashdata # pyyaml 会修改文件格式
+ # yq -iy '.builds."25556".pat |= {url:"...", hash:"..."}' DS918+.yml # yq 也会修改文件格式
+ pat = data["builds"][ver]["pat"]
+ commands = ['sed', '-i', 's|{}|{}|; s|{}|{}|; s|{}|{}|; s|{}|{}|; s|{}|{}|'.format(pat["url"], hashdata["url"], pat["hash"], hashdata["hash"], pat["ramdisk-hash"], hashdata["ramdisk-hash"], pat["zimage-hash"], hashdata["zimage-hash"], pat["md5-hash"], hashdata["md5-hash"]), os.path.join(FILE_PATH, configs, filename)]
+ result = subprocess.check_output(commands)
- # rss.xml
- for n in rssxml.findall('.//item'):
- if n.find('.//BuildNum').text == str(ver):
- n.append(ET.fromstring("\n{}\n{}\n{}\n\n".format(hashdata["unique"], hashdata["url"], hashdata["md5-hash"])))
- # rss.json
- for idx in range(len(rssjson["channel"]["item"])):
- if rssjson["channel"]["item"][idx]["BuildNum"] == int(ver):
- rssjson["channel"]["item"][idx]["model"].append({"mUnique": hashdata["unique"], "mLink": hashdata["url"], "mCheckSum": hashdata["md5-hash"]})
- # # pyyaml 会修改文件格式
- # if isChange is True:
- # with open(os.path.join(FILE_PATH, configs, filename), "w", encoding='utf-8') as f:
- # yaml.dump(data, f, Dumper=yaml.SafeDumper, sort_keys=False) # 双引号: default_style='"',
+ if isUpdateRss is True:
+ # rss.xml
+ for n in rssxml.findall('.//item'):
+ if n.find('.//BuildNum').text == str(ver):
+ n.append(ET.fromstring("\n{}\n{}\n{}\n\n".format(hashdata["unique"], hashdata["url"], hashdata["md5-hash"])))
+ # rss.json
+ for idx in range(len(rssjson["channel"]["item"])):
+ if rssjson["channel"]["item"][idx]["BuildNum"] == int(ver):
+ rssjson["channel"]["item"][idx]["model"].append({"mUnique": hashdata["unique"], "mLink": hashdata["url"], "mCheckSum": hashdata["md5-hash"]})
+ # if isUpdateConfigs is True:
+ # # pyyaml 会修改文件格式
+ # if isChange is True:
+ # with open(os.path.join(FILE_PATH, configs, filename), "w", encoding='utf-8') as f:
+ # yaml.dump(data, f, Dumper=yaml.SafeDumper, sort_keys=False) # 双引号: default_style='"',
except:
pass
@@ -220,4 +224,20 @@ def main(): # if __name__ == '__main__':
if __name__ == '__main__':
- main()
\ No newline at end of file
+
+ isUpdateConfigs = True
+ isUpdateRss = True
+
+ if len(sys.argv) > 2:
+ try:
+ isUpdateConfigs = bool(int(sys.argv[1]))
+ except ValueError:
+ isUpdateConfigs = bool(sys.argv[1])
+
+ if len(sys.argv) > 3:
+ try:
+ isUpdateRss = bool(int(sys.argv[2]))
+ except ValueError:
+ isUpdateRss = bool(sys.argv[2])
+
+ main(isUpdateConfigs, isUpdateRss)
\ No newline at end of file