diff --git a/PLATFORMS b/PLATFORMS deleted file mode 100644 index 5173e00d..00000000 --- a/PLATFORMS +++ /dev/null @@ -1,8 +0,0 @@ -bromolow 3.10.108 -apollolake 4.4.180 -broadwell 4.4.180 -broadwellnk 4.4.180 -denverton 4.4.180 -geminilake 4.4.180 -v1000 4.4.180 -r1000 4.4.180 diff --git a/TODO b/TODO index e23201ed..e69de29b 100644 --- a/TODO +++ b/TODO @@ -1,41 +0,0 @@ -A fazer - - Checar NVME no modelo DS920+, DS918+ - - Checar módulo tg3 no próprio loader (BCM57780 [14e4:1692]) (#31) - - "Sensitive data" (#312) - - Checar drivers para MMC/SD/eMMC - - Checar drivers para chelsio T420, T520 e T580 - - Melhorar função getAllModules() - -Concluidos: - - Generalizar código dos addons - - Implementar checagem de conflito entre addons *** Usado alternativa de ter listagem de módulos não necessários *** - - Tirar MAXDISKS dos arquivos dos modelos e adicionar menu no synoinfo para configurar máximo de HDs - - mudar na configs dos modelos os módulos builtin para módulos não necessários - - Mudar palavra "extra" para "cmdline" - - Implementar exibição de cmdline e synoinfo dos modelos - - Adicionar checagem no grub para exibir ou não menu de boot - - Bug no boot.sh, se usuário mudar a variável netif_num o script repassa os macX sem considerar o novo número de interfaces - *** TIRADO obrigatoriedade no LKM de passar esses parâmetros *** - - bug com macs, com 2 placas os MACs podem se inverter, checar isso - *** Resolvido com solução anterior *** - - Verifica se plataforma vai rodar na máquina checando as flags da CPU - - Implementar seleção da versão do LKM (dev ou prod) - - Usando TTYD para acesso via web - - Verificar se fica legal colocar na config dos modelos os addons obrigatórios como o qjs-dtb *** Usado outra maneira *** - - Implementar escolha de maxdisks - - Limpar addons quando usuário muda de modelo - - Arrumar detecção de discos/maxdisks. 918 tem só 4 discos e dá problema com proxmox - - Melhorar opções de HD, talvez criar um menu a parte - - Implementar update do bzimage e ramdisk online - - Mudar addons para colocar pacote completo na partição 3 e detectar dinâmicamente durante o boot do júnior - - Acertar flag DIRTY ao atualizar módulos, etc - - Consertar MAC custom - - Mudar synoinfo para ler do modelo e preencher no user_config, para usuário poder deletar entradas do modelo - - Validar netif_num e macX - - Descobrir como é o serial do DS2422+ - - Retirar o ttyd da memória quando o root do dsm dar boot - - Adicionar mais binários para o addon dbgutils (nano, strace, etc) - - Checar se tem como atualizar microcode via addon/modules/whatever... (NOP) - - Estudar acrescentar modo simples e avançado do menu (NOP) - - J4125, HP G7 N40L desligando (#110 #160) - - Ver se vale a pena colocar uma opção para configurações extras, como escolher qual módulo realtek usar (R8168 ou R8169) diff --git a/doc/DSM boot.png b/doc/DSM boot.png deleted file mode 100644 index 440c3fb7..00000000 Binary files a/doc/DSM boot.png and /dev/null differ diff --git a/doc/buildnumber.png b/doc/buildnumber.png deleted file mode 100644 index 326fb990..00000000 Binary files a/doc/buildnumber.png and /dev/null differ diff --git a/doc/first-screen.png b/doc/first-screen.png deleted file mode 100644 index 7d4530a6..00000000 Binary files a/doc/first-screen.png and /dev/null differ diff --git a/doc/making.png b/doc/making.png deleted file mode 100644 index 4bae687e..00000000 Binary files a/doc/making.png and /dev/null differ diff --git a/doc/model.png b/doc/model.png deleted file mode 100644 index 6a6c5a13..00000000 Binary files a/doc/model.png and /dev/null differ diff --git a/doc/ttyd.png b/doc/ttyd.png deleted file mode 100644 index 854f9273..00000000 Binary files a/doc/ttyd.png and /dev/null differ diff --git a/docker/syno-compiler/Dockerfile.template b/docker/syno-compiler/Dockerfile.template deleted file mode 100644 index 5eb4e105..00000000 --- a/docker/syno-compiler/Dockerfile.template +++ /dev/null @@ -1,50 +0,0 @@ -FROM alpine:3.14 AS stage -ARG PLATFORMS="@@@PLATFORMS@@@" -ARG TOOLKIT_VER="@@@TOOLKIT_VER@@@" -ARG GCCLIB_VER="@@@GCCLIB_VER@@@" - -# Copy downloaded toolkits -ADD cache /cache -# Extract toolkits -RUN for V in ${PLATFORMS}; do \ - echo "${V}" | while IFS=':' read PLATFORM KVER; do \ - echo -e "${PLATFORM}\t${KVER}" >> /opt/platforms && \ - echo "Extracting ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz" && \ - mkdir "/opt/${PLATFORM}" && \ - tar -xaf "/cache/ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz" -C "/opt/${PLATFORM}" --strip-components=9 \ - "usr/local/x86_64-pc-linux-gnu/x86_64-pc-linux-gnu/sys-root/usr/lib/modules/DSM-${TOOLKIT_VER}" && \ - echo "Extracting ${PLATFORM}-${GCCLIB_VER}_x86_64-GPL.txz" && \ - tar -xaf "/cache/${PLATFORM}-${GCCLIB_VER}_x86_64-GPL.txz" -C "/opt/${PLATFORM}" --strip-components=1; \ - KVER_MAJOR="`echo ${KVER} | rev | cut -d. -f2- | rev`"; \ - if [ ! -d "/opt/linux-${KVER_MAJOR}.x" -a -f "/cache/linux-${KVER_MAJOR}.x.txz" ]; then \ - echo "Extracting linux-${KVER_MAJOR}.x.txz" && \ - tar -xaf "/cache/linux-${KVER_MAJOR}.x.txz" -C "/opt"; \ - rm -rf /opt/${PLATFORM}/source && \ - ln -s /opt/linux-${KVER_MAJOR}.x /opt/${PLATFORM}/source; \ - fi; \ - done; \ - done - -# Final image -FROM debian:9-slim -ENV SHELL=/bin/bash \ - ARCH=x86_64 - -RUN apt update --yes && \ - apt install --yes --no-install-recommends --no-install-suggests --allow-unauthenticated \ - ca-certificates nano curl bc kmod git gettext texinfo autopoint gawk sudo \ - build-essential make ncurses-dev libssl-dev autogen automake pkg-config libtool xsltproc gperf && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* && \ - useradd --create-home --shell /bin/bash --uid 1000 --user-group arpl && \ - echo "arpl ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/arpl && \ - mkdir /output && chown 1000:1000 /output - -COPY --from=stage --chown=1000:1000 /opt /opt -COPY files/ / - -USER arpl -WORKDIR /input -VOLUME /input /output - -ENTRYPOINT ["/opt/do.sh"] diff --git a/docker/syno-compiler/build.sh b/docker/syno-compiler/build.sh deleted file mode 100755 index 7a5e73fa..00000000 --- a/docker/syno-compiler/build.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env bash - -CACHE_DIR="cache" -#SERVER="https://archive.synology.com" -SERVER="https://global.download.synology.com" -#https://global.download.synology.com/download/ToolChain/Synology%20NAS%20GPL%20Source/ - -declare -A URIS -declare -A PLATFORMS - -URIS["apollolake"]="Intel%20x86%20Linux%20|%20%28Apollolake%29" -URIS["broadwell"]="Intel%20x86%20Linux%20|%20%28Broadwell%29" -URIS["broadwellnk"]="Intel%20x86%20Linux%20|%20%28Broadwellnk%29" -URIS["bromolow"]="Intel%20x86%20linux%20|%20%28Bromolow%29" -URIS["denverton"]="Intel%20x86%20Linux%20|%20%28Denverton%29" -URIS["geminilake"]="Intel%20x86%20Linux%20|%20%28GeminiLake%29" -URIS["v1000"]="Intel%20x86%20Linux%20|%20%28V1000%29" -URIS["r1000"]="AMD%20x86%20Linux%20|%20%28r1000%29" -URIS["epyc7002"]="AMD%20x86%20Linux%20Linux%20|%20%28epyc7002%29" -PLATFORMS["7.1"]="apollolake:4.4.180 broadwell:4.4.180 broadwellnk:4.4.180 bromolow:3.10.108 denverton:4.4.180 geminilake:4.4.180 v1000:4.4.180 r1000:4.4.180 epyc7002:5.10.55" -PLATFORMS["7.2"]="apollolake:4.4.302 broadwell:4.4.302 broadwellnk:4.4.302 bromolow:3.10.108 denverton:4.4.302 geminilake:4.4.302 v1000:4.4.302 r1000:4.4.302 epyc7002:5.10.55" - -mkdir -p ${CACHE_DIR} - -############################################################################### -function trap_cancel() { - echo "Press Control+C once more terminate the process (or wait 2s for it to restart)" - sleep 2 || exit 1 -} -trap trap_cancel SIGINT SIGTERM -cd `dirname $0` - -############################################################################### -function prepare() { - # Download toolkits - for P in ${PLATFORMS[${TOOLKIT_VER}]}; do - PLATFORM="`echo ${P} | cut -d':' -f1`" - KVER="`echo ${P} | cut -d':' -f2`" - # Dev - echo -n "Checking ${CACHE_DIR}/ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz... " - if [ ! -f "${CACHE_DIR}/ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz" ]; then - URL="${SERVER}/download/ToolChain/toolkit/${TOOLKIT_VER}/${PLATFORM}/ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz" - echo -e "No\nDownloading ${URL}" - STATUS=`curl -w "%{http_code}" -L "${URL}" -o "${CACHE_DIR}/ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz"` - if [ ${STATUS} -ne 200 ]; then - rm -f "${CACHE_DIR}/ds.${PLATFORM}-${TOOLKIT_VER}.dev.txz" - exit 1 - fi - else - echo "OK" - fi - # Toolchain - URI="`echo ${URIS[${PLATFORM}]} | sed "s/|/${KVER}/"`" - URL="${SERVER}/download/ToolChain/toolchain/${TOOLCHAIN_VER}/${URI}/${PLATFORM}-${GCCLIB_VER}_x86_64-GPL.txz" - FILENAME="${PLATFORM}-${GCCLIB_VER}_x86_64-GPL.txz" - echo -n "Checking ${CACHE_DIR}/${FILENAME}... " - if [ ! -f "${CACHE_DIR}/${FILENAME}" ]; then - echo -e "No\nDownloading ${URL}" - STATUS=`curl -w "%{http_code}" -L "${URL}" -o "${CACHE_DIR}/${FILENAME}"` - if [ ${STATUS} -ne 200 ]; then - rm -f "${CACHE_DIR}/${FILENAME}" - exit 1 - fi - else - echo "OK" - fi - done - - # for KERNEL in 3.10.x 4.4.x 5.10.x; do - # URL=${URLS["${KERNEL}"]} - # [ -z "${URL}" ] && continue - # echo -n "Checking ${CACHE_DIR}/linux-${KERNEL}.txz... " - # if [ ! -f "${CACHE_DIR}/linux-${KERNEL}.txz" ]; then - # echo -e "No\nDownloading ${URL}" - # STATUS=`curl -w "%{http_code}" -L "${URL}" -o "${CACHE_DIR}/linux-${KERNEL}.txz"` - # if [ ${STATUS} -ne 200 ]; then - # rm -f "${CACHE_DIR}/linux-${KERNEL}.txz" - # exit 1 - # fi - # else - # echo "OK" - # fi - # done - - # Generate Dockerfile - echo "Generating Dockerfile" - cp Dockerfile.template Dockerfile - sed -i "s|@@@PLATFORMS@@@|${PLATFORMS[${TOOLKIT_VER}]}|g" Dockerfile - sed -i "s|@@@TOOLKIT_VER@@@|${TOOLKIT_VER}|g" Dockerfile - sed -i "s|@@@GCCLIB_VER@@@|${GCCLIB_VER}|g" Dockerfile -} - -# 7.0 -#TOOLKIT_VER="7.0" -#TOOLCHAIN_VER="7.0-41890" -#GCCLIB_VER="gcc750_glibc226" -#prepare -#echo "Building ${TOOLKIT_VER}" -#docker image rm fbelavenuto/syno-compiler:${TOOLKIT_VER} >/dev/null 2>&1 -#docker buildx build . --load --tag fbelavenuto/syno-compiler:${TOOLKIT_VER} - -# 7.1 -# TOOLKIT_VER="7.1" -# TOOLCHAIN_VER="7.1-42661" -# GCCLIB_VER="gcc850_glibc226" -# prepare -# echo "Building ${TOOLKIT_VER}" -# docker image rm fbelavenuto/syno-compiler:${TOOLKIT_VER} >/dev/null 2>&1 -# docker buildx build . --load --tag fbelavenuto/syno-compiler:${TOOLKIT_VER} - -# 7.2 -TOOLKIT_VER="7.2" -TOOLCHAIN_VER="7.2-63134" -GCCLIB_VER="gcc1220_glibc236" -prepare -echo "Building ${TOOLKIT_VER}" -docker image rm fbelavenuto/syno-compiler:${TOOLKIT_VER} >/dev/null 2>&1 -docker buildx build . --load --tag fbelavenuto/syno-compiler:${TOOLKIT_VER} --tag fbelavenuto/syno-compiler:latest diff --git a/docker/syno-compiler/files/etc/profile.d/login.sh b/docker/syno-compiler/files/etc/profile.d/login.sh deleted file mode 100644 index 76437f46..00000000 --- a/docker/syno-compiler/files/etc/profile.d/login.sh +++ /dev/null @@ -1,9 +0,0 @@ -[[ "$-" != *i* ]] && return -export LS_OPTIONS='--color=auto' -export SHELL='linux' -eval "`dircolors`" -alias ls='ls -F -h --color=always -v --author --time-style=long-iso' -alias ll='ls -l' -alias l='ls -l -a' -alias h='history 25' -alias j='jobs -l' diff --git a/docker/syno-compiler/files/opt/do.sh b/docker/syno-compiler/files/opt/do.sh deleted file mode 100755 index 4a0f36e9..00000000 --- a/docker/syno-compiler/files/opt/do.sh +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env bash - -set -eo pipefail - -############################################################################### -function export-vars() { - # Validate - if [ -z "${1}" ]; then - echo "Use: export-vars " - exit 1 - fi - export PLATFORM="${1}" - export KSRC="/opt/${1}/build" - export CROSS_COMPILE="/opt/${1}/bin/x86_64-pc-linux-gnu-" - export CFLAGS="-I/opt/${1}/include" - export LDFLAGS="-I/opt/${1}/lib" - export LD_LIBRARY_PATH="/opt/${1}/lib" - export ARCH=x86_64 - export CC="x86_64-pc-linux-gnu-gcc" - export LD="x86_64-pc-linux-gnu-ld" - echo "export PATH=\"/opt/${1}/bin:${PATH}\"" | \ - sudo tee /etc/profile.d/path.sh >/dev/null - sudo chmod +x /etc/profile.d/path.sh -} - -############################################################################### -function shell() { - cp -fv /opt/${2}/build/.config /opt/${2}/source/ - cp -fv /opt/${2}/build/System.map /opt/${2}/source/ - cp -fv /opt/${2}/build/Module.symvers /opt/${2}/source/ - export-vars $2 - shift 2 - bash -l $@ -} - -############################################################################### -function compile-module { - # Validate - if [ -z "${1}" ]; then - echo "Use: compile-module " - exit 1 - fi - VALID=0 - while read PLATFORM KVER; do - if [ "${PLATFORM}" = "${1}" ]; then - VALID=1 - break - fi - done " - exit 1 - fi - cp -R /input /tmp - export-vars ${PLATFORM} - export LINUX_SRC="/opt/${PLATFORM}/build" - make -C "/tmp/input" dev-v7 - strip -g "/tmp/input/redpill.ko" - mv "/tmp/input/redpill.ko" "/output/redpill-dev.ko" - make -C "/tmp/input" clean - make -C "/tmp/input" prod-v7 - strip -g "/tmp/input/redpill.ko" - mv "/tmp/input/redpill.ko" "/output/redpill-prod.ko" -} - -############################################################################### -# function compile-drivers { -# while read platform kver; do -# SRC_PATH="/opt/${platform}" -# echo "Compiling for ${platform}-${kver}" -# cd /opt/linux-${kver}/drivers -# while read dir; do -# if [ -f "${dir}/Makefile" ]; then -# echo "Driver `basename ${dir}`" -# grep "CONFIG_.*/.*" "${dir}/Makefile" | sed 's/.*\(CONFIG_[^)]*\).*/\1=n/g' > /tmp/env -# grep "CONFIG_.*\.o.*" "${dir}/Makefile" | sed 's/.*\(CONFIG_[^)]*\).*/\1=m/g' >> /tmp/env -# make -C "${SRC_PATH}" M=$(readlink -f "${dir}") clean -# cat /tmp/env | xargs -d '\n' make -C "${SRC_PATH}" M=$(readlink -f "${dir}") modules $@ -# fi -# done < <(find -type d) -# DST_PATH="/output/compiled-mods/${platform}-${kver}" -# mkdir -p "${DST_PATH}" -# while read f; do -# strip -g "${f}" -# mv "${f}" "${DST_PATH}" -# done < <(find -name \*.ko) -# done ()" - echo "Commands: bash | shell | compile-module | compile-lkm " - exit 1 -fi -case $1 in - bash) shift && bash -l $@ ;; - shell) shell $@ ;; - compile-module) compile-module $2 ;; - compile-lkm) compile-lkm $2 ;; - # compile-drivers) compile-drivers ;; - *) echo "Command not recognized: $1" ;; -esac diff --git a/files/board/arpl/make-img.sh b/files/board/arpl/make-img.sh index 58fcbc14..c832e4ab 100755 --- a/files/board/arpl/make-img.sh +++ b/files/board/arpl/make-img.sh @@ -25,19 +25,19 @@ echo -e "n\np\n\n\n+50M\na\nt\n\n0b\nn\np\n\n\n+50M\nn\np\n\n\n\nw" | fdisk "${I sudo umount "${BINARIES_DIR}/p1" 2>/dev/null || true sudo umount "${BINARIES_DIR}/p3" 2>/dev/null || true # Force unsetup of loop device -sudo losetup -d "/dev/loop8" 2>/dev/null || true -# Setup the loop8 loop device -sudo losetup -P "/dev/loop8" "${IMAGE_FILE}" +LOOPX=`sudo losetup -f` +# Setup the ${LOOPX} loop device +sudo losetup -P "${LOOPX}" "${IMAGE_FILE}" # Format partitions -sudo mkdosfs -F32 -n ARPL1 "/dev/loop8p1" >/dev/null 2>&1 -sudo mkfs.ext2 -F -F -L ARPL2 "/dev/loop8p2" >/dev/null 2>&1 -sudo mkfs.ext4 -F -F -L ARPL3 "/dev/loop8p3" >/dev/null 2>&1 +sudo mkdosfs -F32 -n ARPL1 "${LOOPX}p1" >/dev/null 2>&1 +sudo mkfs.ext2 -F -F -L ARPL2 "${LOOPX}p2" >/dev/null 2>&1 +sudo mkfs.ext4 -F -F -L ARPL3 "${LOOPX}p3" >/dev/null 2>&1 echo "Mounting image file" mkdir -p "${BINARIES_DIR}/p1" mkdir -p "${BINARIES_DIR}/p3" -sudo mount /dev/loop8p1 "${BINARIES_DIR}/p1" -sudo mount /dev/loop8p3 "${BINARIES_DIR}/p3" +sudo mount ${LOOPX}p1 "${BINARIES_DIR}/p1" +sudo mount ${LOOPX}p3 "${BINARIES_DIR}/p3" echo "Copying files" sudo cp "${BINARIES_DIR}/bzImage" "${BINARIES_DIR}/p3/bzImage-arpl" @@ -52,4 +52,4 @@ sudo umount "${BINARIES_DIR}/p3" rmdir "${BINARIES_DIR}/p1" rmdir "${BINARIES_DIR}/p3" -sudo losetup --detach /dev/loop8 +sudo losetup --detach ${LOOPX} diff --git a/img-gen.sh b/img-gen.sh index 912207a6..ae289272 100755 --- a/img-gen.sh +++ b/img-gen.sh @@ -6,38 +6,67 @@ if [ ! -d .buildroot ]; then echo "Downloading buildroot" git clone --single-branch -b 2022.02 https://github.com/buildroot/buildroot.git .buildroot fi -# Remove old files -rm -rf ".buildroot/output/target/opt/arpl" -rm -rf ".buildroot/board/arpl/overlayfs" -rm -rf ".buildroot/board/arpl/p1" -rm -rf ".buildroot/board/arpl/p3" + +echo "Convert po2mo" +if [ -d files/board/arpl/overlayfs/opt/arpl/lang ]; then + for P in "`ls files/board/arpl/overlayfs/opt/arpl/lang/*.po`" + do + # Use msgfmt command to compile the .po file into a binary .mo file + msgfmt ${P} -o ${P/.po/.mo} + done +fi + +# Get extractor +echo "Getting syno extractor" +TOOL_PATH="files/board/arpl/p3/extractor" +CACHE_DIR="/tmp/pat" +rm -rf "${TOOL_PATH}" +mkdir -p "${TOOL_PATH}" +rm -rf "${CACHE_DIR}" +mkdir -p "${CACHE_DIR}" +OLDPAT_URL="https://global.download.synology.com/download/DSM/release/7.0.1/42218/DSM_DS3622xs%2B_42218.pat" +OLDPAT_FILE="DSM_DS3622xs+_42218.pat" +STATUS=`curl -# -w "%{http_code}" -L "${OLDPAT_URL}" -o "${CACHE_DIR}/${OLDPAT_FILE}"` +if [ $? -ne 0 -o ${STATUS} -ne 200 ]; then + echo "[E] DSM_DS3622xs%2B_42218.pat download error!" + rm -rf ${CACHE_DIR} + exit 1 +fi + +mkdir -p "${CACHE_DIR}/ramdisk" +tar -C "${CACHE_DIR}/ramdisk/" -xf "${CACHE_DIR}/${OLDPAT_FILE}" rd.gz 2>&1 +if [ $? -ne 0 ]; then + echo "[E] extractor rd.gz error!" + rm -rf ${CACHE_DIR} + exit 1 +fi +(cd "${CACHE_DIR}/ramdisk"; xz -dc < rd.gz | cpio -idm) >/dev/null 2>&1 || true + +# Copy only necessary files +for f in libcurl.so.4 libmbedcrypto.so.5 libmbedtls.so.13 libmbedx509.so.1 libmsgpackc.so.2 libsodium.so libsynocodesign-ng-virtual-junior-wins.so.7; do + cp "${CACHE_DIR}/ramdisk/usr/lib/${f}" "${TOOL_PATH}" +done +cp "${CACHE_DIR}/ramdisk/usr/syno/bin/scemd" "${TOOL_PATH}/syno_extract_system_patch" +rm -rf ${CACHE_DIR} # Get latest LKMs echo "Getting latest LKMs" -if [ `ls ../redpill-lkm/output | wc -l` -eq 0 ]; then - echo " Downloading from github" - TAG=`curl -s https://api.github.com/repos/fbelavenuto/redpill-lkm/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'` - curl -L "https://github.com/fbelavenuto/redpill-lkm/releases/download/${TAG}/rp-lkms.zip" -o /tmp/rp-lkms.zip - rm -rf files/board/arpl/p3/lkms/* - unzip /tmp/rp-lkms.zip -d files/board/arpl/p3/lkms -else - echo " Copying from ../redpill-lkm/output" - rm -rf files/board/arpl/p3/lkms/* - cp -f ../redpill-lkm/output/* files/board/arpl/p3/lkms -fi +echo " Downloading LKMs from github" +TAG=`curl -s https://api.github.com/repos/wjz304/redpill-lkm/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'` +curl -L "https://github.com/wjz304/redpill-lkm/releases/download/${TAG}/rp-lkms.zip" -o /tmp/rp-lkms.zip +rm -rf files/board/arpl/p3/lkms/* +unzip /tmp/rp-lkms.zip -d files/board/arpl/p3/lkms + # Get latest addons and install its echo "Getting latest Addons" rm -Rf /tmp/addons mkdir -p /tmp/addons -if [ -d ../arpl-addons ]; then - cp ../arpl-addons/*.addon /tmp/addons/ -else - TAG=`curl -s https://api.github.com/repos/fbelavenuto/arpl-addons/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'` - curl -L "https://github.com/fbelavenuto/arpl-addons/releases/download/${TAG}/addons.zip" -o /tmp/addons.zip - rm -rf /tmp/addons - unzip /tmp/addons.zip -d /tmp/addons -fi +echo " Downloading Addons from github" +TAG=`curl -s https://api.github.com/repos/wjz304/arpl-addons/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'` +curl -L "https://github.com/wjz304/arpl-addons/releases/download/${TAG}/addons.zip" -o /tmp/addons.zip +rm -rf /tmp/addons +unzip /tmp/addons.zip -d /tmp/addons DEST_PATH="files/board/arpl/p3/addons" echo "Installing addons to ${DEST_PATH}" for PKG in `ls /tmp/addons/*.addon`; do @@ -49,23 +78,20 @@ done # Get latest modules echo "Getting latest modules" +echo " Downloading Modules from github" MODULES_DIR="${PWD}/files/board/arpl/p3/modules" -if [ -d ../arpl-modules ]; then - cd ../arpl-modules - for D in `ls -d *-*`; do - echo "${D}" - (cd ${D} && tar caf "${MODULES_DIR}/${D}.tgz" *.ko) - done - (cd firmware && tar caf "${MODULES_DIR}/firmware.tgz" *) - cd - -else - TAG=`curl -s https://api.github.com/repos/fbelavenuto/arpl-modules/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'` - while read PLATFORM KVER; do - FILE="${PLATFORM}-${KVER}" - curl -L "https://github.com/fbelavenuto/arpl-modules/releases/download/${TAG}/${FILE}.tgz" -o "${MODULES_DIR}/${FILE}.tgz" - done < PLATFORMS - curl -L "https://github.com/fbelavenuto/arpl-modules/releases/download/${TAG}/firmware.tgz" -o "${MODULES_DIR}/firmware.tgz" -fi + +TAG=`curl -s https://api.github.com/repos/wjz304/arpl-modules/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3)}'` +curl -L "https://github.com/wjz304/arpl-modules/releases/download/${TAG}/modules.zip" -o "/tmp/modules.zip" +rm -rf "${MODULES_DIR}/"* +unzip /tmp/modules.zip -d "${MODULES_DIR}" + + +# Remove old files +rm -rf ".buildroot/output/target/opt/arpl" +rm -rf ".buildroot/board/arpl/overlayfs" +rm -rf ".buildroot/board/arpl/p1" +rm -rf ".buildroot/board/arpl/p3" # Copy files echo "Copying files" @@ -85,9 +111,9 @@ qemu-img convert -O vmdk arpl.img arpl-dyn.vmdk qemu-img convert -O vmdk -o adapter_type=lsilogic arpl.img -o subformat=monolithicFlat arpl.vmdk [ -x test.sh ] && ./test.sh rm -f *.zip -zip -9 "arpl-${VERSION}.img.zip" arpl.img -zip -9 "arpl-${VERSION}.vmdk-dyn.zip" arpl-dyn.vmdk -zip -9 "arpl-${VERSION}.vmdk-flat.zip" arpl.vmdk arpl-flat.vmdk +zip -9 "arpl-i18n-${VERSION}.img.zip" arpl.img +zip -9 "arpl-i18n-${VERSION}.vmdk-dyn.zip" arpl-dyn.vmdk +zip -9 "arpl-i18n-${VERSION}.vmdk-flat.zip" arpl.vmdk arpl-flat.vmdk sha256sum update-list.yml > sha256sum zip -9j update.zip update-list.yml while read F; do diff --git a/make_rsss.sh b/make_rsss.sh deleted file mode 100755 index 2a5821d9..00000000 --- a/make_rsss.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env bash - -set -e - -MODEL_CONFIG_PATH="./files/board/arpl/overlayfs/opt/arpl/model-configs" - -RELEASE="7.0.1" -BUILDNUMBER="42218" -EXTRA="" - -function readConfigKey() { - RESULT=`yq eval '.'${1}' | explode(.)' "${2}"` - [ "${RESULT}" == "null" ] && echo "" || echo ${RESULT} -} -function readModelKey() { - readConfigKey "${2}" "${MODEL_CONFIG_PATH}/${1}.yml" -} - -# JSON -cat < - DSM ${RELEASE}-${BUILDNUMBER} - ${RELEASE:0:1} - ${RELEASE:2:1} - ${RELEASE:4:1} - ${BUILDNUMBER} - 2022/08/01 - 7 - 0 - 0 - 41890 - 2021/06/25 -EOF - -while read M; do - M="`basename ${M}`" - M="${M::-4}" - UNIQUE=`readModelKey "${M}" "unique"` - URL=`readModelKey "${M}" "builds.${BUILDNUMBER}.pat.url"` - HASH=`readModelKey "${M}" "builds.${BUILDNUMBER}.pat.md5-hash"` - cat < - ${UNIQUE} - ${URL} - ${HASH} - -EOF -done < <(find "${MODEL_CONFIG_PATH}" -maxdepth 1 -name \*.yml | sort) - -cat < -EOF diff --git a/new_bn.sh b/new_bn.sh deleted file mode 100755 index 85ea60b7..00000000 --- a/new_bn.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env bash - -# Is necessary test the patches - -set -e - -declare -A KVERS -KVERS["DS918+"]="4.4.180" -KVERS["DS920+"]="4.4.180" -KVERS["DS923+"]="4.4.180" -KVERS["DS1520+"]="4.4.180" -KVERS["DS1621+"]="4.4.180" -KVERS["DS2422+"]="4.4.180" -KVERS["DS3615xs"]="3.10.108" -KVERS["DS3617xs"]="4.4.180" -KVERS["DS3622xs+"]="4.4.180" -KVERS["DVA1622"]="4.4.180" -KVERS["DVA3219"]="4.4.180" -KVERS["DVA3221"]="4.4.180" -KVERS["FS2500"]="4.4.180" -KVERS["RS4021xs+"]="4.4.180" -RELEASE="7.1.1" -BUILDNUMBER="42962" -EXTRA="" - -for MODEL in DS918+ DS920+ DS923+ DS1520+ DS1621+ DS2422+ DS3615xs DS3617xs DS3622xs+ DVA1622 DVA3221 DVA3219 FS2500 RS4021xs+; do - MODEL_CODED=`echo ${MODEL} | sed 's/+/%2B/g'` - URL="https://global.download.synology.com/download/DSM/release/${RELEASE}/${BUILDNUMBER}${EXTRA}/DSM_${MODEL_CODED}_${BUILDNUMBER}.pat" - #URL="https://archive.synology.com/download/Os/DSM/${RELEASE}-${BUILDNUMBER}/DSM_${MODEL_CODED}_${BUILDNUMBER}.pat" - FILENAME="${MODEL}-${BUILDNUMBER}.pat" - FILEPATH="/tmp/${FILENAME}" - echo -n "Checking ${MODEL}... " - if [ -f ${FILEPATH} ]; then - echo "cached" - else - echo "no cached, downloading..." - fi - STATUS=`curl --progress-bar -o ${FILEPATH} -w "%{http_code}" -L "${URL}"` - if [ ${STATUS} -ne 200 ]; then - echo "error: HTTP status = ${STATUS}" - rm -f ${FILEPATH} - continue - fi - echo "Calculating md5:" - PAT_MD5=`md5sum ${FILEPATH} | awk '{print$1}'` - echo "Calculating sha256:" - sudo rm -rf /tmp/extracted - docker run --rm -it -v /tmp:/data syno-extractor /data/${FILENAME} /data/extracted - PAT_CS=`sha256sum ${FILEPATH} | awk '{print$1}'` - ZIMAGE_CS=`sha256sum /tmp/extracted/zImage | awk '{print$1}'` - RD_CS=`sha256sum /tmp/extracted/rd.gz | awk '{print$1}'` - sudo rm -rf /tmp/extracted - cat <