|
@@ -1,4 +1,4 @@
|
|
|
-name: Timely assets update
|
|
|
+name: Scheduled assets update
|
|
|
|
|
|
# NOTE: This Github Actions is required by other actions, for preparing other packaging assets in a
|
|
|
# routine manner, for example: GeoIP/GeoSite.
|
|
@@ -8,19 +8,20 @@ name: Timely assets update
|
|
|
on:
|
|
|
workflow_dispatch:
|
|
|
schedule:
|
|
|
- # Update assets on every hour (xx:30)
|
|
|
- - cron: '30 * * * *'
|
|
|
+ # Update GeoData on every day (22:30 UTC)
|
|
|
+ - cron: '30 22 * * *'
|
|
|
push:
|
|
|
# Prevent triggering update request storm
|
|
|
paths:
|
|
|
- - ".github/workflows/hourly-prepare.yml"
|
|
|
+ - ".github/workflows/scheduled-assets-update.yml"
|
|
|
pull_request:
|
|
|
# Prevent triggering update request storm
|
|
|
paths:
|
|
|
- - ".github/workflows/hourly-prepare.yml"
|
|
|
+ - ".github/workflows/scheduled-assets-update.yml"
|
|
|
|
|
|
jobs:
|
|
|
geodat:
|
|
|
+ if: github.event.schedule == '30 22 * * *' || github.event_name == 'push'|| github.event_name == 'pull_request'
|
|
|
runs-on: ubuntu-latest
|
|
|
steps:
|
|
|
- name: Restore Geodat Cache
|
|
@@ -38,18 +39,18 @@ jobs:
|
|
|
max_attempts: 60
|
|
|
command: |
|
|
|
[ -d 'resources' ] || mkdir resources
|
|
|
- LIST=('v2ray-rules-dat geoip geoip' 'v2ray-rules-dat geosite geosite')
|
|
|
+ LIST=('Loyalsoldier v2ray-rules-dat geoip geoip' 'Loyalsoldier v2ray-rules-dat geosite geosite')
|
|
|
for i in "${LIST[@]}"
|
|
|
do
|
|
|
- INFO=($(echo $i | awk 'BEGIN{FS=" ";OFS=" "} {print $1,$2,$3}'))
|
|
|
- FILE_NAME="${INFO[2]}.dat"
|
|
|
+ INFO=($(echo $i | awk 'BEGIN{FS=" ";OFS=" "} {print $1,$2,$3,$4}'))
|
|
|
+ FILE_NAME="${INFO[3]}.dat"
|
|
|
echo -e "Verifying HASH key..."
|
|
|
- HASH="$(curl -sL "https://raw.githubusercontent.com/Loyalsoldier/${INFO[0]}/release/${INFO[1]}.dat.sha256sum" | awk -F ' ' '{print $1}')"
|
|
|
+ HASH="$(curl -sL "https://raw.githubusercontent.com/${INFO[0]}/${INFO[1]}/release/${INFO[2]}.dat.sha256sum" | awk -F ' ' '{print $1}')"
|
|
|
if [ -s "./resources/${FILE_NAME}" ] && [ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ]; then
|
|
|
continue
|
|
|
else
|
|
|
- echo -e "Downloading https://raw.githubusercontent.com/Loyalsoldier/${INFO[0]}/release/${INFO[1]}.dat..."
|
|
|
- curl -L "https://raw.githubusercontent.com/Loyalsoldier/${INFO[0]}/release/${INFO[1]}.dat" -o ./resources/${FILE_NAME}
|
|
|
+ echo -e "Downloading https://raw.githubusercontent.com/${INFO[0]}/${INFO[1]}/release/${INFO[2]}.dat..."
|
|
|
+ curl -L "https://raw.githubusercontent.com/${INFO[0]}/${INFO[1]}/release/${INFO[2]}.dat" -o ./resources/${FILE_NAME}
|
|
|
echo -e "Verifying HASH key..."
|
|
|
[ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ] || { echo -e "The HASH key of ${FILE_NAME} does not match cloud one."; exit 1; }
|
|
|
echo "unhit=true" >> $GITHUB_OUTPUT
|