-
Notifications
You must be signed in to change notification settings - Fork 316
184 lines (152 loc) · 6.9 KB
/
build_master.yml
File metadata and controls
184 lines (152 loc) · 6.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
name: Build Master
on:
push:
branches:
- master
paths-ignore:
- '.gitignore'
- 'book/**'
workflow_dispatch:
concurrency: build_master
permissions:
packages: write
id-token: write
contents: write
jobs:
run-translation:
runs-on: ubuntu-latest
container:
image: ghcr.io/hacktricks-wiki/hacktricks-cloud/translator-image:latest
environment: prod
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
# Build the mdBook
- name: Build mdBook
run: MDBOOK_BOOK__LANGUAGE=en mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1)
- name: Post-process SEO artifacts
run: |
python3 scripts/seo_postprocess.py pages \
--book-dir ./book \
--site-url https://cloud.hacktricks.wiki \
--lang en \
--default-lang en \
--site-name "HackTricks Cloud"
- name: Push search index to hacktricks-searchindex repo
shell: bash
env:
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
run: |
set -euo pipefail
ASSET="book/searchindex.js"
TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
FILENAME="searchindex-cloud-en.js"
if [ ! -f "$ASSET" ]; then
echo "Expected $ASSET to exist after build" >&2
exit 1
fi
TOKEN="${PAT_TOKEN}"
if [ -z "$TOKEN" ]; then
echo "No PAT_TOKEN available" >&2
exit 1
fi
# First, compress the original file (in the build directory)
cd "${GITHUB_WORKSPACE}"
gzip -9 -k -f "$ASSET"
# Show compression stats
ORIGINAL_SIZE=$(wc -c < "$ASSET")
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
# XOR encrypt the compressed file
KEY='Prevent_Online_AVs_From_Flagging_HackTricks_Search_Gzip_As_Malicious_394h7gt8rf9u3rf9g'
cat > /tmp/xor_encrypt.py << 'EOF'
import sys
key = sys.argv[1]
input_file = sys.argv[2]
output_file = sys.argv[3]
with open(input_file, 'rb') as f:
data = f.read()
key_bytes = key.encode('utf-8')
encrypted = bytearray(len(data))
for i in range(len(data)):
encrypted[i] = data[i] ^ key_bytes[i % len(key_bytes)]
with open(output_file, 'wb') as f:
f.write(encrypted)
print(f"Encrypted: {len(data)} bytes")
EOF
python3 /tmp/xor_encrypt.py "$KEY" "${ASSET}.gz" "${ASSET}.gz.enc"
# Rebuild and force-push with retries to handle concurrent updates.
MAX_RETRIES=20
RETRY_COUNT=0
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
RETRY_COUNT=$((RETRY_COUNT + 1))
echo "Push attempt ${RETRY_COUNT}/${MAX_RETRIES}"
cd "${GITHUB_WORKSPACE}"
rm -rf /tmp/searchindex-repo /tmp/searchindex-backup
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
cd /tmp/searchindex-repo
git config user.name "GitHub Actions"
git config user.email "github-actions@github.com"
# Save all current files from master branch to temp directory.
mkdir -p /tmp/searchindex-backup
cp -r * /tmp/searchindex-backup/ 2>/dev/null || true
# Create a fresh orphan branch (no history).
git checkout --orphan new-main
# Remove all files from git index (but keep working directory).
git rm -rf . 2>/dev/null || true
# Restore all files from backup (keeps all language files).
cp -r /tmp/searchindex-backup/* . 2>/dev/null || true
# Update English searchindex artifact.
cp "${GITHUB_WORKSPACE}/${ASSET}.gz.enc" "${FILENAME}.gz"
git add -A
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
git commit -m "Update searchindex files - ${TIMESTAMP}" --allow-empty
if git push -f origin new-main:master 2>&1 | tee /tmp/push_output.txt; then
echo "Successfully reset repository history and pushed all searchindex files"
break
fi
if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
if grep -q "cannot lock ref 'refs/heads/master'" /tmp/push_output.txt; then
echo "Concurrent update detected on remote master. Retrying..."
else
echo "Force push failed. Retrying..."
fi
sleep 1
else
echo "Failed to push after ${MAX_RETRIES} attempts"
exit 1
fi
done
# Login in AWs
- name: Configure AWS credentials using OIDC
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
aws-region: us-east-1
# Sync the build to S3
- name: Sync to S3
run: aws s3 sync ./book s3://hacktricks-cloud/en --delete
- name: Upload root sitemap index
run: |
LANGS=$(aws s3api list-objects-v2 --bucket hacktricks-cloud --delimiter / --query 'CommonPrefixes[].Prefix' --output text | tr '\t' '\n' | sed 's:/$::' | grep -E '^[a-z]{2}$' | sort | paste -sd, -)
if [ -z "$LANGS" ]; then
LANGS="en"
fi
python3 scripts/seo_postprocess.py index --site-url https://cloud.hacktricks.wiki --languages "$LANGS" --output ./sitemap.xml
aws s3 cp ./sitemap.xml s3://hacktricks-cloud/sitemap.xml --content-type application/xml --cache-control max-age=300
- name: Upload root ads.txt
run: |
aws s3 cp ./ads.txt s3://hacktricks-cloud/ads.txt --content-type text/plain --cache-control max-age=300
aws s3 cp ./ads.txt s3://hacktricks-cloud/en/ads.txt --content-type text/plain --cache-control max-age=300
- name: Upload root robots.txt
run: |
aws s3 cp ./src/robots.txt s3://hacktricks-cloud/robots.txt --content-type text/plain --cache-control max-age=300
aws s3 cp ./src/robots.txt s3://hacktricks-cloud/en/robots.txt --content-type text/plain --cache-control max-age=300
- name: Invalidate CloudFront HTML and SEO assets
run: |
aws cloudfront create-invalidation \
--distribution-id "${{ secrets.CLOUDFRONT_DISTRIBUTION_ID }}" \
--paths "/en/*" "/robots.txt" "/en/robots.txt" "/sitemap.xml" "/en/sitemap.xml"