This commit is contained in:
carlospolop
2025-10-04 01:12:52 +02:00
parent 9bb5984b1a
commit d4b5bd37da
3 changed files with 62 additions and 10 deletions

View File

@@ -60,14 +60,22 @@ jobs:
# Clone the searchindex repo # Clone the searchindex repo
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
# Copy the searchindex file # Copy and compress the searchindex file
cp "$ASSET" "/tmp/searchindex-repo/${FILENAME}" cp "$ASSET" "/tmp/searchindex-repo/${FILENAME}"
gzip -9 -k -f "$ASSET"
cp "${ASSET}.gz" "/tmp/searchindex-repo/${FILENAME}.gz"
# Show compression stats
ORIGINAL_SIZE=$(wc -c < "$ASSET")
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
# Commit and push # Commit and push
cd /tmp/searchindex-repo cd /tmp/searchindex-repo
git config user.name "GitHub Actions" git config user.name "GitHub Actions"
git config user.email "github-actions@github.com" git config user.email "github-actions@github.com"
git add "${FILENAME}" git add "${FILENAME}" "${FILENAME}.gz"
git commit -m "Update ${FILENAME} from hacktricks-cloud build" || echo "No changes to commit" git commit -m "Update ${FILENAME} from hacktricks-cloud build" || echo "No changes to commit"
git push || echo "No changes to push" git push || echo "No changes to push"

View File

@@ -169,14 +169,22 @@ jobs:
# Clone the searchindex repo # Clone the searchindex repo
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
# Copy the searchindex file # Copy and compress the searchindex file
cp "$ASSET" "/tmp/searchindex-repo/${FILENAME}" cp "$ASSET" "/tmp/searchindex-repo/${FILENAME}"
gzip -9 -k -f "$ASSET"
cp "${ASSET}.gz" "/tmp/searchindex-repo/${FILENAME}.gz"
# Show compression stats
ORIGINAL_SIZE=$(wc -c < "$ASSET")
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
# Commit and push # Commit and push
cd /tmp/searchindex-repo cd /tmp/searchindex-repo
git config user.name "GitHub Actions" git config user.name "GitHub Actions"
git config user.email "github-actions@github.com" git config user.email "github-actions@github.com"
git add "${FILENAME}" git add "${FILENAME}" "${FILENAME}.gz"
git commit -m "Update ${FILENAME} from hacktricks-cloud build" || echo "No changes to commit" git commit -m "Update ${FILENAME} from hacktricks-cloud build" || echo "No changes to commit"
git push || echo "No changes to push" git push || echo "No changes to push"

View File

@@ -21,16 +21,52 @@
try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); } try { importScripts('https://cdn.jsdelivr.net/npm/elasticlunr@0.9.5/elasticlunr.min.js'); }
catch { importScripts(abs('/elasticlunr.min.js')); } catch { importScripts(abs('/elasticlunr.min.js')); }
/* 2 — load a single index (remote → local) */ /* 2 — decompress gzip data */
async function decompressGzip(arrayBuffer){
if(typeof DecompressionStream !== 'undefined'){
/* Modern browsers: use native DecompressionStream */
const stream = new Response(arrayBuffer).body.pipeThrough(new DecompressionStream('gzip'));
const decompressed = await new Response(stream).arrayBuffer();
return new TextDecoder().decode(decompressed);
} else {
/* Fallback: use pako library */
if(typeof pako === 'undefined'){
try { importScripts('https://cdn.jsdelivr.net/npm/pako@2.1.0/dist/pako.min.js'); }
catch(e){ throw new Error('pako library required for decompression: '+e); }
}
const uint8Array = new Uint8Array(arrayBuffer);
const decompressed = pako.ungzip(uint8Array, {to: 'string'});
return decompressed;
}
}
/* 3 — load a single index (remote → local) */
async function loadIndex(remote, local, isCloud=false){ async function loadIndex(remote, local, isCloud=false){
let rawLoaded = false; let rawLoaded = false;
if(remote){ if(remote){
/* Try compressed version first */
try { try {
const r = await fetch(remote,{mode:'cors'}); const gzUrl = remote + '.gz';
if (!r.ok) throw new Error('HTTP '+r.status); const r = await fetch(gzUrl,{mode:'cors'});
importScripts(URL.createObjectURL(new Blob([await r.text()],{type:'application/javascript'}))); if (r.ok) {
rawLoaded = true; const compressed = await r.arrayBuffer();
} catch(e){ console.warn('remote',remote,'failed →',e); } const text = await decompressGzip(compressed);
importScripts(URL.createObjectURL(new Blob([text],{type:'application/javascript'})));
rawLoaded = true;
console.log('Loaded compressed',gzUrl);
}
} catch(e){ console.warn('compressed',remote+'.gz','failed →',e); }
/* Fall back to uncompressed if compressed failed */
if(!rawLoaded){
try {
const r = await fetch(remote,{mode:'cors'});
if (!r.ok) throw new Error('HTTP '+r.status);
importScripts(URL.createObjectURL(new Blob([await r.text()],{type:'application/javascript'})));
rawLoaded = true;
console.log('Loaded uncompressed',remote);
} catch(e){ console.warn('remote',remote,'failed →',e); }
}
} }
if(!rawLoaded && local){ if(!rawLoaded && local){
try { importScripts(abs(local)); rawLoaded = true; } try { importScripts(abs(local)); rawLoaded = true; }