This commit is contained in:
Nathan Rashleigh 2026-02-09 22:34:25 +11:00
parent 43883f3841
commit cf65959148
6 changed files with 610 additions and 2 deletions

2
.gitignore vendored
View File

@ -4,6 +4,8 @@
data/ data/
src/mapsdl/credentials
surf.sublime-workspace surf.sublime-workspace

View File

@ -14,4 +14,17 @@ cssds-run:
docker compose run -u root -it cssds bash docker compose run -u root -it cssds bash
cssds-exec: cssds-exec:
docker compose exec -u root -it cssds bash docker compose exec -u root -it cssds bash
# Map downloader commands
mapsdl-dry:
cd src/mapsdl && deno run --allow-net --allow-read --allow-write --allow-env --allow-sys mapsdl.ts --dry-run
mapsdl-dry-verbose:
cd src/mapsdl && deno run --allow-net --allow-read --allow-write --allow-env --allow-sys mapsdl.ts --dry-run --verbose
mapsdl:
cd src/mapsdl && deno run --allow-net --allow-read --allow-write --allow-env --allow-sys mapsdl.ts
mapsdl-verbose:
cd src/mapsdl && deno run --allow-net --allow-read --allow-write --allow-env --allow-sys mapsdl.ts --verbose

View File

@ -36,5 +36,31 @@ services:
volumes: volumes:
- ./etc/mariadb/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - ./etc/mariadb/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
mapsdl:
image: registry.ntwl.xyz/megastructure-surf-mapsdl
build:
context: ./src/mapsdl
dockerfile: Dockerfile
volumes:
# Mount credentials (read-only)
- ./src/mapsdl/credentials:/app/credentials:ro
# Mount maps directory (read-write)
- ./data/cssds/cstrike/maps:/maps
environment:
- MAPS_DIR=/maps
- TEMP_DIR=/tmp/mapsdl
- GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/service-account.json
# Don't start automatically with docker-compose up
profiles:
- tools
# Run once and exit
restart: "no"
volumes: volumes:
tf2ds: tf2ds:

31
src/mapsdl/Dockerfile Normal file
View File

@ -0,0 +1,31 @@
FROM denoland/deno:debian-2.1.4
# Install archive extraction tools
RUN apt-get update && apt-get install -y \
unrar \
unzip \
bzip2 \
&& rm -rf /var/lib/apt/lists/*
# Create app directory
WORKDIR /app
# Copy the script
COPY mapsdl.ts .
# Create directories for credentials and data
RUN mkdir -p /app/credentials /maps /tmp/mapsdl
# Cache Deno dependencies
RUN deno cache --allow-scripts=npm:* mapsdl.ts
# Set default environment variables
ENV MAPS_DIR=/maps
ENV TEMP_DIR=/tmp/mapsdl
ENV GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/service-account.json
# Run as non-root user
USER deno
# Default command runs dry-run mode
CMD ["run", "--allow-net", "--allow-read", "--allow-write", "--allow-env", "--allow-sys", "mapsdl.ts", "--dry-run"]

533
src/mapsdl/mapsdl.ts Normal file
View File

@ -0,0 +1,533 @@
// #!/usr/bin/env -S deno run --allow-net --allow-read --allow-write --allow-env --allow-sys
// this script should be run with deno
// use async stdlib fns where possible
// Polyfill Node.js globals for googleapis
import process from "node:process";
if (!globalThis.process) {
globalThis.process = process;
}
import { parse } from "https://deno.land/std@0.210.0/flags/mod.ts";
import { join } from "https://deno.land/std@0.210.0/path/mod.ts";
import { google } from "npm:googleapis@131";
import { GoogleAuth } from "npm:google-auth-library@9";
// Configuration
const CONFIG = {
sqlUrl: "https://raw.githubusercontent.com/bhopppp/Shavit-Surf-Timer/master/sql/surfzones.sql",
mapsDir: Deno.env.get("MAPS_DIR") || "/home/ntr/surf_megastructure/data/cssds/cstrike/maps",
tempDir: Deno.env.get("TEMP_DIR") || "/tmp/mapsdl",
serviceAccountPath: Deno.env.get("GOOGLE_APPLICATION_CREDENTIALS") || "./credentials/service-account.json",
gdriveFolders: {
aToK: "17QJ-Wzk9eMHKZqX227HkPCg9_Vmrf9h-",
lToZ: "1f3Oe65BngrSxTPKHAt6MEwK0FTsDbUsO",
},
concurrency: 5,
};
interface GDriveFile {
id: string;
name: string;
size: string;
}
interface Manifest {
total: number;
found: number;
missing: string[];
notInGDrive: string[];
}
// Step 1: Download and parse SQL file for map names
async function downloadSurfZonesSQL(): Promise<string> {
console.log(`[DL] Downloading SQL file from ${CONFIG.sqlUrl}...`);
const response = await fetch(CONFIG.sqlUrl);
if (!response.ok) {
throw new Error(`Failed to download SQL: ${response.statusText}`);
}
return await response.text();
}
function extractMapNamesFromSQL(sqlContent: string): Set<string> {
console.log("[>>] Extracting map names from SQL...");
const mapNames = new Set<string>();
// Match INSERT statements and extract map names (surf_*)
// Handles both single and multi-value INSERT statements
const insertRegex = /INSERT INTO\s+`?(?:map)?zones`?\s+.*?VALUES\s*\((.*?)\);/gis;
for (const match of sqlContent.matchAll(insertRegex)) {
const valuesSection = match[1];
// Extract all string literals from the VALUES section
const stringRegex = /'([^']+)'/g;
for (const stringMatch of valuesSection.matchAll(stringRegex)) {
const value = stringMatch[1];
// Only keep values that start with 'surf_'
if (value.startsWith("surf_")) {
mapNames.add(value);
}
}
}
console.log(`[OK] Found ${mapNames.size} unique surf maps in SQL`);
return mapNames;
}
// Step 2: Check which maps exist locally
async function getLocalMaps(): Promise<Set<string>> {
console.log(`[DIR] Scanning local maps directory: ${CONFIG.mapsDir}`);
const localMaps = new Set<string>();
try {
for await (const entry of Deno.readDir(CONFIG.mapsDir)) {
if (entry.isFile && entry.name.endsWith(".bsp")) {
// Remove .bsp extension and add to set
const mapName = entry.name.replace(/\.bsp$/, "");
localMaps.add(mapName);
}
}
} catch (error) {
if (error instanceof Deno.errors.NotFound) {
console.warn(`[!] Maps directory not found: ${CONFIG.mapsDir}`);
await Deno.mkdir(CONFIG.mapsDir, { recursive: true });
} else {
throw error;
}
}
console.log(`[OK] Found ${localMaps.size} local maps`);
return localMaps;
}
// Step 3: Build manifest of missing maps
function buildManifest(
requiredMaps: Set<string>,
localMaps: Set<string>,
): Manifest {
console.log("[STATS] Building manifest of missing maps...");
const missing: string[] = [];
for (const mapName of requiredMaps) {
if (!localMaps.has(mapName)) {
missing.push(mapName);
}
}
return {
total: requiredMaps.size,
found: localMaps.size,
missing,
notInGDrive: [], // Will be populated after checking GDrive
};
}
// Step 4: Google Drive API client with service account
class GoogleDriveClient {
private drive: any;
async authenticate(): Promise<void> {
console.log("[AUTH] Authenticating with Google Drive API...");
const auth = new GoogleAuth({
keyFile: CONFIG.serviceAccountPath,
scopes: ["https://www.googleapis.com/auth/drive.readonly"],
});
this.drive = google.drive({ version: "v3", auth });
console.log("[OK] Authentication successful");
}
async listFiles(folderId: string): Promise<GDriveFile[]> {
const files: GDriveFile[] = [];
let pageToken: string | null = null;
do {
const response = await this.drive.files.list({
q: `'${folderId}' in parents and trashed=false`,
fields: "nextPageToken, files(id, name, size)",
pageSize: 1000,
pageToken: pageToken || undefined,
});
files.push(...(response.data.files || []));
pageToken = response.data.nextPageToken || null;
} while (pageToken);
return files;
}
async downloadFile(fileId: string, destPath: string): Promise<void> {
const response = await this.drive.files.get(
{ fileId, alt: "media" },
{ responseType: "stream" }
);
const file = await Deno.open(destPath, { create: true, write: true });
// Convert Node.js stream to web stream for Deno
const webStream = new ReadableStream({
async start(controller) {
response.data.on("data", (chunk: Uint8Array) => {
controller.enqueue(chunk);
});
response.data.on("end", () => {
controller.close();
});
response.data.on("error", (err: Error) => {
controller.error(err);
});
},
});
await webStream.pipeTo(file.writable);
}
}
// Step 5: Search Google Drive for missing maps
async function searchGDriveForMaps(
client: GoogleDriveClient,
manifest: Manifest,
): Promise<Map<string, GDriveFile>> {
console.log("\n[CLOUD] Searching Google Drive folders...");
// List files from both folders
const [filesAK, filesLZ] = await Promise.all([
client.listFiles(CONFIG.gdriveFolders.aToK),
client.listFiles(CONFIG.gdriveFolders.lToZ),
]);
const allFiles = [...filesAK, ...filesLZ];
console.log(`[OK] Found ${allFiles.length} files in Google Drive`);
console.log(` - A-K folder: ${filesAK.length} files`);
console.log(` - L-Z folder: ${filesLZ.length} files`);
// Build map of normalized names to GDrive files
const gdriveMap = new Map<string, GDriveFile>();
for (const file of allFiles) {
// Normalize: remove archive extensions (.rar, .bz2, .zip, .bsp)
const normalized = file.name
.replace(/\.bsp\.bz2$/i, "")
.replace(/\.(rar|bz2|zip|bsp)$/i, "")
.toLowerCase()
.trim();
// Only add if it looks like a map name (starts with surf_)
if (normalized.startsWith("surf_")) {
gdriveMap.set(normalized, file);
}
}
// Debug: Show first 5 files from each source
if (Deno.args.includes("--verbose")) {
console.log("\n[>>] Debug - First 5 GDrive files (normalized):");
Array.from(gdriveMap.keys()).slice(0, 5).forEach(name => {
console.log(` - "${name}"`);
});
console.log("\n[>>] Debug - First 5 SQL maps:");
manifest.missing.slice(0, 5).forEach(name => {
console.log(` - "${name.toLowerCase().trim()}"`);
});
}
// Check which missing maps are available in GDrive
const availableMaps = new Map<string, GDriveFile>();
for (const mapName of manifest.missing) {
const normalized = mapName.toLowerCase().trim();
if (gdriveMap.has(normalized)) {
availableMaps.set(mapName, gdriveMap.get(normalized)!);
} else {
manifest.notInGDrive.push(mapName);
}
}
console.log(`\n[STATS] Cross-reference results:`);
console.log(` [OK] Available to download: ${availableMaps.size} maps`);
console.log(` [X] Not in Google Drive: ${manifest.notInGDrive.length} maps`);
if (manifest.notInGDrive.length > 0) {
console.log(`\n[!] Maps not found in Google Drive (showing first 10):`);
manifest.notInGDrive.slice(0, 10).forEach(map => {
console.log(` - ${map}`);
});
if (manifest.notInGDrive.length > 10) {
console.log(` ... and ${manifest.notInGDrive.length - 10} more`);
}
}
return availableMaps;
}
// Step 6: Download and extract maps
async function downloadAndExtractMaps(
client: GoogleDriveClient,
mapsToDownload: Map<string, GDriveFile>,
): Promise<{ success: number; failed: number }> {
console.log(`\n[DL] Downloading ${mapsToDownload.size} maps...`);
// Create temp directory
await Deno.mkdir(CONFIG.tempDir, { recursive: true });
const stats = { success: 0, failed: 0 };
let completed = 0;
// Process maps with concurrency control
const entries = Array.from(mapsToDownload.entries());
for (let i = 0; i < entries.length; i += CONFIG.concurrency) {
const batch = entries.slice(i, i + CONFIG.concurrency);
await Promise.all(
batch.map(async ([mapName, gdriveFile]) => {
try {
completed++;
console.log(`[${completed}/${mapsToDownload.size}] Downloading ${mapName}...`);
const archivePath = join(CONFIG.tempDir, gdriveFile.name);
const bspPath = join(CONFIG.mapsDir, `${mapName}.bsp`);
// Download archive
await client.downloadFile(gdriveFile.id, archivePath);
// Extract based on file type
if (gdriveFile.name.endsWith(".rar")) {
await extractRAR(archivePath, mapName, CONFIG.mapsDir);
await Deno.remove(archivePath); // Cleanup archive
} else if (gdriveFile.name.endsWith(".bz2")) {
await decompressBZ2(archivePath, bspPath);
await Deno.remove(archivePath); // Cleanup archive
} else if (gdriveFile.name.endsWith(".zip")) {
await extractZIP(archivePath, mapName, CONFIG.mapsDir);
await Deno.remove(archivePath); // Cleanup archive
} else if (gdriveFile.name.endsWith(".bsp")) {
// If already a BSP, just move it
await Deno.rename(archivePath, bspPath);
} else {
throw new Error(`Unknown archive format: ${gdriveFile.name}`);
}
// Verify it's a valid BSP file
await verifyBSP(bspPath);
stats.success++;
console.log(` [OK] ${mapName} downloaded and extracted`);
} catch (error) {
stats.failed++;
console.error(` [X] Failed to download ${mapName}: ${error.message}`);
}
})
);
}
return stats;
}
async function extractRAR(archivePath: string, mapName: string, destDir: string): Promise<void> {
// Use system unrar command to extract
const tempExtractDir = join(CONFIG.tempDir, `extract_${mapName}`);
await Deno.mkdir(tempExtractDir, { recursive: true });
const process = new Deno.Command("unrar", {
args: ["x", "-o+", archivePath, tempExtractDir],
stdout: "piped",
stderr: "piped",
});
const { code, stderr } = await process.output();
if (code !== 0) {
const errorMsg = new TextDecoder().decode(stderr);
throw new Error(`unrar failed: ${errorMsg}`);
}
// Find the .bsp file in the extracted contents
const bspFile = await findBSPFile(tempExtractDir, mapName);
if (!bspFile) {
throw new Error(`No BSP file found in archive for ${mapName}`);
}
// Move the BSP file to the maps directory
await Deno.rename(bspFile, join(destDir, `${mapName}.bsp`));
// Cleanup temp extraction directory
await Deno.remove(tempExtractDir, { recursive: true });
}
async function extractZIP(archivePath: string, mapName: string, destDir: string): Promise<void> {
// Use system unzip command
const tempExtractDir = join(CONFIG.tempDir, `extract_${mapName}`);
await Deno.mkdir(tempExtractDir, { recursive: true });
const process = new Deno.Command("unzip", {
args: ["-o", archivePath, "-d", tempExtractDir],
stdout: "piped",
stderr: "piped",
});
const { code, stderr } = await process.output();
if (code !== 0) {
const errorMsg = new TextDecoder().decode(stderr);
throw new Error(`unzip failed: ${errorMsg}`);
}
// Find the .bsp file
const bspFile = await findBSPFile(tempExtractDir, mapName);
if (!bspFile) {
throw new Error(`No BSP file found in archive for ${mapName}`);
}
// Move the BSP file to the maps directory
await Deno.rename(bspFile, join(destDir, `${mapName}.bsp`));
// Cleanup
await Deno.remove(tempExtractDir, { recursive: true });
}
async function decompressBZ2(archivePath: string, outputPath: string): Promise<void> {
// Use system bunzip2 command for decompression
const process = new Deno.Command("bunzip2", {
args: ["-c", archivePath],
stdout: "piped",
});
const { code, stdout } = await process.output();
if (code !== 0) {
throw new Error(`bunzip2 failed with exit code ${code}`);
}
await Deno.writeFile(outputPath, stdout);
}
async function findBSPFile(dir: string, mapName: string): Promise<string | null> {
// Recursively search for .bsp file matching the map name
for await (const entry of Deno.readDir(dir)) {
const fullPath = join(dir, entry.name);
if (entry.isFile && entry.name.endsWith(".bsp")) {
// Check if filename matches (case-insensitive)
const baseName = entry.name.replace(/\.bsp$/i, "").toLowerCase();
if (baseName === mapName.toLowerCase()) {
return fullPath;
}
} else if (entry.isDirectory) {
// Recursively search subdirectories
const found = await findBSPFile(fullPath, mapName);
if (found) return found;
}
}
return null;
}
async function verifyBSP(bspPath: string): Promise<void> {
const file = await Deno.open(bspPath, { read: true });
try {
const buffer = new Uint8Array(4);
await file.read(buffer);
const signature = new TextDecoder().decode(buffer);
if (signature !== "VBSP") {
throw new Error(`Invalid BSP signature: ${signature}`);
}
// Check file size (should be at least 1MB for a valid surf map)
const stat = await Deno.stat(bspPath);
if (stat.size < 1024 * 1024) {
throw new Error("BSP file too small - possibly corrupted");
}
} finally {
file.close();
}
}
// Main execution
async function main() {
const args = parse(Deno.args);
console.log("[*] Surf Map Downloader\n");
console.log("━".repeat(60));
try {
// Step 1: Download and parse SQL
const sqlContent = await downloadSurfZonesSQL();
const requiredMaps = extractMapNamesFromSQL(sqlContent);
// Step 2: Get local maps
const localMaps = await getLocalMaps();
// Step 3: Build manifest
const manifest = buildManifest(requiredMaps, localMaps);
console.log("\n[INFO] Summary:");
console.log(` Total maps in SQL: ${manifest.total}`);
console.log(` Already downloaded: ${manifest.found}`);
console.log(` Missing: ${manifest.missing.length}`);
if (manifest.missing.length === 0) {
console.log("\n[*] All maps are already downloaded!");
Deno.exit(0);
}
// Step 4 & 5: Search Google Drive
const client = new GoogleDriveClient();
await client.authenticate();
const mapsToDownload = await searchGDriveForMaps(client, manifest);
if (mapsToDownload.size === 0) {
console.log("\n[!] No maps available to download from Google Drive");
Deno.exit(0);
}
// Dry run check
if (args["dry-run"]) {
console.log("\n[END] Dry run complete. Use without --dry-run to download.");
Deno.exit(0);
}
// Step 6: Download maps
const stats = await downloadAndExtractMaps(client, mapsToDownload);
// Final report
console.log("\n" + "━".repeat(60));
console.log("[OK] Download Complete!\n");
console.log(`[DL] Successfully downloaded: ${stats.success} maps`);
console.log(`[X] Failed: ${stats.failed} maps`);
console.log(`[!] Not in Google Drive: ${manifest.notInGDrive.length} maps`);
console.log("━".repeat(60));
Deno.exit(stats.failed > 0 ? 1 : 0);
} catch (error) {
console.error(`\n[X] Fatal error: ${error.message}`);
if (args.verbose) {
console.error(error.stack);
}
Deno.exit(1);
}
}
// Run if executed directly
if (import.meta.main) {
main();
}

View File

@ -5,7 +5,10 @@
"path": ".", "path": ".",
}, },
{ {
"path": "C:\\Users\\ntr\\code\\ntwl\\surf" "path": "C:\\Users\\ntr\\code\\ntwl\\surf",
},
{
"path": "/home/ntr/ntwl/surf"
}, },
], ],
} }