Offsec Gauntlet Echo Response First Week ProtoVault Breach Determine if the leak could have come from the application. Review the database connection string to ensure it is secure. Submit the connection string here. Solving Method - Read through the source code and find out in the app.py file
Review the other source files. Which one may have leaked the database? Provide the file name. Solving Method - using git to find out the removed detailsgit log --oneline --all
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 ccd589a (HEAD -> main) Release candidate 1cc71b0 Remove backup scripts ef917bb Add item details and notes forms 2304052 Add data models 9544195 database migration scripts 503dba9 Add authentication 2719901 UI refactor b400120 Initial commit: Flask app scaffold 6dcfe41 Release candidate 57b06cb Remove backup scripts 6a65249 Add item details and notes forms 5ad3c06 Add data models d0aa089 database migration scripts b461ed0 Add authentication 6106ded UI refactor 1cd3a17 Initial commit: Flask app scaffold a230bbc Release candidate 8fe1a44 Remove backup scripts 9f257d2 Add item details and notes forms f22ba8a Add data models b32d77d database migration scripts 5fe83a8 Add authentication 3654766 UI refactor 444eb3a Initial commit: Flask app scaffold
then investigate git show 1cc71b0
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 commit 1cc71b0fcfcdbfefc7b59cecd898f13b532acc35 Author: Walter <walter.s@protoguard.local> Date: Thu Aug 14 09:32:00 2025 -0500 Remove backup scripts diff --git a/app/util/backup_db.py b/app/util/backup_db.py deleted file mode 100644 index d1c22b6..0000000 --- a/app/util/backup_db.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python3 -import paramiko -import boto3 -import codecs -import os -from scp import SCPClient - -# ==== CONFIGURATION ==== -SSH_HOST = "pgsql_prod_db01.protoguard.local" -SSH_PORT = 22 -SSH_USER = "dbadmin" -SSH_KEY = "/home/walter/.ssh/pgsql_key" - -DB_NAME = "pgamgt" -DB_USER = "assetdba" -BACKUP_FILENAME = "db_backup.sql" -LOCAL_BACKUP = f"/tmp/{BACKUP_FILENAME}" -ENC_BACKUP = f"/tmp/{BACKUP_FILENAME}.xyz" - -S3_BUCKET = "protoguard-asset-management" -S3_KEY = "db_backup.xyz" -S3_REGION = "us-east-2" - - -def create_ssh_client(): - client = paramiko.SSHClient() - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect(SSH_HOST, port=SSH_PORT, username=SSH_USER, key_filename=SSH_KEY) - return client - - -def run_backup(): - ssh = create_ssh_client() - dump_cmd = f"pg_dump -U {DB_USER} {DB_NAME} > /tmp/{BACKUP_FILENAME}" - stdin, stdout, stderr = ssh.exec_command(dump_cmd) - exit_code = stdout.channel.recv_exit_status() - if exit_code != 0: - print("[!] Error running pg_dump:", stderr.read().decode()) - ssh.close() - return False - - # Copy file to local - with SCPClient(ssh.get_transport()) as scp: - scp.get(f"/tmp/{BACKUP_FILENAME}", LOCAL_BACKUP) - - # Remove remote dump - ssh.exec_command(f"rm /tmp/{BACKUP_FILENAME}") - ssh.close() - return True - - -def encode(): - with open(LOCAL_BACKUP, "r", encoding="utf-8", errors="ignore") as f: - data = f.read() - encoded = codecs.encode(data, "rot_13") - with open(ENC_BACKUP, "w", encoding="utf-8") as f: - f.write(encoded) - - -def upload_to_s3(): - s3 = boto3.client("s3", region_name=S3_REGION) - s3.upload_file(ENC_BACKUP, S3_BUCKET, S3_KEY) - print(f"[+] Uploaded {ENC_BACKUP} to s3://{S3_BUCKET}/{S3_KEY}") - - -if __name__ == "__main__": - if run_backup(): - print("[+] Backup retrieved successfully.") - encode() - print("[+] Backup encoded.") - upload_to_s3() diff --git a/app/util/restore_db.py b/app/util/restore_db.py deleted file mode 100644 index 4cb16ad..0000000 --- a/app/util/restore_db.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -import paramiko -import boto3 -import codecs -import os -from scp import SCPClient - -# ==== CONFIGURATION ==== -SSH_HOST = "pgsql_prod_db01.protoguard.local" -SSH_PORT = 22 -SSH_USER = "dbadmin" -SSH_KEY = "/home/walter/.ssh/pgsql_key" - -DB_NAME = "pgamgt" -DB_USER = "assetdba" -BACKUP_FILENAME = "db_backup.sql" -LOCAL_BACKUP = f"/tmp/{BACKUP_FILENAME}" -ENC_BACKUP = f"/tmp/{BACKUP_FILENAME}.xyz" - -S3_BUCKET = "protoguard-asset-management" -S3_KEY = "db_backup.xyz" -S3_REGION = "us-east-2" - - -def create_ssh_client(): - client = paramiko.SSHClient() - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect(SSH_HOST, port=SSH_PORT, username=SSH_USER, key_filename=SSH_KEY) - return client - - -def download_from_s3(): - s3 = boto3.client("s3", region_name=S3_REGION) - s3.download_file(S3_BUCKET, S3_KEY, LOCAL_BACKUP) - print(f"[+] Downloaded backup from s3://{S3_BUCKET}/{S3_KEY}") - - -def decode(): - with open(LOCAL_BACKUP, "r", encoding="utf-8") as f: - data = f.read() - decoded = codecs.decode(data, "rot_13") - with open(LOCAL_BACKUP, "w", encoding="utf-8") as f: - f.write(decoded) - - -def restore_backup(): - ssh = create_ssh_client() - - # Copy file to remote - with SCPClient(ssh.get_transport()) as scp: - scp.put(LOCAL_BACKUP, f"/tmp/{BACKUP_FILENAME}") - - # Restore using psql - restore_cmd = f"psql -U {DB_USER} -d {DB_NAME} -f /tmp/{BACKUP_FILENAME}" - stdin, stdout, stderr = ssh.exec_command(restore_cmd) - exit_code = stdout.channel.recv_exit_status() - if exit_code != 0: - print("[!] Error restoring database:", stderr.read().decode()) - else: - print("[+] Database restored successfully.") - - # Clean up - ssh.exec_command(f"rm /tmp/{BACKUP_FILENAME}") - ssh.close() - - -if __name__ == "__main__": - download_from_s3() - decode() - print("[+] Backup decoded.") - restore_backup() - print("[+] DB restored.")
and yes, one of them is backup_db.py
Using the results of your analysis, discover the public address of the database leak. Verify the contents of the leak by submitting the password hash for Naomi Adler. pbkdf2:sha256:600000$YQqIvcDipYLzzXPB$598fe450e5ac019cdd41b4b10c5c21515573ee63a8f4881f7d721fd74ee43d59 Solving Method:
From the backup_db.py file we discovered in question 2, I found crucial information about where backups are stored:
S3 Bucket: protoguard-asset-management
S3 Key: db_backup.xyz
S3 Region: us-east-2
Encoding: ROT13 First, I attempted to download the backup file from the S3 bucket using the information found:
1 curl https://protoguard-asset-management .s3.us-east-2 .amazonaws.com/db_backup.xyz -o "$env:TEMP \db_backup.xyz"
The file downloaded successfully, which means the S3 bucket is publicly accessible — this is a major security vulnerability!
Next, I needed to decode the backup file since it was encoded with ROT13. I used PowerShell to decode it:
1 2 3 4 5 6 7 8 $encodedContent = Get-Content "$env:TEMP \db_backup.xyz" -Raw $decodedContent = -join ($encodedContent .ToCharArray() | ForEach-Object { if ($_ -match '[a-zA-Z]' ) { $base = if ($_ -cle 'Z' ) { [int ][char ]'A' } else { [int ][char ]'a' } [char ]($base + (([int ]$_ - $base + 13 ) % 26 )) } else { $_ } }) $decodedContent | Out-File "$env:TEMP \db_decoded.sql" -Encoding utf8
After decoding, I searched for Naomi Adler’s password hash:
1 Get-Content "$env:TEMP \db_decoded.sql" | Select-String -Pattern "Naomi" -Context 0 ,3
Output:
1 2 3 4 5 6 7 8 > 11 Naomi Adler Cognitive Systems Research naomi.adler pbkdf2:sha256:600000$YQqIvcDipYLzzXPB$598fe450e5ac019cdd41b4b10c5c21515573ee63a8f4881f7d721fd74ee43d59 12 Thomas Sinclair Technical Documentation & Records thomas.sinclair pbkdf2:sha256:600000$jiHdWOQtTkpCxgNR$96eca6d861e91a79f3941c42000ef8eca3c28aeece8fb7a97cbc9e687ecf5afc 13 Leila Haddad Chemical Catalysis Research leila.haddad pbkdf2:sha256:600000$6DDYtk7r2O18ELlz$c21419b605e4aa73169634144843115b44104621ee2c6f22bbad427b806c8c28 14 Gabriel Montoya Drone Systems Development gabriel.montoya pbkdf2:sha256:600000$Ko7CFWeuJ5aUTTOt$4641aeb53f381592e7040f93116a8c4675ae44f916fa44b3be61fc02ae39e0fa
Successfully retrieved Naomi Adler’s password hash!
Submit the public address of the database leak, including the name of the file. Solving Method:
Based on the information extracted from the backup_db.py script in question 2, I constructed the public S3 URL using the following details:
S3_BUCKET: protoguard-asset-management
S3_KEY: db_backup.xyz
S3_REGION: us-east-2
The standard format for publicly accessible S3 URLs is:
1 https://{bucket-name}.s3.{region}.amazonaws.com/{object-key}
Therefore, the complete public address is:
1 https://protoguard-asset-management.s3.us-east-2.amazonaws.com/db_backup.xyz
This URL was verified to be publicly accessible when I successfully downloaded the file in question 3. The fact that this sensitive database backup is publicly accessible without authentication is a critical security flaw that led to the data leak.
Key Security Issues Identified:
Database backup script was committed to Git history (even after deletion) S3 bucket has public read access enabled Sensitive database backup is stored in a publicly accessible location The ROT13 encoding provides no real security (easily reversible)