From 0eed2fd84a366c65b46e42a7b4c57a93cb35541b Mon Sep 17 00:00:00 2001 From: devm0nk <33984629+devm0nk@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:39:27 +0200 Subject: [PATCH 1/2] Add JS map decoder to reverse maps into source code --- web/js-map-decoder/README.md | 11 +++++ web/js-map-decoder/decode-source-map.js | 63 +++++++++++++++++++++++++ web/js-map-decoder/package.json | 5 ++ 3 files changed, 79 insertions(+) create mode 100644 web/js-map-decoder/README.md create mode 100644 web/js-map-decoder/decode-source-map.js create mode 100644 web/js-map-decoder/package.json diff --git a/web/js-map-decoder/README.md b/web/js-map-decoder/README.md new file mode 100644 index 0000000..d8d387f --- /dev/null +++ b/web/js-map-decoder/README.md @@ -0,0 +1,11 @@ +# Javascript Map Decoder + +In order to use this script, put all the map files in the `map` folder. The scripts respects the application's folder structure and will create all necessary folders. Decoded files will be placed in the `source-code` folder. + +To run the script run: + +```bash +git clone +npm install +node decode-source-map.js +``` \ No newline at end of file diff --git a/web/js-map-decoder/decode-source-map.js b/web/js-map-decoder/decode-source-map.js new file mode 100644 index 0000000..f29c8fe --- /dev/null +++ b/web/js-map-decoder/decode-source-map.js @@ -0,0 +1,63 @@ +const fs = require('fs'); +const path = require('path'); +const sourceMap = require('source-map'); + +// Directory containing .map files +const DIRECTORY_PATH = './maps'; +const OUTPUT_DIRECTORY = './source-code'; + +// Function to ensure a directory exists +function ensureDirectoryExistence(filePath) { + const dirname = path.dirname(filePath); + if (!fs.existsSync(dirname)) { + fs.mkdirSync(dirname, { recursive: true }); + } +} + +// Function to process all .map files in the directory +async function processSourceMaps() { + try { + const files = fs.readdirSync(DIRECTORY_PATH); + + // Filter only .map files + const mapFiles = files.filter(file => file.endsWith('.map')); + + if (mapFiles.length === 0) { + console.log("No .map files found in the directory."); + return; + } + + for (const file of mapFiles) { + const filePath = path.join(DIRECTORY_PATH, file); + console.log(`\nProcessing: ${filePath}`); + + const rawSourceMap = JSON.parse(fs.readFileSync(filePath, 'utf8')); + const consumer = await new sourceMap.SourceMapConsumer(rawSourceMap); + + consumer.sources.forEach((source) => { + const originalCode = consumer.sourceContentFor(source); + if (originalCode) { + // Preserve original folder structure + const outputFilePath = path.join(OUTPUT_DIRECTORY, source); + + // Ensure directory exists before writing the file + ensureDirectoryExistence(outputFilePath); + + // Save to the mapped-files directory with structure + fs.writeFileSync(outputFilePath, originalCode); + console.log(`✅ Saved: ${outputFilePath}`); + } else { + console.log(`⚠️ No source content found for: ${source}`); + } + }); + + consumer.destroy(); + } + } catch (error) { + console.error("❌ Error processing source maps:", error); + } +} + +// Run the function +processSourceMaps().catch(console.error); + diff --git a/web/js-map-decoder/package.json b/web/js-map-decoder/package.json new file mode 100644 index 0000000..f3f20a5 --- /dev/null +++ b/web/js-map-decoder/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "source-map": "^0.7.4" + } +} From 5fd6ef409c1bae5af23ba41b0fbd6b59d8d9a631 Mon Sep 17 00:00:00 2001 From: devm0nk <33984629+devm0nk@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:40:22 +0200 Subject: [PATCH 2/2] Add script to check a list of URLs for unauthenticated access --- web/check-unauthenticated-access.py | 108 ++++++++++++++++++++++++++++ 1 file changed, 108 insertions(+) create mode 100644 web/check-unauthenticated-access.py diff --git a/web/check-unauthenticated-access.py b/web/check-unauthenticated-access.py new file mode 100644 index 0000000..24c9c6a --- /dev/null +++ b/web/check-unauthenticated-access.py @@ -0,0 +1,108 @@ +import requests # type: ignore +from bs4 import BeautifulSoup # type: ignore +import urllib3 # type: ignore +import time + +def check_access(url, proxy=None, cookies=None, user_agent=None): + try: + # Set proxy if provided + proxies = {"http": proxy, "https": proxy} if proxy else None + + # Set headers if provided + headers = {} + if cookies: + headers["Cookie"] = cookies + if user_agent: + headers["User-Agent"] = user_agent + + # Suppress SSL warnings + urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning) + response = requests.get(url, proxies=proxies, headers=headers, timeout=5, allow_redirects=False, verify=False) + + if response.status_code == 200: + soup = BeautifulSoup(response.text, 'html.parser') + page_title = soup.title.string if soup.title else '' + if "Sign In" in str(page_title): + return f"Authorization enforced for {url} (Page title: {page_title})" + else: + return f"Unauthorized access {url} (Status Code: {response.status_code})" + elif response.status_code == 401: + return f"Authorization enforced for {url} (Status Code: {response.status_code})" + elif response.status_code == 403: + return f"Access forbidden for {url} (Status Code: {response.status_code})" + elif response.status_code == 302 and response.headers.get('Location') == '/login.html': + return f"Authorization enforced for {url} (Redirect to login page)" + else: + return f"Other issue with {url} (Status Code: {response.status_code})" + except requests.exceptions.RequestException as e: + return f"Error accessing {url}: {str(e)}" + +def check_urls_from_file(file_path, output_file, delay=0.3, proxy=None, cookies=None, user_agent=None): + try: + with open(file_path, 'r') as file: + urls = [line.strip() for line in file.readlines() if line.strip()] + + unauthenticated = [] + requires_authentication = [] + access_forbidden = [] + other_issues = [] + + for url in urls: + result = check_access(url, proxy, cookies, user_agent) + print(result) + + if "Unauthorized access" in result: + unauthenticated.append(result) + elif "Authorization enforced" in result: + requires_authentication.append(result) + elif "Access forbidden" in result: + access_forbidden.append(result) + else: + other_issues.append(result) + + time.sleep(delay) + + with open(output_file, 'w') as out_file: + out_file.write("==== Unauthenticated Access (Access Granted) ====\n") + out_file.write("\n".join(unauthenticated) + "\n\n") + + out_file.write("==== Requires Authentication ====\n") + out_file.write("\n".join(requires_authentication) + "\n\n") + + out_file.write("==== Access Forbidden ====\n") + out_file.write("\n".join(access_forbidden) + "\n\n") + + out_file.write("==== Other Issues ====\n") + out_file.write("\n".join(other_issues) + "\n") + + print(f"Results saved to {output_file}") + + except FileNotFoundError: + print(f"File {file_path} not found.") + except Exception as e: + print(f"An error occurred: {str(e)}") + +if __name__ == "__main__": + file_path = input("Enter the path to the file containing URLs: ") + output_file = input("Enter the path to the output file (e.g., results.txt): ") + delay = float(input("Enter the delay between requests (in seconds, e.g., 1): ")) + + # Ask if user wants to use a proxy + use_proxy = input("Do you want to use an HTTP proxy? (yes/no): ").strip().lower() + proxy = None + if use_proxy == 'yes': + proxy = input("Enter the proxy URL (e.g., http://proxy-server:port): ") + + # Ask if user wants to add cookies + use_cookies = input("Do you want to add cookies? (yes/no): ").strip().lower() + cookies = None + if use_cookies == 'yes': + cookies = input("Enter the cookies (e.g., cookie_name1=value1; cookie_name2=value2): ") + + # Ask if user wants to use a custom User-Agent + use_user_agent = input("Do you want to specify a custom User-Agent? (yes/no): ").strip().lower() + user_agent = None + if use_user_agent == 'yes': + user_agent = input("Enter the custom User-Agent string: ") + + check_urls_from_file(file_path, output_file, delay, proxy, cookies, user_agent) \ No newline at end of file