Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds new web scripts #21

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 108 additions & 0 deletions web/check-unauthenticated-access.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
import requests # type: ignore
from bs4 import BeautifulSoup # type: ignore
import urllib3 # type: ignore
import time

def check_access(url, proxy=None, cookies=None, user_agent=None):
try:
# Set proxy if provided
proxies = {"http": proxy, "https": proxy} if proxy else None

# Set headers if provided
headers = {}
if cookies:
headers["Cookie"] = cookies
if user_agent:
headers["User-Agent"] = user_agent

# Suppress SSL warnings
urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning)
response = requests.get(url, proxies=proxies, headers=headers, timeout=5, allow_redirects=False, verify=False)

if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
page_title = soup.title.string if soup.title else ''
if "Sign In" in str(page_title):
return f"Authorization enforced for {url} (Page title: {page_title})"
else:
return f"Unauthorized access {url} (Status Code: {response.status_code})"
elif response.status_code == 401:
return f"Authorization enforced for {url} (Status Code: {response.status_code})"
elif response.status_code == 403:
return f"Access forbidden for {url} (Status Code: {response.status_code})"
elif response.status_code == 302 and response.headers.get('Location') == '/login.html':
return f"Authorization enforced for {url} (Redirect to login page)"
else:
return f"Other issue with {url} (Status Code: {response.status_code})"
except requests.exceptions.RequestException as e:
return f"Error accessing {url}: {str(e)}"

def check_urls_from_file(file_path, output_file, delay=0.3, proxy=None, cookies=None, user_agent=None):
try:
with open(file_path, 'r') as file:
urls = [line.strip() for line in file.readlines() if line.strip()]

unauthenticated = []
requires_authentication = []
access_forbidden = []
other_issues = []

for url in urls:
result = check_access(url, proxy, cookies, user_agent)
print(result)

if "Unauthorized access" in result:
unauthenticated.append(result)
elif "Authorization enforced" in result:
requires_authentication.append(result)
elif "Access forbidden" in result:
access_forbidden.append(result)
else:
other_issues.append(result)

time.sleep(delay)

with open(output_file, 'w') as out_file:
out_file.write("==== Unauthenticated Access (Access Granted) ====\n")
out_file.write("\n".join(unauthenticated) + "\n\n")

out_file.write("==== Requires Authentication ====\n")
out_file.write("\n".join(requires_authentication) + "\n\n")

out_file.write("==== Access Forbidden ====\n")
out_file.write("\n".join(access_forbidden) + "\n\n")

out_file.write("==== Other Issues ====\n")
out_file.write("\n".join(other_issues) + "\n")

print(f"Results saved to {output_file}")

except FileNotFoundError:
print(f"File {file_path} not found.")
except Exception as e:
print(f"An error occurred: {str(e)}")

if __name__ == "__main__":
file_path = input("Enter the path to the file containing URLs: ")
output_file = input("Enter the path to the output file (e.g., results.txt): ")
delay = float(input("Enter the delay between requests (in seconds, e.g., 1): "))

# Ask if user wants to use a proxy
use_proxy = input("Do you want to use an HTTP proxy? (yes/no): ").strip().lower()
proxy = None
if use_proxy == 'yes':
proxy = input("Enter the proxy URL (e.g., http://proxy-server:port): ")

# Ask if user wants to add cookies
use_cookies = input("Do you want to add cookies? (yes/no): ").strip().lower()
cookies = None
if use_cookies == 'yes':
cookies = input("Enter the cookies (e.g., cookie_name1=value1; cookie_name2=value2): ")

# Ask if user wants to use a custom User-Agent
use_user_agent = input("Do you want to specify a custom User-Agent? (yes/no): ").strip().lower()
user_agent = None
if use_user_agent == 'yes':
user_agent = input("Enter the custom User-Agent string: ")

check_urls_from_file(file_path, output_file, delay, proxy, cookies, user_agent)
11 changes: 11 additions & 0 deletions web/js-map-decoder/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Javascript Map Decoder

In order to use this script, put all the map files in the `map` folder. The scripts respects the application's folder structure and will create all necessary folders. Decoded files will be placed in the `source-code` folder.

To run the script run:

```bash
git clone <repo>
npm install
node decode-source-map.js
```
63 changes: 63 additions & 0 deletions web/js-map-decoder/decode-source-map.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
const fs = require('fs');
const path = require('path');
const sourceMap = require('source-map');

// Directory containing .map files
const DIRECTORY_PATH = './maps';
const OUTPUT_DIRECTORY = './source-code';

// Function to ensure a directory exists
function ensureDirectoryExistence(filePath) {
const dirname = path.dirname(filePath);
if (!fs.existsSync(dirname)) {
fs.mkdirSync(dirname, { recursive: true });
}
}

// Function to process all .map files in the directory
async function processSourceMaps() {
try {
const files = fs.readdirSync(DIRECTORY_PATH);

// Filter only .map files
const mapFiles = files.filter(file => file.endsWith('.map'));

if (mapFiles.length === 0) {
console.log("No .map files found in the directory.");
return;
}

for (const file of mapFiles) {
const filePath = path.join(DIRECTORY_PATH, file);
console.log(`\nProcessing: ${filePath}`);

const rawSourceMap = JSON.parse(fs.readFileSync(filePath, 'utf8'));
const consumer = await new sourceMap.SourceMapConsumer(rawSourceMap);

consumer.sources.forEach((source) => {
const originalCode = consumer.sourceContentFor(source);
if (originalCode) {
// Preserve original folder structure
const outputFilePath = path.join(OUTPUT_DIRECTORY, source);

// Ensure directory exists before writing the file
ensureDirectoryExistence(outputFilePath);

// Save to the mapped-files directory with structure
fs.writeFileSync(outputFilePath, originalCode);
console.log(`✅ Saved: ${outputFilePath}`);
} else {
console.log(`⚠️ No source content found for: ${source}`);
}
});

consumer.destroy();
}
} catch (error) {
console.error("❌ Error processing source maps:", error);
}
}

// Run the function
processSourceMaps().catch(console.error);

5 changes: 5 additions & 0 deletions web/js-map-decoder/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"dependencies": {
"source-map": "^0.7.4"
}
}