Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import requests
- from bs4 import BeautifulSoup
- from urllib.parse import urlparse, urljoin
- import builtwith
- import whois
- from sslyze import server_connectivity, plugins
- from PyInquirer import prompt
- from termcolor import colored
- class GhostSecDeepScanToolSet:
- def __init__(self):
- self.visited_urls = set()
- def show_menu(self):
- questions = [
- {
- 'type': 'list',
- 'name': 'option',
- 'message': colored('GhostSec Deep Scan ToolSet - Select an option:', 'cyan'),
- 'choices': [
- colored('Perform User-Specific Search', 'green'),
- colored('Scan Multiple Sites', 'green'),
- colored('Advanced Search Options', 'green'),
- colored('Spider Function', 'green'),
- colored('Deep Scan and Technology Analysis', 'green'),
- colored('Security Protocols', 'green'),
- colored('Error Handling Protocols', 'green'),
- colored('Whois Lookup', 'green'),
- colored('Save Data to File', 'green'),
- colored('Content Analysis', 'green'),
- colored('SSL Analysis', 'green'),
- colored('Check robots.txt', 'green'),
- colored('Check sitemap.xml', 'green'),
- colored('DNS Lookup', 'green'),
- colored('Perform CVE Scan', 'green'),
- colored('Exit', 'red')
- ],
- }
- ]
- try:
- answers = prompt(questions)
- return answers['option']
- except KeyboardInterrupt:
- print("\nExiting GhostSec Deep Scan ToolSet.")
- exit()
- def execute_option(self, option):
- try:
- if option == colored('Perform User-Specific Search', 'green'):
- username = input(colored("Enter username: ", 'cyan'))
- user_query = input(colored("Enter search query: ", 'cyan'))
- self.harvest_data(username, user_query)
- elif option == colored('Scan Multiple Sites', 'green'):
- sites = input(colored("Enter sites separated by commas: ", 'cyan')).split(',')
- user_query = input(colored("Enter search query: ", 'cyan'))
- self.scan_multiple_sites(sites, user_query)
- elif option == colored('Advanced Search Options', 'green'):
- self.advanced_search_options()
- elif option == colored('Spider Function', 'green'):
- start_url = input(colored("Enter starting URL: ", 'cyan'))
- max_depth = int(input(colored("Enter maximum depth: ", 'cyan')))
- self.crawl(start_url, max_depth)
- elif option == colored('Deep Scan and Technology Analysis', 'green'):
- url = input(colored("Enter URL for deep scan: ", 'cyan'))
- self.deep_scan(url)
- elif option == colored('Security Protocols', 'green'):
- self.security_protocols()
- elif option == colored('Error Handling Protocols', 'green'):
- self.error_handling_protocols()
- elif option == colored('Whois Lookup', 'green'):
- url = input(colored("Enter the website URL for Whois lookup: ", 'cyan'))
- self.whois_lookup(url)
- elif option == colored('Save Data to File', 'green'):
- filename = input(colored("Enter the filename to save data: ", 'cyan'))
- content = input(colored("Enter the content to save: ", 'cyan'))
- self.save_to_file(filename, content)
- elif option == colored('Content Analysis', 'green'):
- # Placeholder for content analysis logic
- pass
- elif option == colored('SSL Analysis', 'green'):
- url = input(colored("Enter the website URL for SSL analysis: ", 'cyan'))
- self.ssl_analysis(url)
- elif option == colored('Check robots.txt', 'green'):
- url = input(colored("Enter the website URL to check robots.txt: ", 'cyan'))
- self.check_robots_txt(url)
- elif option == colored('Check sitemap.xml', 'green'):
- url = input(colored("Enter the website URL to check sitemap.xml: ", 'cyan'))
- self.check_sitemap_xml(url)
- elif option == colored('DNS Lookup', 'green'):
- domain = input(colored("Enter the domain for DNS lookup: ", 'cyan'))
- self.dns_lookup(domain)
- elif option == colored('Perform CVE Scan', 'green'):
- url = input(colored("Enter the website URL for CVE scan: ", 'cyan'))
- self.perform_cve_scan(url)
- elif option == colored('Exit', 'red'):
- print(colored("Exiting GhostSec Deep Scan ToolSet.", 'red'))
- exit()
- except KeyboardInterrupt:
- print("\nExiting GhostSec Deep Scan ToolSet.")
- exit()
- except Exception as e:
- print(colored(f"An error occurred: {e}", 'red'))
- def run(self):
- while True:
- option = self.show_menu()
- self.execute_option(option)
- def harvest_data(self, username, user_query):
- search_query = f"site:{username}.com {user_query}"
- search_results = self.perform_web_search(search_query)
- harvested_data = [self.extract_data(result) for result in search_results]
- self.display_data(harvested_data)
- def scan_multiple_sites(self, sites, user_query):
- for site in sites self.harvest_data(site, user_query)
- def advanced_search_options(self):
- print(colored("Advanced Search Options:", 'cyan'))
- print(colored("1. Search by File Type", 'cyan'))
- print(colored("2. Search by Date Range", 'cyan'))
- print(colored("3. Search by Custom Criteria", 'cyan'))
- option = int(input(colored("Enter your choice (1, 2, or 3): ", 'cyan')))
- if option == 1:
- file_type = input(colored("Enter the file type (e.g., pdf): ", 'cyan'))
- self.file_type_search(file_type)
- elif option == 2:
- start_date = input(colored("Enter the start date (YYYY-MM-DD): ", 'cyan'))
- end_date = input(colored("Enter the end date (YYYY-MM-DD): ", 'cyan'))
- self.date_range_search(start_date, end_date)
- elif option == 3:
- custom_criteria = input(colored("Enter custom search criteria: ", 'cyan'))
- self.custom_criteria_search(custom_criteria)
- else:
- print(colored("Invalid option. Returning to the main menu.", 'red'))
- def file_type_search(self, file_type):
- # Placeholder for file type-specific search logic
- pass
- def date_range_search(self, start_date, end_date):
- # Placeholder for date range-specific search logic
- pass
- def custom_criteria_search(self, custom_criteria):
- # Placeholder for custom criteria-specific search logic
- pass
- def crawl(self, start_url, max_depth):
- self.visited_urls.clear() # Clear visited URLs for each new crawl
- self.crawl_recursive(start_url, 0, max_depth)
- def crawl_recursive(self, url, depth, max_depth):
- if depth > max_depth or url in self.visited_urls or not self.is_valid_url(url):
- return
- self.visited_urls.add(url)
- print(colored(f"Exploring: {url}", 'cyan'))
- # Add logic to extract and process data from the page
- # For example, you can call a function to harvest data from the page
- # Extract links and continue crawling
- links = self.extract_links(url)
- for link in links:
- self.crawl_recursive(link, depth + 1, max_depth)
- def deep_scan(self, url):
- try:
- html_source, technologies = self.fetch_deep_scan_data(url)
- if html_source and technologies:
- print(colored("HTML Source Code:", 'cyan'))
- print(html_source)
- print(colored("Technologies Used:", 'cyan'))
- print(technologies)
- else:
- print(colored("Deep scan failed. Check the URL and try again.", 'red'))
- except Exception as e:
- print(colored(f"An error occurred during deep scan: {e}", 'red'))
- def fetch_deep_scan_data(self, url):
- try:
- response = requests.get(url)
- response.raise_for_status() # Raise an exception for bad response status
- soup = BeautifulSoup(response.text, 'html.parser')
- # Parse technologies using builtwith library
- technologies = builtwith.builtwith(url)
- return soup.prettify(), technologies
- except requests.exceptions.RequestException as req_err:
- print(colored(f"Error: {req_err}", 'red'))
- return None, None
- def security_protocols(self):
- print(colored("Security Protocols:", 'cyan'))
- print(colored("1. Check for SSL/TLS", 'cyan'))
- print(colored("2. Check for HTTP Strict Transport Security (HSTS)", 'cyan'))
- option = int(input(colored("Enter your choice (1 or 2): ", 'cyan')))
- if option == 1:
- url = input(colored("Enter the website URL to check for SSL/TLS: ", 'cyan'))
- self.check_ssl_tls(url)
- elif option == 2:
- url = input(colored("Enter the website URL to check for HSTS: ", 'cyan'))
- self.check_hsts(url)
- else:
- print(colored("Invalid option. Returning to the main menu.", 'red'))
- def check_ssl_tls(self, url):
- try:
- result = requests.get(url)
- if result.url.startswith('https'):
- print(colored("The website is using SSL/TLS.", 'green'))
- else:
- print(colored("The website is not using SSL/TLS.", 'yellow'))
- except requests.exceptions.RequestException as req_err:
- print(colored(f"Error checking SSL/TLS: {req_err}", 'red'))
- def check_hsts(self, url):
- try:
- headers = requests.head(url).headers
- if 'Strict-Transport-Security' in headers:
- print(colored("The website has HTTP Strict Transport Security (HSTS) enabled.", 'green'))
- else:
- print(colored("The website does not have HSTS enabled.", 'yellow'))
- except requests.exceptions.RequestException as req_err:
- print(colored(f"Error checking HSTS: {req_err}", 'red'))
- def error_handling_protocols(self):
- print(colored("Error Handling Protocols:", 'cyan'))
- print(colored("1. Check for Broken Links", 'cyan'))
- option = int(input(colored("Enter your choice (1): ", 'cyan')))
- if option == 1:
- url = input(colored("Enter the website URL to check for broken links: ", 'cyan'))
- self.check_broken_links(url)
- else:
- print(colored("Invalid option. Returning to the main menu.", 'red'))
- def check_broken_links(self, url):
- try:
- response = requests.get(url)
- response.raise_for_status()
- print(colored("No broken links found.", 'green'))
- except requests.exceptions.HTTPError as http_err:
- print(colored(f"HTTP Error: {http_err}", 'yellow'))
- except requests.exceptions.RequestException as req_err:
- print(colored(f"Request Error: {req_err}", 'yellow'))
- def whois_lookup(self, url):
- try:
- domain = urlparse(url).hostname
- whois_info = whois.whois(domain)
- print(colored("Whois Information:", 'cyan'))
- print(whois_info)
- except Exception as whois_err:
- print(colored(f"Whois Lookup Error: {whois_err}", 'red'))
- def save_to_file(self, filename, content):
- try:
- with open(filename, 'w', encoding='utf-8') as file:
- file.write(content)
- print(colored(f"Information saved to {filename}", 'green'))
- except Exception as file_err:
- print(colored(f"Error saving to file: {file_err}", 'red'))
- def ssl_analysis(self, url):
- # Placeholder for SSL analysis logic
- pass
- def check_robots_txt(self, url):
- # Placeholder for robots.txt checking logic
- pass
- def check_sitemap_xml(self, url):
- # Placeholder for sitemap.xml checking logic
- pass
- def dns_lookup(self, domain):
- # Placeholder for DNS lookup logic
- pass
- def perform_cve_scan(self, url):
- # Placeholder for CVE scan logic
- pass
- def perform_web_search(self, query, search_engine="google"):
- search_url = f"https://{search_engine}.com/search?q={query}"
- response = requests.get(search_url)
- soup = BeautifulSoup(response.text, 'html.parser')
- return soup.find_all('div', class_='tF2Cxc')
- def extract_data(self, result):
- title = result.find('h3').text
- link = result.find('a')['href']
- content = result.find('div', class_='B6fmyf').text # Adjust class based on actual structure
- return {'Title': title, 'Link': link, 'Content': content}
- def display_data(self, data_list):
- for data in data_list:
- print(f"Title: {data['Title']}\nLink: {data['Link']}\nContent: {data['Content']}\n")
- def extract_links(self, url):
- response = requests.get(url)
- soup = BeautifulSoup(response.text, 'html.parser')
- links = soup.find_all('a', href=True)
- return [urljoin(url, link['href']) for link in links]
- def run(self):
- while True:
- option = self.show_menu()
- self.execute_option(option)
- if __name__ == "__main__":
- ghostsec_toolset = GhostSecDeepScanToolSet()
- ghostsec_toolset.run()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement