Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tkinter as tk
- from tkinter import messagebox, filedialog
- import requests
- from bs4 import BeautifulSoup
- from urllib.parse import urljoin
- class LinkScraperGUI:
- def __init__(self, master):
- self.master = master
- self.master.title("Najeeb Shah Khan Web Links Scraper and save Text")
- self.url_label = tk.Label(master, text="Enter URL:")
- self.url_label.pack()
- self.url_entry = tk.Entry(master, width=100)
- self.url_entry.pack()
- self.scrape_button = tk.Button(master, text="Scrape Links", command=self.scrape_links)
- self.scrape_button.pack()
- def scrape_links(self):
- url = self.url_entry.get()
- if not url:
- messagebox.showerror("Error", "Please enter a valid URL.")
- return
- try:
- response = requests.get(url)
- response.raise_for_status()
- except requests.exceptions.RequestException as e:
- messagebox.showerror("Error", f"Error fetching URL: {e}")
- return
- soup = BeautifulSoup(response.text, 'html.parser')
- # Extract all links from the webpage
- links = soup.find_all('a', href=True)
- if not links:
- messagebox.showinfo("Info", "No links found on the given URL.")
- return
- # Ensure the links are complete URLs
- complete_links = [urljoin(url, link['href']) for link in links]
- # Prompt user to choose a file to save the links
- file_path = filedialog.asksaveasfilename(defaultextension=".txt", filetypes=[("Text files", "*.txt")])
- if not file_path:
- return # User canceled the save operation
- with open(file_path, 'w', encoding='utf-8') as file:
- # Write the complete links to the file
- for link in complete_links:
- file.write(link + '\n')
- messagebox.showinfo("Success", f"Complete URLs saved to {file_path}")
- if __name__ == "__main__":
- root = tk.Tk()
- app = LinkScraperGUI(root)
- root.mainloop()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement