Advertisement
Najeebsk

IPTV-LINK-EXTRAC-ONLINE.pyw

Jun 22nd, 2024
534
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 6.32 KB | None | 0 0
  1. import tkinter as tk
  2. from tkinter import ttk, messagebox
  3. from tkinter.scrolledtext import ScrolledText
  4. import requests
  5. from bs4 import BeautifulSoup
  6. from urllib.parse import urljoin
  7. import subprocess
  8.  
  9. class LinkScraperGUI:
  10.     def __init__(self, master):
  11.         self.master = master
  12.         self.master.title("Web Links Scraper and Save Text")
  13.         self.master.geometry("800x600")
  14.  
  15.         # Style configuration
  16.         self.style = ttk.Style()
  17.         self.style.theme_use("clam")  # Use 'clam' theme for modern look
  18.  
  19.         self.style.configure("TLabel", font=("Helvetica", 12))
  20.         self.style.configure("TButton", font=("Helvetica", 12), background="#4CAF50", foreground="white")
  21.         self.style.configure("TEntry", font=("Helvetica", 12), padding=5)
  22.         self.style.configure("TScrollbar", gripcount=0, background="#f1f1f1")
  23.  
  24.         # URL entry frame
  25.         self.url_frame = ttk.Frame(master)
  26.         self.url_frame.pack(pady=10, padx=10, fill=tk.X)
  27.  
  28.         self.url_label = ttk.Label(self.url_frame, text="Enter URL:")
  29.         self.url_label.pack(side=tk.LEFT, padx=(0, 10))
  30.  
  31.         self.url_entry = ttk.Entry(self.url_frame, width=70)
  32.         self.url_entry.pack(side=tk.LEFT, fill=tk.X, expand=True)
  33.  
  34.         self.scrape_button = ttk.Button(self.url_frame, text="Scrape Links", command=self.scrape_links)
  35.         self.scrape_button.pack(side=tk.LEFT, padx=(10, 0))
  36.  
  37.         # Links editable text frame
  38.         self.links_frame = ttk.Frame(master)
  39.         self.links_frame.pack(pady=10, padx=10, fill=tk.BOTH, expand=True)
  40.  
  41.         self.links_text = ScrolledText(self.links_frame, width=100, height=10, font=("Helvetica", 12))
  42.         self.links_text.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  43.  
  44.         self.listbox_scrollbar = ttk.Scrollbar(self.links_frame, orient=tk.VERTICAL, command=self.links_text.yview)
  45.         self.listbox_scrollbar.pack(side=tk.LEFT, fill=tk.Y)
  46.         self.links_text.config(yscrollcommand=self.listbox_scrollbar.set)
  47.  
  48.         self.links_text.bind("<Double-1>", self.open_in_vlc)
  49.  
  50.         # Scrape selected link button
  51.         self.scrape_selected_button = ttk.Button(master, text="Scrape Selected Link", command=self.scrape_selected_link)
  52.         self.scrape_selected_button.pack(pady=10)
  53.  
  54.         # Result text frame
  55.         self.result_frame = ttk.Frame(master)
  56.         self.result_frame.pack(pady=10, padx=10, fill=tk.BOTH, expand=True)
  57.  
  58.         self.result_text = ScrolledText(self.result_frame, width=100, height=10, font=("Helvetica", 12))
  59.         self.result_text.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  60.  
  61.         self.text_scrollbar = ttk.Scrollbar(self.result_frame, orient=tk.VERTICAL, command=self.result_text.yview)
  62.         self.text_scrollbar.pack(side=tk.LEFT, fill=tk.Y)
  63.         self.result_text.config(yscrollcommand=self.text_scrollbar.set)
  64.  
  65.         self.result_text.bind("<Double-1>", self.open_in_vlc)
  66.  
  67.         # Save results button
  68.         self.save_button = ttk.Button(master, text="Save Results", command=self.save_results)
  69.         self.save_button.pack(pady=10)
  70.  
  71.     def scrape_links(self):
  72.         url = self.url_entry.get()
  73.  
  74.         if not url:
  75.             messagebox.showerror("Error", "Please enter a valid URL.")
  76.             return
  77.  
  78.         try:
  79.             response = requests.get(url)
  80.             response.raise_for_status()
  81.         except requests.exceptions.RequestException as e:
  82.             messagebox.showerror("Error", f"Error fetching URL: {e}")
  83.             return
  84.  
  85.         soup = BeautifulSoup(response.text, 'html.parser')
  86.  
  87.         # Extract all links from the webpage
  88.         links = soup.find_all('a', href=True)
  89.  
  90.         if not links:
  91.             messagebox.showinfo("Info", "No links found on the given URL.")
  92.             return
  93.  
  94.         # Ensure the links are complete URLs
  95.         complete_links = [urljoin(url, link['href']) for link in links]
  96.  
  97.         # Clear the ScrolledText before inserting new links
  98.         self.links_text.delete(1.0, tk.END)
  99.  
  100.         # Insert links into the ScrolledText
  101.         for link in complete_links:
  102.             self.links_text.insert(tk.END, link + '\n')
  103.  
  104.     def scrape_selected_link(self):
  105.         selected_text = self.links_text.get(tk.SEL_FIRST, tk.SEL_LAST).strip()
  106.  
  107.         if not selected_text:
  108.             messagebox.showerror("Error", "Please select a link from the text.")
  109.             return
  110.  
  111.         try:
  112.             response = requests.get(selected_text)
  113.             response.raise_for_status()
  114.         except requests.exceptions.RequestException as e:
  115.             messagebox.showerror("Error", f"Error fetching URL: {e}")
  116.             return
  117.  
  118.         soup = BeautifulSoup(response.text, 'html.parser')
  119.  
  120.         # Extract all links from the selected webpage
  121.         links = soup.find_all('a', href=True)
  122.  
  123.         if not links:
  124.             messagebox.showinfo("Info", "No links found on the selected URL.")
  125.             return
  126.  
  127.         # Ensure the links are complete URLs
  128.         complete_links = [urljoin(selected_text, link['href']) for link in links]
  129.  
  130.         # Display results in the ScrolledText widget
  131.         self.result_text.delete(1.0, tk.END)
  132.         for link in complete_links:
  133.             self.result_text.insert(tk.END, link + '\n')
  134.  
  135.     def save_results(self):
  136.         file_path = "Files-Links.txt"
  137.  
  138.         with open(file_path, 'a', encoding='utf-8') as file:
  139.             links = self.result_text.get(1.0, tk.END).strip()
  140.             file.write(links + '\n\n')  # Add a blank line after each set of links
  141.  
  142.         messagebox.showinfo("Success", f"Links appended to {file_path}")
  143.  
  144.     def open_in_vlc(self, event):
  145.         try:
  146.             widget = event.widget
  147.             index = widget.index("@%s,%s" % (event.x, event.y))
  148.             line_start = f"{index.split('.')[0]}.0"
  149.             line_end = f"{index.split('.')[0]}.end"
  150.             selected_text = widget.get(line_start, line_end).strip()
  151.  
  152.             if not selected_text:
  153.                 messagebox.showerror("Error", "Please select a URL.")
  154.                 return
  155.  
  156.             vlc_path = r"C:\Program Files\VideoLAN\VLC\vlc.exe"
  157.             subprocess.Popen([vlc_path, selected_text])
  158.         except Exception as e:
  159.             messagebox.showerror("Error", f"Failed to open VLC: {e}")
  160.  
  161. if __name__ == "__main__":
  162.     root = tk.Tk()
  163.     app = LinkScraperGUI(root)
  164.     root.mainloop()
  165.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement