Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- !pip install requests
- from requests import get
- import re
- import csv
- # Upload your text file with backlinks
- from google.colab import files
- uploaded = files.upload()
- # Read backlinks from the uploaded file
- backlinks_file = list(uploaded.keys())[0] # Get the uploaded file name
- with open(backlinks_file, 'r') as file:
- backlinks = file.read().splitlines()
- # Prepare the output CSV file
- output_file = "backlink_index_status.csv"
- # Define your ScrapingRobot API token
- api_token = "d6adcc0b-b8a4-47e0-a00c-23e9c33f0633"
- # Initialize the CSV file with headers
- with open(output_file, 'w', newline='') as csvfile:
- csvwriter = csv.writer(csvfile)
- csvwriter.writerow(["Backlink", "Index Status"])
- # Check indexing for each backlink and write to the CSV file
- for backlink in backlinks:
- api_key = f"https://api.scrapingrobot.com/?token={api_token}&url=https://www.google.com/search?q=site:{backlink}"
- try:
- response = get(api_key).json().get('result')
- match = re.search(r'About \d+ results', response)
- if match:
- num_results = int(match.group().split(' ')[1])
- index_status = "Indexed" if num_results >= 1 else "Not Indexed"
- print(f"{backlink} Index Status: {index_status}")
- else:
- index_status = "Not Indexed"
- except Exception as e:
- index_status = "Error"
- # Write the backlink and its status to the CSV
- with open(output_file, 'a', newline='') as csvfile:
- csvwriter = csv.writer(csvfile)
- csvwriter.writerow([backlink, index_status])
- print(f"Index status has been saved to {output_file}")
- # Download the CSV file
- files.download(output_file)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement