# Moz Bulk Site Metrics Checker # # This script fetches site metrics for a list of domains or URLs using the Moz API. # You can customize the following parameters: # # 1. **Scope**: Defines the scope of the query for each domain or URL. Acceptable values are: # - 'domain': Analyzes the entire domain. # - 'subdomain': Analyzes a specific subdomain. # - 'subfolder': Analyzes a specific subfolder. # - 'url': Analyzes a specific URL. # Update the 'scope' parameter for each query when prompted. # # 2. **Headers**: Ensure you replace the 'x-moz-token' with your actual Moz API token. # # The script generates a CSV file with the following columns: # - Query (Domain or URL) # - Page # - Title # - Last Crawled # - HTTP Code # - Spam Score # - Domain Authority # - Page Authority # - External Pages to Page # - External Pages to Root Domain # - Root Domains to Page # - Root Domains to Root Domain # Import necessary libraries import json import requests import pandas as pd from google.colab import files # Define headers with your Moz token headers = { "x-moz-token": "bW96c2NhcGUtemNZeVRuSTVwSDpBTnI0ejRpaTZPNzJTOVpXblRveHhUWGg4QTVWUlo0bQ==", # Replace with your actual token "Content-Type": "application/json", } # Function to fetch metrics for a single site def fetch_site_metrics(query, scope="domain"): data = { "jsonrpc": "2.0", "id": "c4357611-b3bb-4913-b4a7-0ca0eec842fb", "method": "data.site.metrics.fetch", "params": { "data": { "site_query": { "query": query, "scope": scope } } } } # Make the API request response = requests.post("https://api.moz.com/jsonrpc", headers=headers, data=json.dumps(data)) if response.status_code == 200: response_data = response.json() site_metrics = response_data.get("result", {}).get("site_metrics", {}) site_query = response_data.get("result", {}).get("site_query", {}) # Combine all relevant metrics into a single dictionary return { "Query": site_query.get("query"), "Page": site_metrics.get("page"), "Title": site_metrics.get("title"), "Last Crawled": site_metrics.get("last_crawled"), "HTTP Code": site_metrics.get("http_code"), "Spam Score": site_metrics.get("spam_score"), "Domain Authority": site_metrics.get("domain_authority"), "Page Authority": site_metrics.get("page_authority"), "External Pages to Page": site_metrics.get("external_pages_to_page"), "External Pages to Root Domain": site_metrics.get("external_pages_to_root_domain"), "Root Domains to Page": site_metrics.get("root_domains_to_page"), "Root Domains to Root Domain": site_metrics.get("root_domains_to_root_domain"), } else: print(f"Failed to fetch data for {query}. HTTP Status Code: {response.status_code}") print(f"Response: {response.text}") return None # User inputs bulk queries print("Enter domains or URLs (comma-separated):") input_queries = input().strip() queries = [query.strip() for query in input_queries.split(",")] # Fetch metrics for the queries site_metrics_list = [] for query in queries: print(f"Fetching metrics for: {query}") metrics = fetch_site_metrics(query) if metrics: site_metrics_list.append(metrics) if site_metrics_list: # Convert the list to a DataFrame df = pd.DataFrame(site_metrics_list) # Save the DataFrame to a CSV file output_file = "bulk_site_metrics.csv" df.to_csv(output_file, index=False) print(f"Metrics successfully saved to {output_file}") # Download the file in Google Colab files.download(output_file) else: print("No metrics data fetched.")