import requests
import time
from urllib.parse import urlparse
class SessionCookieErrorDetector:
def __init__(self, max_retries=3, delay=5):
self.max_retries = max_retries
self.delay = delay
self.failed_urls = []
def check_session_cookie(self, url, max_retries=3, delay=5):
"""
Checks if a session cookie is valid for a given URL.
Handles potential errors and implements rate limiting.
"""
for attempt in range(max_retries):
try:
response = requests.get(url, timeout=10) # Add timeout
if response.status_code == 200:
return True # Session cookie is valid
elif response.status_code == 401:
#Possible session cookie issue, but not always indicative of failure
print(f"Status code 401 detected for {url}. Potentially session cookie issue.")
return False
else:
print(f"Error: {response.status_code} for {url}")
return False # Other errors indicate session cookie problem
except requests.exceptions.RequestException as e:
print(f"Request error for {url}: {e}")
if attempt < max_retries - 1:
time.sleep(delay)
else:
self.failed_urls.append(url) # Log URLs with repeated failures
return False # Session cookie likely invalid
return False # Should not reach here, but for safety
def rate_limit(self):
"""
Implements rate limiting to prevent excessive requests.
"""
time.sleep(self.delay)
print(f"Rate limiting: Sleeping for {self.delay} seconds.")
if __name__ == '__main__':
detector = SessionCookieErrorDetector()
urls_to_check = [
"https://example.com",
"https://www.google.com",
"https://httpstat.us/401", #Test 401
"https://httpstat.us/500", #Test 500
"https://nonexistentdomain.example" #Test connection error
]
for url in urls_to_check:
if detector.check_session_cookie(url):
print(f"Session cookie valid for {url}")
else:
print(f"Session cookie invalid/problematic for {url}")
detector.rate_limit()
Add your comment