Troubleshooting
Common issues, debugging tips, and solutions to frequent problems when using proxies.
On this page
Connection Issues
Common connection problems and their solutions.
Proxy Connection Refused
- •Connection refused errors
- •"Unable to connect to proxy server" messages
- •Immediate connection failures
1. Incorrect Proxy Endpoint
The most common issue is an incorrectly formatted proxy URL:
python
# Wrong - missing port
proxy = 'http://username:password@proxy-server'
# Correct - includes port
proxy = 'http://username:password@proxy-server:8080'
# BirdProxies format example
proxy = 'http://birduser:[email protected]:8080'2. Firewall or Network Blocking
Test connectivity to verify the proxy server is reachable:
bash
# Test proxy connectivity
curl -x http://username:[email protected]:8080 http://httpbin.org/ip
# Test with verbose output for debugging
curl -v -x http://username:[email protected]:8080 http://httpbin.org/ip3. Proxy Server Down
python
import socket
def test_proxy_connectivity(proxy_host, proxy_port, timeout=10):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout)
result = sock.connect_ex((proxy_host, proxy_port))
sock.close()
if result == 0:
print(f"Success: {proxy_host}:{proxy_port} is reachable")
return True
else:
print(f"Failed: {proxy_host}:{proxy_port} is not reachable")
return False
except Exception as e:
print(f"Error testing {proxy_host}:{proxy_port}: {e}")
return False
# Test your BirdProxies endpoint
test_proxy_connectivity('residential.birdproxies.com', 8080)SSL/TLS Certificate Errors
- •SSL certificate verification failed
- •"certificate verify failed" errors
- •HTTPS requests failing while HTTP works
1. Disable SSL Verification (Not Recommended for Production)
Only use this for testing purposes:
python
import requests
import urllib3
# Disable SSL warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Make request without SSL verification
response = requests.get(
'https://example.com',
proxies={'https': 'http://birduser:[email protected]:8080'},
verify=False # Disables SSL verification
)2. Use Custom Certificate Bundle
For production environments, use proper certificates:
python
import requests
import certifi
response = requests.get(
'https://example.com',
proxies={'https': 'http://birduser:[email protected]:8080'},
verify=certifi.where() # Use certifi's certificate bundle
)3. Debug SSL Issues
python
import ssl
import socket
def debug_ssl_connection(hostname, port=443):
context = ssl.create_default_context()
try:
with socket.create_connection((hostname, port), timeout=10) as sock:
with context.wrap_socket(sock, server_hostname=hostname) as ssock:
print(f"✅ SSL connection to {hostname} successful")
print(f"Protocol: {ssock.version()}")
print(f"Cipher: {ssock.cipher()}")
cert = ssock.getpeercert()
print(f"Certificate subject: {cert['subject']}")
return True
except Exception as e:
print(f"❌ SSL connection failed: {e}")
return False
# Test SSL connection to target site
debug_ssl_connection('httpbin.org')
# Test SSL connection to proxy server
debug_ssl_connection('residential.birdproxies.com')Timeout Issues
- •Requests hanging indefinitely
- •Timeout exceptions
- •Slow response times
1. Set Appropriate Timeouts
Always set reasonable timeout values to prevent hanging requests:
python
import requests
from requests.exceptions import Timeout
def make_request_with_timeout(url, proxies, connect_timeout=10, read_timeout=30):
try:
response = requests.get(
url,
proxies=proxies,
timeout=(connect_timeout, read_timeout) # (connect, read)
)
return response
except Timeout as e:
print(f"Request timed out: {e}")
return None
# Usage
response = make_request_with_timeout(
'https://example.com',
proxies={'https': 'http://birduser:[email protected]:8080'},
connect_timeout=5,
read_timeout=15
)2. Implement Retry Logic with Backoff
python
import time
import random
from requests.exceptions import Timeout, ConnectionError
def retry_request(url, proxies, max_retries=3, base_delay=1):
for attempt in range(max_retries):
try:
response = requests.get(
url,
proxies=proxies,
timeout=(10, 30)
)
return response
except (Timeout, ConnectionError) as e:
if attempt < max_retries - 1:
delay = base_delay * (2 ** attempt) + random.uniform(0, 1)
print(f"Attempt {attempt + 1} failed, retrying in {delay:.2f}s...")
time.sleep(delay)
else:
print(f"All {max_retries} attempts failed")
raise e
# Usage
try:
response = retry_request(
'https://example.com',
proxies={'https': 'http://birduser:[email protected]:8080'}
)
print("Request successful!")
except Exception as e:
print(f"Request failed after all retries: {e}")Authentication Problems
Resolving proxy authentication issues.
Invalid Credentials
- •407 Proxy Authentication Required
- •"Proxy authentication failed" errors
- •Immediate rejection of requests
1. Verify Credentials Format
python
import base64
import requests
def test_proxy_auth(username, password, proxy_host, proxy_port):
# Test basic authentication
auth_string = f"{username}:{password}"
encoded_auth = base64.b64encode(auth_string.encode()).decode()
headers = {
'Proxy-Authorization': f'Basic {encoded_auth}'
}
try:
response = requests.get(
'http://httpbin.org/ip',
proxies={
'http': f'http://{proxy_host}:{proxy_port}',
'https': f'http://{proxy_host}:{proxy_port}'
},
headers=headers,
timeout=10
)
if response.status_code == 200:
print("✅ Authentication successful")
return True
else:
print(f"❌ Authentication failed: {response.status_code}")
return False
except Exception as e:
print(f"❌ Request failed: {e}")
return False
# Test your credentials
test_proxy_auth('your_username', 'your_password', 'proxy-server.com', 8080)2. Check for Special Characters in Credentials
python
from urllib.parse import quote
def encode_proxy_credentials(username, password):
# URL encode credentials to handle special characters
encoded_username = quote(username, safe='')
encoded_password = quote(password, safe='')
return encoded_username, encoded_password
# Example with special characters
username = 'user@domain'
password = 'pass#word!'
encoded_user, encoded_pass = encode_proxy_credentials(username, password)
proxy_url = f'http://{encoded_user}:{encoded_pass}@proxy-server.com:8080'
print(f"Original: {username}:{password}")
print(f"Encoded: {encoded_user}:{encoded_pass}")
print(f"Proxy URL: {proxy_url}")IP Whitelisting Issues
- •Authentication works from some IPs but not others
- •"Source IP not allowed" errors
- •Intermittent authentication failures
1. Check Current IP Address
python
import requests
def get_current_ip():
try:
# Get IP without proxy
response = requests.get('http://httpbin.org/ip', timeout=10)
return response.json()['origin']
except Exception as e:
print(f"Error getting IP: {e}")
return None
current_ip = get_current_ip()
print(f"Your current IP: {current_ip}")
print("Make sure this IP is whitelisted in your proxy settings")2. Test from Different Networks
python
def test_proxy_from_different_sources():
# Test services to check IP
test_urls = [
'http://httpbin.org/ip',
'http://ipinfo.io/json',
'http://ip-api.com/json'
]
for url in test_urls:
try:
# Test without proxy
response = requests.get(url, timeout=5)
print(f"Direct connection to {url}: {response.json()}")
# Test with proxy
response = requests.get(
url,
proxies={'http': 'http://user:pass@proxy:port'},
timeout=10
)
print(f"Proxy connection to {url}: {response.json()}")
except Exception as e:
print(f"Error testing {url}: {e}")
print("---")
test_proxy_from_different_sources()Session Expiration
- •Authentication works initially then fails
- •Need to re-authenticate periodically
- •"Session expired" errors
1. Implement Session Refresh
python
import time
from datetime import datetime, timedelta
class SessionManager:
def __init__(self, proxy_config, session_duration_minutes=30):
self.proxy_config = proxy_config
self.session_duration = timedelta(minutes=session_duration_minutes)
self.session_start = None
self.session = None
def get_session(self):
now = datetime.now()
# Check if session needs refresh
if (self.session is None or
self.session_start is None or
now - self.session_start > self.session_duration):
self.refresh_session()
return self.session
def refresh_session(self):
print("Refreshing proxy session...")
# Close old session
if self.session:
self.session.close()
# Create new session
self.session = requests.Session()
self.session.proxies = {
'http': self.proxy_config['http'],
'https': self.proxy_config['https']
}
# Test the session
try:
response = self.session.get('http://httpbin.org/ip', timeout=10)
if response.status_code == 200:
self.session_start = datetime.now()
print("✅ Session refreshed successfully")
else:
print(f"❌ Session refresh failed: {response.status_code}")
except Exception as e:
print(f"❌ Session refresh error: {e}")
# Usage
proxy_config = {
'http': 'http://user:pass@proxy:port',
'https': 'http://user:pass@proxy:port'
}
session_manager = SessionManager(proxy_config, session_duration_minutes=15)
# Use the session
for i in range(10):
session = session_manager.get_session()
try:
response = session.get('http://httpbin.org/ip')
print(f"Request {i+1}: {response.json()['origin']}")
except Exception as e:
print(f"Request {i+1} failed: {e}")
time.sleep(60) # Wait 1 minute between requestsPerformance Issues
Diagnosing and fixing slow proxy performance.
Slow Response Times
1. Measure Request Performance
python
import time
import statistics
from contextlib import contextmanager
@contextmanager
def measure_time():
start = time.time()
yield
end = time.time()
print(f"Request took {end - start:.2f} seconds")
class PerformanceMonitor:
def __init__(self):
self.response_times = []
self.error_count = 0
self.success_count = 0
def test_proxy_performance(self, proxy_url, test_url='http://httpbin.org/ip', num_tests=10):
print(f"Testing proxy performance with {num_tests} requests...")
for i in range(num_tests):
start_time = time.time()
try:
response = requests.get(
test_url,
proxies={'http': proxy_url, 'https': proxy_url},
timeout=30
)
end_time = time.time()
response_time = end_time - start_time
if response.status_code == 200:
self.response_times.append(response_time)
self.success_count += 1
print(f"Request {i+1}: {response_time:.2f}s ✅")
else:
self.error_count += 1
print(f"Request {i+1}: HTTP {response.status_code} ❌")
except Exception as e:
self.error_count += 1
print(f"Request {i+1}: Error - {e} ❌")
self.print_summary()
def print_summary(self):
if self.response_times:
avg_time = statistics.mean(self.response_times)
min_time = min(self.response_times)
max_time = max(self.response_times)
median_time = statistics.median(self.response_times)
print(f"
📊 Performance Summary:")
print(f" Successful requests: {self.success_count}")
print(f" Failed requests: {self.error_count}")
print(f" Average response time: {avg_time:.2f}s")
print(f" Minimum response time: {min_time:.2f}s")
print(f" Maximum response time: {max_time:.2f}s")
print(f" Median response time: {median_time:.2f}s")
if len(self.response_times) > 1:
std_dev = statistics.stdev(self.response_times)
print(f" Standard deviation: {std_dev:.2f}s")
else:
print("❌ No successful requests to analyze")
# Usage
monitor = PerformanceMonitor()
monitor.test_proxy_performance('http://user:pass@proxy:port')2. Compare Multiple Proxies
python
def compare_proxy_performance(proxy_list, test_url='http://httpbin.org/ip'):
results = {}
for i, proxy in enumerate(proxy_list):
print(f"
🔍 Testing Proxy {i+1}: {proxy}")
monitor = PerformanceMonitor()
monitor.test_proxy_performance(proxy, test_url, num_tests=5)
if monitor.response_times:
results[f"Proxy {i+1}"] = {
'proxy': proxy,
'avg_time': statistics.mean(monitor.response_times),
'success_rate': monitor.success_count / (monitor.success_count + monitor.error_count) * 100
}
# Sort by performance
sorted_results = sorted(results.items(), key=lambda x: x[1]['avg_time'])
print("
🏆 Proxy Performance Ranking:")
for rank, (name, data) in enumerate(sorted_results, 1):
print(f" {rank}. {name}: {data['avg_time']:.2f}s avg, {data['success_rate']:.1f}% success")
# Test multiple proxies
proxy_list = [
'http://user1:pass1@proxy1:port1',
'http://user2:pass2@proxy2:port2',
'http://user3:pass3@proxy3:port3',
]
compare_proxy_performance(proxy_list)Network Latency Issues
python
import subprocess
import re
import os
def traceroute_to_proxy(proxy_host):
try:
# Run traceroute (tracert on Windows)
cmd = ['traceroute', proxy_host] if os.name != 'nt' else ['tracert', proxy_host]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
if result.returncode == 0:
print(f"Traceroute to {proxy_host}:")
print(result.stdout)
# Extract hop times
hop_times = re.findall(r'(d+(?:.d+)?)s*ms', result.stdout)
if hop_times:
total_hops = len(hop_times)
avg_hop_time = sum(float(t) for t in hop_times) / len(hop_times)
print(f"
Summary: {total_hops} hops, average {avg_hop_time:.2f}ms per hop")
else:
print(f"Traceroute failed: {result.stderr}")
except Exception as e:
print(f"Error running traceroute: {e}")
# Test network path to proxy
traceroute_to_proxy('your-proxy-server.com')python
import socket
import time
def test_dns_resolution(hostname):
print(f"Testing DNS resolution for {hostname}")
try:
start_time = time.time()
ip_address = socket.gethostbyname(hostname)
end_time = time.time()
resolution_time = (end_time - start_time) * 1000 # Convert to ms
print(f"✅ {hostname} resolves to {ip_address} in {resolution_time:.2f}ms")
return ip_address
except socket.gaierror as e:
print(f"❌ DNS resolution failed: {e}")
return None
def test_multiple_dns_servers(hostname):
dns_servers = [
'8.8.8.8', # Google DNS
'1.1.1.1', # Cloudflare DNS
'208.67.222.222' # OpenDNS
]
original_dns = socket.getdefaulttimeout()
socket.setdefaulttimeout(5)
for dns_server in dns_servers:
print(f"
Testing with DNS server {dns_server}")
# Note: This is a simplified test - actual DNS server switching requires system-level changes
test_dns_resolution(hostname)
socket.setdefaulttimeout(original_dns)
# Test DNS resolution
test_dns_resolution('your-proxy-server.com')
test_multiple_dns_servers('your-proxy-server.com')Memory and Resource Issues
python
import psutil
import threading
import time
class ResourceMonitor:
def __init__(self):
self.monitoring = False
self.stats = {
'cpu_usage': [],
'memory_usage': [],
'network_io': []
}
def start_monitoring(self):
self.monitoring = True
self.monitor_thread = threading.Thread(target=self._monitor_loop)
self.monitor_thread.start()
def stop_monitoring(self):
self.monitoring = False
if hasattr(self, 'monitor_thread'):
self.monitor_thread.join()
def _monitor_loop(self):
while self.monitoring:
# CPU usage
cpu_percent = psutil.cpu_percent(interval=1)
self.stats['cpu_usage'].append(cpu_percent)
# Memory usage
memory = psutil.virtual_memory()
self.stats['memory_usage'].append(memory.percent)
# Network I/O
network = psutil.net_io_counters()
self.stats['network_io'].append({
'bytes_sent': network.bytes_sent,
'bytes_recv': network.bytes_recv
})
time.sleep(1)
def get_summary(self):
if not self.stats['cpu_usage']:
return "No monitoring data available"
avg_cpu = sum(self.stats['cpu_usage']) / len(self.stats['cpu_usage'])
max_cpu = max(self.stats['cpu_usage'])
avg_memory = sum(self.stats['memory_usage']) / len(self.stats['memory_usage'])
max_memory = max(self.stats['memory_usage'])
return f"""
Resource Usage Summary:
CPU: {avg_cpu:.1f}% average, {max_cpu:.1f}% peak
Memory: {avg_memory:.1f}% average, {max_memory:.1f}% peak
Monitoring duration: {len(self.stats['cpu_usage'])} seconds
"""
# Usage
monitor = ResourceMonitor()
monitor.start_monitoring()
try:
# Run your proxy requests here
for i in range(10):
response = requests.get(
'http://httpbin.org/ip',
proxies={'http': 'http://user:pass@proxy:port'}
)
print(f"Request {i+1} completed")
time.sleep(2)
finally:
monitor.stop_monitoring()
print(monitor.get_summary())python
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
class OptimizedProxySession:
def __init__(self, proxy_url, pool_connections=20, pool_maxsize=20):
self.session = requests.Session()
# Configure retry strategy
retry_strategy = Retry(
total=3,
backoff_factor=0.3,
status_forcelist=[429, 500, 502, 503, 504],
)
# Configure adapters with connection pooling
adapter = HTTPAdapter(
max_retries=retry_strategy,
pool_connections=pool_connections,
pool_maxsize=pool_maxsize
)
self.session.mount("http://", adapter)
self.session.mount("https://", adapter)
# Set proxy
self.session.proxies = {
'http': proxy_url,
'https': proxy_url
}
# Set keep-alive headers
self.session.headers.update({
'Connection': 'keep-alive',
'Keep-Alive': 'timeout=30, max=100'
})
def get(self, url, **kwargs):
return self.session.get(url, **kwargs)
def close(self):
self.session.close()
# Usage
optimized_session = OptimizedProxySession('http://user:pass@proxy:port')
try:
# Multiple requests will reuse connections
for i in range(50):
response = optimized_session.get('http://httpbin.org/ip')
print(f"Request {i+1}: {response.status_code}")
finally:
optimized_session.close()Found an issue? Let us know on Discord
Go to Dashboard