Skip to main content
Proper error handling is crucial for building reliable Bet365 scraping applications. This guide covers common errors, how to handle them, and best practices for resilient scraping.

Common Error Types

Cloudflare Blocking (403 Errors)

Cloudflare protection may block requests with incorrect headers or suspicious IP addresses.
session = Bet365AndroidSession(
    api_url=config["api_url"],
    api_key=config["api_key"],
    proxy=config["proxy"],
    host="www.bet365.com"
)

try:
    session.go_homepage()
except AssertionError as e:
    if "Blocked by Cloudflare" in str(e):
        print("Error: Request blocked by Cloudflare")
        print("Possible causes:")
        print("  - Bad IP address or proxy")
        print("  - Outdated headers")
        print("  - Missing TLS fingerprint")
        # Handle accordingly (switch proxy, update headers, etc.)
    else:
        raise
The SDK automatically includes the correct TLS fingerprint, but Cloudflare may still block based on IP reputation.

Configuration Errors (500 Errors)

Configuration fetch failures typically indicate IP-related issues:
try:
    session.go_homepage()
except AssertionError as e:
    if "Blocked while getting configuration" in str(e):
        print("Error: Configuration fetch failed")
        print("This is usually caused by:")
        print("  - Blocked IP address")
        print("  - Network connectivity issues")
        print("  - Proxy problems")
        # Implement retry logic or switch to different proxy
    else:
        raise

API Key Authentication Failures

The X-Net header generation requires a valid API key:
try:
    response = session.protected_get(url, headers=headers, params=params)
except AssertionError as e:
    if "error occured while generating token" in str(e).lower():
        print("Error: API key authentication failed")
        print("Possible causes:")
        print("  - Invalid API key")
        print("  - Expired API key")
        print("  - API service unavailable")
        print(f"Error details: {e}")
        # Check your API key and service status
    else:
        raise

Error Handling Patterns

Homepage Navigation with Retry

1

Implement Retry Logic

Add retry mechanism for transient failures:
import time
from typing import Optional

def go_homepage_with_retry(
    session: Bet365AndroidSession,
    max_retries: int = 3,
    delay: int = 5
) -> bool:
    """Navigate to homepage with retry logic."""
    for attempt in range(max_retries):
        try:
            session.go_homepage()
            print("Successfully loaded homepage")
            return True
        except AssertionError as e:
            error_msg = str(e)
            
            if "Blocked by Cloudflare" in error_msg:
                print(f"Attempt {attempt + 1}: Blocked by Cloudflare")
                if attempt < max_retries - 1:
                    print(f"Retrying in {delay} seconds...")
                    time.sleep(delay)
                else:
                    print("Max retries reached. Consider:")
                    print("  - Changing proxy")
                    print("  - Updating headers")
                    print("  - Waiting before retrying")
                    return False
                    
            elif "configuration" in error_msg.lower():
                print(f"Attempt {attempt + 1}: Configuration fetch failed")
                if attempt < max_retries - 1:
                    print(f"Retrying in {delay} seconds...")
                    time.sleep(delay)
                else:
                    print("Max retries reached. Check:")
                    print("  - IP address status")
                    print("  - Proxy configuration")
                    print("  - Network connectivity")
                    return False
            else:
                # Unknown error, don't retry
                raise
    
    return False
2

Use in Your Application

session = Bet365AndroidSession(
    config["api_url"],
    config["api_key"],
    proxy=config["proxy"],
    host="www.bet365.com"
)

if go_homepage_with_retry(session):
    # Proceed with scraping
    sports = session.extract_available_sports()
else:
    # Handle failure
    print("Failed to initialize session")
    exit(1)

Protected Request Error Handling

import requests
from typing import Optional

def safe_protected_get(
    session: Bet365AndroidSession,
    url: str,
    headers: Optional[dict] = None,
    **kwargs
) -> Optional[requests.Response]:
    """Make a protected GET request with error handling."""
    try:
        response = session.protected_get(url, headers=headers, **kwargs)
        
        # Check response status
        if response.status_code == 200:
            return response
        elif response.status_code == 403:
            print(f"Access denied (403) for {url}")
            print("The request was blocked. Check:")
            print("  - IP reputation")
            print("  - Request headers")
            print("  - Rate limiting")
            return None
        elif response.status_code == 500:
            print(f"Server error (500) for {url}")
            print("This may be temporary. Consider retrying.")
            return None
        else:
            print(f"Unexpected status code {response.status_code} for {url}")
            return None
            
    except AssertionError as e:
        print(f"Assertion error during request: {e}")
        return None
    except Exception as e:
        print(f"Unexpected error during request: {type(e).__name__}: {e}")
        return None

Parsing Error Handling

from bet365.message_parser import get_parsers, read_table, fix_data
from typing import List, Dict, Any

def safe_extract_match_tables(
    response_text: str
) -> List[Dict[str, Any]]:
    """Extract match tables with error handling."""
    match_tables = []
    
    try:
        parsers = get_parsers(response_text)
    except Exception as e:
        print(f"Failed to parse response: {e}")
        return match_tables
    
    for parser in parsers:
        try:
            # Find content sections
            for _, _ in parser.find_sections(
                "CL",
                PV=lambda k, v: v and v.startswith("podcontentcontentapi"),
                include_part_index=True
            ):
                # Find match groups
                for idx, _ in parser.find_sections("MG", include_part_index=True):
                    try:
                        # Read table
                        table = read_table(parser, idx)
                        
                        # Validate table structure
                        if not table or "data" not in table:
                            print(f"Invalid table structure at index {idx}")
                            continue
                        
                        # Fix and append data
                        fixed_table = fix_data(table)
                        match_tables.append(fixed_table)
                        
                    except AssertionError as e:
                        print(f"Assertion failed reading table at {idx}: {e}")
                        continue
                    except Exception as e:
                        print(f"Error reading table at {idx}: {type(e).__name__}: {e}")
                        continue
                        
        except Exception as e:
            print(f"Error processing parser: {type(e).__name__}: {e}")
            continue
    
    return match_tables

Understanding Assert Statements

The SDK uses assert statements for validation. Here are the key assertions:

Homepage Navigation Assertions

# From android.py:167-171
assert homepage_response.status_code == 200, (
    "Blocked by Cloudflare, bad IP or headers should be updated"
    if homepage_response.status_code == 403
    else f"Unknown error while going to homepage: {homepage_response.status_code}"
)
Handling:
try:
    session.go_homepage()
except AssertionError as e:
    if "403" in str(e) or "Cloudflare" in str(e):
        # Handle Cloudflare block
        handle_cloudflare_block()
    else:
        # Handle other errors
        handle_other_error(e)

Configuration Fetch Assertions

# From android.py:193-197
assert configuration_response.status_code == 200, (
    "Blocked while getting configuration, probably bad IP"
    if configuration_response.status_code == 500
    else f"Unknown error while fetching configuration: {configuration_response.status_code}"
)
Handling:
try:
    session.go_homepage()
except AssertionError as e:
    if "configuration" in str(e).lower():
        # IP or network issue
        switch_proxy_and_retry()

API Token Generation Assertions

# From android.py:254-256
assert response.status_code == 200, (
    "An error occured while generating token: " + response.text
)
Handling:
try:
    response = session.protected_get(url, headers=headers)
except AssertionError as e:
    if "generating token" in str(e):
        # API key or service issue
        check_api_credentials()

Table Reading Assertions

# From message_parser.py:143
assert parser.sections[0][idx + 1].type == "MA"
Handling:
try:
    table = read_table(parser, idx)
except (AssertionError, IndexError) as e:
    print(f"Invalid table structure at index {idx}")
    # Skip this table and continue
    continue

Best Practices for Resilient Scraping

1

Always Use Try-Except Blocks

Wrap all SDK calls in try-except blocks:
try:
    session.go_homepage()
except AssertionError as e:
    # Handle known assertion errors
    handle_assertion_error(e)
except Exception as e:
    # Handle unexpected errors
    log_error(e)
2

Implement Exponential Backoff

Use exponential backoff for retries:
import time

def exponential_backoff_retry(func, max_retries=5):
    for i in range(max_retries):
        try:
            return func()
        except AssertionError as e:
            if i == max_retries - 1:
                raise
            wait_time = 2 ** i  # 1, 2, 4, 8, 16 seconds
            print(f"Retry {i+1}/{max_retries} in {wait_time}s")
            time.sleep(wait_time)
3

Validate Data Before Processing

Always validate parsed data:
table = read_table(parser, idx)

# Validate structure
if not table or "data" not in table:
    continue

# Validate content
if not table["data"] or len(table["data"]) == 0:
    continue

# Process valid data
process_table(table)
4

Log Errors for Debugging

Maintain detailed logs:
import logging

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('scraper.log'),
        logging.StreamHandler()
    ]
)

try:
    session.go_homepage()
except AssertionError as e:
    logging.error(f"Homepage navigation failed: {e}")
5

Handle Rate Limiting

Implement delays between requests:
import time

sports = session.extract_available_sports()

for sport in sports:
    try:
        session.get_sport_homepage(sport)
        time.sleep(2)  # 2-second delay between requests
    except Exception as e:
        print(f"Error fetching {sport.name}: {e}")
        continue

Complete Error Handling Example

import json
import time
import logging
from bet365 import Bet365AndroidSession
from bet365.message_parser import get_parsers, read_table, fix_data

# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def main():
    # Load config
    try:
        with open("config.json") as fp:
            config = json.load(fp)
    except FileNotFoundError:
        logger.error("config.json not found")
        return
    except json.JSONDecodeError as e:
        logger.error(f"Invalid JSON in config.json: {e}")
        return
    
    # Initialize session
    session = Bet365AndroidSession(
        config["api_url"],
        config["api_key"],
        proxy=config.get("proxy"),
        host="www.bet365.com"
    )
    
    # Navigate to homepage with retry
    for attempt in range(3):
        try:
            session.go_homepage()
            logger.info("Successfully loaded homepage")
            break
        except AssertionError as e:
            logger.error(f"Attempt {attempt + 1} failed: {e}")
            if attempt == 2:
                logger.error("Max retries reached, exiting")
                return
            time.sleep(5)
    
    # Extract sports
    try:
        sports = session.extract_available_sports()
        logger.info(f"Found {len(sports)} sports")
    except Exception as e:
        logger.error(f"Failed to extract sports: {e}")
        return
    
    # Fetch sport data with error handling
    for sport in sports[:5]:  # Limit to first 5
        try:
            logger.info(f"Fetching {sport.name}...")
            session.get_sport_homepage(sport)
            time.sleep(2)  # Rate limiting
        except AssertionError as e:
            logger.error(f"Failed to fetch {sport.name}: {e}")
            continue
        except Exception as e:
            logger.error(f"Unexpected error for {sport.name}: {e}")
            continue
    
    # Wait for background threads
    if session.zap_thread:
        session.zap_thread.join()
    
    logger.info("Scraping completed")

if __name__ == "__main__":
    main()
Always save raw responses to files during development (like response.txt) to help debug parsing issues.

Next Steps

Build docs developers (and LLMs) love