Skip to main content
GET
/
api
/
query
/
insights
Insights
curl --request GET \
  --url https://mixpanel.com/api/query/insights
{
  "computed_at": "<string>",
  "date_range": {
    "from_date": "<string>",
    "to_date": "<string>"
  },
  "headers": [
    {}
  ],
  "series": {}
}

Query Insights

Retrieve data from your saved Insights reports programmatically.

Query Saved Report

project_id
integer
required
Your Mixpanel project ID
workspace_id
integer
The workspace ID containing the report
bookmark_id
integer
required
The ID of your Insights report. Can be found from the URL: https://mixpanel.com/project/<PROJECT_ID>/view/<WORKSPACE_ID>/app/boards#id=12345&editor-card-id="report-<BOOKMARK_ID>"

Example Request

curl "https://mixpanel.com/api/query/insights?project_id=123&bookmark_id=456" \
  -u SERVICE_ACCOUNT_USERNAME:SERVICE_ACCOUNT_SECRET

Response

computed_at
string
Timestamp when the query was computed
date_range
object
The date range of the queried data
headers
array
Array explaining the nested keys in the series object
series
object
Maps event names to date/value pairs. Each key is an event name, and the value is an object with dates as keys and event counts as values.
{
  "computed_at": "2020-09-21T16:35:41.252314+00:00",
  "date_range": {
    "from_date": "2020-08-31T00:00:00-07:00",
    "to_date": "2020-09-12T23:59:59.999000-07:00"
  },
  "headers": ["$event"],
  "series": {
    "Logged in": {
      "2020-08-31T00:00:00-07:00": 9852,
      "2020-09-07T00:00:00-07:00": 4325
    },
    "Viewed page": {
      "2020-08-31T00:00:00-07:00": 10246,
      "2020-09-07T00:00:00-07:00": 11432
    }
  }
}

Use Cases

Daily Metrics Dashboard

import requests
from requests.auth import HTTPBasicAuth
from datetime import datetime

def get_daily_metrics(project_id, bookmark_id):
    """Fetch daily event metrics from Insights"""
    
    response = requests.get(
        'https://mixpanel.com/api/query/insights',
        auth=HTTPBasicAuth('USERNAME', 'SECRET'),
        params={
            'project_id': project_id,
            'bookmark_id': bookmark_id
        }
    )
    
    data = response.json()
    
    # Process the series data
    metrics = {}
    for event_name, date_values in data['series'].items():
        total = sum(date_values.values())
        metrics[event_name] = {
            'total': total,
            'daily_breakdown': date_values
        }
    
    return metrics

# Usage
metrics = get_daily_metrics(123, 456)
print(f"Total Signups: {metrics['Signed Up']['total']}")

Trend Analysis

import requests
from requests.auth import HTTPBasicAuth

def analyze_trend(project_id, bookmark_id):
    """Calculate growth trends from Insights data"""
    
    response = requests.get(
        'https://mixpanel.com/api/query/insights',
        auth=HTTPBasicAuth('USERNAME', 'SECRET'),
        params={'project_id': project_id, 'bookmark_id': bookmark_id}
    )
    
    data = response.json()
    
    for event_name, date_values in data['series'].items():
        dates = sorted(date_values.keys())
        values = [date_values[d] for d in dates]
        
        if len(values) >= 2:
            # Calculate week-over-week growth
            current_week = sum(values[-7:])
            previous_week = sum(values[-14:-7])
            growth = ((current_week - previous_week) / previous_week) * 100
            
            print(f"{event_name}: {growth:.1f}% WoW growth")

analyze_trend(123, 456)

Best Practices

Instead of querying general reports, create specific Insights reports optimized for API consumption with:
  • Relevant date ranges
  • Specific events needed
  • Appropriate filters
Insights data doesn’t change frequently, so cache results:
import time
import json

def get_cached_insights(project_id, bookmark_id, cache_duration=3600):
    cache_file = f'insights_{project_id}_{bookmark_id}.json'
    
    # Check if cache exists and is fresh
    try:
        with open(cache_file, 'r') as f:
            cache = json.load(f)
            if time.time() - cache['timestamp'] < cache_duration:
                return cache['data']
    except FileNotFoundError:
        pass
    
    # Fetch fresh data
    data = fetch_insights(project_id, bookmark_id)
    
    # Save to cache
    with open(cache_file, 'w') as f:
        json.dump({
            'timestamp': time.time(),
            'data': data
        }, f)
    
    return data
Some dates might not have data:
series = data['series'].get('Event Name', {})
value = series.get('2024-01-15', 0)  # Default to 0 if missing

Build docs developers (and LLMs) love