Alerts define what you want to monitor. Fetch all alerts for your account:
# Get all alertsalerts_response = client.get_alerts(account_id="your-account-id")# Access the alerts listfor alert in alerts_response.alerts: print(f"Alert: {alert.name}") print(f"ID: {alert.id}") print(f"Keywords: {alert.query.included_keywords}")
from datetime import datetime, timedelta# Get mentions from the last 7 daysweek_ago = datetime.now() - timedelta(days=7)mentions = client.get_mentions( account_id="your-account-id", alert_id="your-alert-id", not_before_date=week_ago, tone="positive", # Filter by sentiment source="twitter", # Filter by source limit=100)print(f"Found {len(mentions.mentions)} positive mentions")
For automatic resource cleanup, use the client as a context manager:
from mention import MentionClientwith MentionClient(access_token="your-token") as client: alerts = client.get_alerts("account-id") # Client is automatically closed when exiting the context
from mention import MentionClient, MentionConfig# Load config from environment variablesconfig = MentionConfig.from_env()# Create client from configclient = MentionClient.from_config(config)# Now you can use the clientalerts = client.get_alerts(config.account_id)
The from_env() method automatically loads .env files from your current directory using python-dotenv.
import asynciofrom mention import AsyncMentionClientasync def main(): async with AsyncMentionClient(access_token="your-token") as client: # All methods are async alerts = await client.get_alerts("account-id") if alerts.alerts: mentions = await client.get_mentions( account_id="account-id", alert_id=alerts.alerts[0].id, limit=10 ) for mention in mentions.mentions: print(f"{mention.title} - {mention.tone}")# Run the async functionasyncio.run(main())
Iterate through all mentions using automatic pagination:
# Process all mentions with automatic paginationfor mention in client.iter_mentions( account_id="account-id", alert_id="alert-id", limit=100 # Fetch 100 per page): print(f"{mention.title} - {mention.published_at}") # The iterator automatically fetches the next page # when needed, so you can process all mentions