Skip to main content
This guide covers how to search for tweets and users, discover trending topics, and explore what’s popular on Twitter.

Searching tweets

Search for tweets using keywords, hashtags, or advanced search operators.
tweets = await client.search_tweet(
    query='python programming',
    product='Top',  # Options: 'Top', 'Latest', 'Media'
    count=20
)

for tweet in tweets:
    print(f'@{tweet.user.screen_name}: {tweet.text}')
    print(f'Likes: {tweet.favorite_count}, Retweets: {tweet.retweet_count}')
    print('---')

Search products

Twikit supports three types of tweet searches:
# Get most relevant and popular tweets
tweets = await client.search_tweet('AI', product='Top', count=20)

Pagination through search results

# Initial search
results = await client.search_tweet('machine learning', product='Latest', count=20)

for tweet in results:
    print(tweet.text)

# Get next page of results
more_results = await results.next()
for tweet in more_results:
    print(tweet.text)

# Get previous page
previous_results = await results.previous()

Advanced search queries

Use Twitter’s search operators for more precise results:
# Search with hashtags
tweets = await client.search_tweet('#python', product='Top')

# Search from specific user
tweets = await client.search_tweet('from:elonmusk', product='Latest')

# Search mentions
tweets = await client.search_tweet('to:twitter', product='Latest')

# Search with multiple keywords (AND)
tweets = await client.search_tweet('python AND machine learning', product='Top')

# Search with OR operator
tweets = await client.search_tweet('python OR javascript', product='Top')

# Exclude terms
tweets = await client.search_tweet('python -java', product='Top')

# Search for phrases
tweets = await client.search_tweet('"artificial intelligence"', product='Top')

# Search tweets with links
tweets = await client.search_tweet('python filter:links', product='Latest')

# Search tweets with media
tweets = await client.search_tweet('sunset filter:media', product='Latest')

# Search by language
tweets = await client.search_tweet('bonjour lang:fr', product='Latest')

# Search tweets with minimum engagement
tweets = await client.search_tweet('python min_faves:100', product='Top')
tweets = await client.search_tweet('python min_retweets:50', product='Top')
You can combine multiple search operators for more specific queries:
query = 'python OR javascript from:github -spam filter:links min_faves:10'
tweets = await client.search_tweet(query, product='Latest')

Searching users

Find users based on their profile information.
users = await client.search_user('python developer', count=20)

for user in users:
    print(f'@{user.screen_name} - {user.name}')
    print(f'Bio: {user.description}')
    print(f'Followers: {user.followers_count}')
    print('---')

Pagination through user search results

results = await client.search_user('data scientist', count=20)

# Get more results
more_results = await results.next()

for user in more_results:
    print(f'@{user.screen_name}')

Filter and sort search results

results = await client.search_user('AI researcher', count=50)

# Filter users by follower count
popular_users = [u for u in results if u.followers_count > 1000]

# Sort by followers
sorted_users = sorted(results, key=lambda u: u.followers_count, reverse=True)

print('Top 10 users by followers:')
for user in sorted_users[:10]:
    print(f'@{user.screen_name}: {user.followers_count} followers')
Explore trending topics and hashtags on Twitter.
trends = await client.get_trends(
    category='trending',  # Options: 'trending', 'for-you', 'news', 'sports', 'entertainment'
    count=20
)

for trend in trends:
    print(f'Trend: {trend.name}')
    if trend.tweets_count:
        print(f'  Tweets: {trend.tweets_count}')
    print(f'  Domain: {trend.domain_context}')

Trend categories

Explore different types of trends:
# Get what's trending overall
trends = await client.get_trends('trending', count=20)
Once you find a trending topic, search for related tweets:
trends = await client.get_trends('trending', count=10)

for trend in trends:
    print(f'\nSearching for: {trend.name}')
    
    # Search tweets about this trend
    tweets = await client.search_tweet(trend.name, product='Latest', count=10)
    
    for tweet in tweets:
        print(f'  @{tweet.user.screen_name}: {tweet.text[:50]}...')
Get trends for specific geographic locations.

Get available locations

locations = await client.get_available_locations()

for location in locations:
    print(f'{location.name}, {location.country} (WOEID: {location.woeid})')
# Get trends for a specific location using WOEID
# WOEID for New York: 2459115
# WOEID for London: 44418
# WOEID for Tokyo: 1118370

nyc_trends = await client.get_place_trends(2459115)

print(f"Trending in {nyc_trends['locations'][0]['name']}:")
for trend in nyc_trends['trends']:
    print(f"  {trend.name}")
    if trend.tweet_volume:
        print(f"    Volume: {trend.tweet_volume} tweets")

Location object methods

locations = await client.get_available_locations()
nyc = next(loc for loc in locations if loc.name == 'New York')

print(f'Location: {nyc.name}')
print(f'Country: {nyc.country}')
print(f'WOEID: {nyc.woeid}')

# Get trends for this location
trends = await nyc.get_trends()

Similar tweets (Premium)

Find tweets similar to a specific tweet.
tweet = await client.get_tweet_by_id('1234567890')

# Get similar tweets
similar_tweets = await tweet.get_similar_tweets()

print(f'Original tweet: {tweet.text}')
print(f'\nSimilar tweets:')
for similar in similar_tweets:
    print(f'  @{similar.user.screen_name}: {similar.text}')
The get_similar_tweets() method requires Twitter Premium/Blue subscription.

Practical examples

Monitor keywords in real-time

import asyncio

async def monitor_keyword(keyword):
    seen_tweet_ids = set()
    
    while True:
        try:
            tweets = await client.search_tweet(
                query=keyword,
                product='Latest',
                count=20
            )
            
            for tweet in tweets:
                if tweet.id not in seen_tweet_ids:
                    print(f'New tweet about {keyword}:')
                    print(f'  @{tweet.user.screen_name}: {tweet.text}')
                    seen_tweet_ids.add(tweet.id)
            
            # Check every 60 seconds
            await asyncio.sleep(60)
        except Exception as e:
            print(f'Error: {e}')
            await asyncio.sleep(60)

# Monitor a specific keyword
await monitor_keyword('Python')

Find influencers in a niche

users = await client.search_user('AI researcher', count=50)

# Filter and sort by engagement
influencers = sorted(
    [u for u in users if u.followers_count > 5000],
    key=lambda u: u.followers_count,
    reverse=True
)

print('Top AI researcher influencers:')
for i, user in enumerate(influencers[:10], 1):
    print(f'{i}. @{user.screen_name}')
    print(f'   Followers: {user.followers_count:,}')
    print(f'   Tweets: {user.statuses_count:,}')
    print(f'   Bio: {user.description[:100]}...')
    print()
import time

trending_hashtags = set()

# Get current trends
trends = await client.get_trends('trending', count=30)

for trend in trends:
    if trend.name.startswith('#'):
        trending_hashtags.add(trend.name)
        print(f'Trending: {trend.name}')
        
        # Search recent tweets with this hashtag
        tweets = await client.search_tweet(
            query=trend.name,
            product='Latest',
            count=5
        )
        
        print('  Recent tweets:')
        for tweet in tweets:
            print(f'    @{tweet.user.screen_name}: {tweet.text[:60]}...')
        print()

Build a sentiment analysis dataset

import json

keyword = 'python programming'
tweets_data = []

# Collect tweets
results = await client.search_tweet(keyword, product='Latest', count=100)
for tweet in results:
    tweets_data.append({
        'id': tweet.id,
        'text': tweet.text,
        'user': tweet.user.screen_name,
        'likes': tweet.favorite_count,
        'retweets': tweet.retweet_count,
        'created_at': tweet.created_at
    })

# Get more tweets
for _ in range(4):  # Get 5 pages total
    try:
        results = await results.next()
        for tweet in results:
            tweets_data.append({
                'id': tweet.id,
                'text': tweet.text,
                'user': tweet.user.screen_name,
                'likes': tweet.favorite_count,
                'retweets': tweet.retweet_count,
                'created_at': tweet.created_at
            })
    except:
        break

# Save to file
with open('tweets_dataset.json', 'w') as f:
    json.dump(tweets_data, f, indent=2)

print(f'Collected {len(tweets_data)} tweets about "{keyword}"')
categories = ['trending', 'news', 'sports', 'entertainment']
trends_by_category = {}

for category in categories:
    trends = await client.get_trends(category, count=10)
    trends_by_category[category] = [t.name for t in trends]
    print(f'\n{category.upper()} trends:')
    for trend in trends:
        print(f'  {trend.name}')

# Find common trends across categories
common_trends = set(trends_by_category['trending'])
for category in categories[1:]:
    common_trends &= set(trends_by_category[category])

if common_trends:
    print(f'\nTrends appearing in all categories: {common_trends}')

Trend attributes

Access detailed information about trends:
trends = await client.get_trends('trending', count=5)

for trend in trends:
    print(f'Name: {trend.name}')
    print(f'Tweets count: {trend.tweets_count}')
    print(f'Domain context: {trend.domain_context}')
    print(f'Grouped trends: {trend.grouped_trends}')
    print('---')

Best practices

1

Use specific search queries

Narrow your searches with operators to get more relevant results and reduce API calls.
2

Implement pagination wisely

Don’t fetch too many pages at once. Process results in batches to stay within rate limits.
3

Cache trend results

Trends don’t change frequently. Cache results for a few minutes to avoid unnecessary API calls.
4

Handle search errors

Some queries may return no results or cause errors. Always handle exceptions gracefully.

Build docs developers (and LLMs) love