Skip to main content

Overview

Complete documentation of all public methods in the SocialAnalyzer class.

Execution Methods

run_as_object()

Main method for programmatic API usage. Executes username analysis with full parameter control.
run_as_object(
    cli=False,
    gui=False,
    logs_dir='',
    logs=False,
    extract=False,
    filter='good',
    headers={},
    list=False,
    metadata=False,
    method='all',
    mode='fast',
    options='',
    output='pretty',
    profiles='detected',
    type='all',
    ret=False,
    silent=False,
    timeout=0,
    trim=False,
    username='',
    websites='all',
    countries='all',
    top='0',
    screenshots=False,
    simplify=False
)
username
string
default:""
required
Username to search for
username="johndoe"
websites
string
default:"all"
Space-separated list of websites or “all”
websites="youtube twitter reddit"
websites="all"
mode
string
default:"fast"
Analysis mode: fast, slow, or special
output
string
default:"pretty"
Output format: json or pretty
method
string
default:"all"
Search method: find, get, or all
filter
string
default:"good"
Confidence filter: good, maybe, bad, comma-separated, or all
profiles
string
default:"detected"
Profile status filter: detected, unknown, failed, comma-separated, or all
options
string
default:""
Fields to display: comma-separated list of link, rate, title, text
top
string
default:"0"
Select top N websites by popularity (e.g., “50”)
type
string
default:"all"
Website category filter (e.g., “Music”, “Adult”)
countries
string
default:"all"
Space-separated country codes (e.g., “us br ru”)
extract
boolean
default:"false"
Extract profiles, URLs, and patterns
metadata
boolean
default:"false"
Extract metadata using QeeqBox OSINT
trim
boolean
default:"false"
Trim long strings
logs
boolean
default:"false"
Enable file logging
logs_dir
string
default:""
Custom directory for log files
screenshots
boolean
default:"false"
Capture screenshots (requires logs=True)
simplify
boolean
default:"false"
Output only 100% confidence profile links
silent
boolean
default:"false"
Suppress console output
timeout
integer
default:"0"
Delay between requests in seconds (0 = random)
headers
dict
default:"{}"
Custom HTTP headers
headers={"User-Agent": "Custom/1.0"}
list
boolean
default:"false"
List all websites and exit
cli
boolean
default:"false"
CLI mode flag (deprecated)
gui
boolean
default:"false"
GUI mode flag (not implemented)
ret
boolean
default:"false"
Reserved parameter

Returns

return
dict
Dictionary with detected, unknown, and failed profiles (structure depends on filters)
{
    "detected": [{"link": "...", "rate": "100%", ...}],
    "unknown": [{"link": "..."}],
    "failed": [{"link": "..."}]
}

Example

from social_analyzer import SocialAnalyzer

sa = SocialAnalyzer()

results = sa.run_as_object(
    username="johndoe",
    websites="youtube twitter reddit",
    mode="fast",
    output="json",
    filter="good",
    profiles="detected",
    extract=True,
    metadata=True,
    silent=True
)

for profile in results.get('detected', []):
    print(f"{profile['link']} - {profile['rate']}")

run_as_cli()

Parse command-line arguments and execute analysis. Used internally by the CLI.
run_as_cli()

Returns

return
dict
Analysis results dictionary

Example

from social_analyzer import SocialAnalyzer
import sys

# Simulate CLI usage
sys.argv = ['social-analyzer', '--username', 'johndoe', '--output', 'json']

sa = SocialAnalyzer()
results = sa.run_as_cli()
print(results)

check_user_cli()

Core CLI execution logic. Processes parsed arguments and performs username search.
check_user_cli(argv)
argv
Namespace
required
Parsed argument namespace from argparse

Returns

return
dict
Analysis results with detected, unknown, and failed profiles

Example

from argparse import Namespace
from social_analyzer import SocialAnalyzer

sa = SocialAnalyzer()
sa.init_logic()

args = Namespace(
    username="johndoe",
    websites="youtube twitter",
    mode="fast",
    output="json",
    method="all",
    filter="good",
    profiles="detected",
    options="",
    extract=False,
    metadata=False,
    trim=False,
    countries="all",
    type="all",
    top="0",
    logs=False,
    screenshots=False,
    simplify=False,
    cli=False
)

results = sa.check_user_cli(args)
print(results)

Search Methods

find_username_normal()

Main username search logic using ThreadPoolExecutor for concurrent website checking.
find_username_normal(req)
req
dict
required
Request object containing search parameters
{
    "body": {
        "uuid": "unique-task-id",
        "string": "username or comma-separated usernames",
        "options": "FindUserProfilesFast,GetUserProfilesFast"
    }
}

Returns

return
list
List of profile dictionaries with detection results
[
    {"link": "...", "method": "all", "good": "true", "rate": "100%", ...},
    {"link": "...", "method": "find", "good": "true", ...},
    {"link": "...", "method": "failed", ...}
]

Example

from uuid import uuid4
from social_analyzer import SocialAnalyzer

sa = SocialAnalyzer(silent=True)
sa.init_logic()

# Select websites
for site in sa.websites_entries:
    if 'youtube' in site['url'] or 'twitter' in site['url']:
        site['selected'] = 'true'
    else:
        site['selected'] = 'false'

req = {
    "body": {
        "uuid": str(uuid4()),
        "string": "johndoe",
        "options": "FindUserProfilesFast,GetUserProfilesFast"
    }
}

results = sa.find_username_normal(req)

for profile in results:
    if profile and profile.get('good') == 'true':
        print(f"Found: {profile['link']}")

fetch_url()

Check a single website for a username. Called by find_username_normal() for each website.
fetch_url(site, username, options)
site
dict
required
Website entry from websites_entries
username
string
required
Username to check
options
string
required
Search options (e.g., “FindUserProfilesFast”)

Returns

return
tuple
Tuple of (success: bool, site_url: string, profile_data: dict)
(True, "https://youtube.com/{username}", {"link": "...", "rate": "100%", ...})

Initialization Methods

init_logic()

Load detection data files and initialize website entries. Must be called before searching.
init_logic()

Example

from social_analyzer import SocialAnalyzer

sa = SocialAnalyzer()
sa.init_logic()  # Load sites.json and languages.json

print(f"Loaded {len(sa.websites_entries)} websites")

init_detections()

Initialize specific detection data from loaded sites.
init_detections(detections)
detections
string
required
Detection type: websites_entries, shared_detections, or generic_detection

Returns

return
list
List of detection entries

load_file()

Load a JSON file from local path or download if missing.
load_file(name, path_to_check, url_download)
name
string
required
Display name for the file
path_to_check
string
required
Local file path
url_download
string
required
Download URL if file doesn’t exist

Returns

return
dict/None
Loaded JSON data or None if failed

Example

sa = SocialAnalyzer()
data = sa.load_file(
    "custom_sites",
    "./data/custom.json",
    "https://example.com/custom.json"
)

if data:
    print("Loaded custom sites")

Website Management Methods

list_all_websites()

Print all available website domains to console.
list_all_websites()

Example

sa = SocialAnalyzer()
sa.init_logic()
sa.list_all_websites()
# Output:
# youtube.com
# twitter.com
# reddit.com
# ...

get_website()

Extract clean domain name from website URL.
get_website(site)
site
string
required
Full website URL

Returns

return
string
Cleaned domain name
# Input: "https://www.youtube.com/{username}/videos"
# Output: "youtube.com"

Example

sa = SocialAnalyzer()
domain = sa.get_website("https://www.youtube.com/{username}")
print(domain)  # "youtube.com"

search_and_change()

Find and update a website entry in websites_entries.
search_and_change(site, _dict)
site
dict
required
Website entry to find
_dict
dict
required
Fields to update

Example

sa = SocialAnalyzer()
sa.init_logic()

# Mark YouTube as selected
for site in sa.websites_entries:
    if 'youtube' in site['url']:
        sa.search_and_change(site, {"selected": "true"})
        break

top_websites()

Select top N websites by global rank.
top_websites(top_number)
top_number
string
required
Number pattern (e.g., “top50”, “top100”)

Returns

return
boolean
True if successful, False otherwise

Example

sa = SocialAnalyzer()
sa.init_logic()

if sa.top_websites("top50"):
    print("Selected top 50 websites")
    selected = [s for s in sa.websites_entries if s.get('selected') == 'true']
    print(f"Total selected: {len(selected)}")

Utility Methods

delete_keys()

Remove specific keys from a dictionary.
delete_keys(in_object, keys)
in_object
dict
required
Dictionary to modify
keys
list
required
List of keys to remove

Returns

return
dict
Modified dictionary

Example

sa = SocialAnalyzer()
profile = {
    "link": "https://youtube.com/johndoe",
    "rate": "100%",
    "method": "all",
    "good": "true"
}

clean = sa.delete_keys(profile, ["method", "good"])
print(clean)  # {"link": "...", "rate": "100%"}

clean_up_item()

Filter profile object to only specified fields (user-controlled).
clean_up_item(in_object, keys_str)
in_object
dict
required
Profile dictionary
keys_str
string/list
required
Comma-separated string or list of fields to keep

Returns

return
dict
Filtered dictionary

Example

sa = SocialAnalyzer()
profile = {
    "link": "https://youtube.com/johndoe",
    "rate": "100%",
    "title": "John Doe - YouTube",
    "text": "Long description..."
}

# Keep only link and rate
filtered = sa.clean_up_item(profile, "link,rate")
print(filtered)  # {"link": "...", "rate": "100%"}

# Keep only link
filtered = sa.clean_up_item(profile, ["link"])
print(filtered)  # {"link": "..."}

get_language_by_guessing()

Detect language from text content using langdetect.
get_language_by_guessing(text)
text
string
required
Text to analyze (needs to be relatively long)

Returns

return
string
Language name with “(Maybe)” suffix or “unavailable”
"English (Maybe)"
"Spanish (Maybe)"
"unavailable"

Example

sa = SocialAnalyzer()
sa.init_logic()

text = "Hello, this is a sample text in English."
lang = sa.get_language_by_guessing(text)
print(lang)  # "English (Maybe)"

get_language_by_parsing()

Detect language from HTML source code meta tags.
get_language_by_parsing(source, encoding)
source
string
required
HTML source code
encoding
string
required
Character encoding

Returns

return
string
Detected language name or “unavailable”

check_errors()

Decorator for error handling in methods.
@check_errors(on_off=None)
def your_method(self):
    pass

Logging Methods

setup_logger()

Configure logging for the instance.
setup_logger(uuid=None, file=False, argv=None)
uuid
string
Unique identifier for log file
file
boolean
default:"false"
Enable file logging
argv
Namespace
Parsed arguments (for configuration)

Example

from uuid import uuid4
from social_analyzer import SocialAnalyzer

sa = SocialAnalyzer()
sa.logs_dir = "./logs"
sa.setup_logger(uuid=str(uuid4()), file=True)

sa.log.info("Custom log message")

Complete Usage Example

from social_analyzer import SocialAnalyzer
from uuid import uuid4
import json

# Create instance
sa = SocialAnalyzer(silent=False)

# Configure
sa.workers = 20
sa.timeout = 1
sa.headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
}

# Initialize
sa.init_logic()

# Run search
results = sa.run_as_object(
    username="johndoe",
    websites="youtube twitter reddit instagram",
    mode="fast",
    output="json",
    method="all",
    filter="good,maybe",
    profiles="detected,unknown",
    extract=True,
    metadata=True,
    trim=False,
    silent=True
)

# Process results
if 'detected' in results:
    print(f"\nFound {len(results['detected'])} profiles:\n")
    
    for profile in results['detected']:
        print(f"[{profile['rate']}] {profile['link']}")
        
        if 'title' in profile:
            print(f"  Title: {profile['title']}")
        
        if 'extracted' in profile:
            if profile['extracted'].get('profiles'):
                print(f"  Extracted profiles: {', '.join(profile['extracted']['profiles'])}")
        
        print()

# Save to file
with open('results.json', 'w') as f:
    json.dump(results, f, indent=2)

print("\nResults saved to results.json")

See Also

Python Class Overview

Class architecture and initialization

CLI Reference

Command-line interface documentation

Output Formats

Understanding result structures

Web Endpoints

Express web API reference

Build docs developers (and LLMs) love