Skip to main content

Module functions

Simplified profiling interface using a global profiler instance.

profile_function

Decorator to profile function memory usage.
def profile_function(
    func: Optional[F] = None,
    *,
    profiler: Optional[TFMemoryProfiler] = None,
    name: Optional[str] = None
) -> Union[Callable[[F], F], F]
func
Callable
Function to profile (when used without parentheses)
profiler
TFMemoryProfiler
Profiler instance. Uses global profiler if None
name
str
Custom name for profiling. Defaults to function name
Example:
from tfmemprof.context_profiler import profile_function

@profile_function
def train_model(model, data):
    return model.fit(data)

# With custom name
@profile_function(name="inference")
def predict(model, x):
    return model.predict(x)

profile_context

Context manager for profiling code blocks.
@contextmanager
def profile_context(
    name: str = "context",
    profiler: Optional[TFMemoryProfiler] = None
) -> Iterator[None]
name
str
default:"context"
Name for the profiling context
profiler
TFMemoryProfiler
Profiler instance. Uses global profiler if None
Example:
from tfmemprof.context_profiler import profile_context

with profile_context("data_loading"):
    dataset = load_data()
    
with profile_context("model_training"):
    model.fit(dataset)

get_global_profiler

Get or create the global profiler instance.
def get_global_profiler() -> TFMemoryProfiler
TFMemoryProfiler
TFMemoryProfiler
Global profiler instance

set_global_profiler

Set the global profiler instance.
def set_global_profiler(profiler: TFMemoryProfiler) -> None
profiler
TFMemoryProfiler
Profiler instance to use globally

clear_global_profiler

Clear global profiler state and reset.
def clear_global_profiler() -> None

clear_profiles

Reset profiling data without discarding the global profiler.
def clear_profiles() -> None

get_profile_summaries

Return aggregated profiling summaries for recent functions/contexts.
def get_profile_summaries(limit: Optional[int] = None) -> List[Dict[str, Any]]
limit
int
Maximum number of summaries to return. Returns all if None
List[Dict]
List[Dict[str, Any]]
name
str
Function or context name
calls
int
Number of times called
total_duration
float
Total execution time in seconds
total_memory_used
float
Total memory used in MB
peak_memory
float
Peak memory in MB

ProfiledLayer

Wrapper for TensorFlow layers with automatic profiling.

Constructor

ProfiledLayer(
    layer: Any,
    profiler: Optional[TFMemoryProfiler] = None,
    name: Optional[str] = None
)
layer
tf.keras.layers.Layer
TensorFlow layer to profile
profiler
TFMemoryProfiler
Profiler instance. Uses global profiler if None
name
str
Custom name for profiling. Defaults to layer name
Example:
from tfmemprof.context_profiler import ProfiledLayer
import tensorflow as tf

layer = tf.keras.layers.Dense(512, activation='relu')
profiled_layer = ProfiledLayer(layer, name="dense_512")

# Use in model
model = tf.keras.Sequential([
    profiled_layer,
    tf.keras.layers.Dense(10)
])

profile_model

Profile all layers in a TensorFlow model.
def profile_model(
    model: Any,
    profiler: Optional[TFMemoryProfiler] = None
) -> Any
model
tf.keras.Model
TensorFlow model to profile
profiler
TFMemoryProfiler
Profiler instance
model
tf.keras.Model
Model with profiled layers

TensorFlowProfiler

High-level TensorFlow profiling interface.

Constructor

TensorFlowProfiler(device: Optional[str] = None)
device
str
TensorFlow device name

Methods

profile_training

Profile model training.
def profile_training(
    self,
    model: Any,
    dataset: Any,
    epochs: int = 1,
    steps_per_epoch: Optional[int] = None
) -> None
model
tf.keras.Model
TensorFlow model
dataset
tf.data.Dataset
Training dataset
epochs
int
default:"1"
Number of epochs to profile
steps_per_epoch
int
Steps per epoch to profile

profile_inference

Profile model inference.
def profile_inference(
    self,
    model: Any,
    data: Any,
    batch_size: int = 32
) -> None
model
tf.keras.Model
TensorFlow model
data
Union[tf.data.Dataset, np.ndarray]
Input data
batch_size
int
default:"32"
Batch size for inference

get_results

Get profiling results.
def get_results(self) -> ProfileResult

reset

Reset profiler state.
def reset(self) -> None

profile_keras_training

Profile Keras model training with automatic dataset creation.
def profile_keras_training(
    model: Any,
    x_train: Any,
    y_train: Any,
    epochs: int = 1,
    batch_size: int = 32,
    validation_data: Optional[Any] = None,
    profiler: Optional[TFMemoryProfiler] = None
) -> None
model
tf.keras.Model
Keras model
x_train
np.ndarray
Training data
y_train
np.ndarray
Training labels
epochs
int
default:"1"
Number of epochs
batch_size
int
default:"32"
Batch size
validation_data
Tuple
Validation data tuple (x_val, y_val)

Example

from tfmemprof.context_profiler import (
    profile_function,
    profile_context,
    get_profile_summaries,
    TensorFlowProfiler
)
import tensorflow as tf

# Simple function profiling
@profile_function
def build_model():
    return tf.keras.Sequential([
        tf.keras.layers.Dense(512, activation='relu'),
        tf.keras.layers.Dense(10)
    ])

# Context profiling
with profile_context("data_preparation"):
    x_train, y_train = prepare_data()

model = build_model()

with profile_context("training"):
    model.compile(optimizer='adam', loss='mse')
    model.fit(x_train, y_train, epochs=5)

# Get summaries
summaries = get_profile_summaries(limit=5)
for summary in summaries:
    print(f"{summary['name']}: {summary['peak_memory']:.2f} MB")

# High-level profiler
profiler = TensorFlowProfiler(device='/GPU:0')
dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(32)
profiler.profile_training(model, dataset, epochs=3)

results = profiler.get_results()
print(f"Peak memory: {results.peak_memory_mb:.2f} MB")

Build docs developers (and LLMs) love