Skip to main content
The get_pipeline_context() function provides access to the pipeline configuration during pipeline definition (design time), not during pipeline execution.

Signature

def get_pipeline_context() -> PipelineContext

Returns

context
PipelineContext
A context object containing pipeline configuration information.

PipelineContext Properties

name
str
The name of the pipeline.
enable_cache
Optional[bool]
Whether caching is enabled for the pipeline.
enable_artifact_metadata
Optional[bool]
Whether artifact metadata extraction is enabled.
enable_artifact_visualization
Optional[bool]
Whether artifact visualization is enabled.
enable_step_logs
Optional[bool]
Whether step logs are enabled.
enable_pipeline_logs
Optional[bool]
Whether pipeline logs are enabled.
settings
Dict[str, SettingsOrDict]
Stack component settings for the pipeline.
extra
Dict[str, Any]
Extra configurations passed to the pipeline.
model
Optional[Model]
The model configuration for this pipeline.

Examples

Access Pipeline Name

from zenml import pipeline, step, get_pipeline_context

@step
def logging_step() -> None:
    pass

@pipeline(name="my_training_pipeline")
def training_pipeline():
    context = get_pipeline_context()
    print(f"Pipeline name: {context.name}")
    logging_step()

Use Extra Configuration

from zenml import pipeline, step, get_pipeline_context

@step
def process_model(model_config: tuple) -> None:
    module, class_name = model_config
    print(f"Processing {class_name} from {module}")

@pipeline(
    extra={
        "models": [
            ("sklearn.tree", "DecisionTreeClassifier"),
            ("sklearn.ensemble", "RandomForestClassifier"),
            ("sklearn.svm", "SVC"),
        ]
    }
)
def model_search_pipeline():
    context = get_pipeline_context()
    
    # Dynamically create steps based on configuration
    for model_config in context.extra["models"]:
        process_model(model_config=model_config)

Conditional Step Execution

from zenml import pipeline, step, get_pipeline_context

@step
def train_model() -> None:
    print("Training model...")

@step
def evaluate_model() -> None:
    print("Evaluating model...")

@step
def use_cached_model() -> None:
    print("Using cached model...")

@pipeline(extra={"use_cache": True})
def conditional_pipeline():
    context = get_pipeline_context()
    
    if context.extra.get("use_cache"):
        use_cached_model()
    else:
        train_model()
        evaluate_model()

Dynamic Step Configuration

from zenml import pipeline, step, get_pipeline_context

@step
def data_processor(config: dict) -> None:
    print(f"Processing with config: {config}")

@pipeline(
    extra={
        "n_splits": 5,
        "preprocessing_steps": [
            {"name": "normalize", "params": {"method": "minmax"}},
            {"name": "encode", "params": {"method": "onehot"}},
            {"name": "impute", "params": {"strategy": "mean"}},
        ]
    }
)
def preprocessing_pipeline():
    context = get_pipeline_context()
    
    for step_config in context.extra["preprocessing_steps"]:
        data_processor(config=step_config)

Access Model Configuration

from zenml import pipeline, step, get_pipeline_context, Model

@step
def train_step() -> None:
    pass

@pipeline(
    model=Model(
        name="iris_classifier",
        version="1.0.0",
        tags=["classification"]
    )
)
def model_training_pipeline():
    context = get_pipeline_context()
    
    if context.model:
        print(f"Training model: {context.model.name}")
        print(f"Version: {context.model.version}")
    
    train_step()

Check Pipeline Settings

from zenml import pipeline, step, get_pipeline_context

@step
def data_step() -> None:
    pass

@pipeline(enable_cache=False)
def no_cache_pipeline():
    context = get_pipeline_context()
    
    if not context.enable_cache:
        print("Cache is disabled for this pipeline")
    
    data_step()

Important Notes

  • get_pipeline_context() is only available during pipeline definition (when the pipeline function is being called to define the DAG)
  • It is not available during pipeline execution (when steps are actually running)
  • For runtime information during step execution, use get_step_context() instead
  • Raises RuntimeError if called outside a pipeline definition or inside a running step

Common Errors

Called Outside Pipeline

# This will raise RuntimeError
from zenml import get_pipeline_context

context = get_pipeline_context()  # Error: No active pipeline found

Called Inside Running Step

from zenml import pipeline, step, get_pipeline_context

@step
def my_step() -> None:
    # This will raise RuntimeError during execution
    context = get_pipeline_context()  # Error: Use get_step_context instead

@pipeline
def my_pipeline():
    my_step()

@pipeline

Learn about creating pipelines

get_step_context

Access step runtime context

Model

Configure models

Build docs developers (and LLMs) love