Skip to main content
This example demonstrates comprehensive file operations including uploads, downloads, directory management, and file search.

What You’ll Learn

  • Creating directories in sandboxes
  • Uploading files from local paths and memory
  • Downloading files to local paths or memory
  • Searching and listing files
  • Replacing content in files
  • Managing file permissions

Complete Example

import json
import os
from datetime import datetime

from daytona import CreateSandboxFromSnapshotParams, Daytona, FileDownloadRequest, FileUpload


def main():
    daytona = Daytona()
    params = CreateSandboxFromSnapshotParams(
        language="python",
    )

    # First, create a sandbox
    sandbox = daytona.create(params)
    print(f"Created sandbox with ID: {sandbox.id}")

    # List files in the sandbox
    files = sandbox.fs.list_files(".")
    print("Initial files:", files)

    # Create a new directory in the sandbox
    new_dir = "project-files"
    sandbox.fs.create_folder(new_dir, "755")

    # Create a local file for demonstration
    local_file_path = "local-example.txt"
    with open(local_file_path, "w", encoding="utf-8") as f:
        _ = f.write("This is a local file created for demonstration purposes")

    # Create a configuration file with JSON data
    config_data = json.dumps(
        {"name": "project-config", "version": "1.0.0", "settings": {"debug": True, "maxConnections": 10}}, indent=2
    )

    # Upload multiple files at once - both from local path and from bytes
    sandbox.fs.upload_files(
        [
            FileUpload(source=local_file_path, destination=os.path.join(new_dir, "example.txt")),
            FileUpload(source=config_data.encode("utf-8"), destination=os.path.join(new_dir, "config.json")),
            FileUpload(
                source=b'#!/bin/bash\necho "Hello from script!"\nexit 0', destination=os.path.join(new_dir, "script.sh")
            ),
        ]
    )

    # Execute commands on the sandbox to verify files and make them executable
    print("Verifying uploaded files:")
    ls_result = sandbox.process.exec(f"ls -la {new_dir}")
    print(ls_result.result)

    # Make the script executable
    _ = sandbox.process.exec(f"chmod +x {os.path.join(new_dir, 'script.sh')}")

    # Run the script
    print("Running script:")
    script_result = sandbox.process.exec(f"{os.path.join(new_dir, 'script.sh')}")
    print(script_result.result)

    # Search for files in the project
    matches = sandbox.fs.search_files(new_dir, "*.json")
    print("JSON files found:", matches)

    # Replace content in config file
    _ = sandbox.fs.replace_in_files([os.path.join(new_dir, "config.json")], '"debug": true', '"debug": false')

    # Download multiple files - mix of local file and memory download
    print("Downloading multiple files:")
    download_results = sandbox.fs.download_files(
        [
            FileDownloadRequest(source=os.path.join(new_dir, "config.json"), destination="local-config.json"),
            FileDownloadRequest(source=os.path.join(new_dir, "example.txt")),
            FileDownloadRequest(source=os.path.join(new_dir, "script.sh"), destination="local-script.sh"),
        ]
    )

    for result in download_results:
        if result.error:
            print(f"Error downloading {result.source}: {result.error}")
        elif isinstance(result.result, str):
            print(f"Downloaded {result.source} to {result.result}")
        elif result.result:
            print(f"Downloaded {result.source} to memory ({len(result.result)} bytes)")
        else:
            print(f"Downloaded {result.source} to None (unknown result type)")

    # Single file download example
    print("Single file download example:")
    config_content = sandbox.fs.download_file(os.path.join(new_dir, "config.json"))
    print("Config content:", config_content.decode("utf-8"))

    # Create a report of all operations
    report_data = f"""
    Project Files Report:
    ---------------------
    Time: {datetime.now().isoformat()}
    Files: {len(matches.files)} JSON files found
    Config: {'Production mode' if b'"debug": false' in config_content else 'Debug mode'}
    Script: {'Executed successfully' if script_result.exit_code == 0 else 'Failed'}
    """.strip()

    # Save the report
    sandbox.fs.upload_file(report_data.encode("utf-8"), os.path.join(new_dir, "report.txt"))

    # Clean up local file
    os.remove(local_file_path)
    if os.path.exists("local-config.json"):
        os.remove("local-config.json")
    if os.path.exists("local-script.sh"):
        os.remove("local-script.sh")

    # Delete the sandbox
    daytona.delete(sandbox)


if __name__ == "__main__":
    main()

Expected Output

Created sandbox with ID: sandbox-abc123
Initial files: [...]
Verifying uploaded files:
total 16
drwxr-xr-x 2 daytona daytona 4096 Feb 20 12:00 .
drwxr-xr-x 3 daytona daytona 4096 Feb 20 12:00 ..
-rw-r--r-- 1 daytona daytona  123 Feb 20 12:00 config.json
-rw-r--r-- 1 daytona daytona   56 Feb 20 12:00 example.txt
-rwxr-xr-x 1 daytona daytona   45 Feb 20 12:00 script.sh

Running script:
Hello from script!

JSON files found: {"files": ["project-files/config.json"], "total": 1}

Downloading multiple files:
Downloaded project-files/config.json to local-config.json
Downloaded project-files/example.txt to memory (56 bytes)
Downloaded project-files/script.sh to local-script.sh

Single file download example:
Config content: {
  "name": "project-config",
  "version": "1.0.0",
  "settings": {
    "debug": false,
    "maxConnections": 10
  }
}

Key Concepts

Upload Strategies

You can upload files from different sources:
# From local file path
FileUpload(source="./local.txt", destination="/remote.txt")

# From bytes in memory
FileUpload(source=b"content", destination="/remote.txt")

# From string
FileUpload(source="content".encode(), destination="/remote.txt")

Download Strategies

Download files to different destinations:
# To local file
FileDownloadRequest(source="/remote.txt", destination="./local.txt")

# To memory (returns bytes)
FileDownloadRequest(source="/remote.txt")

Searching Files

Use glob patterns to search for files:
# Find all JSON files
matches = sandbox.fs.search_files("/project", "*.json")

# Find all Python files in subdirectories
matches = sandbox.fs.search_files("/project", "**/*.py")

Replacing Content

Search and replace across multiple files:
sandbox.fs.replace_in_files(
    ["config.json", "settings.json"],
    '"debug": true',
    '"debug": false'
)

Best Practices

Batch uploads: Use uploadFiles() for multiple files instead of multiple uploadFile() calls for better performance.
File permissions: When uploading scripts, remember to set execute permissions using chmod or specify permissions during upload.
Path handling: Always use absolute paths or paths relative to the sandbox’s working directory to avoid ambiguity.

Next Steps

Git Workflow

Clone repositories and work with Git

Code Execution

Run code in your sandboxes

Build docs developers (and LLMs) love