Overview
This example demonstrates the complete upload and download flow using streaming for efficient memory usage.Source Code
Full example:utils/example-storage-e2e.js
Basic Upload & Download
import fs from 'fs'
import { Readable } from 'stream'
import { Synapse } from '@filoz/synapse-sdk'
import { http } from 'viem'
import { privateKeyToAccount } from 'viem/accounts'
import { calibration } from '@filoz/synapse-core/chains'
// Initialize
const account = privateKeyToAccount('0x...')
const synapse = Synapse.create({
chain: calibration,
transport: http(),
account,
})
// Upload with streaming
const fileStream = Readable.toWeb(fs.createReadStream('photo.jpg'))
const result = await synapse.storage.upload(fileStream, {
callbacks: {
onProviderSelected: (provider) => {
console.log(`Selected SP ${provider.id}`)
},
onProgress: (bytesUploaded) => {
console.log(`Progress: ${bytesUploaded} bytes`)
},
onStored: (providerId, pieceCid) => {
console.log(`Stored: ${pieceCid}`)
},
},
})
console.log(`PieceCID: ${result.pieceCid}`)
// Download
const downloaded = await synapse.storage.download({
pieceCid: result.pieceCid
})
fs.writeFileSync('downloaded.jpg', downloaded)
With Progress Tracking
const fileSize = fs.statSync('video.mp4').size
let lastReported = 0
const CHUNK_SIZE = 10 * 1024 * 1024 // Report every 10 MB
const result = await synapse.storage.upload(fileStream, {
callbacks: {
onProgress: (bytesUploaded) => {
if (bytesUploaded - lastReported >= CHUNK_SIZE || bytesUploaded === fileSize) {
const pct = ((bytesUploaded / fileSize) * 100).toFixed(1)
console.log(`Progress: ${bytesUploaded} / ${fileSize} (${pct}%)`)
lastReported = bytesUploaded
}
},
},
})
Complete Lifecycle Callbacks
const result = await synapse.storage.upload(data, {
callbacks: {
// Provider selection
onProviderSelected: (provider) => {
console.log(`\nSelected Provider:`)
console.log(` ID: ${provider.id}`)
console.log(` Address: ${provider.serviceProvider}`)
console.log(` Service: ${provider.pdp.serviceURL}`)
},
// Dataset resolution
onDataSetResolved: (info) => {
const verb = info.isExisting ? 'Reusing' : 'Creating'
console.log(`${verb} data set: ${info.dataSetId}`)
},
// Upload progress
onProgress: (bytesUploaded) => {
console.log(`Uploaded: ${bytesUploaded} bytes`)
},
// Primary storage
onStored: (providerId, pieceCid) => {
console.log(`\nStored on primary SP ${providerId}`)
console.log(` PieceCID: ${pieceCid}`)
},
// Secondary replication
onPullProgress: (providerId, pieceCid, status) => {
console.log(`Replicating to SP ${providerId}: ${status}`)
},
onCopyComplete: (providerId, pieceCid) => {
console.log(`Copy complete on SP ${providerId}`)
},
onCopyFailed: (providerId, pieceCid, error) => {
console.error(`Copy failed on SP ${providerId}: ${error.message}`)
},
// On-chain commitment
onPiecesAdded: (txHash, providerId, pieces) => {
console.log(`\nCommitted on SP ${providerId}`)
console.log(` Transaction: ${txHash}`)
for (const { pieceCid } of pieces) {
console.log(` - ${pieceCid}`)
}
},
onPiecesConfirmed: (dataSetId, providerId, pieces) => {
console.log(`\nConfirmed on SP ${providerId}`)
console.log(` Data Set: ${dataSetId}`)
for (const { pieceCid, pieceId } of pieces) {
console.log(` - ${pieceCid} -> pieceId ${pieceId}`)
}
},
},
})
Verify Downloaded Data
import crypto from 'crypto'
function sha256(data: Buffer): string {
return crypto.createHash('sha256').update(data).digest('hex')
}
// Upload
const originalData = await fs.promises.readFile('document.pdf')
const originalHash = sha256(originalData)
const result = await synapse.storage.upload(originalData)
// Download and verify
const downloadedData = await synapse.storage.download({
pieceCid: result.pieceCid
})
const downloadedHash = sha256(Buffer.from(downloadedData))
if (originalHash === downloadedHash) {
console.log('✓ Data integrity verified')
} else {
console.error('✗ Data corruption detected!')
}
Upload with Metadata
import path from 'path'
const filePath = 'vacation-photos/beach.jpg'
const fileName = path.basename(filePath)
const result = await synapse.storage.upload(data, {
pieceMetadata: {
name: fileName,
type: 'image/jpeg',
category: 'vacation',
uploadedBy: account.address,
},
})
Handle Upload Errors
import { StoreError, CommitError } from '@filoz/synapse-sdk'
try {
const result = await synapse.storage.upload(data)
// Check for partial failures
if (result.failures.length > 0) {
console.warn(`${result.failures.length} copies failed:`)
for (const failure of result.failures) {
console.warn(` SP ${failure.providerId}: ${failure.error}`)
}
}
console.log(`Success: ${result.copies.length} copies`)
} catch (error) {
if (error instanceof StoreError) {
console.error('Failed to store on primary:')
console.error(` Provider: ${error.providerId}`)
console.error(` Error: ${error.message}`)
} else if (error instanceof CommitError) {
console.error('Failed to commit on-chain:')
console.error(` Provider: ${error.providerId}`)
console.error(` Note: Data is stored but not committed`)
} else {
console.error('Unexpected error:', error)
}
}
Download from Specific Provider
// Get upload result
const result = await synapse.storage.upload(data)
// Download from primary provider
const primaryCopy = result.copies.find(c => c.role === 'primary')
if (primaryCopy) {
const context = await synapse.storage.createContext({
providerId: primaryCopy.providerId,
})
const data = await context.download({ pieceCid: result.pieceCid })
}
Batch Upload Files
import glob from 'glob'
const files = glob.sync('documents/*.pdf')
console.log(`Uploading ${files.length} files...`)
for (const file of files) {
const stream = Readable.toWeb(fs.createReadStream(file))
const result = await synapse.storage.upload(stream, {
pieceMetadata: {
name: path.basename(file),
},
callbacks: {
onStored: (providerId, pieceCid) => {
console.log(`${file} -> ${pieceCid}`)
},
},
})
}
console.log('All files uploaded')
Stream from URL
const response = await fetch('https://example.com/largefile.zip')
if (!response.ok || !response.body) {
throw new Error('Failed to fetch file')
}
const result = await synapse.storage.upload(response.body, {
callbacks: {
onProgress: (bytes) => {
console.log(`Streaming upload: ${bytes} bytes`)
},
},
})
Preflight Check Before Upload
import { formatUnits } from '@filoz/synapse-sdk'
const fileSize = fs.statSync('largefile.mp4').size
// Check before uploading
const preflight = await synapse.storage.preflightUpload({
size: fileSize,
withCDN: true,
})
console.log('Estimated costs:')
console.log(` Per day: ${formatUnits(preflight.estimatedCost.perDay)} USDFC`)
console.log(` Per month: ${formatUnits(preflight.estimatedCost.perMonth)} USDFC`)
if (!preflight.allowanceCheck.sufficient) {
console.error('Insufficient allowances')
console.error(preflight.allowanceCheck.message)
process.exit(1)
}
// Proceed with upload
const result = await synapse.storage.upload(data, { withCDN: true })
Command-Line Usage
Run the E2E example:# Single file
PRIVATE_KEY=0x... node utils/example-storage-e2e.js photo.jpg
# Multiple files (uses split operations)
PRIVATE_KEY=0x... node utils/example-storage-e2e.js file1.pdf file2.pdf file3.pdf
# Specify network
NETWORK=mainnet PRIVATE_KEY=0x... node utils/example-storage-e2e.js data.zip
# Use devnet
NETWORK=devnet node utils/example-storage-e2e.js test.txt
Best Practices
Stream Large Files
Use streaming to avoid loading entire files into memory
Check Preflight
Always verify costs and allowances before uploading
Verify Downloads
Hash downloaded data to ensure integrity
Handle Failures
Check result.failures for partial upload issues
Next Steps
Multi-Copy Storage
Upload to multiple providers for redundancy
Split Operations
Use split operations for batch uploads