Quickstart
Get up and running with Spooled Cloud in 5 minutes. This guide covers the basics of queuing jobs,
processing them with workers, and handling failures gracefully.
🎮 See it in action
Try our interactive demo! SpriteForge
shows real-time job processing, workflows, retries, and more — all powered by Spooled Cloud.
SDK Status
Node.js, Python, Go, and PHP SDKs are all production-ready!
See SDKs for full documentation and examples.
API Options Spooled Cloud provides two APIs:
REST API (port 8080) — HTTP/1.1 + JSON, best for web apps and simple integrations gRPC API — HTTP/2 + Protobuf, best for high-throughput workers with streaming gRPC endpoints
Spooled Cloud (TLS) : grpc.spooled.cloud:443 Self-hosted / local : localhost:50051 (or GRPC_PORT)
This quickstart covers the REST API. See the gRPC documentation for
high-performance worker implementations using gRPC streaming.
Prerequisites Before you begin, you'll need:
A Spooled Cloud account — Sign up for free An API key — Available in your dashboard cURL, Node.js, Python, Go, or PHP — Any HTTP client will work Step 1: Queue Your First Job
Jobs are the fundamental unit in Spooled. A job represents a task to be processed asynchronously,
like sending an email, processing an image, or delivering a webhook.
cURL Node.js Python Go PHP
Copy curl -X POST https://api.spooled.cloud/api/v1/jobs \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"queue_name": "my-queue",
"payload": {
"event": "user.created",
"user_id": "usr_123",
"email": "alice@example.com"
},
"idempotency_key": "user-created-usr_123"
}' import { SpooledClient } from '@spooled/sdk' ;
const client = new SpooledClient ({
apiKey: process.env. SPOOLED_API_KEY ! ,
});
const userId = 'usr_123' ;
// Create a job
const { id } = await client.jobs. create ({
queueName: 'email-notifications' ,
payload: {
to: 'user@example.com' ,
subject: 'Welcome!' ,
template: 'welcome' ,
},
idempotencyKey: `welcome-${ userId }` ,
maxRetries: 5 ,
});
console. log ( `Created job: ${ id }` ); from spooled import SpooledClient
import os
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
image_id = "img_123"
# Create a background job
result = client.jobs.create({
"queue_name" : "image-processing" ,
"payload" : {
"image_url" : "https://example.com/image.jpg" ,
"operations" : [ "resize" , "compress" ],
"output_format" : "webp"
},
"idempotency_key" : f "process-image- { image_id } " ,
"max_retries" : 3
})
print ( f "Created job: { result.id } " )
client.close() package main
import (
" context "
" fmt "
" os "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
func ptr [ T any ]( v T ) * T { return & v }
func main () {
client, err := spooled. NewClient (spooled. WithAPIKey (os. Getenv ( "SPOOLED_API_KEY" )))
if err != nil {
panic (err)
}
resp, err := client. Jobs (). Create (context. Background (), & resources . CreateJobRequest {
QueueName: "my-queue" ,
Payload: map [ string ] any { "key" : "value" },
IdempotencyKey: ptr ( "unique-key" ),
MaxRetries: ptr ( 3 ),
})
if err != nil {
panic (err)
}
fmt. Printf ( "Created job: %s\n " , resp.ID)
} <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
$userId = 'usr_123' ;
// Create a job
$job = $client -> jobs -> create ([
'queue' => 'email-notifications' ,
'payload' => [
'to' => 'user@example.com' ,
'subject' => 'Welcome!' ,
'template' => 'welcome' ,
],
'idempotencyKey' => "welcome-{ $userId }" ,
'maxRetries' => 5 ,
]);
echo "Created job: { $job -> id } \n " ;
The response includes the job ID and status. The idempotency_key prevents duplicate
processing if you retry the request.
Step 2: Process Jobs with a Worker
Workers claim jobs from queues and process them. If processing fails, Spooled automatically
retries with exponential backoff.
Worker Pattern: Claim → Process → Complete/Fail
cURL Node.js Python Go PHP
Copy # Worker loop: claim → process → complete/fail
# 1. Claim jobs
curl -X POST https://api.spooled.cloud/api/v1/jobs/claim \
-H "Authorization: Bearer sp_live_..." \
-d '{"queue_name": "my-queue", "worker_id": "worker-1", "limit": 5}'
# 2. Complete a job
curl -X POST https://api.spooled.cloud/api/v1/jobs/job_xyz/complete \
-H "Authorization: Bearer sp_live_..." \
-d '{"worker_id": "worker-1"}'
# 3. Or fail it (will retry)
curl -X POST https://api.spooled.cloud/api/v1/jobs/job_xyz/fail \
-H "Authorization: Bearer sp_live_..." \
-d '{"worker_id": "worker-1", "error": "Processing failed"}' import { SpooledClient, SpooledWorker } from '@spooled/sdk' ;
const client = new SpooledClient ({
apiKey: process.env. SPOOLED_API_KEY ! ,
});
const worker = new SpooledWorker (client, {
queueName: 'email-notifications' ,
concurrency: 10 ,
});
worker. process ( async ( ctx ) => {
const { to , subject , body } = ctx.payload;
await sendEmail ({ to, subject, body });
console. log ( `Sent email to ${ to }` );
return { sent: true };
});
await worker. start (); from spooled import SpooledClient
from spooled.worker import SpooledWorker
import os
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
worker = SpooledWorker(client, queue_name = "email-notifications" , concurrency = 10 )
@worker.process
def handle_job (ctx):
to = ctx.payload[ "to" ]
subject = ctx.payload[ "subject" ]
# Process the job
send_email(to, subject)
return { "sent" : True }
worker.start() # Blocking package main
import (
" context "
" fmt "
" os "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/worker "
)
func main () {
client, err := spooled. NewClient (spooled. WithAPIKey (os. Getenv ( "SPOOLED_API_KEY" )))
if err != nil {
panic (err)
}
w := worker. NewWorker (client. Jobs (), client. Workers (), worker . Options {
QueueName: "email-notifications" ,
Concurrency: 10 ,
})
w. Process ( func ( ctx * worker . JobContext ) ( map [ string ] any , error ) {
fmt. Printf ( "Job %s payload= %v\n " , ctx.JobID, ctx.Payload)
return map [ string ] any { "ok" : true }, nil
})
if err := w. Start (context. Background ()); err != nil {
panic (err)
}
} <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
use Spooled\Worker\SpooledWorker ;
use Spooled\Worker\WorkerConfig ;
use Spooled\Worker\JobContext ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
$worker = new SpooledWorker ($client, new WorkerConfig (
queueName : 'email-notifications' ,
concurrency : 10 ,
));
$worker -> process ( function ( JobContext $ctx) : array {
$to = $ctx -> get ( 'to' );
$subject = $ctx -> get ( 'subject' );
// Process the job
sendEmail ($to, $subject);
return [ 'sent' => true ];
});
$worker -> start (); Common Operations Scheduled Jobs (Run Later) Schedule a job to run at a specific time:
cURL Node.js Python Go PHP
Copy # Schedule a job for later
curl -X POST https://api.spooled.cloud/api/v1/jobs \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"queue_name": "reminders",
"payload": {"type": "cart-abandoned", "user_id": "usr_123"},
"scheduled_at": "2024-12-10T09:00:00Z"
}' // Schedule a job to run in 24 hours
await client.jobs. create ({
queueName: 'reminders' ,
payload: { userId: 'usr_123' , type: 'cart-abandoned' },
scheduledAt: new Date (Date. now () + 24 * 60 * 60 * 1000 ),
idempotencyKey: `reminder-${ userId }-cart` ,
}); from datetime import datetime, timedelta
# Schedule a job to run in 24 hours
client.jobs.create({
"queue_name" : "reminders" ,
"payload" : { "user_id" : "usr_123" , "type" : "cart-abandoned" },
"scheduled_at" : (datetime.utcnow() + timedelta( hours = 24 )).isoformat() + "Z" ,
"idempotency_key" : f "reminder- { user_id } -cart"
}) import (
" context "
" time "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
client, _ := spooled. NewClient (spooled. WithAPIKey ( "sp_live_YOUR_API_KEY" ))
scheduledTime := time. Now (). Add ( 24 * time.Hour)
resp, err := client. Jobs (). Create (context. Background (), & resources . CreateJobRequest {
QueueName: "reminders" ,
Payload: map [ string ] interface {}{ "user_id" : "usr_123" , "type" : "cart-abandoned" },
ScheduledAt: & scheduledTime,
IdempotencyKey: stringPtr ( "reminder-usr_123-cart" ),
})
if err != nil {
panic (err)
}
fmt. Printf ( "Scheduled job: %s\n " , resp.ID) <? php
use DateTime ;
// Schedule a job to run in 24 hours
$scheduledAt = ( new DateTime ( '+24 hours' )) -> format ( DateTime :: ATOM );
$client -> jobs -> create ([
'queue' => 'reminders' ,
'payload' => [ 'userId' => 'usr_123' , 'type' => 'cart-abandoned' ],
'scheduledAt' => $scheduledAt,
'idempotencyKey' => "reminder-{ $userId }-cart" ,
]); Cron Schedules (Recurring) Create recurring jobs with cron expressions:
cURL Node.js Python Go PHP
Copy # Create a cron schedule
curl -X POST https://api.spooled.cloud/api/v1/schedules \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"name": "Daily Report",
"cron_expression": "0 0 9 * * *",
"timezone": "America/New_York",
"queue_name": "reports",
"payload_template": {"type": "daily_report"}
}' // Create a cron schedule
const schedule = await client.schedules. create ({
name: 'Daily Report' ,
cronExpression: '0 0 9 * * *' ,
timezone: 'America/New_York' ,
queueName: 'reports' ,
payloadTemplate: { type: 'daily_report' },
}); # Create a cron schedule
schedule = client.schedules.create({
"name" : "Daily Report" ,
"cron_expression" : "0 0 9 * * *" ,
"timezone" : "America/New_York" ,
"queue_name" : "reports" ,
"payload_template" : { "type" : "daily_report" }
}) import (
" context "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
func ptr [ T any ]( v T ) * T { return & v }
client, err := spooled. NewClient (spooled. WithAPIKey ( "sp_live_YOUR_API_KEY" ))
if err != nil {
panic (err)
}
schedule, err := client. Schedules (). Create (context. Background (), & resources . CreateScheduleRequest {
Name: "Daily Report" ,
CronExpression: "0 0 9 * * *" ,
Timezone: ptr ( "America/New_York" ),
QueueName: "reports" ,
PayloadTemplate: map [ string ] interface {}{ "type" : "daily_report" },
})
if err != nil {
panic (err)
}
fmt. Printf ( "Created schedule: %s\n " , schedule.ID) <? php
// Create a cron schedule
$schedule = $client -> schedules -> create ([
'name' => 'Daily Report' ,
'cronExpression' => '0 0 9 * * *' ,
'timezone' => 'America/New_York' ,
'queue' => 'reports' ,
'payloadTemplate' => [ 'type' => 'daily_report' ],
]);
echo "Created schedule: { $schedule -> id } \n " ; Bulk Enqueue Enqueue multiple jobs in a single request (up to 100 jobs):
cURL Node.js Python Go PHP
Copy # Bulk enqueue up to 100 jobs
curl -X POST https://api.spooled.cloud/api/v1/jobs/bulk \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"queue_name": "notifications",
"jobs": [
{"payload": {"type": "email", "to": "a@example.com"}},
{"payload": {"type": "sms", "to": "+1234567890"}},
{"payload": {"type": "push", "token": "abc123"}}
]
}' // Bulk enqueue multiple jobs (up to 100)
const result = await client.jobs. bulkEnqueue ({
queueName: 'notifications' ,
jobs: [
{ payload: { type: 'email' , to: 'a@example.com' } },
{ payload: { type: 'sms' , to: '+1234567890' } },
{ payload: { type: 'push' , token: 'abc123' } },
],
defaultMaxRetries: 5 ,
});
console. log ( `Enqueued ${ result . successCount } jobs` ); # Bulk enqueue multiple jobs (up to 100)
result = client.jobs.bulk_enqueue({
"queue_name" : "notifications" ,
"jobs" : [
{ "payload" : { "type" : "email" , "to" : "a@example.com" }},
{ "payload" : { "type" : "sms" , "to" : "+1234567890" }},
{ "payload" : { "type" : "push" , "token" : "abc123" }},
],
"default_max_retries" : 5
})
print ( f "Enqueued { result.success_count } jobs" ) import (
" context "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
client, _ := spooled. NewClient (spooled. WithAPIKey ( "sp_live_YOUR_API_KEY" ))
result, err := client. Jobs (). BulkEnqueue (context. Background (), & resources . BulkEnqueueRequest {
QueueName: "notifications" ,
Jobs: [] resources . BulkJobItem {
{Payload: map [ string ] interface {}{ "type" : "email" , "to" : "a@example.com" }},
{Payload: map [ string ] interface {}{ "type" : "sms" , "to" : "+1234567890" }},
{Payload: map [ string ] interface {}{ "type" : "push" , "token" : "abc123" }},
},
})
if err != nil {
panic (err)
}
fmt. Printf ( "Enqueued %d jobs \n " , result.Succeeded) <? php
// Bulk enqueue multiple jobs (up to 100)
$result = $client -> jobs -> bulkEnqueue ([
'queue' => 'notifications' ,
'jobs' => [
[ 'payload' => [ 'type' => 'email' , 'to' => 'a@example.com' ]],
[ 'payload' => [ 'type' => 'sms' , 'to' => '+1234567890' ]],
[ 'payload' => [ 'type' => 'push' , 'token' => 'abc123' ]],
],
'defaultMaxRetries' => 5 ,
]);
echo "Enqueued { $result -> successCount } jobs \n " ; Queue Management Operation Endpoint Pause queue POST /api/v1/queues/:name/pause Resume queue POST /api/v1/queues/:name/resume Get queue stats GET /api/v1/queues/:name/stats Retry failed job POST /api/v1/jobs/:id/retry Cancel job POST /api/v1/jobs/:id/cancel
Real-time Updates
Get instant notifications when jobs complete or fail using WebSocket or Server-Sent Events (SSE).
WebSocket Connection
cURL Node.js Python Go PHP
Copy # WebSocket connections require a WebSocket client.
# Example using websocat (install: https://github.com/vi/websocat)
#
# 1) Exchange API key for a JWT access token (requires jq)
TOKEN = $( curl -s -X POST https://api.spooled.cloud/api/v1/auth/login \
-H "Content-Type: application/json" \
-d '{"api_key":"sp_live_YOUR_API_KEY"}' | jq -r .access_token )
# 2) Connect (token goes in the query string)
websocat "wss://api.spooled.cloud/api/v1/ws?token= $TOKEN "
# 3) Subscribe by sending JSON commands over the socket.
# (Tip: see the Node.js/Python SDK tabs for a ready-to-run subscription example.) import { SpooledClient } from '@spooled/sdk' ;
const client = new SpooledClient ({
apiKey: process.env. SPOOLED_API_KEY ! ,
});
// WebSocket realtime client (uses /api/v1/ws?token=... under the hood)
const realtime = await client. realtime ({ type: 'websocket' });
realtime. on ( 'job.created' , ( data ) => {
console. log ( 'job.created:' , data);
});
realtime. on ( 'job.completed' , ( data ) => {
console. log ( 'job.completed:' , data);
});
await realtime. connect ();
await realtime. subscribe ({ queueName: 'orders' }); from spooled import SpooledClient
from spooled.realtime import SubscriptionFilter
import os
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
# WebSocket realtime client (uses /api/v1/ws?token=... under the hood)
realtime = client.realtime( type = "websocket" )
@realtime.on ( "job.created" )
def on_job_created (data):
print ( "job.created:" , data)
@realtime.on ( "job.completed" )
def on_job_completed (data):
print ( "job.completed:" , data)
realtime.connect()
realtime.subscribe(SubscriptionFilter( queue = "orders" )) import (
" context "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/realtime "
)
client, _ := spooled. NewClient (spooled. WithAPIKey ( "sp_live_YOUR_API_KEY" ))
ws := client. Realtime (). WebSocket ()
ws. OnEvent ( "job.created" , func ( data realtime . EventData ) {
fmt. Println ( "job.created:" , data)
})
ws. OnEvent ( "job.completed" , func ( data realtime . EventData ) {
fmt. Println ( "job.completed:" , data)
})
ws. Connect (context. Background ())
ws. Subscribe ( realtime . Filter {QueueName: "orders" }) <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
// WebSocket realtime client
$realtime = $client -> realtime ();
$realtime -> ws () -> onEvent ( 'job.created' , function ( array $data) : void {
echo "job.created: " . json_encode ($data) . " \n " ;
});
$realtime -> ws () -> onEvent ( 'job.completed' , function ( array $data) : void {
echo "job.completed: " . json_encode ($data) . " \n " ;
});
$realtime -> ws () -> connect ();
$realtime -> ws () -> subscribe ([ 'queueName' => 'orders' ]); Server-Sent Events
cURL Node.js Python Go PHP
Copy # Stream events via Server-Sent Events
curl -N \
-H "Accept: text/event-stream" \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
"https://api.spooled.cloud/api/v1/events"
# Stream events for a specific queue
curl -N \
-H "Accept: text/event-stream" \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
"https://api.spooled.cloud/api/v1/events/queues/my-queue" import { SpooledClient } from '@spooled/sdk' ;
const client = new SpooledClient ({
apiKey: process.env. SPOOLED_API_KEY ! ,
});
// SSE realtime client (uses /api/v1/events with Authorization header under the hood)
const realtime = await client. realtime ({ type: 'sse' });
realtime. on ( 'job.created' , ( data ) => {
console. log ( 'job.created:' , data);
});
await realtime. connect ();
await realtime. subscribe ({ queueName: 'orders' }); from spooled import SpooledClient
from spooled.realtime import SubscriptionFilter
import os
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
realtime = client.realtime( type = "sse" )
@realtime.on ( "job.created" )
def on_job_created (data):
print ( "job.created:" , data)
realtime.connect()
realtime.subscribe(SubscriptionFilter( queue = "orders" )) import (
" context "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/realtime "
)
client, _ := spooled. NewClient (spooled. WithAPIKey ( "sp_live_YOUR_API_KEY" ))
sse := client. Realtime (). SSE ()
sse. OnEvent ( "job.created" , func ( data realtime . EventData ) {
fmt. Println ( "job.created:" , data)
})
sse. Connect (context. Background ())
sse. Subscribe ( realtime . Filter {QueueName: "orders" }) <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
// SSE realtime client
$realtime = $client -> realtime ();
$realtime -> sse () -> subscribe ( function ( array $event) : void {
echo "Event: { $event ['type']} - Job: { $event ['data']['jobId']} \n " ;
});
$realtime -> sse () -> subscribeToQueue ( 'orders' , function ( array $event) : void {
echo "Queue event: { $event ['type']} \n " ;
});
$realtime -> sse () -> listen (); Event Types Event Description job.createdNew job enqueued job.startedJob started by worker job.completedJob finished successfully job.failedJob failed (will retry if retries remaining) job.progressProgress updates while processing job.status_changedJob status transitions queue.pausedQueue processing paused queue.resumedQueue processing resumed
Key Concepts Idempotency
Use idempotency_key to prevent
duplicate processing. If a job with the same key exists, the request returns the existing job.
Automatic Retries
Failed jobs retry automatically with exponential backoff (1s, 2s, 4s, 8s...). Configure
max_retries per job or queue.
Dead-Letter Queue
Jobs that exhaust retries move to the DLQ. Inspect failures and replay them with a single
API call or bulk retry all DLQ jobs.
Priority Queues
Set priority from 0-100.
Higher priority jobs are processed first. Default is 0.
Job Leases
Jobs are claimed with a 5-minute lease. If not completed before expiry, the job is
released for another worker. Renew leases for long-running jobs.
Real-time Updates
Stream job updates via WebSocket or SSE. Get instant notifications when jobs complete
or fail without polling.
Troubleshooting Invalid API Key
📍 Account → API Keys
What to look for:
→ Key starts with sp_live_ or sp_test_ (or legacy sk_) → Key hasn't been revoked → Using correct organization's key
Actions:
✓ Generate a new key if needed ✓ Check environment variables are set correctly
Rate Limited
📍 Account → Usage
What to look for:
→ Current request rate vs limit → Retry-After header value
Actions:
✓ Implement exponential backoff ✓ Use bulk operations where possible ✓ Consider upgrading your plan
Job Stuck in Processing
📍 Dashboard → Jobs → Processing
What to look for:
→ Worker heartbeat status → Lease expiration time → Worker logs for errors
Actions:
✓ Jobs with expired leases auto-recover ✓ Force-fail stuck jobs via API ✓ Check worker health
Next Steps