Real-world examples
These examples are made for “normal” daily work. No buzzwords — just copy/paste setups you can run today.
Tip: If you’re new, start with Quickstart first,
then come back here for real-life patterns.
Example 0: A tiny worker that prints jobs
A worker is a small program that pulls jobs from a queue. This one does the simplest thing possible:
it prints the job and returns { ok: true }.
Worker: print jobs from a queue
import { SpooledClient, SpooledWorker } from '@spooled/sdk' ;
const client = new SpooledClient ({ apiKey: process.env. SPOOLED_API_KEY ! });
const worker = new SpooledWorker (client, {
queueName: 'my-queue' ,
concurrency: 1 ,
});
worker. process ( async ( ctx ) => {
console. log ( 'Job ID:' , ctx.jobId);
console. log ( 'Payload:' , ctx.payload);
return { ok: true };
});
await worker. start (); from spooled import SpooledClient
from spooled.worker import SpooledWorker
import os
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
worker = SpooledWorker(client, queue_name = "my-queue" , concurrency = 1 )
@worker.process
def handle_job (ctx):
print ( "Job ID:" , ctx.job_id)
print ( "Payload:" , ctx.payload)
return { "ok" : True }
worker.start() # Blocking package main
import (
" context "
" fmt "
" os "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/worker "
)
func main () {
client, err := spooled. NewClient (spooled. WithAPIKey (os. Getenv ( "SPOOLED_API_KEY" )))
if err != nil {
panic (err)
}
w := worker. NewWorker (client. Jobs (), client. Workers (), worker . Options {
QueueName: "my-queue" ,
Concurrency: 1 ,
})
w. Process ( func ( ctx * worker . JobContext ) ( map [ string ] any , error ) {
fmt. Println ( "Job ID:" , ctx.JobID)
fmt. Println ( "Payload:" , ctx.Payload)
return map [ string ] any { "ok" : true }, nil
})
if err := w. Start (context. Background ()); err != nil {
panic (err)
}
} <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
use Spooled\Worker\SpooledWorker ;
use Spooled\Worker\WorkerConfig ;
use Spooled\Worker\JobContext ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
$worker = new SpooledWorker ($client, new WorkerConfig (
queueName : 'my-queue' ,
concurrency : 1 ,
));
$worker -> process ( function ( JobContext $ctx) : array {
echo "Job ID: { $ctx -> jobId ()} \n " ;
print_r ($ctx -> payload ());
return [ 'ok' => true ];
});
$worker -> start (); How to run
Set SPOOLED_API_KEY in your environment Pick a queue name (example: my-queue) Run the worker, then create jobs into that queue Example 1: New user signup → enqueue a job
When a user signs up, your API should respond fast. So you enqueue a job and do slow work in the background
(send email, create CRM record, generate PDF, etc.).
Create a job (signup / background task)
cURL Node.js Python Go PHP
Copy curl -X POST https://api.spooled.cloud/api/v1/jobs \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"queue_name": "my-queue",
"payload": {
"event": "user.created",
"user_id": "usr_123",
"email": "alice@example.com"
},
"idempotency_key": "user-created-usr_123"
}' import { SpooledClient } from '@spooled/sdk' ;
const client = new SpooledClient ({
apiKey: process.env. SPOOLED_API_KEY ! ,
});
const userId = 'usr_123' ;
// Create a job
const { id } = await client.jobs. create ({
queueName: 'email-notifications' ,
payload: {
to: 'user@example.com' ,
subject: 'Welcome!' ,
template: 'welcome' ,
},
idempotencyKey: `welcome-${ userId }` ,
maxRetries: 5 ,
});
console. log ( `Created job: ${ id }` ); from spooled import SpooledClient
import os
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
image_id = "img_123"
# Create a background job
result = client.jobs.create({
"queue_name" : "image-processing" ,
"payload" : {
"image_url" : "https://example.com/image.jpg" ,
"operations" : [ "resize" , "compress" ],
"output_format" : "webp"
},
"idempotency_key" : f "process-image- { image_id } " ,
"max_retries" : 3
})
print ( f "Created job: { result.id } " )
client.close() package main
import (
" context "
" fmt "
" os "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
func ptr [ T any ]( v T ) * T { return & v }
func main () {
client, err := spooled. NewClient (spooled. WithAPIKey (os. Getenv ( "SPOOLED_API_KEY" )))
if err != nil {
panic (err)
}
resp, err := client. Jobs (). Create (context. Background (), & resources . CreateJobRequest {
QueueName: "my-queue" ,
Payload: map [ string ] any { "key" : "value" },
IdempotencyKey: ptr ( "unique-key" ),
MaxRetries: ptr ( 3 ),
})
if err != nil {
panic (err)
}
fmt. Printf ( "Created job: %s\n " , resp.ID)
} <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
$userId = 'usr_123' ;
// Create a job
$job = $client -> jobs -> create ([
'queue' => 'email-notifications' ,
'payload' => [
'to' => 'user@example.com' ,
'subject' => 'Welcome!' ,
'template' => 'welcome' ,
],
'idempotencyKey' => "welcome-{ $userId }" ,
'maxRetries' => 5 ,
]);
echo "Created job: { $job -> id } \n " ;
Use idempotency_key (or idempotencyKey in SDKs) so retries don’t create duplicates.
Example 2: GitHub issue opened → enqueue a job (no servers needed)
If you don’t want to run a webhook server, GitHub Actions can send jobs to Spooled for you.
This is a very common “automation” pattern for small teams.
Step 1: Add repo secrets SPOOLED_WEBHOOK_URL SPOOLED_WEBHOOK_TOKEN Step 2: Add this workflow name: Spooled - enqueue GitHub issues
on:
issues:
types: [opened, reopened]
jobs:
enqueue:
runs-on: ubuntu-latest
steps:
- name: Send issue to Spooled
env:
SPOOLED_WEBHOOK_URL: ${{ secrets.SPOOLED_WEBHOOK_URL }}
SPOOLED_WEBHOOK_TOKEN: ${{ secrets.SPOOLED_WEBHOOK_TOKEN }}
run: |
python - << 'PY' > payload.json
import json, os
event = json.load(open(os.environ["GITHUB_EVENT_PATH"]))
body = {
"queue_name": "github-events",
"event_type": "github.issue.opened",
"idempotency_key": f"github-issue-{event['issue']['id']}",
"payload": {
"repo": os.environ.get("GITHUB_REPOSITORY"),
"number": event["issue"]["number"],
"title": event["issue"]["title"],
"url": event["issue"]["html_url"],
"author": event["issue"]["user"]["login"],
},
}
print(json.dumps(body))
PY
curl -sS -X POST "$SPOOLED_WEBHOOK_URL" -H "Content-Type: application/json" -H "X-Webhook-Token: $SPOOLED_WEBHOOK_TOKEN" --data-binary "@payload.json"
Step 3: Process the jobs
Start the worker from Example 0, but change the queue name to github-events.
Example 3: Run a job every day (cron schedules)
Need daily reports, cleanup, reminders, renewals? Use schedules. Spooled uses 6-field cron :
second minute hour day month weekday.
Create a cron schedule
cURL Node.js Python Go PHP
Copy # Create a cron schedule
curl -X POST https://api.spooled.cloud/api/v1/schedules \
-H "Authorization: Bearer sp_live_YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"name": "Daily Report",
"cron_expression": "0 0 9 * * *",
"timezone": "America/New_York",
"queue_name": "reports",
"payload_template": {"type": "daily_report"}
}' // Create a cron schedule
const schedule = await client.schedules. create ({
name: 'Daily Report' ,
cronExpression: '0 0 9 * * *' ,
timezone: 'America/New_York' ,
queueName: 'reports' ,
payloadTemplate: { type: 'daily_report' },
}); # Create a cron schedule
schedule = client.schedules.create({
"name" : "Daily Report" ,
"cron_expression" : "0 0 9 * * *" ,
"timezone" : "America/New_York" ,
"queue_name" : "reports" ,
"payload_template" : { "type" : "daily_report" }
}) import (
" context "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
func ptr [ T any ]( v T ) * T { return & v }
client, err := spooled. NewClient (spooled. WithAPIKey ( "sp_live_YOUR_API_KEY" ))
if err != nil {
panic (err)
}
schedule, err := client. Schedules (). Create (context. Background (), & resources . CreateScheduleRequest {
Name: "Daily Report" ,
CronExpression: "0 0 9 * * *" ,
Timezone: ptr ( "America/New_York" ),
QueueName: "reports" ,
PayloadTemplate: map [ string ] interface {}{ "type" : "daily_report" },
})
if err != nil {
panic (err)
}
fmt. Printf ( "Created schedule: %s\n " , schedule.ID) <? php
// Create a cron schedule
$schedule = $client -> schedules -> create ([
'name' => 'Daily Report' ,
'cronExpression' => '0 0 9 * * *' ,
'timezone' => 'America/New_York' ,
'queue' => 'reports' ,
'payloadTemplate' => [ 'type' => 'daily_report' ],
]);
echo "Created schedule: { $schedule -> id } \n " ; Example 4: CSV import → one job per row
A super common real task: you have a CSV file with hundreds of rows and you want to process it reliably.
Spooled is perfect for this.
Read users.csv and enqueue one job per row
import { SpooledClient } from '@spooled/sdk' ;
import fs from 'node:fs/promises' ;
const client = new SpooledClient ({ apiKey: process.env. SPOOLED_API_KEY ! });
const csv = await fs. readFile ( 'users.csv' , 'utf8' );
const [ headerLine , ... rows ] = csv. trim (). split ( / \r ? \n / );
const headers = headerLine. split ( ',' ). map (( s ) => s. trim ());
for ( const row of rows) {
if ( ! row. trim ()) continue ;
const values = row. split ( ',' ). map (( s ) => s. trim ());
const payload : Record < string , string > = {};
headers. forEach (( h , i ) => (payload[h] = values[i] ?? '' ));
await client.jobs. create ({
queueName: 'csv-import' ,
payload,
idempotencyKey: payload.email ? `csv-${ payload . email }` : undefined ,
});
}
console. log ( '✅ Enqueued CSV jobs' ); import csv
import os
from spooled import SpooledClient
client = SpooledClient( api_key = os.environ[ "SPOOLED_API_KEY" ])
with open ( "users.csv" , newline = "" ) as f:
reader = csv.DictReader(f)
for row in reader:
email = row.get( "email" ) or ""
client.jobs.create({
"queue_name" : "csv-import" ,
"payload" : row,
"idempotency_key" : f "csv- { email } " if email else None ,
})
print ( "✅ Enqueued CSV jobs" )
client.close() package main
import (
" context "
" encoding/csv "
" os "
" github.com/spooled-cloud/spooled-sdk-go/spooled "
" github.com/spooled-cloud/spooled-sdk-go/spooled/resources "
)
func ptr [ T any ]( v T ) * T { return & v }
func main () {
client, err := spooled. NewClient (spooled. WithAPIKey (os. Getenv ( "SPOOLED_API_KEY" )))
if err != nil {
panic (err)
}
f, err := os. Open ( "users.csv" )
if err != nil {
panic (err)
}
defer f. Close ()
r := csv. NewReader (f)
records, err := r. ReadAll ()
if err != nil {
panic (err)
}
if len (records) < 2 {
return
}
headers := records[ 0 ]
for _, rec := range records[ 1 :] {
payload := map [ string ] any {}
for i, h := range headers {
if i < len (rec) {
payload[h] = rec[i]
}
}
var idem * string
if email, ok := payload[ "email" ].( string ); ok && email != "" {
idem = ptr ( "csv-" + email)
}
_, err := client. Jobs (). Create (context. Background (), & resources . CreateJobRequest {
QueueName: "csv-import" ,
Payload: payload,
IdempotencyKey: idem,
})
if err != nil {
panic (err)
}
}
} <? php
use Spooled\SpooledClient ;
use Spooled\Config\ClientOptions ;
$client = new SpooledClient ( new ClientOptions (
apiKey : getenv ( 'SPOOLED_API_KEY' ),
));
$fh = fopen ( 'users.csv' , 'r' );
$headers = fgetcsv ($fh);
while (($row = fgetcsv ($fh)) !== false ) {
$payload = array_combine ($headers, $row);
$email = $payload[ 'email' ] ?? null ;
$client -> jobs -> create ([
'queue' => 'csv-import' ,
'payload' => $payload,
'idempotencyKey' => $email ? "csv-{ $email }" : null ,
]);
}
fclose ($fh);
echo "✅ Enqueued CSV jobs \n " ;
In production, prefer idempotency_key based on a stable ID (email, user_id, order_id) so reruns don’t double-process.
Example 5: From “toy” to production Start with print : print payloads until you trust the pipeline Add idempotency : always for external events (Stripe IDs, GitHub IDs, etc.) Handle failures : throw an error to fail a job (Spooled retries) Use the dashboard : inspect jobs, errors, retries, DLQ