Resource APIs
Your stack defines resources like storage buckets, queues, and vaults. To use them in your application code, link them to your function and import the SDK.
1. Link resources to your function
const data = new alien.Storage("data").build()
const api = new alien.Function("api")
.link(data)
.build()2. Access them in your code
import { storage } from "@alienplatform/sdk"
const store = await storage("data") // same name as in alien.ts
await store.put("reports/q1.json", JSON.stringify(report))let ctx = AlienContext::init().await?;
let store = ctx.bindings().load_storage("data").await?; // same name as in alien.ts
store.put(&"reports/q1.json".into(), bytes).await?;Credentials are injected automatically — IAM roles on AWS, Workload Identity on GCP, Managed Identity on Azure. No config files, no connection strings.
Storage
Object storage — S3 on AWS, Cloud Storage on GCP, Blob Storage on Azure.
import { storage } from "@alienplatform/sdk"
const store = await storage("files")
// Write
await store.put("reports/q1.json", JSON.stringify(report))
// Read
const { data } = await store.get("reports/q1.json")
const content = new TextDecoder().decode(data)
// List
for await (const entry of store.list("reports/")) {
console.log(entry.key, entry.size)
}
// Delete
await store.delete("reports/old.json")
// Generate a signed URL for direct browser uploads/downloads
const url = await store.signedUrl("reports/q1.json", { expiresIn: 3600 })let ctx = AlienContext::init().await?;
let store = ctx.bindings().load_storage("files").await?;
// Write
store.put(&"reports/q1.json".into(), bytes).await?;
// Read
let result = store.get(&"reports/q1.json".into()).await?;
let content = result.bytes().await?;
// List
let mut stream = store.list(Some(&"reports/".into()));
while let Some(meta) = stream.next().await {
let meta = meta?;
println!("{} {}", meta.location, meta.size);
}
// Delete
store.delete(&"reports/old.json".into()).await?;
// Generate a signed URL
let url = store.presigned_get(
&"reports/q1.json".into(),
Duration::from_secs(3600),
).await?;Full reference: Storage API | Behavior & limits
KV
Key-value store — DynamoDB on AWS, Firestore on GCP, Table Storage on Azure.
import { kv } from "@alienplatform/sdk"
const cache = await kv("cache")
// Write (with optional TTL)
await cache.set("user:123", JSON.stringify({ name: "Alice" }))
await cache.set("session:abc", token, { ttlMs: 3600_000 })
// Read
const value = await cache.get("user:123")
// Scan by prefix
for await (const entry of cache.scan("user:")) {
console.log(entry.key, new TextDecoder().decode(entry.value))
}
// Delete
await cache.delete("user:123")let ctx = AlienContext::init().await?;
let cache = ctx.bindings().load_kv("cache").await?;
// Write (with optional TTL)
cache.put("user:123", value.into(), None).await?;
cache.put("session:abc", token.into(), Some(PutOptions {
ttl: Some(Duration::from_secs(3600)),
..Default::default()
})).await?;
// Read
if let Some(value) = cache.get("user:123").await? {
println!("{}", String::from_utf8_lossy(&value));
}
// Scan by prefix
let result = cache.scan_prefix("user:", Some(100), None).await?;
for (key, value) in result.items {
println!("{}: {}", key, String::from_utf8_lossy(&value));
}
// Delete
cache.delete("user:123").await?;Full reference: KV API | Behavior & limits
Queue
Message queue — SQS on AWS, Pub/Sub on GCP, Service Bus on Azure.
import { queue } from "@alienplatform/sdk"
const q = await queue("tasks")
await q.send("job-source", JSON.stringify({ type: "process", id: "abc" }))let ctx = AlienContext::init().await?;
let q = ctx.bindings().load_queue("tasks").await?;
q.send("tasks", MessagePayload::Json(serde_json::json!({
"type": "process", "id": "abc"
}))).await?;To receive messages, use event handlers — see Responding to Events below.
Full reference: Queue API | Behavior & limits
Vault
Secret storage — SSM Parameter Store on AWS, Secret Manager on GCP, Key Vault on Azure.
import { vault } from "@alienplatform/sdk"
const secrets = await vault("credentials")
// Read a secret
const raw = await secrets.get("database")
const config = JSON.parse(raw)
// Connect to customer's database using their own credentials
const pool = new Pool({
host: config.host,
database: config.database,
password: config.password, // never leaves their cloud
})let ctx = AlienContext::init().await?;
let secrets = ctx.bindings().load_vault("credentials").await?;
// Read a secret
let raw = secrets.get_secret("database").await?;
let config: DbConfig = serde_json::from_str(&raw)?;
// Connect to customer's database using their own credentials
let pool = PgPool::connect_with(
PgConnectOptions::new()
.host(&config.host)
.database(&config.database)
.password(&config.password) // never leaves their cloud
).await?;Secrets are stored in the customer's cloud vault — you never see them in your logs or config files.
Full reference: Vault API | Behavior & limits
Responding to Events
Functions can react to events — queue messages, file uploads, and cron schedules. Register a handler, and Alien wires the trigger.
Queue Messages
import { kv, onQueueMessage } from "@alienplatform/sdk"
onQueueMessage("*", async (message) => {
const store = await kv("events")
await store.set(`queue:${message.id}`, JSON.stringify({
source: message.source,
payload: message.payload,
processedAt: new Date().toISOString(),
}))
})ctx.on_queue_message("*", |message| async move {
let store = ctx.bindings().load_kv("events").await?;
store.put(
&format!("queue:{}", message.id),
serde_json::to_vec(&serde_json::json!({
"source": message.source,
"processedAt": chrono::Utc::now().to_rfc3339(),
}))?,
None,
).await?;
Ok(())
});Use "*" to handle messages from any linked queue, or pass a specific queue name.
Storage Events
import { onStorageEvent } from "@alienplatform/sdk"
onStorageEvent("*", async (event) => {
console.log(event.eventType, event.objectKey, event.size)
// "created", "uploads/photo.jpg", 1048576
})ctx.on_storage_event("*", |event| async move {
println!("{} {} {}", event.event_type, event.key, event.size);
Ok(())
});Cron / Scheduled Events
import { onCronEvent } from "@alienplatform/sdk"
onCronEvent("*", async (event) => {
console.log(event.scheduleName, event.timestamp)
// run cleanup, generate reports, sync data...
})ctx.on_cron_event("*", |event| async move {
println!("{} {}", event.schedule_name, event.scheduled_time);
// run cleanup, generate reports, sync data...
Ok(())
});The schedule is defined in alien.ts via .trigger({ type: "schedule", cron: "0 * * * *" }). See Events & Triggers for trigger configuration.
Remote Commands
Define callable endpoints that your control plane can invoke remotely — no inbound networking, no open ports:
import { command, vault, kv } from "@alienplatform/sdk"
command("query", async ({ sql, useCache }) => {
const secrets = await vault("credentials")
const cache = await kv("cache")
if (useCache) {
const cached = await cache.get(`query:${hash(sql)}`)
if (cached) return { ...JSON.parse(cached), cached: true }
}
const config = JSON.parse(await secrets.get("database"))
const result = await runQuery(config, sql)
if (useCache) {
await cache.set(`query:${hash(sql)}`, JSON.stringify(result))
}
return { ...result, cached: false }
})ctx.on_command("query", |params: QueryParams| async move {
let secrets = ctx.bindings().load_vault("credentials").await?;
let cache = ctx.bindings().load_kv("cache").await?;
if params.use_cache {
if let Some(cached) = cache.get(&format!("query:{}", hash(¶ms.sql))).await? {
let mut result: serde_json::Value = serde_json::from_slice(&cached)?;
result["cached"] = serde_json::json!(true);
return Ok(result);
}
}
let config: DbConfig = serde_json::from_str(
&secrets.get_secret("database").await?
)?;
let result = run_query(&config, ¶ms.sql).await?;
if params.use_cache {
cache.put(
&format!("query:{}", hash(¶ms.sql)),
serde_json::to_vec(&result)?,
None,
).await?;
}
Ok(result)
});See Remote Commands for the full guide.
Using Native Cloud SDKs
Every linked resource is also available as a JSON environment variable. Use any language, any SDK:
const binding = JSON.parse(process.env.ALIEN_DATA_BINDING!)
if (binding.service === "s3") {
const s3 = new S3Client({})
await s3.send(new GetObjectCommand({
Bucket: binding.bucketName,
Key: "reports/q1.json",
}))
}import json, os, boto3
binding = json.loads(os.environ["ALIEN_DATA_BINDING"])
s3 = boto3.client("s3")
s3.get_object(Bucket=binding["bucketName"], Key="reports/q1.json")The environment variable name follows the pattern ALIEN_{NAME}_BINDING — uppercased, hyphens become underscores. The JSON contains resource identifiers, never credentials. Cloud credentials are injected automatically (IAM roles, Workload Identity, Managed Identity).
Use native SDKs when you need platform-specific features like DynamoDB streams, S3 Select, or Pub/Sub ordering keys. They coexist with the Alien SDK in the same app.