Skip to content

Multi-Tenant App

This example shows how to build a multi-tenant application where each user gets their own githosted repo. Repo IDs are stored in your database so agents can pick up work without knowing slugs.

import json
import os
from githosted import Client
client = Client(token=os.environ.get("GITHOSTED_TOKEN"))
# -- Server side: repo provisioning --
def provision_user_repo(user_id: str) -> str:
"""Create a dedicated repo for a user. Return the stable ID for storage."""
repo = client.create_repo(f"user-{user_id}")
repo_id = repo.id # e.g. "rp_a1b2c3d4"
# Write initial scaffolding
with repo.transaction("Initialize user workspace") as tx:
tx.write(
"config.json",
json.dumps(
{
"userId": user_id,
"model": "claude-3",
"maxTokens": 4096,
},
indent=2,
),
)
tx.write("conversations/.gitkeep", "")
tx.write("artifacts/.gitkeep", "")
# Store the stable repo ID in your database
db.users.update(user_id, repo_id=repo_id)
return repo_id
def get_user_repo(repo_id: str):
"""Look up a user's repo by the ID stored in your database."""
return client.repo(id=repo_id)

Agents receive a repo ID from a job queue and operate on it directly:

import json
from dataclasses import dataclass
from githosted import Client
@dataclass
class AgentJob:
repo_id: str # e.g. "rp_a1b2c3d4"
conversation_id: str
user_message: str
def process_job(job: AgentJob):
client = Client()
# Reference the repo by stable ID — no slug lookup needed
repo = client.repo(id=job.repo_id)
# Read the user's config
config_file = repo.read("config.json")
config = json.loads(config_file.content)
# Read conversation history (if it exists)
history = []
try:
history_file = repo.read(
f"conversations/{job.conversation_id}.json"
)
history = json.loads(history_file.content)
except Exception:
pass # First message in this conversation
# Run your AI model
response = call_model(
model=config["model"],
max_tokens=config["maxTokens"],
messages=[*history, {"role": "user", "content": job.user_message}],
)
# Save the updated conversation and any artifacts atomically
with repo.transaction(
f"Agent response for {job.conversation_id}"
) as tx:
updated_history = [
*history,
{"role": "user", "content": job.user_message},
{"role": "assistant", "content": response.text},
]
tx.write(
f"conversations/{job.conversation_id}.json",
json.dumps(updated_history, indent=2),
)
# If the agent produced artifacts, save them too
for artifact in response.artifacts:
tx.write(f"artifacts/{artifact.filename}", artifact.content)

Listing a user’s files from an API endpoint

Section titled “Listing a user’s files from an API endpoint”
from githosted import Client
def handle_list_files(user_id: str, path: str = ""):
"""API handler: list files in a user's repo."""
user = db.users.find_by_id(user_id)
if not user or not user.repo_id:
raise NotFoundError("No workspace found")
client = Client()
repo = client.repo(id=user.repo_id)
return repo.ls(path)
def handle_get_history(user_id: str, path: str | None = None):
"""API handler: get commit history for a user's repo."""
user = db.users.find_by_id(user_id)
repo = client.repo(id=user.repo_id)
commits = repo.log(path or "", limit=20)
return [
{
"hash": c.hash,
"subject": c.subject,
"author": c.author_name,
"date": c.committed_at.isoformat(),
}
for c in commits
]
  • Use client.repo(id="rp_xxx") for stable references. Slugs can change if the workspace or repo is renamed. Store the repo.id in your database after create_repo and use it for all subsequent access.
  • One repo per tenant gives you natural isolation, per-user commit history, and the ability to diff any user’s state over time.
  • Agents are stateless. They receive a repo ID from your job queue, do their work, commit results, and exit. The repo is the durable state.
  • Transactions keep multi-file writes atomic — a conversation update and its artifacts land in a single commit, so readers never see a partial state.