Complete Examples
Use these end-to-end snippets as starting points for real applications and automation workflows.
RAG Pipeline
from touai import TouAI
with TouAI() as client:
conn = client.context_hub.connections.create(
name="Analytics DB",
connector_type="postgresql",
credentials={"host": "db.example.com", "database": "analytics", "...": "..."},
auto_sync=True,
)
client.context_hub.sync.wait_until_ready(conn.connection_id, timeout=300)
results = client.context_hub.search(
"What were our top-performing products in Q1?",
connection_ids=[conn.connection_id],
top_k=10,
rerank=True,
)
for r in results.results:
print(f"[{r.similarity:.2f}] {r.content[:150]}")Document Processing Pipeline
from touai import TouAI
with TouAI() as client:
job = client.unstructured.jobs.create(
source={"source_type": "url", "url": "https://example.com/report.pdf"},
options={"chunking": {"enabled": True}},
)
result = client.unstructured.jobs.wait_until_complete(job.job_id)
print(f"Processed: {result.status}")
research = client.deep_research.research(
"Summarize the key findings from the processed document",
mode="pro",
)
print(research.content)Web Intelligence Gathering
from touai import TouAI
with TouAI() as client:
crawl = client.data_search.deep_crawl_and_wait(
"https://docs.example.com",
max_depth=3,
max_pages=100,
timeout=300,
)
print(f"Crawled {len(crawl.pages)} pages")
for event in client.deep_research.research_stream(
"What are the main features documented on this site?",
mode="pro",
):
if event.type == "complete":
print(event.data.get("content"))These examples are intended to be copied and adapted. Start with the simplest one that matches your workflow, then add auth, retries, and persistence for your production environment.