Chat with the Global Knowledge Base
This guide shows how to create a chat session using the shared global knowledge base — no file uploads needed.
Prerequisites
- Credentials for a hosted SmartChat RAG API with a
CHAT_USERrole - A pre-configured SmartChat RAG API server
1. Authenticate
import requests
import json
base_url = "<BASE_URL>"
payload = json.dumps({"username": "<USERNAME>", "password": "<PASSWORD>"})
headers = {"Content-Type": "application/json"}
response = requests.post(f"{base_url}/api/v1/auth/user", headers=headers, data=payload)
headers["Authorization"] = f"Bearer {response.json()['access_token']}"
2. Get Default Chat Configuration
response = requests.get(f"{base_url}/config-manager/api/v1/user/configs", headers=headers)
configs = response.json()
default_config = [c for c in configs if c["userGroupId"] == "default"][0]
default_local_config_id = default_config["localKbConfigs"][0]["id"]
allowed_llms = default_config["localKbConfigs"][0]["allowed_llms"]
3. Create a Global Chat Session
body = {
"title": "Testing the SmartChat RAG API",
"config": {
"localConfigId": default_local_config_id,
"globalContext": True,
"chatModel": allowed_llms[0]["name"],
},
}
response = requests.post(f"{base_url}/chat-session-manager/api/v1/sessions/", headers=headers, json=body)
session_id = response.json()["sessionId"]
4. Chat
body = {
"sessionId": session_id,
"userPrompt": "Can you summarize the context to me?",
}
response = requests.post(f"{base_url}/query-pipelines/api/v1/chat", headers=headers, json=body)
print(response.json())