Zum Inhalt springen

LangChain Integration

Dieser Inhalt ist für v1.0.0. Geh zur neuesten Version, um die aktuellste Dokumentation zu bekommen.

Dieser Inhalt ist noch nicht in deiner Sprache verfügbar.

Use AI Foundation Services with LangChain for RAG, chains, and agent workflows. Since AIFS is OpenAI-compatible, you can use the langchain-openai package directly.


Terminal window
pip install langchain langchain-openai

import os
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(
openai_api_key=os.getenv("OPENAI_API_KEY"),
openai_api_base=os.getenv("OPENAI_BASE_URL"),
model_name="Llama-3.3-70B-Instruct",
streaming=True,
)
# Test
for chunk in llm.stream("Write me a short poem about cloud computing."):
if chunk.content:
print(chunk.content, end="", flush=True)

from typing import List
from langchain_openai import OpenAIEmbeddings
import os
class AIFSEmbeddings(OpenAIEmbeddings):
def embed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = self.client.create(input=texts, model=self.model)
return [embed.embedding for embed in embeddings.data]
async def aembed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = await self.async_client.create(input=texts, model=self.model)
return [embed.embedding for embed in embeddings.data]
def embed_query(self, text: str) -> List[float]:
return self.embed_documents([text])[0]
async def aembed_query(self, text: str) -> List[float]:
embed = await self.aembed_documents([text])
return embed[0]
embed_model = AIFSEmbeddings(
openai_api_key=os.getenv("OPENAI_API_KEY"),
openai_api_base=os.getenv("OPENAI_BASE_URL"),
model="text-embedding-bge-m3",
)
# Test
embeddings = embed_model.embed_documents(["Hello!", "World!"])
print(f"Dimensions: {len(embeddings[0])}")