Callback Handler to enable Chainlit to display intermediate steps in the UI.
from llama_index.callbacks.base import CallbackManager from llama_index import ( LLMPredictor, ServiceContext, ) from langchain.chat_models import ChatOpenAI import chainlit as cl @cl.on_chat_start async def factory(): llm_predictor = LLMPredictor( llm=ChatOpenAI( temperature=0, model_name="gpt-3.5-turbo", streaming=True, ), ) service_context = ServiceContext.from_defaults( llm_predictor=llm_predictor, chunk_size=512, callback_manager=CallbackManager([cl.LlamaIndexCallbackHandler()]), )
Was this page helpful?