Код: Выделить всё
File "/backend/.venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 3386, in _convert_responses_chunk_to_generation_chunk
| chunk.annotation.model_dump(exclude_none=True, mode="json")
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
| AttributeError: 'dict' object has no attribute 'model_dump'
< /code>
Мой код для генерации ответа: < /p>
prompt = chat_prompt
vectore_store_id = "vs_68341b563aac8191af2df101ef7066a6"
fileTool = {"type": "file_search", "vector_store_ids": [vectore_store_id]}
llm_with_tools = self.llm.bind_tools([fileTool])
chain = prompt | llm_with_tools
output = ""
async for chunk in chain.astream(
{
"query": query,
"question": question,
"context": query_context,
"history": log,
},
):
try:
print(chunk.type)
output += chunk.text()
yield AIMessageChunk(content=chunk.text())
except Exception:
print("Err")
@property
def chat_prompt(self):
current_date = datetime.now().strftime("%B %d, %Y at %I:%M %p")
messages = [
SystemMessagePromptTemplate.from_template(
"You are top mortgage subject matter expert working.\n"
),
HumanMessagePromptTemplate.from_template("{query}"),
]
prompt = ChatPromptTemplate(messages=messages)
return prompt
/>
Подробнее здесь: https://stackoverflow.com/questions/796 ... on-library