Код: Выделить всё
from vanna.ollama import Ollama
from vanna.chromadb import ChromaDB_VectorStore
class MyVanna(ChromaDB_VectorStore, Ollama):
def __init__(self, config=None):
ChromaDB_VectorStore.__init__(self, config=config)
Ollama.__init__(self, config=config)
vn = MyVanna(config={'model': 'mistral'})
vn.connect_to_postgres(host='', dbname='', user='', password='', port='')
# The information schema query may need some tweaking depending on your database. This is a good starting point.
df_information_schema = vn.run_sql("SELECT * FROM INFORMATION_SCHEMA.COLUMNS")
# This will break up the information schema into bite-sized chunks that can be referenced by the LLM
plan = vn.get_training_plan_generic(df_information_schema)
print(plan)
# If you like the plan, then uncomment this and run it to train
print("Training starts")
vn.train(plan=plan)
print("Training ends")
Код: Выделить всё
...
Training starts
C:\Users\bodoque\.cache\chroma\onnx_models\all-MiniLM-L6-v2\onnx.tar.gz: 100%|██████████| 79.3M/79.3M [00:05
Подробнее здесь: [url]https://stackoverflow.com/questions/78834030/vannaai-with-ollama-and-chromadb-sample-program-fails-at-training-model-step[/url]