Код: Выделить всё
def ask_ollama(messages):
"""
Function to send the conversation to the Ollama API and get the response.
"""
payload = {
'model': 'llama3.2:1b', # Model ID
'messages': messages, # Pass the entire conversation history
'stream': False
}
response = requests.post(
"http://localhost:11434/api/chat", # Ollama API URL
json=payload
)
raw_data = response.text
data = json.loads(raw_data)
message_content = data['message']['content']
return message_content
def main():
parser = argparse.ArgumentParser()
parser.add_argument('file_path', type=str)
parser.add_argument('pre_question', type=str)
args = parser.parse_args()
# Initialize context from file
context = ""
with open(args.file_path, 'r', encoding="utf-8") as file:
context = file.read()
# Construct the initial question with context
question = (
f"{context} would typically consist of plenty of paragraphs. "
f"Scrutinize each of the paragraphs, and try to answer {args.pre_question}. "
"Be patient, do not overlook any of those paragraphs. "
"In some occasion, you need to group certain amounts of the paragraphs to answer the question, beware of it. "
"If you have tried your best but still do not find any useful information from those paragraphs for the answer, "
"then begin your response with 'it seems that there is no information that I am able to gain from the context' "
"at the very first sentence and then elaborate what you know about the question according to your own knowledge."
)
# Initialize the conversation history with the user's initial question
conversation_history = [{'role': 'user', 'content': question}]
# Get the initial response from the model
response = ask_ollama(conversation_history)
print(response) # Print the first response
# Now enter a loop to handle follow-up questions
while True:
# Read the next follow-up question
pre_follow_up_question = input()
if pre_follow_up_question.lower() == 'exit':
break
follow_up_question = (
f"{context} would typically consist of plenty of paragraphs. "
f"Scrutinize each of the paragraphs, and try to answer {pre_follow_up_question}. "
"Be patient, do not overlook any of those paragraphs. "
"In some occasion, you need to group certain amounts of the paragraphs to answer the question, beware of it. "
"If you have tried your best but still do not find any useful information from those paragraphs for the answer, "
"then begin your response with 'it seems that there is no information that I am able to gain from the context' "
"at the very first sentence and then elaborate what you know about the question according to your own knowledge."
)
# Append the follow-up question to the conversation history
conversation_history.append({'role': 'user', 'content': follow_up_question})
# Get the follow-up response from Ollama using the updated conversation history
response = ask_ollama(conversation_history)
# Print the follow-up response
print(response)
main()
Код: Выделить всё
string filePath = @".\Uploads\Philosophy.txt";
string inputText = "What is philosophy of language?";
ProcessStartInfo startInfo = new ProcessStartInfo
{
FileName = "python",
Arguments = $"Python_Ollama.py \"{filePath}\" \"{inputText}\"",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using (Process process = Process.Start(startInfo))
{
StreamReader reader = process.StandardOutput;
StreamWriter writer = process.StandardInput;
Console.WriteLine("Python Output:");
string line;
while ((line = reader.ReadLine()) != null)
{
Console.WriteLine(line);
}
while (true)
{
Console.WriteLine("\nEnter a follow-up question (or type 'exit' to quit): ");
string followUpQuestion = Console.ReadLine();
if (followUpQuestion.ToLower() == "exit")
{
break;
}
writer.WriteLine(followUpQuestion);
writer.Flush();
while ((line = reader.ReadLine()) != null)
{
Console.WriteLine(line);
}
}
}
Прежде всего, я попробовал ReadToEnd() (или ReadToEndAsync()), и он ничего не показал.
И я б/у
Код: Выделить всё
while ((line = reader.ReadLineAsync()) != null)
{
Console.WriteLine(line);
}
Код: Выделить всё
string output = await reader.ReadToEndAsync();
Console.WriteLine(output);
Подробнее здесь: https://stackoverflow.com/questions/792 ... of-c-sharp
Мобильная версия