Код: Выделить всё
public static AIManager Instance;
public string ollamaEndpoint = "https://artificialunintelligenceapi.tech/api/generate";
public string modelName = "llama3.2:latest";
[SerializeField] private string apiKey;
public void SendPrompt(string userPrompt, System.Action callback)
{
StartCoroutine(SendPromptCoroutine(userPrompt, callback));
}
private IEnumerator SendPromptCoroutine(string userPrompt, System.Action callback)
{
RequestBody body = new RequestBody
{
model = modelName,
prompt = userPrompt,
stream = false
};
string jsonBody = JsonUtility.ToJson(body);
Debug.Log("Sending JSON: " + jsonBody);
using (UnityWebRequest request = new UnityWebRequest(ollamaEndpoint, "POST"))
{
byte[] bodyRaw = Encoding.UTF8.GetBytes(jsonBody);
request.uploadHandler = new UploadHandlerRaw(bodyRaw);
request.downloadHandler = new DownloadHandlerBuffer();
request.SetRequestHeader("Content-Type", "application/json");
// Add your VPS API key here
if (!string.IsNullOrEmpty(apiKey))
{
request.SetRequestHeader("Authorization", "Bearer " + apiKey);
}
yield return request.SendWebRequest();
if (request.result == UnityWebRequest.Result.Success)
{
string rawResponse = request.downloadHandler.text;
string response = ExtractResponse(rawResponse);
Debug.Log("AI Response: " + response);
callback?.Invoke(response);
}
else
{
Debug.LogError("Ollama Error: " + request.error);
callback?.Invoke(null);
}
}
}
private string ExtractResponse(string json)
{
int startIndex = json.IndexOf("\"response\":\"");
if (startIndex == -1) return null;
startIndex += "\"response\":\"".Length;
int endIndex = json.IndexOf("\"", startIndex);
if (endIndex == -1) return null;
return json.Substring(startIndex, endIndex - startIndex);
}
[System.Serializable]
private class RequestBody
{
public string model;
public string system;
public string prompt;
public bool stream;
}
Код: Выделить всё
curl -X POST http://127.0.0.1:11434/api/generate \
-H "Content-Type: application/json" \
-d '{"model":"llama3.2:latest","prompt":"Say hi","stream":false}'
Код: Выделить всё
curl.exe -X POST "https://artificialunintelligenceapi.tech/api/generate" `
-H "Content-Type: application/json" `
-d "{""model"":""llama3.2:latest"",""prompt"":""Say hi"",""stream"":false}"
Код: Выделить всё
> 301 Moved Permanently 301 Moved Permanently nginx/1.24.0 (Ubuntu)
Код: Выделить всё
location /api/ {
proxy_pass http://127.0.0.1:11434/api/;
proxy_http_version 1.1;
proxy_set_header Connection "";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_buffering off;
}
Пытался подключить ollama на хостинге vps к моему проекту Unity, чтобы любой пользователь в приложении мог получить доступ к ответу AI. Вместо этого Unity отвечает ошибкой Ollama 403.
Подробнее здесь: https://stackoverflow.com/questions/797 ... ror-http-1
Мобильная версия