Это мои файлы:
main.py
Код: Выделить всё
import os
import uvicorn
from dotenv import load_dotenv
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from mangum import Mangum
from app.api.api_v0.api import router as api_v0_router
load_dotenv()
root_path = os.getenv('ENV', default="")
app = FastAPI(root_path=f"/{root_path}")
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_v0_router, prefix="/api/v0")
handler = Mangum(app)
if __name__ == "__main__":
uvicorn.run(app, port=8000)
Код: Выделить всё
from fastapi import APIRouter
from app.api.api_v0.endpoints.process import router as process_router
from app.api.api_v0.endpoints.test import router as test_router
router = APIRouter()
router.include_router(process_router)
router.include_router(test_router)
Код: Выделить всё
import json
from typing import List
import numpy as np
from fastapi import APIRouter, UploadFile
from app.utils.utils import convert_file_to_image
from onnxruntime import InferenceSession
MODEL_PATH = "./model/last.onnx"
router = APIRouter(prefix="/process", tags=["Process"])
@router.on_event("startup")
def load_inference_session():
global session
session = InferenceSession(MODEL_PATH)
@router.post("")
async def root(
files: List[UploadFile]
):
file_read = await files[0].read()
np_image = convert_file_to_image(file_read)
np_image = np_image.astype(np.float32)
input_name = session.get_inputs()[0].name
output_name = session.get_outputs()[0].name
result = session.run([output_name], {input_name: np_image})
return json.dumps({"result": str(np.array(result).shape)})
Код: Выделить всё
FROM python:3.9
COPY requirements.txt .
RUN pip install --no-cache-dir Bottleneck==1.3.5
RUN pip install --no-cache-dir -r requirements.txt
COPY ./app /app
EXPOSE 8080
CMD ["uvicorn", "app.main:handler", "--host", "0.0.0.0", "--port", "8080"]
Код: Выделить всё
gcloud builds submit . --tag=-docker.pkg.dev///
--machine-type='n1-highcpu-32' --timeout=900s --verbosity=info
- Создаю модельный реестр в Vertex AI
< li>Развертывание модели
Код: Выделить всё
INFO: Started server process [1]
INFO: Waiting for application startup.
INFO: ASGI 'lifespan' protocol appears unsupported.
INFO: Application startup complete.
INFO: Uvicorn running on http://0.0.0.0:8080 (Press CTRL+C to quit)
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/uvicorn/protocols/http/h11_impl.py", line 408, in run_asgi
result = await app( # type: ignore[func-returns-value]
File "/usr/local/lib/python3.9/site-packages/uvicorn/middleware/proxy_headers.py", line 84, in __call__
return await self.app(scope, receive, send)
File "/usr/local/lib/python3.9/site-packages/uvicorn/middleware/asgi2.py", line 16, in __call__
instance = self.app(scope)
TypeError: __call__() missing 1 required positional argument: 'context'
У меня вообще нет опыта работы с GCP, поэтому я не уверен, стоит ли мне это делать. я делаю что-то не так.
Подробнее здесь: https://stackoverflow.com/questions/789 ... t-error-wh