"(venv) D:\Gen AI projs\Code Analyzer>python main .py
Запуск анализатора кода AI...
INFO:root:Запуск приложения Streamlit...
Теперь вы можете просмотреть свое приложение Streamlit. в вашем браузере.
Локальный URL-адрес: http://localhost:8501
Сетевой URL-адрес: http:/ /192.168.1.10:8501
2024-12-25 15:26:00.496 Неперехваченное приложение выполнение
Traceback (последний последний вызов):
Файл "D:\Gen AI projs\Code Analyzer\venv\Lib\site->packages\streamlit\runtime\scriptrunner\exec_code.py", строка 88, в >exec_func_with_error_handling
result = func()
^^^^^^
Файл "D:\Gen AI projs\Code Analyzer\venv\Lib\site->packages\streamlit\runtime\scriptrunner\script_runner.py", строка 579, в code_to_exec
exec(code, Module.dict)
Файл «D:\Gen AI projs\Code Analyzer\ui\streamlit_ui.py», строка 4, в
из импорта app.model_loader load_model
ModuleNotFoundError: Нет модуля с именем «app»
Моя структура каталогов выглядит следующим образом:

и код в каждом файле следующий:< /p>
- main.py:
Код: Выделить всё
import os
import sys
import subprocess
import logging
# Add project root to PYTHONPATH
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
logging.basicConfig(level=logging.INFO)
def start_streamlit():
"""Starts the Streamlit application."""
try:
logging.info("Starting Streamlit app...")
subprocess.run(["streamlit", "run", "ui/streamlit_ui.py"])
except Exception as e:
logging.error(f"Error starting Streamlit app: {e}")
if __name__ == "__main__":
print("Starting the AI Code Analyzer...")
start_streamlit()
Код: Выделить всё
import streamlit as st
import os
import sys
from app.model_loader import load_model
from app.analyzer import analyze_code
print("Current working directory:", os.getcwd())
# Dynamically add the root directory to PYTHONPATH
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
print("SRC Directory exists:", os.path.exists(os.path.join(os.path.dirname(__file__), "..", "src")))
# Load the model once when the app starts
st.title("AI Code Analyzer")
tokenizer, model = load_model()
# User inputs code for analysis
user_code = st.text_area("Enter your code:", height=300)
if st.button("Analyze Code"):
if not user_code.strip():
st.error("Please enter some code to analyze.")
else:
with st.spinner("Analyzing code..."):
try:
result = analyze_code(user_code, tokenizer, model)
st.success("Analysis Complete!")
st.text_area("Analysis Result:", value=result, height=300, disabled=True)
except Exception as e:
st.error(f"Analysis failed: {e}")
Код: Выделить всё
from transformers import AutoTokenizer, AutoModelForCausalLM
def load_model():
"""Loads the StarCoder model and tokenizer."""
model_name = "bigcode/starcoderbase"
try:
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype="float16")
except Exception as e:
raise RuntimeError(f"Failed to load model: {e}")
return tokenizer, model
Код: Выделить всё
import langdetect # You may need to install this package
def analyze_code(code: str, tokenizer, model, language: str = None) -> str:
"""
Analyze the code using the model for errors, security vulnerabilities, and optimizations.
The function generates a detailed analysis of the code in bullet points.
"""
# Detect language if not provided
if not language:
language = langdetect.detect(code)
# If a language is provided, include it in the prompt
language_info = f" in {language}" if language else ""
input_prompt = (
f"Analyze the following code{language_info}:\n\n"
f"{code}\n\n"
f"1. Detect syntax or logical errors.\n"
f"2. Identify security vulnerabilities.\n"
f"3. Suggest optimizations or best practices.\n"
f"Provide detailed feedback in bullet points."
)
try:
# Tokenizing the input prompt
inputs = tokenizer(input_prompt, return_tensors="pt")
# Generating output using the model
outputs = model.generate(inputs.input_ids, max_length=512, num_return_sequences=1)
except Exception as e:
raise RuntimeError(f"Error during analysis: {e}")
# Decoding the response
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
Код: Выделить всё
streamlit
langchain
transformers
torch
langdetect
Подробнее здесь: https://stackoverflow.com/questions/793 ... eamlit-app
Мобильная версия