feat: RAG chat — ask questions about your project with AI context

This commit is contained in:
Yuriy 2026-02-12 09:55:30 +03:00
parent 6642562f73
commit 4f2c890c6b
5 changed files with 219 additions and 10 deletions

View File

@ -0,0 +1,104 @@
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
const MAX_CONTEXT_BYTES: usize = 100_000;
const MAX_FILE_BYTES: u64 = 30_000;
const CODE_EXTENSIONS: &[&str] = &[
"js","jsx","ts","tsx","mjs","cjs","py","rs","go","rb","php","java","kt",
"sh","bash","yml","yaml","toml","json","md","txt","sql","graphql",
"css","scss","html","vue","svelte",
];
const EXCLUDED_DIRS: &[&str] = &[
"node_modules",".git","target","dist","build",".next",
"__pycache__",".venv","venv","vendor",".cargo",
];
const PRIORITY_FILES: &[&str] = &[
"package.json","Cargo.toml","pyproject.toml","requirements.txt",
"README.md","readme.md","tsconfig.json",
"next.config.js","next.config.ts","vite.config.ts","vite.config.js",
"Dockerfile","docker-compose.yml",".env.example",".gitignore",
];
#[derive(Debug, Serialize, Deserialize)]
pub struct ProjectContextRequest { pub path: String }
#[derive(Debug, Serialize, Deserialize)]
pub struct FileContext { pub path: String, pub content: String, pub lines: u32 }
#[derive(Debug, Serialize, Deserialize)]
pub struct ProjectContextResponse {
pub ok: bool, pub files: Vec<FileContext>,
pub total_files: u32, pub total_bytes: u32,
pub truncated: bool, pub error: Option<String>,
}
#[tauri::command]
pub async fn collect_project_context(request: ProjectContextRequest) -> Result<ProjectContextResponse, String> {
let root = Path::new(&request.path);
if !root.exists() || !root.is_dir() {
return Ok(ProjectContextResponse { ok: false, files: vec![], total_files: 0, total_bytes: 0, truncated: false, error: Some(format!("Путь не существует: {}", request.path)) });
}
let mut files: Vec<FileContext> = Vec::new();
let mut total_bytes: usize = 0;
let mut truncated = false;
for pf in PRIORITY_FILES {
let fp = root.join(pf);
if fp.exists() && fp.is_file() {
if let Some(fc) = read_file_ctx(root, &fp) { total_bytes += fc.content.len(); files.push(fc); }
}
}
let mut all: Vec<std::path::PathBuf> = Vec::new();
collect_code_files(root, root, 0, &mut all);
all.sort_by(|a, b| {
let a_src = a.to_string_lossy().contains("src/");
let b_src = b.to_string_lossy().contains("src/");
match (a_src, b_src) {
(true, false) => std::cmp::Ordering::Less,
(false, true) => std::cmp::Ordering::Greater,
_ => a.metadata().map(|m| m.len()).unwrap_or(u64::MAX).cmp(&b.metadata().map(|m| m.len()).unwrap_or(u64::MAX)),
}
});
for fp in &all {
if total_bytes >= MAX_CONTEXT_BYTES { truncated = true; break; }
let rel = fp.strip_prefix(root).unwrap_or(fp).to_string_lossy().to_string();
if files.iter().any(|f| f.path == rel) { continue; }
if let Some(fc) = read_file_ctx(root, fp) {
if total_bytes + fc.content.len() > MAX_CONTEXT_BYTES { truncated = true; break; }
total_bytes += fc.content.len();
files.push(fc);
}
}
Ok(ProjectContextResponse { ok: true, total_files: files.len() as u32, total_bytes: total_bytes as u32, truncated, files, error: None })
}
fn read_file_ctx(root: &Path, fp: &Path) -> Option<FileContext> {
let meta = fp.metadata().ok()?;
if meta.len() > MAX_FILE_BYTES { return None; }
let content = fs::read_to_string(fp).ok()?;
let rel = fp.strip_prefix(root).unwrap_or(fp).to_string_lossy().to_string();
Some(FileContext { path: rel, lines: content.lines().count() as u32, content })
}
fn collect_code_files(root: &Path, dir: &Path, depth: u32, out: &mut Vec<std::path::PathBuf>) {
if depth > 8 || out.len() > 300 { return; }
let entries = match fs::read_dir(dir) { Ok(e) => e, Err(_) => return };
for entry in entries.flatten() {
let path = entry.path();
let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if path.is_dir() {
if EXCLUDED_DIRS.contains(&name) || name.starts_with('.') { continue; }
collect_code_files(root, &path, depth + 1, out);
continue;
}
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
if CODE_EXTENSIONS.contains(&ext) { out.push(path); }
}
}

View File

@ -13,3 +13,5 @@ pub use generate_ai_actions::generate_ai_actions;
pub use get_app_info::get_app_info;
pub use preview_actions::preview_actions;
pub use undo_last::undo_last;
mod collect_context;
pub use collect_context::collect_project_context;

View File

@ -2,7 +2,7 @@ mod deep_analysis;
mod commands;
mod types;
use commands::{analyze_project, apply_actions, ask_llm, generate_ai_actions, get_app_info, preview_actions, undo_last};
use commands::{analyze_project, apply_actions, ask_llm, generate_ai_actions, collect_project_context, get_app_info, preview_actions, undo_last};
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
@ -28,6 +28,7 @@ pub fn run() {
get_app_info,
ask_llm,
generate_ai_actions,
collect_project_context,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View File

@ -214,3 +214,58 @@ export async function generateAiActions(
},
});
}
// ---- RAG Chat ----
export interface FileContext {
path: string;
content: string;
lines: number;
}
export interface ProjectContextResponse {
ok: boolean;
files: FileContext[];
total_files: number;
total_bytes: number;
truncated: boolean;
error?: string | null;
}
export async function collectProjectContext(
path: string,
): Promise<ProjectContextResponse> {
return invoke<ProjectContextResponse>('collect_project_context', {
request: { path },
});
}
export async function chatWithProject(
settings: LlmSettings,
projectPath: string,
projectContext: ProjectContextResponse,
llmContext: LlmContext,
question: string,
chatHistory: { role: string; content: string }[],
): Promise<LlmResponse> {
// Build context from file contents
const filesSummary = projectContext.files
.map((f) => `--- ${f.path} (${f.lines} строк) ---\n${f.content}`)
.join('\n\n');
const contextStr = JSON.stringify(llmContext);
const fullPrompt = `Контекст проекта (${projectPath}):\n${contextStr}\n\айлы проекта (${projectContext.total_files} файлов, ${projectContext.total_bytes} байт${projectContext.truncated ? ', обрезано' : ''}):\n${filesSummary}\n\n${chatHistory.length > 0 ? 'История чата:\n' + chatHistory.map((m) => `${m.role}: ${m.content}`).join('\n') + '\n\n' : ''}Вопрос пользователя: ${question}`;
return invoke<LlmResponse>('ask_llm', {
request: {
provider: settings.provider,
model: settings.model,
api_key: settings.apiKey || null,
base_url: settings.baseUrl || null,
context: contextStr,
prompt: fullPrompt,
max_tokens: 2048,
},
});
}

View File

@ -19,7 +19,7 @@ import {
X,
} from 'lucide-react';
import { invoke } from '@tauri-apps/api/core';
import { analyzeProject, askLlm, generateAiActions, type AnalyzeReport, type Action, type ApplyResult, type UndoResult, type PreviewResult, type DiffItem, type LlmSettings, DEFAULT_LLM_SETTINGS } from '../lib/analyze';
import { analyzeProject, askLlm, generateAiActions, collectProjectContext, chatWithProject, type AnalyzeReport, type Action, type ApplyResult, type UndoResult, type PreviewResult, type DiffItem, type LlmSettings, type ProjectContextResponse, DEFAULT_LLM_SETTINGS } from '../lib/analyze';
import { animateFadeInUp } from '../lib/anime-utils';
import { useAppStore } from '../store/app-store';
@ -122,6 +122,7 @@ export function Tasks() {
};
const [isGeneratingActions, setIsGeneratingActions] = useState(false);
const [projectContext, setProjectContext] = useState<ProjectContextResponse | null>(null);
const handleAiCodeGen = async (report: AnalyzeReport) => {
const settings = loadLlmSettings();
@ -230,16 +231,61 @@ export function Tasks() {
});
};
const handleSend = () => {
const handleSend = async () => {
if (!input.trim()) return;
setMessages((prev) => [...prev, { role: 'user', text: input.trim() }]);
const question = input.trim();
setMessages((prev) => [...prev, { role: 'user', text: question }]);
setInput('');
setTimeout(() => {
setMessages((prev) => [
...prev,
{ role: 'assistant', text: 'Ответ ИИ агента будет отображаться здесь. Результаты действий агента подключаются к backend.' },
]);
}, 500);
const settings = loadLlmSettings();
if (!settings.apiKey && settings.provider !== 'ollama') {
setMessages((prev) => [...prev, { role: 'system', text: '⚠️ Для чата нужен API-ключ. Перейдите в Настройки LLM (🧠).' }]);
return;
}
if (!lastReport || !lastPath) {
setMessages((prev) => [...prev, { role: 'system', text: '📂 Сначала проанализируйте проект — выберите папку для анализа.' }]);
return;
}
// Collect project context if not yet loaded
let ctx = projectContext;
if (!ctx) {
setMessages((prev) => [...prev, { role: 'system', text: '📖 Индексирую файлы проекта...' }]);
try {
ctx = await collectProjectContext(lastPath);
setProjectContext(ctx);
} catch (e) {
setMessages((prev) => [...prev, { role: 'system', text: `❌ Ошибка индексации: ${e}` }]);
return;
}
}
setMessages((prev) => [...prev, { role: 'system', text: '🤔 Думаю...' }]);
try {
// Build chat history from recent messages
const chatHistory = messages
.filter((m): m is { role: 'user'; text: string } | { role: 'assistant'; text: string } => 'text' in m && (m.role === 'user' || m.role === 'assistant'))
.slice(-6)
.map((m) => ({ role: m.role, content: m.text }));
const resp = await chatWithProject(settings, lastPath, ctx, lastReport.llm_context, question, chatHistory);
// Remove "Думаю..." message
setMessages((prev) => {
const filtered = prev.filter((m) => !('text' in m && m.text === '🤔 Думаю...'));
if (resp.ok) {
return [...filtered, { role: 'assistant' as const, text: resp.content }];
}
return [...filtered, { role: 'system' as const, text: `${resp.error}` }];
});
} catch (e) {
setMessages((prev) => {
const filtered = prev.filter((m) => !('text' in m && m.text === '🤔 Думаю...'));
return [...filtered, { role: 'system' as const, text: `❌ Ошибка: ${e}` }];
});
}
};
const runAnalysis = async (pathStr: string) => {
@ -253,6 +299,7 @@ export function Tasks() {
try {
const report = await analyzeProject(pathStr);
setPreviousReport(lastReport);
setProjectContext(null);
setLastReport(report);
setLastPath(pathStr);
storeSetLastReport(report, pathStr);