From 4f2c890c6b3dce70096f228042868a451fa0b83e Mon Sep 17 00:00:00 2001 From: Yuriy Date: Thu, 12 Feb 2026 09:55:30 +0300 Subject: [PATCH] =?UTF-8?q?feat:=20RAG=20chat=20=E2=80=94=20ask=20question?= =?UTF-8?q?s=20about=20your=20project=20with=20AI=20context?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src-tauri/src/commands/collect_context.rs | 104 ++++++++++++++++++ desktop/src-tauri/src/commands/mod.rs | 2 + desktop/src-tauri/src/lib.rs | 3 +- desktop/ui/src/lib/analyze.ts | 55 +++++++++ desktop/ui/src/pages/Tasks.tsx | 65 +++++++++-- 5 files changed, 219 insertions(+), 10 deletions(-) create mode 100644 desktop/src-tauri/src/commands/collect_context.rs diff --git a/desktop/src-tauri/src/commands/collect_context.rs b/desktop/src-tauri/src/commands/collect_context.rs new file mode 100644 index 0000000..64584ce --- /dev/null +++ b/desktop/src-tauri/src/commands/collect_context.rs @@ -0,0 +1,104 @@ +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::Path; + +const MAX_CONTEXT_BYTES: usize = 100_000; +const MAX_FILE_BYTES: u64 = 30_000; + +const CODE_EXTENSIONS: &[&str] = &[ + "js","jsx","ts","tsx","mjs","cjs","py","rs","go","rb","php","java","kt", + "sh","bash","yml","yaml","toml","json","md","txt","sql","graphql", + "css","scss","html","vue","svelte", +]; + +const EXCLUDED_DIRS: &[&str] = &[ + "node_modules",".git","target","dist","build",".next", + "__pycache__",".venv","venv","vendor",".cargo", +]; + +const PRIORITY_FILES: &[&str] = &[ + "package.json","Cargo.toml","pyproject.toml","requirements.txt", + "README.md","readme.md","tsconfig.json", + "next.config.js","next.config.ts","vite.config.ts","vite.config.js", + "Dockerfile","docker-compose.yml",".env.example",".gitignore", +]; + +#[derive(Debug, Serialize, Deserialize)] +pub struct ProjectContextRequest { pub path: String } + +#[derive(Debug, Serialize, Deserialize)] +pub struct FileContext { pub path: String, pub content: String, pub lines: u32 } + +#[derive(Debug, Serialize, Deserialize)] +pub struct ProjectContextResponse { + pub ok: bool, pub files: Vec, + pub total_files: u32, pub total_bytes: u32, + pub truncated: bool, pub error: Option, +} + +#[tauri::command] +pub async fn collect_project_context(request: ProjectContextRequest) -> Result { + let root = Path::new(&request.path); + if !root.exists() || !root.is_dir() { + return Ok(ProjectContextResponse { ok: false, files: vec![], total_files: 0, total_bytes: 0, truncated: false, error: Some(format!("Путь не существует: {}", request.path)) }); + } + let mut files: Vec = Vec::new(); + let mut total_bytes: usize = 0; + let mut truncated = false; + + for pf in PRIORITY_FILES { + let fp = root.join(pf); + if fp.exists() && fp.is_file() { + if let Some(fc) = read_file_ctx(root, &fp) { total_bytes += fc.content.len(); files.push(fc); } + } + } + + let mut all: Vec = Vec::new(); + collect_code_files(root, root, 0, &mut all); + all.sort_by(|a, b| { + let a_src = a.to_string_lossy().contains("src/"); + let b_src = b.to_string_lossy().contains("src/"); + match (a_src, b_src) { + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + _ => a.metadata().map(|m| m.len()).unwrap_or(u64::MAX).cmp(&b.metadata().map(|m| m.len()).unwrap_or(u64::MAX)), + } + }); + + for fp in &all { + if total_bytes >= MAX_CONTEXT_BYTES { truncated = true; break; } + let rel = fp.strip_prefix(root).unwrap_or(fp).to_string_lossy().to_string(); + if files.iter().any(|f| f.path == rel) { continue; } + if let Some(fc) = read_file_ctx(root, fp) { + if total_bytes + fc.content.len() > MAX_CONTEXT_BYTES { truncated = true; break; } + total_bytes += fc.content.len(); + files.push(fc); + } + } + + Ok(ProjectContextResponse { ok: true, total_files: files.len() as u32, total_bytes: total_bytes as u32, truncated, files, error: None }) +} + +fn read_file_ctx(root: &Path, fp: &Path) -> Option { + let meta = fp.metadata().ok()?; + if meta.len() > MAX_FILE_BYTES { return None; } + let content = fs::read_to_string(fp).ok()?; + let rel = fp.strip_prefix(root).unwrap_or(fp).to_string_lossy().to_string(); + Some(FileContext { path: rel, lines: content.lines().count() as u32, content }) +} + +fn collect_code_files(root: &Path, dir: &Path, depth: u32, out: &mut Vec) { + if depth > 8 || out.len() > 300 { return; } + let entries = match fs::read_dir(dir) { Ok(e) => e, Err(_) => return }; + for entry in entries.flatten() { + let path = entry.path(); + let name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); + if path.is_dir() { + if EXCLUDED_DIRS.contains(&name) || name.starts_with('.') { continue; } + collect_code_files(root, &path, depth + 1, out); + continue; + } + let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); + if CODE_EXTENSIONS.contains(&ext) { out.push(path); } + } +} diff --git a/desktop/src-tauri/src/commands/mod.rs b/desktop/src-tauri/src/commands/mod.rs index 97eae4f..3a117ce 100644 --- a/desktop/src-tauri/src/commands/mod.rs +++ b/desktop/src-tauri/src/commands/mod.rs @@ -13,3 +13,5 @@ pub use generate_ai_actions::generate_ai_actions; pub use get_app_info::get_app_info; pub use preview_actions::preview_actions; pub use undo_last::undo_last; +mod collect_context; +pub use collect_context::collect_project_context; diff --git a/desktop/src-tauri/src/lib.rs b/desktop/src-tauri/src/lib.rs index 7842e32..ccd5eef 100644 --- a/desktop/src-tauri/src/lib.rs +++ b/desktop/src-tauri/src/lib.rs @@ -2,7 +2,7 @@ mod deep_analysis; mod commands; mod types; -use commands::{analyze_project, apply_actions, ask_llm, generate_ai_actions, get_app_info, preview_actions, undo_last}; +use commands::{analyze_project, apply_actions, ask_llm, generate_ai_actions, collect_project_context, get_app_info, preview_actions, undo_last}; #[cfg_attr(mobile, tauri::mobile_entry_point)] pub fn run() { @@ -28,6 +28,7 @@ pub fn run() { get_app_info, ask_llm, generate_ai_actions, + collect_project_context, ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); diff --git a/desktop/ui/src/lib/analyze.ts b/desktop/ui/src/lib/analyze.ts index 31c2d93..af9edbd 100644 --- a/desktop/ui/src/lib/analyze.ts +++ b/desktop/ui/src/lib/analyze.ts @@ -214,3 +214,58 @@ export async function generateAiActions( }, }); } + +// ---- RAG Chat ---- + +export interface FileContext { + path: string; + content: string; + lines: number; +} + +export interface ProjectContextResponse { + ok: boolean; + files: FileContext[]; + total_files: number; + total_bytes: number; + truncated: boolean; + error?: string | null; +} + +export async function collectProjectContext( + path: string, +): Promise { + return invoke('collect_project_context', { + request: { path }, + }); +} + +export async function chatWithProject( + settings: LlmSettings, + projectPath: string, + projectContext: ProjectContextResponse, + llmContext: LlmContext, + question: string, + chatHistory: { role: string; content: string }[], +): Promise { + // Build context from file contents + const filesSummary = projectContext.files + .map((f) => `--- ${f.path} (${f.lines} строк) ---\n${f.content}`) + .join('\n\n'); + + const contextStr = JSON.stringify(llmContext); + + const fullPrompt = `Контекст проекта (${projectPath}):\n${contextStr}\n\nФайлы проекта (${projectContext.total_files} файлов, ${projectContext.total_bytes} байт${projectContext.truncated ? ', обрезано' : ''}):\n${filesSummary}\n\n${chatHistory.length > 0 ? 'История чата:\n' + chatHistory.map((m) => `${m.role}: ${m.content}`).join('\n') + '\n\n' : ''}Вопрос пользователя: ${question}`; + + return invoke('ask_llm', { + request: { + provider: settings.provider, + model: settings.model, + api_key: settings.apiKey || null, + base_url: settings.baseUrl || null, + context: contextStr, + prompt: fullPrompt, + max_tokens: 2048, + }, + }); +} diff --git a/desktop/ui/src/pages/Tasks.tsx b/desktop/ui/src/pages/Tasks.tsx index fbed860..dbead8e 100644 --- a/desktop/ui/src/pages/Tasks.tsx +++ b/desktop/ui/src/pages/Tasks.tsx @@ -19,7 +19,7 @@ import { X, } from 'lucide-react'; import { invoke } from '@tauri-apps/api/core'; -import { analyzeProject, askLlm, generateAiActions, type AnalyzeReport, type Action, type ApplyResult, type UndoResult, type PreviewResult, type DiffItem, type LlmSettings, DEFAULT_LLM_SETTINGS } from '../lib/analyze'; +import { analyzeProject, askLlm, generateAiActions, collectProjectContext, chatWithProject, type AnalyzeReport, type Action, type ApplyResult, type UndoResult, type PreviewResult, type DiffItem, type LlmSettings, type ProjectContextResponse, DEFAULT_LLM_SETTINGS } from '../lib/analyze'; import { animateFadeInUp } from '../lib/anime-utils'; import { useAppStore } from '../store/app-store'; @@ -122,6 +122,7 @@ export function Tasks() { }; const [isGeneratingActions, setIsGeneratingActions] = useState(false); + const [projectContext, setProjectContext] = useState(null); const handleAiCodeGen = async (report: AnalyzeReport) => { const settings = loadLlmSettings(); @@ -230,16 +231,61 @@ export function Tasks() { }); }; - const handleSend = () => { + const handleSend = async () => { if (!input.trim()) return; - setMessages((prev) => [...prev, { role: 'user', text: input.trim() }]); + const question = input.trim(); + setMessages((prev) => [...prev, { role: 'user', text: question }]); setInput(''); - setTimeout(() => { - setMessages((prev) => [ - ...prev, - { role: 'assistant', text: 'Ответ ИИ агента будет отображаться здесь. Результаты действий агента подключаются к backend.' }, - ]); - }, 500); + + const settings = loadLlmSettings(); + if (!settings.apiKey && settings.provider !== 'ollama') { + setMessages((prev) => [...prev, { role: 'system', text: '⚠️ Для чата нужен API-ключ. Перейдите в Настройки LLM (🧠).' }]); + return; + } + + if (!lastReport || !lastPath) { + setMessages((prev) => [...prev, { role: 'system', text: '📂 Сначала проанализируйте проект — выберите папку для анализа.' }]); + return; + } + + // Collect project context if not yet loaded + let ctx = projectContext; + if (!ctx) { + setMessages((prev) => [...prev, { role: 'system', text: '📖 Индексирую файлы проекта...' }]); + try { + ctx = await collectProjectContext(lastPath); + setProjectContext(ctx); + } catch (e) { + setMessages((prev) => [...prev, { role: 'system', text: `❌ Ошибка индексации: ${e}` }]); + return; + } + } + + setMessages((prev) => [...prev, { role: 'system', text: '🤔 Думаю...' }]); + + try { + // Build chat history from recent messages + const chatHistory = messages + .filter((m): m is { role: 'user'; text: string } | { role: 'assistant'; text: string } => 'text' in m && (m.role === 'user' || m.role === 'assistant')) + .slice(-6) + .map((m) => ({ role: m.role, content: m.text })); + + const resp = await chatWithProject(settings, lastPath, ctx, lastReport.llm_context, question, chatHistory); + + // Remove "Думаю..." message + setMessages((prev) => { + const filtered = prev.filter((m) => !('text' in m && m.text === '🤔 Думаю...')); + if (resp.ok) { + return [...filtered, { role: 'assistant' as const, text: resp.content }]; + } + return [...filtered, { role: 'system' as const, text: `❌ ${resp.error}` }]; + }); + } catch (e) { + setMessages((prev) => { + const filtered = prev.filter((m) => !('text' in m && m.text === '🤔 Думаю...')); + return [...filtered, { role: 'system' as const, text: `❌ Ошибка: ${e}` }]; + }); + } }; const runAnalysis = async (pathStr: string) => { @@ -253,6 +299,7 @@ export function Tasks() { try { const report = await analyzeProject(pathStr); setPreviousReport(lastReport); + setProjectContext(null); setLastReport(report); setLastPath(pathStr); storeSetLastReport(report, pathStr);