refactor(core): remove provider module, migrate to LLMProvider, add client mode handling, improve serialization error handling, update workspace edition, and clean up conditionals and imports
This commit is contained in:
@@ -2,24 +2,23 @@
|
||||
|
||||
mod cloud;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use clap::{Parser, Subcommand};
|
||||
use cloud::{load_runtime_credentials, CloudCommand};
|
||||
use cloud::{CloudCommand, load_runtime_credentials, set_env_var};
|
||||
use owlen_core::config as core_config;
|
||||
use owlen_core::{
|
||||
ChatStream, Error, Provider,
|
||||
config::{Config, McpMode},
|
||||
mcp::remote_client::RemoteMcpClient,
|
||||
mode::Mode,
|
||||
provider::ChatStream,
|
||||
providers::OllamaProvider,
|
||||
session::SessionController,
|
||||
storage::StorageManager,
|
||||
types::{ChatRequest, ChatResponse, Message, ModelInfo},
|
||||
Error, Provider,
|
||||
};
|
||||
use owlen_tui::tui_controller::{TuiController, TuiRequest};
|
||||
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
|
||||
use owlen_tui::{AppState, ChatApp, Event, EventHandler, SessionEvent, config, ui};
|
||||
use std::any::Any;
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
@@ -30,10 +29,10 @@ use tokio_util::sync::CancellationToken;
|
||||
use crossterm::{
|
||||
event::{DisableBracketedPaste, DisableMouseCapture, EnableBracketedPaste, EnableMouseCapture},
|
||||
execute,
|
||||
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
|
||||
terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
|
||||
};
|
||||
use futures::stream;
|
||||
use ratatui::{prelude::CrosstermBackend, Terminal};
|
||||
use ratatui::{Terminal, prelude::CrosstermBackend};
|
||||
|
||||
/// Owlen - Terminal UI for LLM chat
|
||||
#[derive(Parser, Debug)]
|
||||
@@ -132,7 +131,9 @@ async fn run_command(command: OwlenCommand) -> Result<()> {
|
||||
OwlenCommand::Config(config_cmd) => run_config_command(config_cmd),
|
||||
OwlenCommand::Cloud(cloud_cmd) => cloud::run_cloud_command(cloud_cmd).await,
|
||||
OwlenCommand::Upgrade => {
|
||||
println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force");
|
||||
println!(
|
||||
"To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force"
|
||||
);
|
||||
println!(
|
||||
"If you installed from the AUR, use your package manager (e.g., yay -S owlen-git)."
|
||||
);
|
||||
@@ -333,11 +334,11 @@ impl Provider for OfflineProvider {
|
||||
}])
|
||||
}
|
||||
|
||||
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse, Error> {
|
||||
async fn send_prompt(&self, request: ChatRequest) -> Result<ChatResponse, Error> {
|
||||
Ok(self.friendly_response(&request.model))
|
||||
}
|
||||
|
||||
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream, Error> {
|
||||
async fn stream_prompt(&self, request: ChatRequest) -> Result<ChatStream, Error> {
|
||||
let response = self.friendly_response(&request.model);
|
||||
Ok(Box::pin(stream::iter(vec![Ok(response)])))
|
||||
}
|
||||
@@ -363,7 +364,7 @@ async fn main() -> Result<()> {
|
||||
let initial_mode = if code { Mode::Code } else { Mode::Chat };
|
||||
|
||||
// Set auto-consent for TUI mode to prevent blocking stdin reads
|
||||
std::env::set_var("OWLEN_AUTO_CONSENT", "1");
|
||||
set_env_var("OWLEN_AUTO_CONSENT", "1");
|
||||
|
||||
let color_support = detect_terminal_color_support();
|
||||
// Load configuration (or fall back to defaults) for the session controller.
|
||||
|
||||
Reference in New Issue
Block a user