Skip to content

Commit

Permalink
feat: deprecate prompt, remove --prompt and .prompt (#176)
Browse files Browse the repository at this point in the history
  • Loading branch information
sigoden authored Oct 30, 2023
1 parent 44ea384 commit b3e6879
Show file tree
Hide file tree
Showing 7 changed files with 3 additions and 79 deletions.
32 changes: 2 additions & 30 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@ Download it from [GitHub Releases](https://github.com/sigoden/aichat/releases),

- Supports multiple AIs, including openai and localai.
- Support chat and command modes
- Predefine model [roles](#roles)
- Use GPT prompt easily
- Use [roles](#roles)
- Powerful [Chat REPL](#chat-repl)
- Context-aware conversation/session
- Syntax highlighting markdown and 200 other languages
Expand Down Expand Up @@ -91,9 +90,7 @@ clients: # Setup AIs

### Roles

We can let ChatGPT play a certain role through `prompt` to have it better generate what we want.

We can predefine a batch of roles in `roles.yaml`.
We can define a batch of roles in `roles.yaml`.

> We can get the location of `roles.yaml` through the repl's `.info` command or cli's `--info` option.

Expand Down Expand Up @@ -167,7 +164,6 @@ AIChat also provides `.edit` command for multi-lines editing.
.info Print system-wide information
.set Modify the configuration temporarily
.model Choose a model
.prompt Add a GPT prompt
.role Select a role
.clear role Clear the currently selected role
.session Start a session
Expand Down Expand Up @@ -216,30 +212,6 @@ vi_keybindings true
> .model localai:gpt4all-j
```
### `.prompt` - Set GPT prompt
When you set up a prompt, every message sent later will carry the prompt.
```
〉{ .prompt
I want you to translate the sentences I write into emojis.
I will write the sentence, and you will express it with emojis.
I just want you to express it with emojis.
I want you to reply only with emojis.
}
Done

P〉You are a genius
👉🧠💡👨‍🎓

P〉I'm embarrassed
🙈😳
```
`.prompt` actually creates a temporary role internally, so **run `.clear role` to clear the prompt**.
When you are satisfied with the prompt, add it to `roles.yaml` for later use.
### `.role` - Let the AI play a role
Select a role:
Expand Down
3 changes: 0 additions & 3 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@ pub struct Cli {
/// Select a role
#[clap(short, long)]
pub role: Option<String>,
/// Add a GPT prompt
#[clap(short, long)]
pub prompt: Option<String>,
/// List sessions
#[clap(long)]
pub list_sessions: bool,
Expand Down
15 changes: 0 additions & 15 deletions src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,12 +192,6 @@ impl Config {
None => {
format!("# CHAT:[{timestamp}]\n{input}\n--------\n{output}\n--------\n\n",)
}
Some(v) if v.is_temp() => {
format!(
"# CHAT:[{timestamp}]\n{}\n{input}\n--------\n{output}\n--------\n\n",
v.prompt
)
}
Some(v) => {
format!(
"# CHAT:[{timestamp}] ({})\n{input}\n--------\n{output}\n--------\n\n",
Expand Down Expand Up @@ -262,15 +256,6 @@ impl Config {
Ok(())
}

pub fn add_prompt(&mut self, prompt: &str) -> Result<()> {
let role = Role::new(prompt, self.temperature);
if let Some(session) = self.session.as_mut() {
session.update_role(Some(role.clone()))?;
}
self.role = Some(role);
Ok(())
}

pub fn get_temperature(&self) -> Option<f64> {
self.role
.as_ref()
Expand Down
13 changes: 0 additions & 13 deletions src/config/role.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use super::message::{Message, MessageRole};
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};

const TEMP_ROLE_NAME: &str = "temp";
const INPUT_PLACEHOLDER: &str = "__INPUT__";

#[derive(Debug, Clone, Deserialize, Serialize)]
Expand All @@ -17,18 +16,6 @@ pub struct Role {
}

impl Role {
pub fn new(prompt: &str, temperature: Option<f64>) -> Self {
Self {
name: TEMP_ROLE_NAME.into(),
prompt: prompt.into(),
temperature,
}
}

pub fn is_temp(&self) -> bool {
self.name == TEMP_ROLE_NAME
}

pub fn info(&self) -> Result<String> {
let output = serde_yaml::to_string(&self)
.with_context(|| format!("Unable to show info about role {}", &self.name))?;
Expand Down
3 changes: 0 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,6 @@ fn main() -> Result<()> {
if cli.no_highlight {
config.write().highlight = false;
}
if let Some(prompt) = &cli.prompt {
config.write().add_prompt(prompt)?;
}
if cli.info {
let info = if let Some(session) = &config.read().session {
session.info()?
Expand Down
5 changes: 0 additions & 5 deletions src/repl/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ pub enum ReplCmd {
SetModel(String),
SetRole(String),
UpdateConfig(String),
Prompt(String),
ClearRole,
ViewInfo,
StartSession(Option<String>),
Expand Down Expand Up @@ -79,10 +78,6 @@ impl ReplCmdHandler {
self.config.write().clear_role()?;
print_now!("\n");
}
ReplCmd::Prompt(prompt) => {
self.config.write().add_prompt(&prompt)?;
print_now!("\n");
}
ReplCmd::ViewInfo => {
let output = self.config.read().info()?;
print_now!("{}\n\n", output.trim_end());
Expand Down
11 changes: 1 addition & 10 deletions src/repl/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,10 @@ use lazy_static::lazy_static;
use reedline::Signal;
use std::rc::Rc;

pub const REPL_COMMANDS: [(&str, &str); 15] = [
pub const REPL_COMMANDS: [(&str, &str); 14] = [
(".info", "Print system-wide information"),
(".set", "Modify the configuration temporarily"),
(".model", "Choose a model"),
(".prompt", "Add a GPT prompt"),
(".role", "Select a role"),
(".clear role", "Clear the currently selected role"),
(".session", "Start a session"),
Expand Down Expand Up @@ -134,14 +133,6 @@ impl Repl {
handler.handle(ReplCmd::UpdateConfig(args.unwrap_or_default().to_string()))?;
self.prompt.sync_config();
}
".prompt" => {
let text = args.unwrap_or_default().to_string();
if text.is_empty() {
print_now!("Usage: .prompt <text>.\n\n");
} else {
handler.handle(ReplCmd::Prompt(text))?;
}
}
".session" => {
handler.handle(ReplCmd::StartSession(args.map(|v| v.to_string())))?;
}
Expand Down

0 comments on commit b3e6879

Please sign in to comment.