Skip to content

Commit

Permalink
Initial gitgnosis commit
Browse files Browse the repository at this point in the history
  • Loading branch information
dsteeley committed Jun 4, 2024
1 parent efae4c9 commit 07b5e34
Show file tree
Hide file tree
Showing 4 changed files with 107 additions and 0 deletions.
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,9 @@ Cargo.lock

# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb


# Added by cargo

/target
.env
13 changes: 13 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[package]
name = "gitgnosis"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
anyhow = "1.0.86"
dotenvy = "0.15.7"
openai_api_rust = "0.1.9"
palm_api = {path = "../palm_api"}
# palm_api = { git = "https://github.com/dsteeley/palm_api.git"}
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# GitGnosis

Playground Rust library leveraging the analysis capabilities of AI to improve the quality of your repository.

The first planned feature is to provide a tool that can analyze commits and suggest human readable summaries for them.

## Developing

### Running the tests
Pre-requisites:
- Create a .env file and provide an appropriate API key for the tests to run
- `<MODEL>_API_KEY=your_api_key`

To test:
- `cargo test`
73 changes: 73 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
use openai_api_rust::*;
use openai_api_rust::chat::*;
use openai_api_rust::completions::*;
use anyhow::Result;
use dotenvy::dotenv;
// TODO make llm a trait and crate feature
use palm_api::palm::create_client;
use palm_api::palm::new_text_body;


// Todo investigate other auth mechanisms
pub fn auth() -> Auth {
// Dot in a .env file
// dotenv().expect(".env file not found");
Auth::from_env().unwrap()
}

// TODO better return type
// TODO split into one object and have trait per model
pub fn query_openapi(content: String) -> Result<String> {
let auth = auth();
// TODO add support for other llms
let openai = OpenAI::new(auth, "https://api.openai.com/v1/");
// TODO other models
let body = ChatBody {
model: "gpt-3.5-turbo".to_string(),
max_tokens: Some(20),
temperature: Some(0_f32),
top_p: Some(0_f32),
n: Some(2),
stream: Some(false),
stop: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
messages: vec![Message {
role: Role::User,
content
}],
};
let rs = openai.chat_completion_create(&body);
let choice = rs.unwrap().choices;
// TODO Error handling
let message = &choice[0].message.as_ref().unwrap().content;
Ok(message.to_string())
}

pub fn query_palm(content: String) -> Result<String> {
dotenv().expect(".env file not found");
let palm_api_key = std::env::var("PALM_API_KEY").unwrap();
let client = create_client(palm_api_key.to_string());
let mut textbody = new_text_body();
textbody.set_text_prompt(content);
let response = client
.generate_text("text-bison-001".to_string(), textbody)
.expect("An error has occured.");
Ok(response.candidates.unwrap()[0].output.clone())
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn it_works() {
let content = "Hello".to_string();
// Run the test on Palm as that's currently free to use for some developers.
let result = query_palm(content).unwrap();
println!("{}", result);
assert!(result.contains("Hello"));
}
}

0 comments on commit 07b5e34

Please sign in to comment.