Crates.io | gpt-rs |
lib.rs | gpt-rs |
version | 0.1.1 |
source | src |
created_at | 2023-07-26 10:54:30.524747 |
updated_at | 2023-07-26 11:08:35.231495 |
description | This crate provides a simple way to interact with the OpenAI API |
homepage | https://github.com/Kobayashi-takumi/gpt-rs |
repository | https://github.com/Kobayashi-takumi/gpt-rs |
max_upload_size | |
id | 926357 |
size | 21,376 |
This crate provides a simple way to interact with the OpenAI API from Rust.
This asynchronous example uses Tokio and enables some optional features, so your Cargo.toml could look like this:
[dependencies]
gpt = { git="https://github.com/Kobayashi-takumi/gpt-rs" }
tokio = { version = "1", features = ["full"] }
And then the code:
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Client::new(Config {
api_key:"<Your API_KEY>".to_string(),
organization: Some("<Your ORGANIZATION>"),
})?;
let res = Chat::builder()
.config(Default::default())
.request(vec!["hi".into()])
.build()?
.execute(&client)
.await?;
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Client::new(Config {
api_key:"<Your API_KEY>".to_string(),
organization: Some("<Your ORGANIZATION>".to_string()),
})?;
let res = Chat::builder()
.config(Default::default())
.request(vec!["hi".into()])
.build()?
.execute(&client)
.await?;
Ok(())
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Client::new(Config {
api_key:"<Your API_KEY>".to_string(),
organization: Some("<Your ORGANIZATION>"),
})?;
let res = CreateImage::builder()
.request("doc".into())
.build()?
.execute(&client)
.await;
Ok(())
}