-
Notifications
You must be signed in to change notification settings - Fork 94
/
conversational_retriever_chain_with_vector_store.rs
115 lines (99 loc) · 3.62 KB
/
conversational_retriever_chain_with_vector_store.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
// To run this example execute: cargo run --example conversational_retriever_chain --features postgres
#[cfg(feature = "postgres")]
use futures_util::StreamExt;
#[cfg(feature = "postgres")]
use langchain_rust::{
add_documents,
chain::{Chain, ConversationalRetrieverChainBuilder},
embedding::openai::openai_embedder::OpenAiEmbedder,
llm::{OpenAI, OpenAIModel},
memory::SimpleMemory,
prompt_args,
schemas::Document,
vectorstore::{pgvector::StoreBuilder, Retriever, VectorStore},
};
#[cfg(feature = "postgres")]
#[tokio::main]
async fn main() {
use langchain_rust::{
fmt_message, fmt_template, message_formatter, prompt::HumanMessagePromptTemplate,
schemas::Message, template_jinja2,
};
let documents = vec![
Document::new(format!(
"\nQuestion: {}\nAnswer: {}\n",
"Which is the favorite text editor of luis", "Nvim"
)),
Document::new(format!(
"\nQuestion: {}\nAnswer: {}\n",
"How old is Luis", "24"
)),
Document::new(format!(
"\nQuestion: {}\nAnswer: {}\n",
"Where do luis live", "Peru"
)),
Document::new(format!(
"\nQuestion: {}\nAnswer: {}\n",
"Whats his favorite food", "Pan con chicharron"
)),
];
let store = StoreBuilder::new()
.embedder(OpenAiEmbedder::default())
.pre_delete_collection(true)
.connection_url("postgresql://postgres:postgres@localhost:5432/postgres")
.vector_dimensions(1536)
.build()
.await
.unwrap();
let _ = add_documents!(store, &documents).await.map_err(|e| {
println!("Error adding documents: {:?}", e);
});
let llm = OpenAI::default().with_model(OpenAIModel::Gpt35.to_string());
let prompt= message_formatter![
fmt_message!(Message::new_system_message("You are a helpful assistant")),
fmt_template!(HumanMessagePromptTemplate::new(
template_jinja2!("
Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{{context}}
Question:{{question}}
Helpful Answer:
",
"context","question")))
];
let chain = ConversationalRetrieverChainBuilder::new()
.llm(llm)
.rephrase_question(true)
.memory(SimpleMemory::new().into())
.retriever(Retriever::new(store, 5))
//If you want to use the default prompt remove the .prompt()
//Keep in mind if you want to change the prompt; this chain need the {{context}} variable
.prompt(prompt)
.build()
.expect("Error building ConversationalChain");
let input_variables = prompt_args! {
"question" => "Hi",
};
let result = chain.invoke(input_variables).await;
if let Ok(result) = result {
println!("Result: {:?}", result);
}
let input_variables = prompt_args! {
"question" => "Which is luis Favorite Food",
};
//If you want to stream
let mut stream = chain.stream(input_variables).await.unwrap();
while let Some(result) = stream.next().await {
match result {
Ok(data) => data.to_stdout().unwrap(),
Err(e) => {
println!("Error: {:?}", e);
}
}
}
}
#[cfg(not(feature = "postgres"))]
fn main() {
println!("This example requires the 'postgres' feature to be enabled.");
println!("Please run the command as follows:");
println!("cargo run --example conversational_retriever_chain --features postgres");
}