Skip to content

Commit

Permalink
✨ add langfuse
Browse files Browse the repository at this point in the history
  • Loading branch information
baptiste-pasquier committed May 2, 2024
1 parent bef92dd commit f3ec8c6
Show file tree
Hide file tree
Showing 8 changed files with 193 additions and 60 deletions.
55 changes: 40 additions & 15 deletions backend/rag_1/notebook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@
"from pathlib import Path\n",
"\n",
"from hydra import compose, initialize\n",
"from langfuse.callback import CallbackHandler\n",
"\n",
"from backend.rag_1.chain import get_chain\n",
"from backend.rag_1.config import validate_config\n",
Expand All @@ -86,6 +87,15 @@
"\n",
"logging.basicConfig(format=\"[%(asctime)s] - %(name)s - %(levelname)s - %(message)s\")\n",
"logging.getLogger(\"backend\").setLevel(logging.INFO)\n",
"\n",
"# Initialize Langfuse handler\n",
"lf_handler = CallbackHandler(\n",
" secret_key=os.getenv(\"LANGFUSE_SECRET_KEY\"),\n",
" public_key=os.getenv(\"LANGFUSE_PUBLIC_KEY\"),\n",
" host=os.getenv(\"LANGFUSE_HOST\"),\n",
" tags=[\"multimodal-1\"],\n",
")\n",
"\n",
"t = time.time()"
]
},
Expand Down Expand Up @@ -378,6 +388,21 @@
"outputs": [],
"source": [
"chain = get_chain(config)\n",
"\n",
"\n",
"def stream_question(question: str) -> None:\n",
" \"\"\"Stream the response to a given question using the RAG chain.\n",
"\n",
" Args:\n",
" question (str): Question to be answered.\n",
" \"\"\"\n",
" for chunk in chain.stream(\n",
" question,\n",
" config={\"run_name\": \"RAG\", \"callbacks\": [lf_handler]},\n",
" ):\n",
" print(chunk, end=\"\", flush=True)\n",
"\n",
"\n",
"t_rag = time.time()"
]
},
Expand All @@ -388,7 +413,7 @@
"outputs": [],
"source": [
"# Page 3\n",
"chain.invoke(\"Describe the Transformer architecture\")"
"stream_question(\"Describe the Transformer architecture\")"
]
},
{
Expand All @@ -398,7 +423,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"What is the complexity of self-attention?\")"
"stream_question(\"What is the complexity of self-attention?\")"
]
},
{
Expand All @@ -408,7 +433,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"Explain the formula of positional encoding\")"
"stream_question(\"Explain the formula of positional encoding\")"
]
},
{
Expand All @@ -418,7 +443,7 @@
"outputs": [],
"source": [
"# Page 8\n",
"chain.invoke(\"What are the BLEU performance of Transformer?\")"
"stream_question(\"What are the BLEU performance of Transformer?\")"
]
},
{
Expand All @@ -428,7 +453,7 @@
"outputs": [],
"source": [
"# No mention\n",
"chain.invoke(\"What are the ROUGE performance of Transformer?\")"
"stream_question(\"What are the ROUGE performance of Transformer?\")"
]
},
{
Expand All @@ -438,7 +463,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Describe the variations of Transformer\")"
"stream_question(\"Describe the variations of Transformer\")"
]
},
{
Expand All @@ -448,7 +473,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Between variations A and B, which one is better on PPL?\")"
"stream_question(\"Between variations A and B, which one is better on PPL?\")"
]
},
{
Expand All @@ -458,7 +483,7 @@
"outputs": [],
"source": [
"# Page 10\n",
"chain.invoke(\"Is Transformer performing well on English constituency parsing?\")"
"stream_question(\"Is Transformer performing well on English constituency parsing?\")"
]
},
{
Expand All @@ -468,7 +493,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"What words are connected to `making` in the weight visualization?\")"
"stream_question(\"What words are connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -478,7 +503,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `governments` connected to `making` in the weight visualization?\")"
"stream_question(\"Is `governments` connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -488,7 +513,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `2009` connected to `making`?\")"
"stream_question(\"Is `2009` connected to `making`?\")"
]
},
{
Expand All @@ -498,7 +523,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What words are associated with `its` in heads 5 and 6?\")"
"stream_question(\"What words are associated with `its` in heads 5 and 6?\")"
]
},
{
Expand All @@ -508,7 +533,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 5 and 6)?\"\n",
")"
]
Expand All @@ -520,7 +545,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 1 and 4)?\"\n",
")"
]
Expand All @@ -532,7 +557,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What word has connection with `its` on 2 heads?\")"
"stream_question(\"What word has connection with `its` on 2 heads?\")"
]
},
{
Expand Down
58 changes: 43 additions & 15 deletions backend/rag_2/notebook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
"from pathlib import Path\n",
"\n",
"from hydra import compose, initialize\n",
"from langfuse.callback import CallbackHandler\n",
"\n",
"from backend.rag_2 import prompts\n",
"from backend.rag_2.chain import get_chain\n",
Expand All @@ -93,6 +94,15 @@
"\n",
"logging.basicConfig(format=\"[%(asctime)s] - %(name)s - %(levelname)s - %(message)s\")\n",
"logging.getLogger(\"backend\").setLevel(logging.INFO)\n",
"\n",
"# Initialize Langfuse handler\n",
"lf_handler = CallbackHandler(\n",
" secret_key=os.getenv(\"LANGFUSE_SECRET_KEY\"),\n",
" public_key=os.getenv(\"LANGFUSE_PUBLIC_KEY\"),\n",
" host=os.getenv(\"LANGFUSE_HOST\"),\n",
" tags=[\"multimodal-2\"],\n",
")\n",
"\n",
"t = time.time()"
]
},
Expand Down Expand Up @@ -296,6 +306,7 @@
" text_list=texts,\n",
" config=config,\n",
" prompt_template=prompts.TEXT_SUMMARIZATION_PROMPT,\n",
" chain_config={\"callbacks\": [lf_handler]},\n",
")\n",
"for text in texts[:N_DISPLAY]:\n",
" display(text)"
Expand All @@ -312,6 +323,7 @@
" table_list=tables,\n",
" config=config,\n",
" prompt_template=prompts.TABLE_SUMMARIZATION_PROMPT,\n",
" chain_config={\"callbacks\": [lf_handler]},\n",
")\n",
"for table in tables[:N_DISPLAY]:\n",
" display(table)"
Expand All @@ -328,6 +340,7 @@
" image_list=images,\n",
" config=config,\n",
" prompt_template=prompts.IMAGE_SUMMARIZATION_PROMPT,\n",
" chain_config={\"callbacks\": [lf_handler]},\n",
")\n",
"for image in images[:N_DISPLAY]:\n",
" display(image)"
Expand Down Expand Up @@ -446,6 +459,21 @@
"outputs": [],
"source": [
"chain = get_chain(config)\n",
"\n",
"\n",
"def stream_question(question: str) -> None:\n",
" \"\"\"Stream the response to a given question using the RAG chain.\n",
"\n",
" Args:\n",
" question (str): Question to be answered.\n",
" \"\"\"\n",
" for chunk in chain.stream(\n",
" question,\n",
" config={\"run_name\": \"RAG\", \"callbacks\": [lf_handler]},\n",
" ):\n",
" print(chunk, end=\"\", flush=True)\n",
"\n",
"\n",
"t_rag = time.time()"
]
},
Expand All @@ -456,7 +484,7 @@
"outputs": [],
"source": [
"# Page 3\n",
"chain.invoke(\"Describe the Transformer architecture\")"
"stream_question(\"Describe the Transformer architecture\")"
]
},
{
Expand All @@ -466,7 +494,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"What is the complexity of self-attention?\")"
"stream_question(\"What is the complexity of self-attention?\")"
]
},
{
Expand All @@ -476,7 +504,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"Explain the formula of positional encoding\")"
"stream_question(\"Explain the formula of positional encoding\")"
]
},
{
Expand All @@ -486,7 +514,7 @@
"outputs": [],
"source": [
"# Page 8\n",
"chain.invoke(\"What are the BLEU performance of Transformer?\")"
"stream_question(\"What are the BLEU performance of Transformer?\")"
]
},
{
Expand All @@ -496,7 +524,7 @@
"outputs": [],
"source": [
"# No mention\n",
"chain.invoke(\"What are the ROUGE performance of Transformer?\")"
"stream_question(\"What are the ROUGE performance of Transformer?\")"
]
},
{
Expand All @@ -506,7 +534,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Describe the variations of Transformer\")"
"stream_question(\"Describe the variations of Transformer\")"
]
},
{
Expand All @@ -516,7 +544,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Between variations A and B, which one is better on PPL?\")"
"stream_question(\"Between variations A and B, which one is better on PPL?\")"
]
},
{
Expand All @@ -526,7 +554,7 @@
"outputs": [],
"source": [
"# Page 10\n",
"chain.invoke(\"Is Transformer performing well on English constituency parsing?\")"
"stream_question(\"Is Transformer performing well on English constituency parsing?\")"
]
},
{
Expand All @@ -536,7 +564,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"What words are connected to `making` in the weight visualization?\")"
"stream_question(\"What words are connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -546,7 +574,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `governments` connected to `making` in the weight visualization?\")"
"stream_question(\"Is `governments` connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -556,7 +584,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `2009` connected to `making`?\")"
"stream_question(\"Is `2009` connected to `making`?\")"
]
},
{
Expand All @@ -566,7 +594,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What words are associated with `its` in heads 5 and 6?\")"
"stream_question(\"What words are associated with `its` in heads 5 and 6?\")"
]
},
{
Expand All @@ -576,7 +604,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 5 and 6)?\"\n",
")"
]
Expand All @@ -588,7 +616,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 1 and 4)?\"\n",
")"
]
Expand All @@ -600,7 +628,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What word has connection with `its` on 2 heads?\")"
"stream_question(\"What word has connection with `its` on 2 heads?\")"
]
},
{
Expand Down
Loading

0 comments on commit f3ec8c6

Please sign in to comment.