Skip to content

Commit

Permalink
✨ add tags
Browse files Browse the repository at this point in the history
  • Loading branch information
baptiste-pasquier committed May 2, 2024
1 parent bef92dd commit 64d7abf
Show file tree
Hide file tree
Showing 5 changed files with 131 additions and 53 deletions.
45 changes: 30 additions & 15 deletions backend/rag_1/notebook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -378,6 +378,21 @@
"outputs": [],
"source": [
"chain = get_chain(config)\n",
"\n",
"\n",
"def stream_question(question: str) -> None:\n",
" \"\"\"Stream the response to a given question using the RAG chain.\n",
"\n",
" Args:\n",
" question (str): Question to be answered.\n",
" \"\"\"\n",
" for chunk in chain.stream(\n",
" question,\n",
" config={\"run_name\": \"RAG\", \"tags\": [\"multimodal-1\"]},\n",
" ):\n",
" print(chunk, end=\"\", flush=True)\n",
"\n",
"\n",
"t_rag = time.time()"
]
},
Expand All @@ -388,7 +403,7 @@
"outputs": [],
"source": [
"# Page 3\n",
"chain.invoke(\"Describe the Transformer architecture\")"
"stream_question(\"Describe the Transformer architecture\")"
]
},
{
Expand All @@ -398,7 +413,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"What is the complexity of self-attention?\")"
"stream_question(\"What is the complexity of self-attention?\")"
]
},
{
Expand All @@ -408,7 +423,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"Explain the formula of positional encoding\")"
"stream_question(\"Explain the formula of positional encoding\")"
]
},
{
Expand All @@ -418,7 +433,7 @@
"outputs": [],
"source": [
"# Page 8\n",
"chain.invoke(\"What are the BLEU performance of Transformer?\")"
"stream_question(\"What are the BLEU performance of Transformer?\")"
]
},
{
Expand All @@ -428,7 +443,7 @@
"outputs": [],
"source": [
"# No mention\n",
"chain.invoke(\"What are the ROUGE performance of Transformer?\")"
"stream_question(\"What are the ROUGE performance of Transformer?\")"
]
},
{
Expand All @@ -438,7 +453,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Describe the variations of Transformer\")"
"stream_question(\"Describe the variations of Transformer\")"
]
},
{
Expand All @@ -448,7 +463,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Between variations A and B, which one is better on PPL?\")"
"stream_question(\"Between variations A and B, which one is better on PPL?\")"
]
},
{
Expand All @@ -458,7 +473,7 @@
"outputs": [],
"source": [
"# Page 10\n",
"chain.invoke(\"Is Transformer performing well on English constituency parsing?\")"
"stream_question(\"Is Transformer performing well on English constituency parsing?\")"
]
},
{
Expand All @@ -468,7 +483,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"What words are connected to `making` in the weight visualization?\")"
"stream_question(\"What words are connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -478,7 +493,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `governments` connected to `making` in the weight visualization?\")"
"stream_question(\"Is `governments` connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -488,7 +503,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `2009` connected to `making`?\")"
"stream_question(\"Is `2009` connected to `making`?\")"
]
},
{
Expand All @@ -498,7 +513,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What words are associated with `its` in heads 5 and 6?\")"
"stream_question(\"What words are associated with `its` in heads 5 and 6?\")"
]
},
{
Expand All @@ -508,7 +523,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 5 and 6)?\"\n",
")"
]
Expand All @@ -520,7 +535,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 1 and 4)?\"\n",
")"
]
Expand All @@ -532,7 +547,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What word has connection with `its` on 2 heads?\")"
"stream_question(\"What word has connection with `its` on 2 heads?\")"
]
},
{
Expand Down
48 changes: 33 additions & 15 deletions backend/rag_2/notebook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@
" text_list=texts,\n",
" config=config,\n",
" prompt_template=prompts.TEXT_SUMMARIZATION_PROMPT,\n",
" chain_config={\"tags\": [\"multimodal-2\"]},\n",
")\n",
"for text in texts[:N_DISPLAY]:\n",
" display(text)"
Expand All @@ -312,6 +313,7 @@
" table_list=tables,\n",
" config=config,\n",
" prompt_template=prompts.TABLE_SUMMARIZATION_PROMPT,\n",
" chain_config={\"tags\": [\"multimodal-2\"]},\n",
")\n",
"for table in tables[:N_DISPLAY]:\n",
" display(table)"
Expand All @@ -328,6 +330,7 @@
" image_list=images,\n",
" config=config,\n",
" prompt_template=prompts.IMAGE_SUMMARIZATION_PROMPT,\n",
" chain_config={\"tags\": [\"multimodal-2\"]},\n",
")\n",
"for image in images[:N_DISPLAY]:\n",
" display(image)"
Expand Down Expand Up @@ -446,6 +449,21 @@
"outputs": [],
"source": [
"chain = get_chain(config)\n",
"\n",
"\n",
"def stream_question(question: str) -> None:\n",
" \"\"\"Stream the response to a given question using the RAG chain.\n",
"\n",
" Args:\n",
" question (str): Question to be answered.\n",
" \"\"\"\n",
" for chunk in chain.stream(\n",
" question,\n",
" config={\"run_name\": \"RAG\", \"tags\": [\"multimodal-2\"]},\n",
" ):\n",
" print(chunk, end=\"\", flush=True)\n",
"\n",
"\n",
"t_rag = time.time()"
]
},
Expand All @@ -456,7 +474,7 @@
"outputs": [],
"source": [
"# Page 3\n",
"chain.invoke(\"Describe the Transformer architecture\")"
"stream_question(\"Describe the Transformer architecture\")"
]
},
{
Expand All @@ -466,7 +484,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"What is the complexity of self-attention?\")"
"stream_question(\"What is the complexity of self-attention?\")"
]
},
{
Expand All @@ -476,7 +494,7 @@
"outputs": [],
"source": [
"# Page 6\n",
"chain.invoke(\"Explain the formula of positional encoding\")"
"stream_question(\"Explain the formula of positional encoding\")"
]
},
{
Expand All @@ -486,7 +504,7 @@
"outputs": [],
"source": [
"# Page 8\n",
"chain.invoke(\"What are the BLEU performance of Transformer?\")"
"stream_question(\"What are the BLEU performance of Transformer?\")"
]
},
{
Expand All @@ -496,7 +514,7 @@
"outputs": [],
"source": [
"# No mention\n",
"chain.invoke(\"What are the ROUGE performance of Transformer?\")"
"stream_question(\"What are the ROUGE performance of Transformer?\")"
]
},
{
Expand All @@ -506,7 +524,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Describe the variations of Transformer\")"
"stream_question(\"Describe the variations of Transformer\")"
]
},
{
Expand All @@ -516,7 +534,7 @@
"outputs": [],
"source": [
"# Page 9\n",
"chain.invoke(\"Between variations A and B, which one is better on PPL?\")"
"stream_question(\"Between variations A and B, which one is better on PPL?\")"
]
},
{
Expand All @@ -526,7 +544,7 @@
"outputs": [],
"source": [
"# Page 10\n",
"chain.invoke(\"Is Transformer performing well on English constituency parsing?\")"
"stream_question(\"Is Transformer performing well on English constituency parsing?\")"
]
},
{
Expand All @@ -536,7 +554,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"What words are connected to `making` in the weight visualization?\")"
"stream_question(\"What words are connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -546,7 +564,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `governments` connected to `making` in the weight visualization?\")"
"stream_question(\"Is `governments` connected to `making` in the weight visualization?\")"
]
},
{
Expand All @@ -556,7 +574,7 @@
"outputs": [],
"source": [
"# Page 13\n",
"chain.invoke(\"Is `2009` connected to `making`?\")"
"stream_question(\"Is `2009` connected to `making`?\")"
]
},
{
Expand All @@ -566,7 +584,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What words are associated with `its` in heads 5 and 6?\")"
"stream_question(\"What words are associated with `its` in heads 5 and 6?\")"
]
},
{
Expand All @@ -576,7 +594,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 5 and 6)?\"\n",
")"
]
Expand All @@ -588,7 +606,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\n",
"stream_question(\n",
" \"What word has connection with `its` on only one head (heads between 1 and 4)?\"\n",
")"
]
Expand All @@ -600,7 +618,7 @@
"outputs": [],
"source": [
"# Page 14\n",
"chain.invoke(\"What word has connection with `its` on 2 heads?\")"
"stream_question(\"What word has connection with `its` on 2 heads?\")"
]
},
{
Expand Down
Loading

0 comments on commit 64d7abf

Please sign in to comment.