Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add tracing support to langchain #741

Merged
merged 83 commits into from
Jan 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
83 commits
Select commit Hold shift + click to select a range
3e818f9
add tracing interface
agola11 Dec 23, 2022
a2c84b5
add stdout impl and add calls
agola11 Dec 23, 2022
f3ce15a
add examples, start implementing nestedjson
agola11 Dec 23, 2022
54afb5d
add example
agola11 Dec 23, 2022
71560db
add nested json
agola11 Dec 23, 2022
1c0f2e4
add basic tracing
agola11 Dec 23, 2022
d2969bd
delete redundant file
agola11 Dec 23, 2022
5fbaa17
fix some lint stuff with singleton instantiation
agola11 Dec 24, 2022
d45992a
edit dataclasses
agola11 Dec 24, 2022
02ca69d
add langchain tracer
agola11 Dec 24, 2022
b2a63cc
format
agola11 Dec 24, 2022
9cf1a2d
multithreading support
agola11 Dec 25, 2022
fc52dac
refactoring json and langchain tracing
agola11 Dec 25, 2022
46b3162
Add BaseCallbackHandler and CallbackManager (#476)
agola11 Dec 29, 2022
3692231
allow for optional CallbackManager in LLM, Chain, and Agent (#482)
agola11 Dec 30, 2022
5d43246
WIP: stdout callback (#479)
hwchase17 Dec 30, 2022
7e36f28
Harrison/not verbose (#487)
hwchase17 Dec 30, 2022
52490e2
add explicit agent end method (#486)
hwchase17 Dec 30, 2022
e3edd74
switch up defaults (#485)
hwchase17 Dec 30, 2022
82380bc
restore files
agola11 Dec 30, 2022
a850a99
Merge remote-tracking branch 'upstream/ankush/callbackhandler' into a…
agola11 Dec 30, 2022
dc8b550
WIP refactoring
agola11 Dec 30, 2022
91c83c1
finish refactoring
agola11 Dec 30, 2022
1699f17
update script and notebook
agola11 Dec 30, 2022
164806a
quick comment fixes (#494)
agola11 Dec 30, 2022
b902bdd
fix verbosity (#496)
hwchase17 Dec 30, 2022
175a248
Harrison/get rid of prints (#490)
hwchase17 Dec 30, 2022
45d6de1
remove logger (#491)
hwchase17 Dec 30, 2022
a3d2a2e
Harrison/streamlit handler (#488)
hwchase17 Dec 30, 2022
f1419eb
toggle flag
agola11 Dec 31, 2022
a5beb32
merge
agola11 Dec 31, 2022
45dd60e
edit script
agola11 Dec 31, 2022
0bdd95e
refactor, add fakes for testing
agola11 Dec 31, 2022
675495f
unit tests
agola11 Dec 31, 2022
e5f38f4
multithreaded test
agola11 Dec 31, 2022
706c5a3
format
agola11 Dec 31, 2022
aae3609
fix ontext comment (#500)
agola11 Dec 31, 2022
6aa2b59
remove all runs before proceeding with test
agola11 Dec 31, 2022
113347d
Merge remote-tracking branch 'upstream/ankush/callbackhandler' into a…
agola11 Dec 31, 2022
4b64694
rm verbose flag
agola11 Dec 31, 2022
c2c2932
session management for tracing
agola11 Jan 2, 2023
92a8fc8
format
agola11 Jan 2, 2023
2460404
tests
agola11 Jan 2, 2023
8ff23fe
change LLMResult class to be json serializable and run notebook
agola11 Jan 2, 2023
eba3d89
fix session to not have child runs
agola11 Jan 3, 2023
8c355f2
update notebook
agola11 Jan 3, 2023
e12a761
update notebook
agola11 Jan 4, 2023
2ec351a
add examples for datasets and feedback
agola11 Jan 4, 2023
83dbb26
update notebook
agola11 Jan 4, 2023
bd9a3d3
add on_error handling
agola11 Jan 5, 2023
4875f02
merge attempt
agola11 Jan 5, 2023
c60c79f
get tracer updated
agola11 Jan 5, 2023
4a9caab
revert schema
agola11 Jan 5, 2023
72e1cfe
fix error handling
agola11 Jan 6, 2023
2fe8894
cr
hwchase17 Jan 7, 2023
2e836ca
cr
hwchase17 Jan 7, 2023
1113898
cr
hwchase17 Jan 7, 2023
c85aa93
cr
hwchase17 Jan 7, 2023
d8ef483
cr
hwchase17 Jan 7, 2023
498ff69
add docker compose pull, fix docker-compose file
agola11 Jan 7, 2023
10a154b
fix tracer tests
agola11 Jan 12, 2023
1405523
fix tests
agola11 Jan 12, 2023
4c76e6f
update tracing schemas (#4)
agola11 Jan 12, 2023
7694295
update notebook and docker compose, allow for auth headers in tracer
agola11 Jan 14, 2023
07187f9
more defensive handling of requests errors
agola11 Jan 17, 2023
ca870c8
cr
hwchase17 Jan 23, 2023
b79fbce
default session (#6)
hwchase17 Jan 24, 2023
e249468
get tests working
agola11 Jan 24, 2023
9f52fa6
refactor tests
agola11 Jan 24, 2023
8164c57
change notebook, fix loading stuff
agola11 Jan 24, 2023
caec5bd
fix lint and mypy
agola11 Jan 25, 2023
3725094
merge
agola11 Jan 25, 2023
27af32f
whoops
agola11 Jan 25, 2023
ca3109c
fix poetry lock file
agola11 Jan 25, 2023
770b6c8
fix lock
agola11 Jan 25, 2023
1d28c47
inlude on_error to include keyboard interrupt
agola11 Jan 25, 2023
410f6b1
mypy and black
agola11 Jan 25, 2023
f3ca2d8
flake8
agola11 Jan 26, 2023
c0b785f
add freezegun to test deps
agola11 Jan 26, 2023
fc51304
cr
agola11 Jan 26, 2023
6b1474e
cr
agola11 Jan 26, 2023
5599fbe
remove tracing notebook
agola11 Jan 26, 2023
fd0c466
update docker-compose
agola11 Jan 26, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion langchain/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@

from langchain.agents import MRKLChain, ReActChain, SelfAskWithSearchChain
from langchain.cache import BaseCache
from langchain.callbacks import set_default_callback_manager, set_handler
from langchain.callbacks import (
set_default_callback_manager,
set_handler,
set_tracing_callback_manager,
)
from langchain.chains import (
ConversationChain,
LLMBashChain,
Expand Down Expand Up @@ -68,4 +72,5 @@
"QAWithSourcesChain",
"PALChain",
"set_handler",
"set_tracing_callback_manager",
]
2 changes: 1 addition & 1 deletion langchain/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ def _call(self, inputs: Dict[str, str]) -> Dict[str, Any]:
observation = tool.func(output.tool_input)
color = color_mapping[output.tool]
return_direct = tool.return_direct
except Exception as e:
except (KeyboardInterrupt, Exception) as e:
self.callback_manager.on_tool_error(e, verbose=self.verbose)
raise e
else:
Expand Down
30 changes: 28 additions & 2 deletions langchain/callbacks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
"""Callback handlers that allow listening to events in LangChain."""
import os
from contextlib import contextmanager
from typing import Generator
from typing import Generator, Optional

from langchain.callbacks.base import BaseCallbackHandler, BaseCallbackManager
from langchain.callbacks.openai_info import OpenAICallbackHandler
from langchain.callbacks.shared import SharedCallbackManager
from langchain.callbacks.stdout import StdOutCallbackHandler
from langchain.callbacks.tracers import SharedLangChainTracer


def get_callback_manager() -> BaseCallbackManager:
Expand All @@ -21,7 +23,31 @@ def set_handler(handler: BaseCallbackHandler) -> None:

def set_default_callback_manager() -> None:
"""Set default callback manager."""
set_handler(StdOutCallbackHandler())
default_handler = os.environ.get("LANGCHAIN_HANDLER", "stdout")
if default_handler == "stdout":
set_handler(StdOutCallbackHandler())
elif default_handler == "langchain":
session = os.environ.get("LANGCHAIN_SESSION")
set_tracing_callback_manager(session)
else:
raise ValueError(
f"LANGCHAIN_HANDLER should be one of `stdout` "
f"or `langchain`, got {default_handler}"
)


def set_tracing_callback_manager(session_name: Optional[str] = None) -> None:
"""Set tracing callback manager."""
handler = SharedLangChainTracer()
callback = get_callback_manager()
callback.set_handlers([handler, StdOutCallbackHandler()])
if session_name is None:
handler.load_default_session()
else:
try:
handler.load_session(session_name)
except Exception:
raise ValueError(f"session {session_name} not found")


@contextmanager
Expand Down
68 changes: 49 additions & 19 deletions langchain/callbacks/base.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,34 @@
"""Base callback handler that can be used to handle callbacks from langchain."""

from abc import ABC, abstractmethod
from typing import Any, Dict, List

from pydantic import BaseModel
from typing import Any, Dict, List, Union

from langchain.schema import AgentAction, AgentFinish, LLMResult


class BaseCallbackHandler(BaseModel, ABC):
class BaseCallbackHandler(ABC):
"""Base callback handler that can be used to handle callbacks from langchain."""

ignore_llm: bool = False
ignore_chain: bool = False
ignore_agent: bool = False

@property
def always_verbose(self) -> bool:
"""Whether to call verbose callbacks even if verbose is False."""
return False

@property
def ignore_llm(self) -> bool:
"""Whether to ignore LLM callbacks."""
return False

@property
def ignore_chain(self) -> bool:
"""Whether to ignore chain callbacks."""
return False

@property
def ignore_agent(self) -> bool:
"""Whether to ignore agent callbacks."""
return False

@abstractmethod
def on_llm_start(
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
Expand All @@ -31,7 +40,9 @@ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
"""Run when LLM ends running."""

@abstractmethod
def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Run when LLM errors."""

@abstractmethod
Expand All @@ -45,7 +56,9 @@ def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
"""Run when chain ends running."""

@abstractmethod
def on_chain_error(self, error: Exception, **kwargs: Any) -> None:
def on_chain_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Run when chain errors."""

@abstractmethod
Expand All @@ -59,7 +72,9 @@ def on_tool_end(self, output: str, **kwargs: Any) -> None:
"""Run when tool ends running."""

@abstractmethod
def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Run when tool errors."""

@abstractmethod
Expand All @@ -82,15 +97,21 @@ def add_handler(self, callback: BaseCallbackHandler) -> None:
def remove_handler(self, handler: BaseCallbackHandler) -> None:
"""Remove a handler from the callback manager."""

@abstractmethod
def set_handler(self, handler: BaseCallbackHandler) -> None:
"""Set handler as the only handler on the callback manager."""
self.set_handlers([handler])

@abstractmethod
def set_handlers(self, handlers: List[BaseCallbackHandler]) -> None:
"""Set handlers as the only handlers on the callback manager."""


class CallbackManager(BaseCallbackManager):
"""Callback manager that can be used to handle callbacks from langchain."""

handlers: List[BaseCallbackHandler]
def __init__(self, handlers: List[BaseCallbackHandler]) -> None:
"""Initialize callback manager."""
self.handlers: List[BaseCallbackHandler] = handlers

def on_llm_start(
self,
Expand All @@ -115,7 +136,10 @@ def on_llm_end(
handler.on_llm_end(response)

def on_llm_error(
self, error: Exception, verbose: bool = False, **kwargs: Any
self,
error: Union[Exception, KeyboardInterrupt],
verbose: bool = False,
**kwargs: Any
) -> None:
"""Run when LLM errors."""
for handler in self.handlers:
Expand Down Expand Up @@ -146,7 +170,10 @@ def on_chain_end(
handler.on_chain_end(outputs)

def on_chain_error(
self, error: Exception, verbose: bool = False, **kwargs: Any
self,
error: Union[Exception, KeyboardInterrupt],
verbose: bool = False,
**kwargs: Any
) -> None:
"""Run when chain errors."""
for handler in self.handlers:
Expand Down Expand Up @@ -175,7 +202,10 @@ def on_tool_end(self, output: str, verbose: bool = False, **kwargs: Any) -> None
handler.on_tool_end(output, **kwargs)

def on_tool_error(
self, error: Exception, verbose: bool = False, **kwargs: Any
self,
error: Union[Exception, KeyboardInterrupt],
verbose: bool = False,
**kwargs: Any
) -> None:
"""Run when tool errors."""
for handler in self.handlers:
Expand Down Expand Up @@ -206,6 +236,6 @@ def remove_handler(self, handler: BaseCallbackHandler) -> None:
"""Remove a handler from the callback manager."""
self.handlers.remove(handler)

def set_handler(self, handler: BaseCallbackHandler) -> None:
"""Set handler as the only handler on the callback manager."""
self.handlers = [handler]
def set_handlers(self, handlers: List[BaseCallbackHandler]) -> None:
"""Set handlers as the only handlers on the callback manager."""
self.handlers = handlers
14 changes: 10 additions & 4 deletions langchain/callbacks/openai_info.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Callback Handler that prints to std out."""
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import AgentAction, AgentFinish, LLMResult
Expand Down Expand Up @@ -29,7 +29,9 @@ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
if "total_tokens" in token_usage:
self.total_tokens += token_usage["total_tokens"]

def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand All @@ -43,7 +45,9 @@ def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
"""Print out that we finished a chain."""
pass

def on_chain_error(self, error: Exception, **kwargs: Any) -> None:
def on_chain_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand All @@ -68,7 +72,9 @@ def on_tool_end(
"""If not the final action, print out observation."""
pass

def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand Down
20 changes: 13 additions & 7 deletions langchain/callbacks/shared.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""A shared CallbackManager."""

import threading
from typing import Any, Dict, List
from typing import Any, Dict, List, Union

from langchain.callbacks.base import (
BaseCallbackHandler,
Expand Down Expand Up @@ -46,7 +46,9 @@ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
with self._lock:
self._callback_manager.on_llm_end(response, **kwargs)

def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Run when LLM errors."""
with self._lock:
self._callback_manager.on_llm_error(error, **kwargs)
Expand All @@ -63,7 +65,9 @@ def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
with self._lock:
self._callback_manager.on_chain_end(outputs, **kwargs)

def on_chain_error(self, error: Exception, **kwargs: Any) -> None:
def on_chain_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Run when chain errors."""
with self._lock:
self._callback_manager.on_chain_error(error, **kwargs)
Expand All @@ -80,7 +84,9 @@ def on_tool_end(self, output: str, **kwargs: Any) -> None:
with self._lock:
self._callback_manager.on_tool_end(output, **kwargs)

def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Run when tool errors."""
with self._lock:
self._callback_manager.on_tool_error(error, **kwargs)
Expand All @@ -105,7 +111,7 @@ def remove_handler(self, callback: BaseCallbackHandler) -> None:
with self._lock:
self._callback_manager.remove_handler(callback)

def set_handler(self, handler: BaseCallbackHandler) -> None:
"""Set handler as the only handler on the callback manager."""
def set_handlers(self, handlers: List[BaseCallbackHandler]) -> None:
"""Set handlers as the only handlers on the callback manager."""
with self._lock:
self._callback_manager.handlers = [handler]
self._callback_manager.handlers = handlers
14 changes: 10 additions & 4 deletions langchain/callbacks/stdout.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Callback Handler that prints to std out."""
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

from langchain.callbacks.base import BaseCallbackHandler
from langchain.input import print_text
Expand All @@ -19,7 +19,9 @@ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
"""Do nothing."""
pass

def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand All @@ -34,7 +36,9 @@ def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
"""Print out that we finished a chain."""
print("\n\033[1m> Finished chain.\033[0m")

def on_chain_error(self, error: Exception, **kwargs: Any) -> None:
def on_chain_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand All @@ -61,7 +65,9 @@ def on_tool_end(
print_text(output, color=color)
print_text(f"\n{llm_prefix}")

def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand Down
14 changes: 10 additions & 4 deletions langchain/callbacks/streamlit.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Callback Handler that logs to streamlit."""
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

import streamlit as st

Expand All @@ -22,7 +22,9 @@ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
"""Do nothing."""
pass

def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand All @@ -37,7 +39,9 @@ def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
"""Print out that we finished a chain."""
st.write("Finished chain.")

def on_chain_error(self, error: Exception, **kwargs: Any) -> None:
def on_chain_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand All @@ -62,7 +66,9 @@ def on_tool_end(
st.write(f"{observation_prefix}{output}")
st.write(llm_prefix)

def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
def on_tool_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> None:
"""Do nothing."""
pass

Expand Down
Loading