Skip to content

Commit

Permalink
ref: Add ruff rules for pydocstyle (D) (#4120)
Browse files Browse the repository at this point in the history
Add ruff rules for pydocstyle (D)
  • Loading branch information
cbornet authored Oct 14, 2024
1 parent 3e181b9 commit b1a79c0
Show file tree
Hide file tree
Showing 119 changed files with 634 additions and 1,245 deletions.
50 changes: 13 additions & 37 deletions src/backend/base/langflow/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,7 @@ def get_number_of_workers(workers=None):


def display_results(results):
"""
Display the results of the migration.
"""
"""Display the results of the migration."""
for table_results in results:
table = Table(title=f"Migration {table_results.table_name}")
table.add_column("Name")
Expand Down Expand Up @@ -149,10 +147,7 @@ def run(
show_default=False,
),
):
"""
Run Langflow.
"""

"""Run Langflow."""
configure(log_level=log_level, log_file=log_file)
set_var_for_macos_issue()

Expand Down Expand Up @@ -225,9 +220,7 @@ def run(


def wait_for_server_ready(host, port):
"""
Wait for the server to become ready by polling the health endpoint.
"""
"""Wait for the server to become ready by polling the health endpoint."""
status_code = 0
while status_code != httpx.codes.OK:
try:
Expand All @@ -249,16 +242,13 @@ def run_on_mac_or_linux(host, port, log_level, options, app):


def run_on_windows(host, port, log_level, options, app):
"""
Run the Langflow server on Windows.
"""
"""Run the Langflow server on Windows."""
print_banner(host, port)
run_langflow(host, port, log_level, options, app)


def is_port_in_use(port, host="localhost"):
"""
Check if a port is in use.
"""Check if a port is in use.
Args:
port (int): The port number to check.
Expand All @@ -272,8 +262,7 @@ def is_port_in_use(port, host="localhost"):


def get_free_port(port):
"""
Given a used port, find a free port.
"""Given a used port, find a free port.
Args:
port (int): The port number to check.
Expand All @@ -287,9 +276,7 @@ def get_free_port(port):


def get_letter_from_version(version: str):
"""
Get the letter from a pre-release version.
"""
"""Get the letter from a pre-release version."""
if "a" in version:
return "a"
if "b" in version:
Expand All @@ -308,9 +295,7 @@ def build_version_notice(current_version: str, package_name: str) -> str:


def generate_pip_command(package_names, is_pre_release):
"""
Generate the pip install command based on the packages and whether it's a pre-release.
"""
"""Generate the pip install command based on the packages and whether it's a pre-release."""
base_command = "pip install"
if is_pre_release:
return f"{base_command} {' '.join(package_names)} -U --pre"
Expand Down Expand Up @@ -369,10 +354,7 @@ def print_banner(host: str, port: int):


def run_langflow(host, port, log_level, options, app):
"""
Run Langflow server on localhost
"""

"""Run Langflow server on localhost."""
if platform.system() == "Windows":
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
Expand All @@ -398,9 +380,7 @@ def superuser(
password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."),
log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"),
):
"""
Create a superuser.
"""
"""Create a superuser."""
configure(log_level=log_level)
initialize_services()
db_service = get_db_service()
Expand Down Expand Up @@ -432,8 +412,7 @@ def superuser(
# because now the database is stored per installation
@app.command()
def copy_db():
"""
Copy the database files to the current directory.
"""Copy the database files to the current directory.
This function copies the 'langflow.db' and 'langflow-pre.db' files from the cache directory to the current
directory.
Expand Down Expand Up @@ -472,9 +451,7 @@ def migration(
help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.",
),
):
"""
Run or test migrations.
"""
"""Run or test migrations."""
if fix and not typer.confirm(
"This will delete all data necessary to fix migrations. Are you sure you want to continue?"
):
Expand All @@ -492,8 +469,7 @@ def migration(
def api_key(
log_level: str = typer.Option("error", help="Logging level."),
):
"""
Creates an API key for the default superuser if AUTO_LOGIN is enabled.
"""Creates an API key for the default superuser if AUTO_LOGIN is enabled.
Args:
log_level (str, optional): Logging level. Defaults to "error".
Expand Down
8 changes: 4 additions & 4 deletions src/backend/base/langflow/api/log_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,10 @@ async def event_generator(request: Request):
async def stream_logs(
request: Request,
):
"""
HTTP/2 Server-Sent-Event (SSE) endpoint for streaming logs
it establishes a long-lived connection to the server and receives log messages in real-time
the client should use the head "Accept: text/event-stream"
"""HTTP/2 Server-Sent-Event (SSE) endpoint for streaming logs.
It establishes a long-lived connection to the server and receives log messages in real-time.
The client should use the header "Accept: text/event-stream".
"""
global log_buffer # noqa: PLW0602
if log_buffer.enabled() is False:
Expand Down
4 changes: 1 addition & 3 deletions src/backend/base/langflow/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def remove_api_keys(flow: dict):

def build_input_keys_response(langchain_object, artifacts):
"""Build the input keys response."""

input_keys_response = {
"input_keys": dict.fromkeys(langchain_object.input_keys, ""),
"memory_keys": [],
Expand Down Expand Up @@ -201,8 +200,7 @@ def format_exception_message(exc: Exception) -> str:


def get_top_level_vertices(graph, vertices_ids):
"""
Retrieves the top-level vertices from the given graph based on the provided vertex IDs.
"""Retrieves the top-level vertices from the given graph based on the provided vertex IDs.
Args:
graph (Graph): The graph object containing the vertices.
Expand Down
7 changes: 5 additions & 2 deletions src/backend/base/langflow/api/v1/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,17 @@ async def retrieve_vertices_order(
session=Depends(get_session),
telemetry_service: TelemetryService = Depends(get_telemetry_service),
):
"""
Retrieve the vertices order for a given flow.
"""Retrieve the vertices order for a given flow.
Args:
flow_id (str): The ID of the flow.
background_tasks (BackgroundTasks): The background tasks.
data (Optional[FlowDataRequest], optional): The flow data. Defaults to None.
stop_component_id (str, optional): The ID of the stop component. Defaults to None.
start_component_id (str, optional): The ID of the start component. Defaults to None.
chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service).
session (Session, optional): The session dependency. Defaults to Depends(get_session).
telemetry_service (TelemetryService, optional): The telemetry service.
Returns:
VerticesOrderResponse: The response containing the ordered vertex IDs and the run ID.
Expand Down Expand Up @@ -471,8 +472,10 @@ async def build_vertex(
vertex_id (str): The ID of the vertex to build.
background_tasks (BackgroundTasks): The background tasks dependency.
inputs (Optional[InputValueRequest], optional): The input values for the vertex. Defaults to None.
files (List[str], optional): The files to use. Defaults to None.
chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service).
current_user (Any, optional): The current user dependency. Defaults to Depends(get_current_active_user).
telemetry_service (TelemetryService, optional): The telemetry service.
Returns:
VertexBuildResponse: The response containing the built vertex information.
Expand Down
31 changes: 13 additions & 18 deletions src/backend/base/langflow/api/v1/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,9 +156,7 @@ async def simple_run_flow_task(
stream: bool = False,
api_key_user: User | None = None,
):
"""
Run a flow task as a BackgroundTask, therefore it should not throw exceptions.
"""
"""Run a flow task as a BackgroundTask, therefore it should not throw exceptions."""
try:
return await simple_run_flow(
flow=flow,
Expand All @@ -180,9 +178,10 @@ async def simplified_run_flow(
api_key_user: UserRead = Depends(api_key_security),
telemetry_service: TelemetryService = Depends(get_telemetry_service),
):
"""
Executes a specified flow by ID with input customization, performance enhancements through caching, and optional
data streaming.
"""Executes a specified flow by ID.
Executes a specified flow by ID with input customization, performance enhancements through caching,
and optional data streaming.
### Parameters:
- `db` (Session): Database session for executing queries.
Expand Down Expand Up @@ -301,15 +300,14 @@ async def webhook_run_flow(
background_tasks: BackgroundTasks,
telemetry_service: Annotated[TelemetryService, Depends(get_telemetry_service)],
):
"""
Run a flow using a webhook request.
"""Run a flow using a webhook request.
Args:
db (Session): The database session.
flow (Flow, optional): The flow to be executed. Defaults to Depends(get_flow_by_id).
user (User): The flow user.
request (Request): The incoming HTTP request.
background_tasks (BackgroundTasks): The background tasks manager.
session_service (SessionService, optional): The session service. Defaults to Depends(get_session_service).
flow (Flow, optional): The flow to be executed. Defaults to Depends(get_flow_by_id).
telemetry_service (TelemetryService): The telemetry service.
Returns:
dict: A dictionary containing the status of the task.
Expand Down Expand Up @@ -382,8 +380,8 @@ async def experimental_run_flow(
api_key_user: UserRead = Depends(api_key_security),
session_service: SessionService = Depends(get_session_service),
):
"""
Executes a specified flow by ID with optional input values, output selection, tweaks, and streaming capability.
"""Executes a specified flow by ID with optional input values, output selection, tweaks, and streaming capability.
This endpoint supports running flows with caching to enhance performance and efficiency.
### Parameters:
Expand Down Expand Up @@ -511,9 +509,7 @@ async def process(
sync: Annotated[bool, Body(embed=True)] = True,
session_service: SessionService = Depends(get_session_service),
):
"""
Endpoint to process an input with a given flow_id.
"""
"""Endpoint to process an input with a given flow_id."""
# Raise a depreciation warning
logger.warning(
"The /process endpoint is deprecated and will be removed in a future version. Please use /run instead."
Expand Down Expand Up @@ -598,8 +594,7 @@ async def custom_component_update(
code_request: UpdateCustomComponentRequest,
user: Annotated[User, Depends(get_current_active_user)],
):
"""
Update a custom component with the provided code request.
"""Update a custom component with the provided code request.
This endpoint generates the CustomComponentFrontendNode normally but then runs the `update_build_config` method
on the latest version of the template.
Expand Down
14 changes: 6 additions & 8 deletions src/backend/base/langflow/api/v1/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,7 @@ def read_flows(
params: Params = Depends(),
header_flows: bool = False,
):
"""
Retrieve a list of flows with pagination support.
"""Retrieve a list of flows with pagination support.
Args:
current_user (User): The current authenticated user.
Expand All @@ -149,10 +148,11 @@ def read_flows(
folder_id (UUID, optional): The folder ID. Defaults to None.
params (Params): Pagination parameters.
remove_example_flows (bool, optional): Whether to remove example flows. Defaults to False.
header_flows (bool, optional): Whether to return only specific headers of the flows. Defaults to False.
Returns:
Union[list[FlowRead], Page[FlowRead]]: A list of flows or a paginated response containing the list of flows.
"""

try:
auth_settings = settings_service.auth_settings

Expand Down Expand Up @@ -364,12 +364,12 @@ async def delete_multiple_flows(
user: Annotated[User, Depends(get_current_active_user)],
db: Annotated[Session, Depends(get_session)],
):
"""
Delete multiple flows by their IDs.
"""Delete multiple flows by their IDs.
Args:
flow_ids (List[str]): The list of flow IDs to delete.
user (User, optional): The user making the request. Defaults to the current active user.
db (Session, optional): The database session.
Returns:
dict: A dictionary containing the number of flows deleted.
Expand Down Expand Up @@ -441,16 +441,14 @@ def read_basic_examples(
*,
session: Session = Depends(get_session),
):
"""
Retrieve a list of basic example flows.
"""Retrieve a list of basic example flows.
Args:
session (Session): The database session.
Returns:
list[FlowRead]: A list of basic example flows.
"""

try:
# Get the starter folder
starter_folder = session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first()
Expand Down
Loading

0 comments on commit b1a79c0

Please sign in to comment.