From d2afc20b02550d5b446c5990c44b490552c0ecac Mon Sep 17 00:00:00 2001 From: david Date: Wed, 11 Dec 2024 19:25:10 +0800 Subject: [PATCH] enhancement:improve startup time and minor improvement. --- .../android/text_icon_localization.py | 16 +++++++++++- metagpt/repo_parser.py | 3 ++- metagpt/utils/text.py | 25 ++++++++++--------- 3 files changed, 30 insertions(+), 14 deletions(-) diff --git a/metagpt/environment/android/text_icon_localization.py b/metagpt/environment/android/text_icon_localization.py index e8886b540a..b5e44c591a 100644 --- a/metagpt/environment/android/text_icon_localization.py +++ b/metagpt/environment/android/text_icon_localization.py @@ -8,7 +8,21 @@ import cv2 import groundingdino.datasets.transforms as T import numpy as np -import torch +class LazyTorch: + def __init__(self): + self._torch = None + + def _import_torch(self): + if self._torch is None: + import torch + self._torch = torch + + def __getattr__(self, item): + self._import_torch() + return getattr(self._torch, item) + +# Create the LazyTorch instance +torch = LazyTorch() from groundingdino.models import build_model from groundingdino.util.slconfig import SLConfig from groundingdino.util.utils import clean_state_dict, get_phrases_from_posmap diff --git a/metagpt/repo_parser.py b/metagpt/repo_parser.py index bc3bae6624..c3f414b831 100644 --- a/metagpt/repo_parser.py +++ b/metagpt/repo_parser.py @@ -18,7 +18,6 @@ from pathlib import Path from typing import Dict, List, Optional -import pandas as pd from pydantic import BaseModel, Field, field_validator from metagpt.const import AGGREGATION, COMPOSITION, GENERALIZATION @@ -508,6 +507,8 @@ def generate_dataframe_structure(self, output_path: Path): Args: output_path (Path): The path to the CSV file to be generated. """ + import pandas as pd + files_classes = [i.model_dump() for i in self.generate_symbols()] df = pd.DataFrame(files_classes) df.to_csv(output_path, index=False) diff --git a/metagpt/utils/text.py b/metagpt/utils/text.py index df0f5ac053..2998ef9c83 100644 --- a/metagpt/utils/text.py +++ b/metagpt/utils/text.py @@ -59,18 +59,19 @@ def generate_prompt_chunk( max_token = TOKEN_MAX.get(model_name, 2048) - reserved - 100 while paragraphs: - paragraph = paragraphs.pop(0) - token = count_output_tokens(paragraph, model_name) - if current_token + token <= max_token: - current_lines.append(paragraph) - current_token += token - elif token > max_token: - paragraphs = split_paragraph(paragraph) + paragraphs - continue - else: - yield prompt_template.format("".join(current_lines)) - current_lines = [paragraph] - current_token = token + paragraph = paragraphs.pop(0).strip() + if len(paragraph) != 0: + token = count_output_tokens(paragraph, model_name) + if current_token + token <= max_token: + current_lines.append(paragraph) + current_token += token + elif token > max_token: + paragraphs = split_paragraph(paragraph) + paragraphs + continue + else: + yield prompt_template.format("".join(current_lines)) + current_lines = [paragraph] + current_token = token if current_lines: yield prompt_template.format("".join(current_lines))