Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enhanced few code files in the main branch. #1522

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 9 additions & 12 deletions metagpt/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,16 @@

import sys
from datetime import datetime
from functools import partial
from typing import Callable, Optional

from loguru import logger as _logger

from metagpt.const import METAGPT_ROOT

_print_level = "INFO"


def define_log_level(print_level="INFO", logfile_level="DEBUG", name: str = None):
"""Adjust the log level to above level"""
global _print_level
_print_level = print_level

def define_log_level(print_level: str = "INFO", logfile_level: str = "DEBUG", name: Optional[str] = None) -> _logger:
"""Adjust the log level to the specified levels."""
current_date = datetime.now()
formatted_date = current_date.strftime("%Y%m%d")
log_name = f"{name}_{formatted_date}" if name else formatted_date # name a log with prefix name
Expand All @@ -34,15 +31,15 @@ def define_log_level(print_level="INFO", logfile_level="DEBUG", name: str = None
logger = define_log_level()


def log_llm_stream(msg):
def log_llm_stream(msg: str) -> None:
"""Log LLM stream messages."""
_llm_stream_log(msg)


def set_llm_stream_logfunc(func):
def set_llm_stream_logfunc(func: Callable[[str], None]) -> None:
"""Set the function to be used for logging LLM streams."""
global _llm_stream_log
_llm_stream_log = func


def _llm_stream_log(msg):
if _print_level in ["INFO"]:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It looks like this changes have reverted the feature introduced in PR #1124, which allowed the define_log_level method to control stream logging via the print_level parameter. Could you please clarify the reason for this change?

print(msg, end="")
_llm_stream_log: Callable[[str], None] = partial(print, end="")
20 changes: 6 additions & 14 deletions metagpt/software_company.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import asyncio
from pathlib import Path

import agentops
import typer

from metagpt.const import CONFIG_ROOT
Expand Down Expand Up @@ -39,9 +38,6 @@ def generate_repo(
)
from metagpt.team import Team

if config.agentops_api_key != "":
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why was agentops removed?

agentops.init(config.agentops_api_key, tags=["software_company"])

config.update_via_cli(project_path, project_name, inc, reqa_file, max_auto_summarize_code)
ctx = Context(config=config)

Expand Down Expand Up @@ -72,9 +68,6 @@ def generate_repo(
company.run_project(idea)
asyncio.run(company.run(n_round=n_round))

if config.agentops_api_key != "":
agentops.end_session("Success")

return ctx.repo


Expand All @@ -87,7 +80,7 @@ def startup(
run_tests: bool = typer.Option(default=False, help="Whether to enable QA for adding & running tests."),
implement: bool = typer.Option(default=True, help="Enable or disable code implementation."),
project_name: str = typer.Option(default="", help="Unique project name, such as 'game_2048'."),
inc: bool = typer.Option(default=False, help="Incremental mode. Use it to coop with existing repo."),
inc: bool = typer.Option(default=False, help="Incremental mode. Use it to cope with existing repo."),
project_path: str = typer.Option(
default="",
help="Specify the directory path of the old version project to fulfill the incremental requirements.",
Expand Down Expand Up @@ -130,10 +123,9 @@ def startup(

DEFAULT_CONFIG = """# Full Example: https://github.com/geekan/MetaGPT/blob/main/config/config2.example.yaml
# Reflected Code: https://github.com/geekan/MetaGPT/blob/main/metagpt/config2.py
# Config Docs: https://docs.deepwisdom.ai/main/en/guide/get_started/configuration.html
llm:
api_type: "openai" # or azure / ollama / groq etc.
model: "gpt-4-turbo" # or gpt-3.5-turbo
api_type: "openai" # or azure / ollama / open_llm etc. Check LLMType for more options
model: "gpt-4-turbo-preview" # or gpt-3.5-turbo-1106 / gpt-4-1106-preview
base_url: "https://api.openai.com/v1" # or forward url / other llm url
api_key: "YOUR_API_KEY"
"""
Expand All @@ -143,16 +135,16 @@ def copy_config_to():
"""Initialize the configuration file for MetaGPT."""
target_path = CONFIG_ROOT / "config2.yaml"

# 创建目标目录(如果不存在)
# 创建目标目录(如果不存在)/ Create the target directory if it does not exist
target_path.parent.mkdir(parents=True, exist_ok=True)

# 如果目标文件已经存在,则重命名为 .bak
# 如果目标文件已经存在,则重命名为 .bak / If the target file already exists, rename it to .bak
if target_path.exists():
backup_path = target_path.with_suffix(".bak")
target_path.rename(backup_path)
print(f"Existing configuration file backed up at {backup_path}")

# 复制文件
# 复制文件 / Copy files
target_path.write_text(DEFAULT_CONFIG, encoding="utf-8")
print(f"Configuration file initialized at {target_path}")

Expand Down
7 changes: 6 additions & 1 deletion metagpt/team.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,12 @@ async def run(self, n_round=3, idea="", send_to="", auto_archive=True):
logger.debug("All roles are idle.")
break
n_round -= 1
self._check_balance()
try:
self._check_balance()
except NoMoneyException as e:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

already has a serialize_decorator, should we add this?

logger.error(f"Project stopped due to insufficient funds: {e}")
break

await self.env.run()

logger.debug(f"max {n_round=} left.")
Expand Down
Loading