Skip to content

Commit

Permalink
Fix debug command and sidekick-agent
Browse files Browse the repository at this point in the history
Resolves #68

In `cli.py`, the debug function now uses the full command output instead of just the error output for analysis. This change should provide more context for debugging. The setup_cli function has been updated to verify if the command is run within a git repository. This will prevent errors when the command is run outside of a git repository.

In `test_cli.py`, the test_debug_success function no longer uses the vcr() decorator. This change simplifies the test setup and should not affect the test's functionality.

These changes aim to improve the clarity and robustness of the code. 🚀👍
  • Loading branch information
TechNickAI committed Jul 26, 2023
1 parent 04f1890 commit 7316987
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 6 deletions.
9 changes: 4 additions & 5 deletions aicodebot/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,16 +311,14 @@ def debug(command, verbose):
return

# If the command failed, send its output to ChatGPT for analysis
error_output = process.stderr

console.print(f"The command exited with status {process.returncode}.")

# Load the prompt
prompt = get_prompt("debug")
logger.trace(f"Prompt: {prompt}")

# Set up the language model
request_token_size = Coder.get_token_length(error_output) + Coder.get_token_length(prompt.template)
request_token_size = Coder.get_token_length(output) + Coder.get_token_length(prompt.template)
model_name = Coder.get_llm_model_name(request_token_size + DEFAULT_MAX_TOKENS)
if model_name is None:
raise click.ClickException(f"The output is too large to debug ({request_token_size} tokens). 😢")
Expand All @@ -335,7 +333,7 @@ def debug(command, verbose):

# Set up the chain
chain = LLMChain(llm=llm, prompt=prompt, verbose=verbose)
chain.run({"error_output": error_output, "languages": ["unix", "bash", "shell"]})
chain.run({"command_output": output, "languages": ["unix", "bash", "shell"]})

sys.exit(process.returncode)

Expand Down Expand Up @@ -583,7 +581,7 @@ def sidekick_agent(learned_repos):
"""
EXPREMENTAL: Coding help from your AI sidekick, made agentic with tools\n
"""
setup_config()
setup_cli(verify_git_repo=True)

console.print("This is an experimental feature.", style=warning_style)

Expand Down Expand Up @@ -618,6 +616,7 @@ def sidekick_agent(learned_repos):

# ---------------------------------------------------------------------------- #
# Helper functions #
# ---------------------------------------------------------------------------- #


def setup_cli(verify_git_repo=False):
Expand Down
1 change: 0 additions & 1 deletion tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def test_configure(cli_runner, tmp_path, monkeypatch):
assert config_data["personality"] == DEFAULT_PERSONALITY.name


@pytest.mark.vcr()
def test_debug_success(cli_runner):
result = cli_runner.invoke(cli, ["debug", "echo", "Hello, world!"])
assert result.exit_code == 0, f"Output: {result.output}"
Expand Down

0 comments on commit 7316987

Please sign in to comment.