diff --git a/internal/agent/logic.go b/internal/agent/logic.go index 4241bc19..53b3f7f6 100644 --- a/internal/agent/logic.go +++ b/internal/agent/logic.go @@ -14,6 +14,7 @@ import ( "strings" gcs "cloud.google.com/go/storage" + "github.com/go-git/go-billy/v5" "github.com/go-git/go-billy/v5/memfs" "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/plumbing" @@ -339,6 +340,58 @@ func (a *defaultAgent) getTools() []*llm.FunctionDefinition { } } +func (a *defaultAgent) proposeInferenceWithAIAssist(ctx context.Context, initialErr error, wt billy.Filesystem, str *memory.Storage) (*schema.StrategyOneOf, error) { + log.Println("Getting repository hint from AI...") + prompt := []string{ + fmt.Sprintf("Based on the following inference failure error \"%v\" for package '%s', find the correct source code repository URL.", initialErr, a.t.Package), + "Just return the URL WITHOUT any additional text or formatting.", + "For example, for the package 'org.apache.camel:camel-support', return 'https://github.com/apache/camel' not 'https://github.com/apache/camel/tree/main/core/camel-support'.", + "Use the tools you have at your disposal to find the URL.", + "Finally, if you don't find the URL, just return an empty string.", + } + repoURL, err := llm.GenerateTextContent(ctx, a.deps.Client, llm.GeminiPro, &genai.GenerateContentConfig{ + Temperature: genai.Ptr(float32(0.0)), + Tools: []*genai.Tool{ + {GoogleSearch: &genai.GoogleSearch{}}, + }, + }, genai.NewPartFromText(strings.Join(prompt, "\n"))) + if err != nil { + return nil, errors.Wrap(err, "getting AI repo hint") + } + if repoURL == "" { + return nil, errors.Wrap(initialErr, "AI could not find a repository hint") + } + log.Printf("AI suggested repo hint: %s", repoURL) + req := schema.InferenceRequest{ + Ecosystem: a.t.Ecosystem, + Package: a.t.Package, + Version: a.t.Version, + Artifact: a.t.Artifact, + StrategyHint: &schema.StrategyOneOf{ + LocationHint: &rebuild.LocationHint{ + Location: rebuild.Location{ + Repo: repoURL, + }, + }, + }, + } + s, err := inferenceservice.Infer( + ctx, + req, + &inferenceservice.InferDeps{ + HTTPClient: http.DefaultClient, + GitCache: nil, + RepoOptF: func() *gitx.RepositoryOptions { + return &gitx.RepositoryOptions{ + Worktree: wt, + Storer: str, + } + }, + }, + ) + return s, errors.Wrap(err, "AI-assisted inference failed") +} + func (a *defaultAgent) proposeNormalInference(ctx context.Context) (*schema.StrategyOneOf, error) { wt := memfs.New() str := memory.NewStorage() @@ -362,7 +415,14 @@ func (a *defaultAgent) proposeNormalInference(ctx context.Context) (*schema.Stra }, ) if err != nil { - return nil, errors.Wrap(err, "inferring initial strategy") + log.Printf("Normal inference failed: %v", err) + wt := memfs.New() + str := memory.NewStorage() + s, err = a.proposeInferenceWithAIAssist(ctx, err, wt, str) + if err != nil { + return nil, errors.Wrap(err, "AI-assisted inference failed") + } + log.Println("AI-assisted inference succeeded.") } a.repo, err = git.Open(str, wt) if err != nil { diff --git a/internal/agent/session.go b/internal/agent/session.go index d56a9374..ff75efe9 100644 --- a/internal/agent/session.go +++ b/internal/agent/session.go @@ -22,6 +22,7 @@ type AgentDeps struct { MetadataBucket string LogsBucket string MaxTurns int + Client *genai.Client } type Agent interface { @@ -86,6 +87,7 @@ func doSession(ctx context.Context, req RunSessionReq, deps RunSessionDeps) *sch MetadataBucket: deps.MetadataBucket, LogsBucket: deps.LogsBucket, MaxTurns: 10, + Client: deps.Client, }) var err error a.deps.Chat, err = llm.NewChat(ctx, deps.Client, llm.GeminiPro, config, &llm.ChatOpts{Tools: a.getTools()}) @@ -123,6 +125,7 @@ func doSession(ctx context.Context, req RunSessionReq, deps RunSessionDeps) *sch if iteration != nil && iteration.Result != nil && !iteration.Result.BuildSuccess { log.Printf("Build failed: %s", iteration.Result.ErrorMessage) } + log.Printf("Iteration status: %s", iteration.Status) switch iteration.Status { case schema.AgentIterationStatusSuccess: return &schema.AgentCompleteRequest{ diff --git a/internal/llm/llm.go b/internal/llm/llm.go index 51088e51..eeea2680 100644 --- a/internal/llm/llm.go +++ b/internal/llm/llm.go @@ -6,6 +6,7 @@ package llm import ( "context" "encoding/json" + "log" "github.com/pkg/errors" "google.golang.org/genai" @@ -61,6 +62,7 @@ type ScriptResponse struct { func GenerateTextContent(ctx context.Context, client *genai.Client, model string, config *genai.GenerateContentConfig, prompt ...*genai.Part) (string, error) { contents := []*genai.Content{{Parts: prompt, Role: "user"}} + log.Printf("%s\n\n", FormatContent(*contents[0])) resp, err := client.Models.GenerateContent(ctx, model, contents, config) if err != nil { return "", errors.Wrap(err, "failed to generate content") @@ -77,6 +79,7 @@ func GenerateTextContent(ctx context.Context, client *genai.Client, model string return "", errors.New("empty response content") case 1: if candidate.Content.Parts[0].Text != "" { + log.Printf("%s\n\n", FormatContent(*candidate.Content)) return candidate.Content.Parts[0].Text, nil } return "", errors.New("part is not text") diff --git a/targetversion.txt b/targetversion.txt new file mode 100644 index 00000000..e69de29b