llm/ant: convert dumpText constant to dump-ant-calls command line flag
Co-Authored-By: sketch <hello@sketch.dev>
Change-ID: sd58268f97ed95de8k
diff --git a/cmd/sketch/main.go b/cmd/sketch/main.go
index 0594778..7bfcf38 100644
--- a/cmd/sketch/main.go
+++ b/cmd/sketch/main.go
@@ -294,6 +294,8 @@
bashSlowTimeout string
bashBackgroundTimeout string
passthroughUpstream bool
+ // Claude debugging
+ dumpAntCalls bool
}
// parseCLIFlags parses all command-line flags and returns a CLIFlags struct
@@ -369,6 +371,7 @@
// Internal flags for development/debugging
internalFlags.StringVar(&flags.dumpDist, "dump-dist", "", "(internal) dump embedded /dist/ filesystem to specified directory and exit")
internalFlags.StringVar(&flags.subtraceToken, "subtrace-token", "", "(development) run sketch under subtrace.dev with the provided token")
+ internalFlags.BoolVar(&flags.dumpAntCalls, "dump-ant-calls", false, "(debugging) dump raw communications with Claude to files in ~/.cache/sketch/")
// Custom usage function that shows only user-visible flags by default
userFlags.Usage = func() {
@@ -518,6 +521,7 @@
SubtraceToken: flags.subtraceToken,
MCPServers: flags.mcpServers,
PassthroughUpstream: flags.passthroughUpstream,
+ DumpAntCalls: flags.dumpAntCalls,
}
if err := dockerimg.LaunchContainer(ctx, config); err != nil {
@@ -628,7 +632,7 @@
}
}
- llmService, err := selectLLMService(nil, flags.modelName, modelURL, apiKey)
+ llmService, err := selectLLMService(nil, flags, modelURL, apiKey)
if err != nil {
return fmt.Errorf("failed to initialize LLM service: %w", err)
}
@@ -895,19 +899,20 @@
// If modelName is "gemini", it uses the Gemini service.
// Otherwise, it tries to use the OpenAI service with the specified model.
// Returns an error if the model name is not recognized or if required configuration is missing.
-func selectLLMService(client *http.Client, modelName string, modelURL, apiKey string) (llm.Service, error) {
- if modelName == "" || modelName == "claude" {
+func selectLLMService(client *http.Client, flags CLIFlags, modelURL, apiKey string) (llm.Service, error) {
+ if flags.modelName == "" || flags.modelName == "claude" {
if apiKey == "" {
return nil, fmt.Errorf("missing ANTHROPIC_API_KEY")
}
return &ant.Service{
- HTTPC: client,
- URL: modelURL,
- APIKey: apiKey,
+ HTTPC: client,
+ URL: modelURL,
+ APIKey: apiKey,
+ DumpAntCalls: flags.dumpAntCalls,
}, nil
}
- if modelName == "gemini" {
+ if flags.modelName == "gemini" {
if apiKey == "" {
return nil, fmt.Errorf("missing %s", gem.GeminiAPIKeyEnv)
}
@@ -919,9 +924,9 @@
}, nil
}
- model := oai.ModelByUserName(modelName)
+ model := oai.ModelByUserName(flags.modelName)
if model == nil {
- return nil, fmt.Errorf("unknown model '%s', use -list-models to see available models", modelName)
+ return nil, fmt.Errorf("unknown model '%s', use -list-models to see available models", flags.modelName)
}
// Verify we have an API key, if necessary.
diff --git a/dockerimg/dockerimg.go b/dockerimg/dockerimg.go
index abf3421..d545a68 100644
--- a/dockerimg/dockerimg.go
+++ b/dockerimg/dockerimg.go
@@ -139,6 +139,9 @@
// PassthroughUpstream configures upstream remote for passthrough to innie
PassthroughUpstream bool
+
+ // DumpAntCalls enables dumping raw communications with Claude to files
+ DumpAntCalls bool
}
// LaunchContainer creates a docker container for a project, installs sketch and opens a connection to it.
@@ -645,6 +648,9 @@
if config.PassthroughUpstream {
cmdArgs = append(cmdArgs, "-passthrough-upstream")
}
+ if config.DumpAntCalls {
+ cmdArgs = append(cmdArgs, "-dump-ant-calls")
+ }
// Add additional docker arguments if provided
if config.DockerArgs != "" {
diff --git a/llm/ant/ant.go b/llm/ant/ant.go
index 2afc69f..0b65d48 100644
--- a/llm/ant/ant.go
+++ b/llm/ant/ant.go
@@ -59,11 +59,12 @@
// Service provides Claude completions.
// Fields should not be altered concurrently with calling any method on Service.
type Service struct {
- HTTPC *http.Client // defaults to http.DefaultClient if nil
- URL string // defaults to DefaultURL if empty
- APIKey string // must be non-empty
- Model string // defaults to DefaultModel if empty
- MaxTokens int // defaults to DefaultMaxTokens if zero
+ HTTPC *http.Client // defaults to http.DefaultClient if nil
+ URL string // defaults to DefaultURL if empty
+ APIKey string // must be non-empty
+ Model string // defaults to DefaultModel if empty
+ MaxTokens int // defaults to DefaultMaxTokens if zero
+ DumpAntCalls bool // whether to dump request/response text to files for debugging; defaults to false
}
var _ llm.Service = (*Service)(nil)
@@ -209,13 +210,8 @@
TokenEfficientToolUse bool `json:"-"` // DO NOT USE, broken on Anthropic's side as of 2025-02-28
}
-const dumpText = false // debugging toggle to dump raw communications with Claude using dumpToFile
-
// dumpToFile writes the content to a timestamped file in ~/.cache/sketch/, with typ in the filename.
func dumpToFile(typ string, content []byte) error {
- if !dumpText {
- return nil
- }
homeDir, err := os.UserHomeDir()
if err != nil {
return err
@@ -444,7 +440,7 @@
var payload []byte
var err error
- if dumpText || testing.Testing() {
+ if s.DumpAntCalls || testing.Testing() {
payload, err = json.MarshalIndent(request, "", " ")
} else {
payload, err = json.Marshal(request)
@@ -476,8 +472,10 @@
slog.WarnContext(ctx, "anthropic request sleep before retry", "sleep", sleep, "attempts", attempts)
time.Sleep(sleep)
}
- if err := dumpToFile("request", payload); err != nil {
- slog.WarnContext(ctx, "failed to dump request to file", "error", err)
+ if s.DumpAntCalls {
+ if err := dumpToFile("request", payload); err != nil {
+ slog.WarnContext(ctx, "failed to dump request to file", "error", err)
+ }
}
req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(payload))
if err != nil {
@@ -518,8 +516,10 @@
switch {
case resp.StatusCode == http.StatusOK:
- if err := dumpToFile("response", buf); err != nil {
- slog.WarnContext(ctx, "failed to dump response to file", "error", err)
+ if s.DumpAntCalls {
+ if err := dumpToFile("response", buf); err != nil {
+ slog.WarnContext(ctx, "failed to dump response to file", "error", err)
+ }
}
var response response
err = json.NewDecoder(bytes.NewReader(buf)).Decode(&response)