blob: 10b474e1181fbd83aba9328304e6b03ee055c0e3 [file] [log] [blame]
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -07001package ant
2
3import (
4 "bytes"
5 "cmp"
6 "context"
7 "encoding/json"
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -07008 "errors"
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -07009 "fmt"
10 "io"
11 "log/slog"
12 "math/rand/v2"
13 "net/http"
14 "strings"
15 "testing"
16 "time"
17
18 "sketch.dev/llm"
19)
20
21const (
Josh Bleecher Snyder0efb29d2025-05-22 21:05:04 -070022 DefaultModel = Claude4Sonnet
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070023 // See https://docs.anthropic.com/en/docs/about-claude/models/all-models for
24 // current maximums. There's currently a flag to enable 128k output (output-128k-2025-02-19)
25 DefaultMaxTokens = 8192
Josh Bleecher Snyder44dfdce2025-07-23 13:02:29 -070026 APIKeyEnv = "ANTHROPIC_API_KEY"
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070027 DefaultURL = "https://api.anthropic.com/v1/messages"
28)
29
30const (
31 Claude35Sonnet = "claude-3-5-sonnet-20241022"
32 Claude35Haiku = "claude-3-5-haiku-20241022"
33 Claude37Sonnet = "claude-3-7-sonnet-20250219"
Josh Bleecher Snyder0e8073a2025-05-22 21:04:51 -070034 Claude4Sonnet = "claude-sonnet-4-20250514"
35 Claude4Opus = "claude-opus-4-20250514"
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070036)
37
Philip Zeyligerb8a8f352025-06-02 07:39:37 -070038// TokenContextWindow returns the maximum token context window size for this service
39func (s *Service) TokenContextWindow() int {
40 model := s.Model
41 if model == "" {
42 model = DefaultModel
43 }
44
45 switch model {
46 case Claude35Sonnet, Claude37Sonnet:
47 return 200000
48 case Claude35Haiku:
49 return 200000
50 case Claude4Sonnet, Claude4Opus:
51 return 200000
52 default:
53 // Default for unknown models
54 return 200000
55 }
56}
57
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070058// Service provides Claude completions.
59// Fields should not be altered concurrently with calling any method on Service.
60type Service struct {
Josh Bleecher Snyder57afbca2025-07-23 13:29:59 -070061 HTTPC *http.Client // defaults to http.DefaultClient if nil
62 URL string // defaults to DefaultURL if empty
63 APIKey string // must be non-empty
64 Model string // defaults to DefaultModel if empty
65 MaxTokens int // defaults to DefaultMaxTokens if zero
66 DumpLLM bool // whether to dump request/response text to files for debugging; defaults to false
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070067}
68
69var _ llm.Service = (*Service)(nil)
70
71type content struct {
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070072 // https://docs.anthropic.com/en/api/messages
73 ID string `json:"id,omitempty"`
74 Type string `json:"type,omitempty"`
Philip Zeyliger72252cb2025-05-10 17:00:08 -070075
76 // Subtly, an empty string appears in tool results often, so we have
77 // to distinguish between empty string and no string.
78 // Underlying error looks like one of:
79 // "messages.46.content.0.tool_result.content.0.text.text: Field required""
80 // "messages.1.content.1.tool_use.text: Extra inputs are not permitted"
81 //
82 // I haven't found a super great source for the API, but
83 // https://github.com/anthropics/anthropic-sdk-typescript/blob/main/src/resources/messages/messages.ts
84 // is somewhat acceptable but hard to read.
85 Text *string `json:"text,omitempty"`
86 MediaType string `json:"media_type,omitempty"` // for image
87 Source json.RawMessage `json:"source,omitempty"` // for image
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070088
89 // for thinking
90 Thinking string `json:"thinking,omitempty"`
Philip Zeyliger72252cb2025-05-10 17:00:08 -070091 Data string `json:"data,omitempty"` // for redacted_thinking or image
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -070092 Signature string `json:"signature,omitempty"` // for thinking
93
94 // for tool_use
95 ToolName string `json:"name,omitempty"`
96 ToolInput json.RawMessage `json:"input,omitempty"`
97
98 // for tool_result
Philip Zeyliger72252cb2025-05-10 17:00:08 -070099 ToolUseID string `json:"tool_use_id,omitempty"`
100 ToolError bool `json:"is_error,omitempty"`
101 // note the recursive nature here; message looks like:
102 // {
103 // "role": "user",
104 // "content": [
105 // {
106 // "type": "tool_result",
107 // "tool_use_id": "toolu_01A09q90qw90lq917835lq9",
108 // "content": [
109 // {"type": "text", "text": "15 degrees"},
110 // {
111 // "type": "image",
112 // "source": {
113 // "type": "base64",
114 // "media_type": "image/jpeg",
115 // "data": "/9j/4AAQSkZJRg...",
116 // }
117 // }
118 // ]
119 // }
120 // ]
121 //}
122 ToolResult []content `json:"content,omitempty"`
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700123
124 // timing information for tool_result; not sent to Claude
125 StartTime *time.Time `json:"-"`
126 EndTime *time.Time `json:"-"`
127
128 CacheControl json.RawMessage `json:"cache_control,omitempty"`
129}
130
131// message represents a message in the conversation.
132type message struct {
133 Role string `json:"role"`
134 Content []content `json:"content"`
135 ToolUse *toolUse `json:"tool_use,omitempty"` // use to control whether/which tool to use
136}
137
138// toolUse represents a tool use in the message content.
139type toolUse struct {
140 ID string `json:"id"`
141 Name string `json:"name"`
142}
143
144// tool represents a tool available to Claude.
145type tool struct {
146 Name string `json:"name"`
147 // Type is used by the text editor tool; see
148 // https://docs.anthropic.com/en/docs/build-with-claude/tool-use/text-editor-tool
149 Type string `json:"type,omitempty"`
150 Description string `json:"description,omitempty"`
151 InputSchema json.RawMessage `json:"input_schema,omitempty"`
152}
153
154// usage represents the billing and rate-limit usage.
155type usage struct {
156 InputTokens uint64 `json:"input_tokens"`
157 CacheCreationInputTokens uint64 `json:"cache_creation_input_tokens"`
158 CacheReadInputTokens uint64 `json:"cache_read_input_tokens"`
159 OutputTokens uint64 `json:"output_tokens"`
160 CostUSD float64 `json:"cost_usd"`
161}
162
163func (u *usage) Add(other usage) {
164 u.InputTokens += other.InputTokens
165 u.CacheCreationInputTokens += other.CacheCreationInputTokens
166 u.CacheReadInputTokens += other.CacheReadInputTokens
167 u.OutputTokens += other.OutputTokens
168 u.CostUSD += other.CostUSD
169}
170
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700171// response represents the response from the message API.
172type response struct {
173 ID string `json:"id"`
174 Type string `json:"type"`
175 Role string `json:"role"`
176 Model string `json:"model"`
177 Content []content `json:"content"`
178 StopReason string `json:"stop_reason"`
179 StopSequence *string `json:"stop_sequence,omitempty"`
180 Usage usage `json:"usage"`
181}
182
183type toolChoice struct {
184 Type string `json:"type"`
185 Name string `json:"name,omitempty"`
186}
187
188// https://docs.anthropic.com/en/api/messages#body-system
189type systemContent struct {
190 Text string `json:"text,omitempty"`
191 Type string `json:"type,omitempty"`
192 CacheControl json.RawMessage `json:"cache_control,omitempty"`
193}
194
195// request represents the request payload for creating a message.
196type request struct {
197 Model string `json:"model"`
198 Messages []message `json:"messages"`
199 ToolChoice *toolChoice `json:"tool_choice,omitempty"`
200 MaxTokens int `json:"max_tokens"`
201 Tools []*tool `json:"tools,omitempty"`
202 Stream bool `json:"stream,omitempty"`
203 System []systemContent `json:"system,omitempty"`
204 Temperature float64 `json:"temperature,omitempty"`
205 TopK int `json:"top_k,omitempty"`
206 TopP float64 `json:"top_p,omitempty"`
207 StopSequences []string `json:"stop_sequences,omitempty"`
208
209 TokenEfficientToolUse bool `json:"-"` // DO NOT USE, broken on Anthropic's side as of 2025-02-28
210}
211
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700212func mapped[Slice ~[]E, E, T any](s Slice, f func(E) T) []T {
213 out := make([]T, len(s))
214 for i, v := range s {
215 out[i] = f(v)
216 }
217 return out
218}
219
220func inverted[K, V cmp.Ordered](m map[K]V) map[V]K {
221 inv := make(map[V]K)
222 for k, v := range m {
223 if _, ok := inv[v]; ok {
224 panic(fmt.Errorf("inverted map has multiple keys for value %v", v))
225 }
226 inv[v] = k
227 }
228 return inv
229}
230
231var (
232 fromLLMRole = map[llm.MessageRole]string{
233 llm.MessageRoleAssistant: "assistant",
234 llm.MessageRoleUser: "user",
235 }
236 toLLMRole = inverted(fromLLMRole)
237
238 fromLLMContentType = map[llm.ContentType]string{
239 llm.ContentTypeText: "text",
240 llm.ContentTypeThinking: "thinking",
241 llm.ContentTypeRedactedThinking: "redacted_thinking",
242 llm.ContentTypeToolUse: "tool_use",
243 llm.ContentTypeToolResult: "tool_result",
244 }
245 toLLMContentType = inverted(fromLLMContentType)
246
247 fromLLMToolChoiceType = map[llm.ToolChoiceType]string{
248 llm.ToolChoiceTypeAuto: "auto",
249 llm.ToolChoiceTypeAny: "any",
250 llm.ToolChoiceTypeNone: "none",
251 llm.ToolChoiceTypeTool: "tool",
252 }
253
254 toLLMStopReason = map[string]llm.StopReason{
255 "stop_sequence": llm.StopReasonStopSequence,
256 "max_tokens": llm.StopReasonMaxTokens,
257 "end_turn": llm.StopReasonEndTurn,
258 "tool_use": llm.StopReasonToolUse,
Josh Bleecher Snyder0e8073a2025-05-22 21:04:51 -0700259 "refusal": llm.StopReasonRefusal,
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700260 }
261)
262
263func fromLLMCache(c bool) json.RawMessage {
264 if !c {
265 return nil
266 }
267 return json.RawMessage(`{"type":"ephemeral"}`)
268}
269
270func fromLLMContent(c llm.Content) content {
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700271 var toolResult []content
272 if len(c.ToolResult) > 0 {
273 toolResult = make([]content, len(c.ToolResult))
274 for i, tr := range c.ToolResult {
275 // For image content inside a tool_result, we need to map it to "image" type
276 if tr.MediaType != "" && tr.MediaType == "image/jpeg" || tr.MediaType == "image/png" {
277 // Format as an image for Claude
278 toolResult[i] = content{
279 Type: "image",
280 Source: json.RawMessage(fmt.Sprintf(`{"type":"base64","media_type":"%s","data":"%s"}`,
281 tr.MediaType, tr.Data)),
282 }
283 } else {
284 toolResult[i] = fromLLMContent(tr)
285 }
286 }
287 }
288
289 d := content{
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700290 ID: c.ID,
291 Type: fromLLMContentType[c.Type],
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700292 MediaType: c.MediaType,
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700293 Thinking: c.Thinking,
294 Data: c.Data,
295 Signature: c.Signature,
296 ToolName: c.ToolName,
297 ToolInput: c.ToolInput,
298 ToolUseID: c.ToolUseID,
299 ToolError: c.ToolError,
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700300 ToolResult: toolResult,
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700301 CacheControl: fromLLMCache(c.Cache),
302 }
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700303 // Anthropic API complains if Text is specified when it shouldn't be
304 // or not specified when it's the empty string.
305 if c.Type != llm.ContentTypeToolResult && c.Type != llm.ContentTypeToolUse {
306 d.Text = &c.Text
307 }
308 return d
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700309}
310
311func fromLLMToolUse(tu *llm.ToolUse) *toolUse {
312 if tu == nil {
313 return nil
314 }
315 return &toolUse{
316 ID: tu.ID,
317 Name: tu.Name,
318 }
319}
320
321func fromLLMMessage(msg llm.Message) message {
322 return message{
323 Role: fromLLMRole[msg.Role],
324 Content: mapped(msg.Content, fromLLMContent),
325 ToolUse: fromLLMToolUse(msg.ToolUse),
326 }
327}
328
329func fromLLMToolChoice(tc *llm.ToolChoice) *toolChoice {
330 if tc == nil {
331 return nil
332 }
333 return &toolChoice{
334 Type: fromLLMToolChoiceType[tc.Type],
335 Name: tc.Name,
336 }
337}
338
339func fromLLMTool(t *llm.Tool) *tool {
340 return &tool{
341 Name: t.Name,
342 Type: t.Type,
343 Description: t.Description,
344 InputSchema: t.InputSchema,
345 }
346}
347
348func fromLLMSystem(s llm.SystemContent) systemContent {
349 return systemContent{
350 Text: s.Text,
351 Type: s.Type,
352 CacheControl: fromLLMCache(s.Cache),
353 }
354}
355
356func (s *Service) fromLLMRequest(r *llm.Request) *request {
357 return &request{
358 Model: cmp.Or(s.Model, DefaultModel),
359 Messages: mapped(r.Messages, fromLLMMessage),
360 MaxTokens: cmp.Or(s.MaxTokens, DefaultMaxTokens),
361 ToolChoice: fromLLMToolChoice(r.ToolChoice),
362 Tools: mapped(r.Tools, fromLLMTool),
363 System: mapped(r.System, fromLLMSystem),
364 }
365}
366
367func toLLMUsage(u usage) llm.Usage {
368 return llm.Usage{
369 InputTokens: u.InputTokens,
370 CacheCreationInputTokens: u.CacheCreationInputTokens,
371 CacheReadInputTokens: u.CacheReadInputTokens,
372 OutputTokens: u.OutputTokens,
373 CostUSD: u.CostUSD,
374 }
375}
376
377func toLLMContent(c content) llm.Content {
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700378 // Convert toolResult from []content to []llm.Content
379 var toolResultContents []llm.Content
380 if len(c.ToolResult) > 0 {
381 toolResultContents = make([]llm.Content, len(c.ToolResult))
382 for i, tr := range c.ToolResult {
383 toolResultContents[i] = toLLMContent(tr)
384 }
385 }
386
387 ret := llm.Content{
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700388 ID: c.ID,
389 Type: toLLMContentType[c.Type],
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700390 MediaType: c.MediaType,
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700391 Thinking: c.Thinking,
392 Data: c.Data,
393 Signature: c.Signature,
394 ToolName: c.ToolName,
395 ToolInput: c.ToolInput,
396 ToolUseID: c.ToolUseID,
397 ToolError: c.ToolError,
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700398 ToolResult: toolResultContents,
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700399 }
Philip Zeyliger72252cb2025-05-10 17:00:08 -0700400 if c.Text != nil {
401 ret.Text = *c.Text
402 }
403 return ret
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700404}
405
406func toLLMResponse(r *response) *llm.Response {
407 return &llm.Response{
408 ID: r.ID,
409 Type: r.Type,
410 Role: toLLMRole[r.Role],
411 Model: r.Model,
412 Content: mapped(r.Content, toLLMContent),
413 StopReason: toLLMStopReason[r.StopReason],
414 StopSequence: r.StopSequence,
415 Usage: toLLMUsage(r.Usage),
416 }
417}
418
419// Do sends a request to Anthropic.
420func (s *Service) Do(ctx context.Context, ir *llm.Request) (*llm.Response, error) {
421 request := s.fromLLMRequest(ir)
422
423 var payload []byte
424 var err error
Josh Bleecher Snyder57afbca2025-07-23 13:29:59 -0700425 if s.DumpLLM || testing.Testing() {
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700426 payload, err = json.MarshalIndent(request, "", " ")
427 } else {
428 payload, err = json.Marshal(request)
429 payload = append(payload, '\n')
430 }
431 if err != nil {
432 return nil, err
433 }
434
435 if false {
436 fmt.Printf("claude request payload:\n%s\n", payload)
437 }
438
439 backoff := []time.Duration{15 * time.Second, 30 * time.Second, time.Minute}
440 largerMaxTokens := false
441 var partialUsage usage
442
443 url := cmp.Or(s.URL, DefaultURL)
444 httpc := cmp.Or(s.HTTPC, http.DefaultClient)
445
446 // retry loop
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700447 var errs error // accumulated errors across all attempts
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700448 for attempts := 0; ; attempts++ {
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700449 if attempts > 10 {
450 return nil, fmt.Errorf("anthropic request failed after %d attempts: %w", attempts, errs)
451 }
452 if attempts > 0 {
453 sleep := backoff[min(attempts, len(backoff)-1)] + time.Duration(rand.Int64N(int64(time.Second)))
454 slog.WarnContext(ctx, "anthropic request sleep before retry", "sleep", sleep, "attempts", attempts)
455 time.Sleep(sleep)
456 }
Josh Bleecher Snyder57afbca2025-07-23 13:29:59 -0700457 if s.DumpLLM {
458 if err := llm.DumpToFile("request", url, payload); err != nil {
Josh Bleecher Snydere75d0ea2025-07-21 23:50:44 +0000459 slog.WarnContext(ctx, "failed to dump request to file", "error", err)
460 }
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700461 }
462 req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(payload))
463 if err != nil {
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700464 return nil, errors.Join(errs, err)
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700465 }
466
467 req.Header.Set("Content-Type", "application/json")
468 req.Header.Set("X-API-Key", s.APIKey)
469 req.Header.Set("Anthropic-Version", "2023-06-01")
470
471 var features []string
472 if request.TokenEfficientToolUse {
473 features = append(features, "token-efficient-tool-use-2025-02-19")
474 }
475 if largerMaxTokens {
476 features = append(features, "output-128k-2025-02-19")
477 request.MaxTokens = 128 * 1024
478 }
479 if len(features) > 0 {
480 req.Header.Set("anthropic-beta", strings.Join(features, ","))
481 }
482
483 resp, err := httpc.Do(req)
484 if err != nil {
Josh Bleecher Snyder3b5646f2025-05-23 16:47:53 +0000485 // Don't retry httprr cache misses
486 if strings.Contains(err.Error(), "cached HTTP response not found") {
487 return nil, err
488 }
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700489 errs = errors.Join(errs, err)
490 continue
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700491 }
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700492 buf, err := io.ReadAll(resp.Body)
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700493 resp.Body.Close()
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700494 if err != nil {
495 errs = errors.Join(errs, err)
496 continue
497 }
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700498
499 switch {
500 case resp.StatusCode == http.StatusOK:
Josh Bleecher Snyder57afbca2025-07-23 13:29:59 -0700501 if s.DumpLLM {
502 if err := llm.DumpToFile("response", "", buf); err != nil {
Josh Bleecher Snydere75d0ea2025-07-21 23:50:44 +0000503 slog.WarnContext(ctx, "failed to dump response to file", "error", err)
504 }
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700505 }
506 var response response
507 err = json.NewDecoder(bytes.NewReader(buf)).Decode(&response)
508 if err != nil {
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700509 return nil, errors.Join(errs, err)
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700510 }
511 if response.StopReason == "max_tokens" && !largerMaxTokens {
Josh Bleecher Snyder29fea842025-05-06 01:51:09 +0000512 slog.InfoContext(ctx, "anthropic_retrying_with_larger_tokens", "message", "Retrying Anthropic API call with larger max tokens size")
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700513 // Retry with more output tokens.
514 largerMaxTokens = true
Josh Bleecher Snyder59bb27d2025-06-05 07:32:10 -0700515 response.Usage.CostUSD = llm.CostUSDFromResponse(resp.Header)
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700516 partialUsage = response.Usage
517 continue
518 }
519
520 // Calculate and set the cost_usd field
521 if largerMaxTokens {
522 response.Usage.Add(partialUsage)
523 }
Josh Bleecher Snyder59bb27d2025-06-05 07:32:10 -0700524 response.Usage.CostUSD = llm.CostUSDFromResponse(resp.Header)
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700525
526 return toLLMResponse(&response), nil
527 case resp.StatusCode >= 500 && resp.StatusCode < 600:
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700528 // server error, retry
529 slog.WarnContext(ctx, "anthropic_request_failed", "response", string(buf), "status_code", resp.StatusCode)
530 errs = errors.Join(errs, fmt.Errorf("status %v: %s", resp.Status, buf))
531 continue
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700532 case resp.StatusCode == 429:
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700533 // rate limited, retry
534 slog.WarnContext(ctx, "anthropic_request_rate_limited", "response", string(buf))
535 errs = errors.Join(errs, fmt.Errorf("status %v: %s", resp.Status, buf))
536 continue
537 case resp.StatusCode >= 400 && resp.StatusCode < 500:
538 // some other 400, probably unrecoverable
539 slog.WarnContext(ctx, "anthropic_request_failed", "response", string(buf), "status_code", resp.StatusCode)
540 return nil, errors.Join(errs, fmt.Errorf("status %v: %s", resp.Status, buf))
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700541 default:
Josh Bleecher Snydera4500c92025-05-15 15:38:32 -0700542 // ...retry, I guess?
543 slog.WarnContext(ctx, "anthropic_request_failed", "response", string(buf), "status_code", resp.StatusCode)
544 errs = errors.Join(errs, fmt.Errorf("status %v: %s", resp.Status, buf))
545 continue
Josh Bleecher Snyder4f84ab72025-04-22 16:40:54 -0700546 }
547 }
548}