I just ran into this issue as well, using anthropic; here is the json from the ERROR log entry (redacted the messages, system, and tools fields for security && brevity):
{
"error": {
"name": "AI_APICallError",
"url": "https://api.anthropic.com/v1/messages",
"requestBodyValues": {
"model": "claude-sonnet-4-6",
"max_tokens": 32000,
"stream": true
},
"statusCode": 400,
"responseHeaders": {
"anthropic-organization-id": "18aa8ab8-a2eb-4e1f-8865-87023f2a0f7f",
"cf-cache-status": "DYNAMIC",
"cf-ray": "9eded6e1cfd4dcfe-SJC",
"connection": "keep-alive",
"content-encoding": "gzip",
"content-security-policy": "default-src 'none'; frame-ancestors 'none'",
"content-type": "application/json",
"date": "Fri, 17 Apr 2026 22:24:20 GMT",
"request-id": "req_011CaA8K4i7kBdnE9ZcQcTgn",
"server": "cloudflare",
"server-timing": "x-originResponse;dur=43",
"strict-transport-security": "max-age=31536000; includeSubDomains; preload",
"transfer-encoding": "chunked",
"vary": "Accept-Encoding",
"x-envoy-upstream-service-time": "42",
"x-robots-tag": "none",
"x-should-retry": "false",
"set-cookie": "_cfuvid=jfz1zQxCIrGW9VuquDDC14GJzvhlKp0c7CpgUl7LFT4-1776464660.7643719-1.0.1.1-khLHScW8Kj9zCTmZgOjBTe33xWmwSTD.Lwz.eftBiqk; HttpOnly; SameSite=None; Secure; Path=/; Domain=api.anthropic.com"
},
"responseBody": "{\"type\":\"error\",\"error\":{\"type\":\"invalid_request_error\",\"message\":\"This model does not support assistant message prefill. The conversation must end with a user message.\"},\"request_id\":\"req_011CaA8K4i7kBdnE9ZcQcTgn\"}",
"isRetryable": false,
"data": {
"type": "error",
"error": {
"type": "invalid_request_error",
"message": "This model does not support assistant message prefill. The conversation must end with a user message."
}
}
}
}
ERROR 2026-04-17T22:24:20 +98ms service=llm providerID=anthropic modelID=claude-sonnet-4-6 sessionID=ses_262915ac3ffeb0h3F56QYx3GDA small=false agent=build mode=primary
Originally posted by @caleb-reyes-op in #470