Compare commits

...

5 Commits

Author SHA1 Message Date
Richard Feldman
1c8300b4d2 Add test for reasoning_opaque and reasoning_text 2025-11-25 11:43:22 -05:00
Richard Feldman
a53133a733 Revise ChatMessage matching logic 2025-11-25 11:43:22 -05:00
Richard Feldman
589be2ce8e Send ChatMessageContent::empty() when necessary 2025-11-25 11:43:22 -05:00
Richard Feldman
eb3b879de4 Add reasoning_opaque and reasoning_text 2025-11-25 11:43:22 -05:00
Richard Feldman
69cb08bfd9 Add some copilot debug logging 2025-11-25 11:43:22 -05:00
3 changed files with 206 additions and 9 deletions

View File

@@ -294,6 +294,10 @@ pub enum ChatMessage {
content: ChatMessageContent,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
tool_calls: Vec<ToolCall>,
#[serde(default, skip_serializing_if = "Option::is_none")]
reasoning_opaque: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
reasoning_text: Option<String>,
},
User {
content: ChatMessageContent,
@@ -386,6 +390,8 @@ pub struct ResponseDelta {
pub role: Option<Role>,
#[serde(default)]
pub tool_calls: Vec<ToolCallChunk>,
pub reasoning_opaque: Option<String>,
pub reasoning_text: Option<String>,
}
#[derive(Deserialize, Debug, Eq, PartialEq)]
pub struct ToolCallChunk {
@@ -786,13 +792,13 @@ async fn stream_completion(
is_user_initiated: bool,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let is_vision_request = request.messages.iter().any(|message| match message {
ChatMessage::User { content }
| ChatMessage::Assistant { content, .. }
| ChatMessage::Tool { content, .. } => {
matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
}
_ => false,
});
ChatMessage::User { content }
| ChatMessage::Assistant { content, .. }
| ChatMessage::Tool { content, .. } => {
matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
}
_ => false,
});
let request_initiator = if is_user_initiated { "user" } else { "agent" };
@@ -817,6 +823,10 @@ async fn stream_completion(
let is_streaming = request.stream;
let json = serde_json::to_string(&request)?;
eprintln!(
"Copilot chat completion request to {}: {}",
completion_url, json
);
let request = request_builder.body(AsyncBody::from(json))?;
let mut response = client.send(request).await?;
@@ -824,6 +834,11 @@ async fn stream_completion(
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
eprintln!(
"Copilot chat completion HTTP error: status={}, response_body={}",
response.status(),
body_str
);
anyhow::bail!(
"Failed to connect to API: {} {}",
response.status(),
@@ -831,6 +846,11 @@ async fn stream_completion(
);
}
eprintln!(
"Copilot chat completion response status: {}",
response.status()
);
if is_streaming {
let reader = BufReader::new(response.into_body());
Ok(reader
@@ -838,6 +858,7 @@ async fn stream_completion(
.filter_map(|line| async move {
match line {
Ok(line) => {
eprintln!("Copilot chat completion stream line: {}", line);
let line = line.strip_prefix("data: ")?;
if line.starts_with("[DONE]") {
return None;
@@ -851,7 +872,14 @@ async fn stream_completion(
Some(Ok(response))
}
}
Err(error) => Some(Err(anyhow!(error))),
Err(error) => {
eprintln!(
"Failed to parse Copilot chat completion stream event: {}\nLine: {}",
error,
line
);
Some(Err(anyhow!(error)))
}
}
}
Err(error) => Some(Err(anyhow!(error))),
@@ -862,6 +890,10 @@ async fn stream_completion(
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
eprintln!(
"Copilot chat completion non-streaming response body: {}",
body_str
);
let response: ResponseEvent = serde_json::from_str(body_str)?;
Ok(futures::stream::once(async move { Ok(response) }).boxed())

View File

@@ -313,15 +313,24 @@ pub async fn stream_response(
};
let is_streaming = request.stream;
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let json = serde_json::to_string(&request)?;
eprintln!("Copilot responses request to {}: {}", api_url, json);
let request = request_builder.body(AsyncBody::from(json))?;
let mut response = client.send(request).await?;
if !response.status().is_success() {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
eprintln!(
"Copilot responses HTTP error: status={}, response_body={}",
response.status(),
body
);
anyhow::bail!("Failed to connect to API: {} {}", response.status(), body);
}
eprintln!("Copilot responses response status: {}", response.status());
if is_streaming {
let reader = BufReader::new(response.into_body());
Ok(reader
@@ -329,6 +338,7 @@ pub async fn stream_response(
.filter_map(|line| async move {
match line {
Ok(line) => {
eprintln!("Copilot responses stream line: {}", line);
let line = line.strip_prefix("data: ")?;
if line.starts_with("[DONE]") || line.is_empty() {
return None;
@@ -355,6 +365,7 @@ pub async fn stream_response(
// Removes the need of having a method to map StreamEvent and another to map Response to a LanguageCompletionEvent
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
eprintln!("Copilot responses non-streaming response body: {}", body);
match serde_json::from_str::<Response>(&body) {
Ok(response) => {

View File

@@ -367,12 +367,16 @@ pub fn map_to_language_model_completion_events(
struct State {
events: Pin<Box<dyn Send + Stream<Item = Result<ResponseEvent>>>>,
tool_calls_by_index: HashMap<usize, RawToolCall>,
reasoning_opaque: Option<String>,
reasoning_text: Option<String>,
}
futures::stream::unfold(
State {
events,
tool_calls_by_index: HashMap::default(),
reasoning_opaque: None,
reasoning_text: None,
},
move |mut state| async move {
if let Some(event) = state.events.next().await {
@@ -403,6 +407,14 @@ pub fn map_to_language_model_completion_events(
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
}
// Capture reasoning data from the delta (e.g. for Gemini 3)
if let Some(opaque) = delta.reasoning_opaque.clone() {
state.reasoning_opaque = Some(opaque);
}
if let Some(text) = delta.reasoning_text.clone() {
state.reasoning_text = Some(text);
}
for (index, tool_call) in delta.tool_calls.iter().enumerate() {
let tool_index = tool_call.index.unwrap_or(index);
let entry = state.tool_calls_by_index.entry(tool_index).or_default();
@@ -445,6 +457,30 @@ pub fn map_to_language_model_completion_events(
)));
}
Some("tool_calls") => {
// Emit reasoning details if we have them (e.g. for Gemini 3)
if state.reasoning_opaque.is_some()
|| state.reasoning_text.is_some()
{
let mut details = serde_json::Map::new();
if let Some(opaque) = state.reasoning_opaque.take() {
details.insert(
"reasoning_opaque".to_string(),
serde_json::Value::String(opaque),
);
}
if let Some(text) = state.reasoning_text.take() {
details.insert(
"reasoning_text".to_string(),
serde_json::Value::String(text),
);
}
events.push(Ok(
LanguageModelCompletionEvent::ReasoningDetails(
serde_json::Value::Object(details),
),
));
}
events.extend(state.tool_calls_by_index.drain().map(
|(_, tool_call)| {
// The model can output an empty string
@@ -807,6 +843,22 @@ fn into_copilot_chat(
buffer
};
// Extract reasoning_opaque and reasoning_text from reasoning_details
let (reasoning_opaque, reasoning_text) =
if let Some(details) = &message.reasoning_details {
let opaque = details
.get("reasoning_opaque")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
let text = details
.get("reasoning_text")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
(opaque, text)
} else {
(None, None)
};
messages.push(ChatMessage::Assistant {
content: if text_content.is_empty() {
ChatMessageContent::empty()
@@ -814,6 +866,8 @@ fn into_copilot_chat(
text_content.into()
},
tool_calls,
reasoning_opaque,
reasoning_text,
});
}
Role::System => messages.push(ChatMessage::System {
@@ -1317,6 +1371,106 @@ mod tests {
other => panic!("expected HttpResponseError, got {:?}", other),
}
}
#[test]
fn chat_completions_stream_maps_reasoning_data() {
use copilot::copilot_chat::ResponseEvent;
let events = vec![
ResponseEvent {
choices: vec![copilot::copilot_chat::ResponseChoice {
index: Some(0),
finish_reason: None,
delta: Some(copilot::copilot_chat::ResponseDelta {
content: None,
role: Some(copilot::copilot_chat::Role::Assistant),
tool_calls: vec![copilot::copilot_chat::ToolCallChunk {
index: Some(0),
id: Some("call_abc123".to_string()),
function: Some(copilot::copilot_chat::FunctionChunk {
name: Some("list_directory".to_string()),
arguments: Some("{\"path\":\"test\"}".to_string()),
thought_signature: None,
}),
}],
reasoning_opaque: Some("encrypted_reasoning_token_xyz".to_string()),
reasoning_text: Some("Let me check the directory".to_string()),
}),
message: None,
}],
id: "chatcmpl-123".to_string(),
usage: None,
},
ResponseEvent {
choices: vec![copilot::copilot_chat::ResponseChoice {
index: Some(0),
finish_reason: Some("tool_calls".to_string()),
delta: Some(copilot::copilot_chat::ResponseDelta {
content: None,
role: None,
tool_calls: vec![],
reasoning_opaque: None,
reasoning_text: None,
}),
message: None,
}],
id: "chatcmpl-123".to_string(),
usage: None,
},
];
let mapped = futures::executor::block_on(async {
map_to_language_model_completion_events(
Box::pin(futures::stream::iter(events.into_iter().map(Ok))),
true,
)
.collect::<Vec<_>>()
.await
});
let mut has_reasoning_details = false;
let mut has_tool_use = false;
let mut reasoning_opaque_value: Option<String> = None;
let mut reasoning_text_value: Option<String> = None;
for event_result in mapped {
match event_result {
Ok(LanguageModelCompletionEvent::ReasoningDetails(details)) => {
has_reasoning_details = true;
reasoning_opaque_value = details
.get("reasoning_opaque")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
reasoning_text_value = details
.get("reasoning_text")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
}
Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => {
has_tool_use = true;
assert_eq!(tool_use.id.to_string(), "call_abc123");
assert_eq!(tool_use.name.as_ref(), "list_directory");
}
_ => {}
}
}
assert!(
has_reasoning_details,
"Should emit ReasoningDetails event for Gemini 3 reasoning"
);
assert!(has_tool_use, "Should emit ToolUse event");
assert_eq!(
reasoning_opaque_value,
Some("encrypted_reasoning_token_xyz".to_string()),
"Should capture reasoning_opaque"
);
assert_eq!(
reasoning_text_value,
Some("Let me check the directory".to_string()),
"Should capture reasoning_text"
);
}
}
struct ConfigurationView {
copilot_status: Option<copilot::Status>,