Compare commits
26 Commits
fix-git-ht
...
copy-paste
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd6bb9204f | ||
|
|
4a90eba0da | ||
|
|
3c183c4698 | ||
|
|
ab03680094 | ||
|
|
7e76aea435 | ||
|
|
c7d2fa8b1b | ||
|
|
76966b33f7 | ||
|
|
909e31a48d | ||
|
|
34053860c9 | ||
|
|
42727dd74e | ||
|
|
cac634960e | ||
|
|
bd3d81a0e2 | ||
|
|
e600b35dcd | ||
|
|
63837e5d43 | ||
|
|
fbf71cf8f8 | ||
|
|
2d6e83d1d0 | ||
|
|
092610bc15 | ||
|
|
257e8aaacc | ||
|
|
5664f1f95d | ||
|
|
d60a29bbe6 | ||
|
|
e7e59aa10c | ||
|
|
61b204bedb | ||
|
|
d9c42a817f | ||
|
|
bbdefeab25 | ||
|
|
4667e2ec22 | ||
|
|
8c8d350b61 |
2259
Cargo.lock
generated
2259
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -66,6 +66,8 @@ semantic_index.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
similar.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
|
||||
@@ -33,12 +33,12 @@ use editor::{
|
||||
use editor::{display_map::CreaseId, FoldPlaceholder};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
canvas, div, percentage, point, Action, Animation, AnimationExt, AnyElement, AnyView,
|
||||
AppContext, AsyncWindowContext, ClipboardItem, Context as _, DismissEvent, Empty, Entity,
|
||||
EntityId, EventEmitter, FocusHandle, FocusableView, FontWeight, InteractiveElement,
|
||||
IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Subscription, Task, Transformation, UpdateGlobal, View,
|
||||
ViewContext, VisualContext, WeakView, WindowContext,
|
||||
canvas, div, img, percentage, point, size, Action, Animation, AnimationExt, AnyElement,
|
||||
AnyView, AppContext, AsyncWindowContext, ClipboardEntry, ClipboardItem, Context as _,
|
||||
DismissEvent, Empty, Entity, EntityId, EventEmitter, FocusHandle, FocusableView, FontWeight,
|
||||
InteractiveElement, IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render, RenderImage,
|
||||
SharedString, Size, StatefulInteractiveElement, Styled, Subscription, Task, Transformation,
|
||||
UpdateGlobal, View, ViewContext, VisualContext, WeakView, WindowContext,
|
||||
};
|
||||
use indexed_docs::IndexedDocsStore;
|
||||
use language::{
|
||||
@@ -1609,6 +1609,7 @@ pub struct ContextEditor {
|
||||
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
editor: View<Editor>,
|
||||
blocks: HashSet<CustomBlockId>,
|
||||
image_blocks: HashSet<CustomBlockId>,
|
||||
scroll_position: Option<ScrollPosition>,
|
||||
remote_id: Option<workspace::ViewId>,
|
||||
pending_slash_command_creases: HashMap<Range<language::Anchor>, CreaseId>,
|
||||
@@ -1665,6 +1666,7 @@ impl ContextEditor {
|
||||
editor,
|
||||
lsp_adapter_delegate,
|
||||
blocks: Default::default(),
|
||||
image_blocks: Default::default(),
|
||||
scroll_position: None,
|
||||
remote_id: None,
|
||||
fs,
|
||||
@@ -1679,6 +1681,7 @@ impl ContextEditor {
|
||||
error_message: None,
|
||||
};
|
||||
this.update_message_headers(cx);
|
||||
this.update_image_blocks(cx);
|
||||
this.insert_slash_command_output_sections(sections, cx);
|
||||
this
|
||||
}
|
||||
@@ -2041,6 +2044,7 @@ impl ContextEditor {
|
||||
match event {
|
||||
ContextEvent::MessagesEdited => {
|
||||
self.update_message_headers(cx);
|
||||
self.update_image_blocks(cx);
|
||||
self.context.update(cx, |context, cx| {
|
||||
context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx);
|
||||
});
|
||||
@@ -3069,6 +3073,8 @@ impl ContextEditor {
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let mut copied_text = String::new();
|
||||
let mut spanned_messages = 0;
|
||||
let mut clipboard_entries: Vec<ClipboardEntry> = Vec::new();
|
||||
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
break;
|
||||
@@ -3086,8 +3092,16 @@ impl ContextEditor {
|
||||
}
|
||||
}
|
||||
|
||||
for image_anchor in context.image_anchors(cx) {
|
||||
if let Some((render_image, _)) = context.get_image(image_anchor.image_id) {
|
||||
//
|
||||
} else {
|
||||
log::error!("Assistant panel context had an image id of {:?} but there was no associated images entry stored for that id. This should never happen!", image_anchor.image_id);
|
||||
}
|
||||
}
|
||||
|
||||
if spanned_messages > 1 {
|
||||
cx.write_to_clipboard(ClipboardItem::new(copied_text));
|
||||
cx.write_to_clipboard(ClipboardItem::new(clipboard_entries));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -3095,6 +3109,104 @@ impl ContextEditor {
|
||||
cx.propagate();
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &editor::actions::Paste, cx: &mut ViewContext<Self>) {
|
||||
let images = if let Some(item) = cx.read_from_clipboard() {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
if images.is_empty() {
|
||||
// If we didn't find any valid image data to paste, propagate to let normal pasting happen.
|
||||
cx.propagate();
|
||||
} else {
|
||||
let mut image_positions = Vec::new();
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.transact(cx, |editor, cx| {
|
||||
let edits = editor
|
||||
.selections
|
||||
.all::<usize>(cx)
|
||||
.into_iter()
|
||||
.map(|selection| (selection.start..selection.end, "\n"));
|
||||
editor.edit(edits, cx);
|
||||
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
for selection in editor.selections.all::<usize>(cx) {
|
||||
image_positions.push(snapshot.anchor_before(selection.end));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
self.context.update(cx, |context, cx| {
|
||||
for image in images {
|
||||
let image_id = image.id();
|
||||
context.insert_image(image, cx);
|
||||
for image_position in image_positions.iter() {
|
||||
context.insert_image_anchor(image_id, image_position.text_anchor, cx);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn update_image_blocks(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let excerpt_id = *buffer.as_singleton().unwrap().0;
|
||||
let old_blocks = std::mem::take(&mut self.image_blocks);
|
||||
let new_blocks = self
|
||||
.context
|
||||
.read(cx)
|
||||
.image_anchors(cx)
|
||||
.filter_map(|image_anchor| {
|
||||
const MAX_HEIGHT_IN_LINES: u32 = 8;
|
||||
let anchor = buffer
|
||||
.anchor_in_excerpt(excerpt_id, image_anchor.anchor)
|
||||
.unwrap();
|
||||
let image = image_anchor.render_image.clone();
|
||||
anchor.is_valid(&buffer).then(|| BlockProperties {
|
||||
position: anchor,
|
||||
height: MAX_HEIGHT_IN_LINES,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(move |cx| {
|
||||
let image_size = size_for_image(
|
||||
&image,
|
||||
size(
|
||||
cx.max_width - cx.gutter_dimensions.full_width(),
|
||||
MAX_HEIGHT_IN_LINES as f32 * cx.line_height,
|
||||
),
|
||||
);
|
||||
h_flex()
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.child(
|
||||
img(image.clone())
|
||||
.object_fit(gpui::ObjectFit::ScaleDown)
|
||||
.w(image_size.width)
|
||||
.h(image_size.height),
|
||||
)
|
||||
.into_any_element()
|
||||
}),
|
||||
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
editor.remove_blocks(old_blocks, None, cx);
|
||||
let ids = editor.insert_blocks(new_blocks, None, cx);
|
||||
self.image_blocks = HashSet::from_iter(ids);
|
||||
});
|
||||
}
|
||||
|
||||
fn split(&mut self, _: &Split, cx: &mut ViewContext<Self>) {
|
||||
self.context.update(cx, |context, cx| {
|
||||
let selections = self.editor.read(cx).selections.disjoint_anchors();
|
||||
@@ -3295,6 +3407,7 @@ impl Render for ContextEditor {
|
||||
.capture_action(cx.listener(ContextEditor::cancel))
|
||||
.capture_action(cx.listener(ContextEditor::save))
|
||||
.capture_action(cx.listener(ContextEditor::copy))
|
||||
.capture_action(cx.listener(ContextEditor::paste))
|
||||
.capture_action(cx.listener(ContextEditor::cycle_message_role))
|
||||
.capture_action(cx.listener(ContextEditor::confirm_command))
|
||||
.on_action(cx.listener(ContextEditor::assist))
|
||||
@@ -4308,6 +4421,30 @@ fn token_state(context: &Model<Context>, cx: &AppContext) -> Option<TokenState>
|
||||
Some(token_state)
|
||||
}
|
||||
|
||||
fn size_for_image(data: &RenderImage, max_size: Size<Pixels>) -> Size<Pixels> {
|
||||
let image_size = data
|
||||
.size(0)
|
||||
.map(|dimension| Pixels::from(u32::from(dimension)));
|
||||
let image_ratio = image_size.width / image_size.height;
|
||||
let bounds_ratio = max_size.width / max_size.height;
|
||||
|
||||
if image_size.width > max_size.width || image_size.height > max_size.height {
|
||||
if bounds_ratio > image_ratio {
|
||||
size(
|
||||
image_size.width * (max_size.height / image_size.height),
|
||||
max_size.height,
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
max_size.width,
|
||||
image_size.height * (max_size.width / image_size.width),
|
||||
)
|
||||
}
|
||||
} else {
|
||||
size(image_size.width, image_size.height)
|
||||
}
|
||||
}
|
||||
|
||||
enum ConfigurationError {
|
||||
NoProvider,
|
||||
ProviderNotAuthenticated,
|
||||
|
||||
@@ -16,22 +16,23 @@ use futures::{
|
||||
FutureExt, StreamExt,
|
||||
};
|
||||
use gpui::{
|
||||
AppContext, Context as _, EventEmitter, Model, ModelContext, Subscription, Task, UpdateGlobal,
|
||||
View, WeakView,
|
||||
AppContext, Context as _, EventEmitter, Image, Model, ModelContext, RenderImage, Subscription,
|
||||
Task, UpdateGlobal, View, WeakView,
|
||||
};
|
||||
use language::{
|
||||
AnchorRangeExt, Bias, Buffer, BufferSnapshot, LanguageRegistry, OffsetRangeExt, ParseStatus,
|
||||
Point, ToOffset,
|
||||
};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelTool,
|
||||
Role,
|
||||
LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
LanguageModelTool, Role,
|
||||
};
|
||||
use open_ai::Model as OpenAiModel;
|
||||
use paths::contexts_dir;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp,
|
||||
fmt::Debug,
|
||||
@@ -319,8 +320,23 @@ pub struct MessageMetadata {
|
||||
timestamp: clock::Lamport,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MessageImage {
|
||||
image_id: u64,
|
||||
image: Shared<Task<Option<LanguageModelImage>>>,
|
||||
}
|
||||
|
||||
impl PartialEq for MessageImage {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.image_id == other.image_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for MessageImage {}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Message {
|
||||
pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
|
||||
pub offset_range: Range<usize>,
|
||||
pub index_range: Range<usize>,
|
||||
pub id: MessageId,
|
||||
@@ -331,13 +347,55 @@ pub struct Message {
|
||||
|
||||
impl Message {
|
||||
fn to_request_message(&self, buffer: &Buffer) -> LanguageModelRequestMessage {
|
||||
let mut content = Vec::new();
|
||||
|
||||
let mut range_start = self.offset_range.start;
|
||||
for (image_offset, message_image) in self.image_offsets.iter() {
|
||||
if *image_offset != range_start {
|
||||
content.push(
|
||||
buffer
|
||||
.text_for_range(range_start..*image_offset)
|
||||
.collect::<String>()
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
if let Some(image) = message_image.image.clone().now_or_never().flatten() {
|
||||
content.push(language_model::MessageContent::Image(image));
|
||||
}
|
||||
|
||||
range_start = *image_offset;
|
||||
}
|
||||
if range_start != self.offset_range.end {
|
||||
content.push(
|
||||
buffer
|
||||
.text_for_range(range_start..self.offset_range.end)
|
||||
.collect::<String>()
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
LanguageModelRequestMessage {
|
||||
role: self.role,
|
||||
content: buffer.text_for_range(self.offset_range.clone()).collect(),
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImageAnchor {
|
||||
pub anchor: language::Anchor,
|
||||
pub image_id: u64,
|
||||
pub render_image: Arc<RenderImage>,
|
||||
pub image: Shared<Task<Option<LanguageModelImage>>>,
|
||||
}
|
||||
|
||||
impl PartialEq for ImageAnchor {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.image_id == other.image_id
|
||||
}
|
||||
}
|
||||
|
||||
struct PendingCompletion {
|
||||
id: usize,
|
||||
_task: Task<()>,
|
||||
@@ -605,6 +663,8 @@ pub struct Context {
|
||||
finished_slash_commands: HashSet<SlashCommandId>,
|
||||
slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
|
||||
message_anchors: Vec<MessageAnchor>,
|
||||
images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
|
||||
image_anchors: Vec<ImageAnchor>,
|
||||
messages_metadata: HashMap<MessageId, MessageMetadata>,
|
||||
summary: Option<ContextSummary>,
|
||||
pending_summary: Task<Option<()>>,
|
||||
@@ -674,6 +734,8 @@ impl Context {
|
||||
pending_ops: Vec::new(),
|
||||
operations: Vec::new(),
|
||||
message_anchors: Default::default(),
|
||||
image_anchors: Default::default(),
|
||||
images: Default::default(),
|
||||
messages_metadata: Default::default(),
|
||||
pending_slash_commands: Vec::new(),
|
||||
finished_slash_commands: HashSet::default(),
|
||||
@@ -1288,7 +1350,7 @@ impl Context {
|
||||
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
content: vec![prompt.into()],
|
||||
});
|
||||
|
||||
// Invoke the model to get its edit suggestions for this workflow step.
|
||||
@@ -1690,13 +1752,15 @@ impl Context {
|
||||
}
|
||||
|
||||
pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
|
||||
let messages = self
|
||||
let buffer = self.buffer.read(cx);
|
||||
let request_messages = self
|
||||
.messages(cx)
|
||||
.filter(|message| matches!(message.status, MessageStatus::Done))
|
||||
.map(|message| message.to_request_message(self.buffer.read(cx)));
|
||||
.filter(|message| message.status == MessageStatus::Done)
|
||||
.map(|message| message.to_request_message(&buffer))
|
||||
.collect();
|
||||
|
||||
LanguageModelRequest {
|
||||
messages: messages.collect(),
|
||||
messages: request_messages,
|
||||
stop: vec![],
|
||||
temperature: 1.0,
|
||||
}
|
||||
@@ -1794,6 +1858,68 @@ impl Context {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
|
||||
if !self.images.contains_key(&image.id()) {
|
||||
self.images.insert(
|
||||
image.id(),
|
||||
(
|
||||
image.to_image_data(cx).log_err()?,
|
||||
LanguageModelImage::from_image(image, cx).shared(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
pub fn insert_image_anchor(
|
||||
&mut self,
|
||||
image_id: u64,
|
||||
anchor: language::Anchor,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> bool {
|
||||
cx.emit(ContextEvent::MessagesEdited);
|
||||
|
||||
let buffer = self.buffer.read(cx);
|
||||
let insertion_ix = match self
|
||||
.image_anchors
|
||||
.binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
|
||||
{
|
||||
Ok(ix) => ix,
|
||||
Err(ix) => ix,
|
||||
};
|
||||
|
||||
if let Some((render_image, image)) = self.images.get(&image_id) {
|
||||
self.image_anchors.insert(
|
||||
insertion_ix,
|
||||
ImageAnchor {
|
||||
anchor,
|
||||
image_id,
|
||||
image: image.clone(),
|
||||
render_image: render_image.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn image_anchors<'a>(
|
||||
&'a self,
|
||||
_cx: &'a AppContext,
|
||||
) -> impl 'a + Iterator<Item = ImageAnchor> {
|
||||
self.image_anchors.iter().cloned()
|
||||
}
|
||||
|
||||
pub fn get_image(
|
||||
&self,
|
||||
image_id: u64,
|
||||
) -> Option<&(Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)> {
|
||||
self.images.get(&image_id)
|
||||
}
|
||||
|
||||
pub fn split_message(
|
||||
&mut self,
|
||||
range: Range<usize>,
|
||||
@@ -1812,7 +1938,10 @@ impl Context {
|
||||
let mut edited_buffer = false;
|
||||
|
||||
let mut suffix_start = None;
|
||||
if range.start > message.offset_range.start && range.end < message.offset_range.end - 1
|
||||
|
||||
// TODO: why did this start panicking?
|
||||
if range.start > message.offset_range.start
|
||||
&& range.end < message.offset_range.end.saturating_sub(1)
|
||||
{
|
||||
if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
|
||||
suffix_start = Some(range.end + 1);
|
||||
@@ -1954,7 +2083,9 @@ impl Context {
|
||||
.map(|message| message.to_request_message(self.buffer.read(cx)))
|
||||
.chain(Some(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: "Summarize the context into a short title without punctuation.".into(),
|
||||
content: vec![
|
||||
"Summarize the context into a short title without punctuation.".into(),
|
||||
],
|
||||
}));
|
||||
let request = LanguageModelRequest {
|
||||
messages: messages.collect(),
|
||||
@@ -2056,25 +2187,55 @@ impl Context {
|
||||
|
||||
pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let mut message_anchors = self.message_anchors.iter().enumerate().peekable();
|
||||
let messages = self.message_anchors.iter().enumerate();
|
||||
let images = self.image_anchors.iter();
|
||||
|
||||
Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
|
||||
}
|
||||
|
||||
pub fn messages_from_iters<'a>(
|
||||
buffer: &'a Buffer,
|
||||
metadata: &'a HashMap<MessageId, MessageMetadata>,
|
||||
messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
|
||||
images: impl Iterator<Item = &'a ImageAnchor> + 'a,
|
||||
) -> impl 'a + Iterator<Item = Message> {
|
||||
let mut messages = messages.peekable();
|
||||
let mut images = images.peekable();
|
||||
|
||||
iter::from_fn(move || {
|
||||
if let Some((start_ix, message_anchor)) = message_anchors.next() {
|
||||
let metadata = self.messages_metadata.get(&message_anchor.id)?;
|
||||
if let Some((start_ix, message_anchor)) = messages.next() {
|
||||
let metadata = metadata.get(&message_anchor.id)?;
|
||||
|
||||
let message_start = message_anchor.start.to_offset(buffer);
|
||||
let mut message_end = None;
|
||||
let mut end_ix = start_ix;
|
||||
while let Some((_, next_message)) = message_anchors.peek() {
|
||||
while let Some((_, next_message)) = messages.peek() {
|
||||
if next_message.start.is_valid(buffer) {
|
||||
message_end = Some(next_message.start);
|
||||
break;
|
||||
} else {
|
||||
end_ix += 1;
|
||||
message_anchors.next();
|
||||
messages.next();
|
||||
}
|
||||
}
|
||||
let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
|
||||
let message_end = message_anchor.start.to_offset(buffer);
|
||||
|
||||
let mut image_offsets = SmallVec::new();
|
||||
while let Some(image_anchor) = images.peek() {
|
||||
if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
|
||||
image_offsets.push((
|
||||
image_anchor.anchor.to_offset(buffer),
|
||||
MessageImage {
|
||||
image_id: image_anchor.image_id,
|
||||
image: image_anchor.image.clone(),
|
||||
},
|
||||
));
|
||||
images.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let message_end = message_end
|
||||
.unwrap_or(language::Anchor::MAX)
|
||||
.to_offset(buffer);
|
||||
|
||||
return Some(Message {
|
||||
index_range: start_ix..end_ix,
|
||||
@@ -2083,6 +2244,7 @@ impl Context {
|
||||
anchor: message_anchor.start,
|
||||
role: metadata.role,
|
||||
status: metadata.status.clone(),
|
||||
image_offsets,
|
||||
});
|
||||
}
|
||||
None
|
||||
|
||||
@@ -2302,7 +2302,7 @@ impl Codegen {
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
content: vec![prompt.into()],
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
|
||||
@@ -775,7 +775,7 @@ impl PromptLibrary {
|
||||
LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: body.to_string(),
|
||||
content: vec![body.to_string().into()],
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
|
||||
@@ -276,7 +276,7 @@ impl TerminalInlineAssistant {
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
content: vec![prompt.into()],
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
|
||||
@@ -280,7 +280,7 @@ impl ChannelView {
|
||||
};
|
||||
|
||||
let link = channel.notes_link(closest_heading.map(|heading| heading.text), cx);
|
||||
cx.write_to_clipboard(ClipboardItem::new(link));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(link));
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
struct CopyLinkForPositionToast;
|
||||
|
||||
@@ -710,7 +710,7 @@ impl ChatPanel {
|
||||
active_chat.read(cx).find_loaded_message(message_id)
|
||||
}) {
|
||||
let text = message.body.clone();
|
||||
cx.write_to_clipboard(ClipboardItem::new(text))
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(text))
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -2042,7 +2042,7 @@ impl CollabPanel {
|
||||
let Some(channel) = channel_store.channel_for_id(channel_id) else {
|
||||
return;
|
||||
};
|
||||
let item = ClipboardItem::new(channel.link(cx));
|
||||
let item = ClipboardItem::new_string(channel.link(cx));
|
||||
cx.write_to_clipboard(item)
|
||||
}
|
||||
|
||||
@@ -2261,7 +2261,7 @@ impl CollabPanel {
|
||||
.size(ButtonSize::None)
|
||||
.visible_on_hover("section-header")
|
||||
.on_click(move |_, cx| {
|
||||
let item = ClipboardItem::new(channel_link_copy.clone());
|
||||
let item = ClipboardItem::new_string(channel_link_copy.clone());
|
||||
cx.write_to_clipboard(item)
|
||||
})
|
||||
.tooltip(|cx| Tooltip::text("Copy channel link", cx))
|
||||
|
||||
@@ -175,7 +175,8 @@ impl Render for ChannelModal {
|
||||
.read(cx)
|
||||
.channel_for_id(channel_id)
|
||||
{
|
||||
let item = ClipboardItem::new(channel.link(cx));
|
||||
let item =
|
||||
ClipboardItem::new_string(channel.link(cx));
|
||||
cx.write_to_clipboard(item);
|
||||
}
|
||||
})),
|
||||
|
||||
@@ -55,7 +55,7 @@ impl CopilotCodeVerification {
|
||||
) -> impl IntoElement {
|
||||
let copied = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| item.text() == &data.user_code)
|
||||
.map(|item| item.text().as_ref() == Some(&data.user_code))
|
||||
.unwrap_or(false);
|
||||
h_flex()
|
||||
.w_full()
|
||||
@@ -68,7 +68,7 @@ impl CopilotCodeVerification {
|
||||
.on_mouse_down(gpui::MouseButton::Left, {
|
||||
let user_code = data.user_code.clone();
|
||||
move |_, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(user_code.clone()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(user_code.clone()));
|
||||
cx.refresh();
|
||||
}
|
||||
})
|
||||
|
||||
@@ -2,8 +2,8 @@ use futures::Future;
|
||||
use git::blame::BlameEntry;
|
||||
use git::Oid;
|
||||
use gpui::{
|
||||
Asset, ClipboardItem, Element, ParentElement, Render, ScrollHandle, StatefulInteractiveElement,
|
||||
WeakView, WindowContext,
|
||||
AppContext, Asset, ClipboardItem, Element, ParentElement, Render, ScrollHandle,
|
||||
StatefulInteractiveElement, WeakView,
|
||||
};
|
||||
use settings::Settings;
|
||||
use std::hash::Hash;
|
||||
@@ -35,7 +35,7 @@ impl<'a> CommitAvatar<'a> {
|
||||
|
||||
let avatar_url = CommitAvatarAsset::new(remote.clone(), self.sha);
|
||||
|
||||
let element = match cx.use_cached_asset::<CommitAvatarAsset>(&avatar_url) {
|
||||
let element = match cx.use_asset::<CommitAvatarAsset>(&avatar_url) {
|
||||
// Loading or no avatar found
|
||||
None | Some(None) => Icon::new(IconName::Person)
|
||||
.color(Color::Muted)
|
||||
@@ -73,7 +73,7 @@ impl Asset for CommitAvatarAsset {
|
||||
|
||||
fn load(
|
||||
source: Self::Source,
|
||||
cx: &mut WindowContext,
|
||||
cx: &mut AppContext,
|
||||
) -> impl Future<Output = Self::Output> + Send + 'static {
|
||||
let client = cx.http_client();
|
||||
|
||||
@@ -242,9 +242,9 @@ impl Render for BlameEntryTooltip {
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(move |_, cx| {
|
||||
cx.stop_propagation();
|
||||
cx.write_to_clipboard(ClipboardItem::new(
|
||||
full_sha.clone(),
|
||||
))
|
||||
cx.write_to_clipboard(
|
||||
ClipboardItem::new_string(full_sha.clone()),
|
||||
)
|
||||
}),
|
||||
),
|
||||
),
|
||||
|
||||
@@ -69,13 +69,13 @@ use git::blame::GitBlame;
|
||||
use git::diff_hunk_to_display;
|
||||
use gpui::{
|
||||
div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement,
|
||||
AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardItem,
|
||||
Context, DispatchPhase, ElementId, EntityId, EventEmitter, FocusHandle, FocusOutEvent,
|
||||
FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext,
|
||||
ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render, SharedString,
|
||||
Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle, UnderlineStyle,
|
||||
UniformListScrollHandle, View, ViewContext, ViewInputHandler, VisualContext, WeakFocusHandle,
|
||||
WeakView, WindowContext,
|
||||
AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardEntry,
|
||||
ClipboardItem, Context, DispatchPhase, ElementId, EntityId, EventEmitter, FocusHandle,
|
||||
FocusOutEvent, FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText,
|
||||
KeyContext, ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render,
|
||||
SharedString, Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle,
|
||||
UnderlineStyle, UniformListScrollHandle, View, ViewContext, ViewInputHandler, VisualContext,
|
||||
WeakFocusHandle, WeakView, WindowContext,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
@@ -2281,7 +2281,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
if !text.is_empty() {
|
||||
cx.write_to_primary(ClipboardItem::new(text));
|
||||
cx.write_to_primary(ClipboardItem::new_string(text));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6546,7 +6546,10 @@ impl Editor {
|
||||
s.select(selections);
|
||||
});
|
||||
this.insert("", cx);
|
||||
cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string_with_metadata(
|
||||
text,
|
||||
clipboard_selections,
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6585,7 +6588,10 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string_with_metadata(
|
||||
text,
|
||||
clipboard_selections,
|
||||
));
|
||||
}
|
||||
|
||||
pub fn do_paste(
|
||||
@@ -6669,13 +6675,21 @@ impl Editor {
|
||||
|
||||
pub fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
|
||||
if let Some(item) = cx.read_from_clipboard() {
|
||||
self.do_paste(
|
||||
item.text(),
|
||||
item.metadata::<Vec<ClipboardSelection>>(),
|
||||
true,
|
||||
cx,
|
||||
)
|
||||
};
|
||||
let entries = item.entries();
|
||||
|
||||
match entries.first() {
|
||||
// For now, we only support applying metadata if there's one string. In the future, we can incorporate all the selections
|
||||
// of all the pasted entries.
|
||||
Some(ClipboardEntry::String(clipboard_string)) if entries.len() == 1 => self
|
||||
.do_paste(
|
||||
clipboard_string.text(),
|
||||
clipboard_string.metadata::<Vec<ClipboardSelection>>(),
|
||||
true,
|
||||
cx,
|
||||
),
|
||||
_ => self.do_paste(&item.text().unwrap_or_default(), None, true, cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext<Self>) {
|
||||
@@ -10496,7 +10510,7 @@ impl Editor {
|
||||
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
|
||||
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
|
||||
if let Some(path) = file.abs_path(cx).to_str() {
|
||||
cx.write_to_clipboard(ClipboardItem::new(path.to_string()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(path.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10506,7 +10520,7 @@ impl Editor {
|
||||
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
|
||||
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
|
||||
if let Some(path) = file.path().to_str() {
|
||||
cx.write_to_clipboard(ClipboardItem::new(path.to_string()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(path.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10696,7 +10710,7 @@ impl Editor {
|
||||
|
||||
match permalink {
|
||||
Ok(permalink) => {
|
||||
cx.write_to_clipboard(ClipboardItem::new(permalink.to_string()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(permalink.to_string()));
|
||||
}
|
||||
Err(err) => {
|
||||
let message = format!("Failed to copy permalink: {err}");
|
||||
@@ -11632,7 +11646,7 @@ impl Editor {
|
||||
let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else {
|
||||
return;
|
||||
};
|
||||
cx.write_to_clipboard(ClipboardItem::new(lines));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(lines));
|
||||
}
|
||||
|
||||
pub fn inlay_hint_cache(&self) -> &InlayHintCache {
|
||||
@@ -12899,7 +12913,9 @@ pub fn diagnostic_block_renderer(
|
||||
.visible_on_hover(group_id.clone())
|
||||
.on_click({
|
||||
let message = diagnostic.message.clone();
|
||||
move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone()))
|
||||
move |_click, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
|
||||
}
|
||||
})
|
||||
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
|
||||
)
|
||||
|
||||
@@ -3956,8 +3956,9 @@ async fn test_clipboard(cx: &mut gpui::TestAppContext) {
|
||||
the lazy dog"});
|
||||
cx.update_editor(|e, cx| e.copy(&Copy, cx));
|
||||
assert_eq!(
|
||||
cx.read_from_clipboard().map(|item| item.text().to_owned()),
|
||||
Some("fox jumps over\n".to_owned())
|
||||
cx.read_from_clipboard()
|
||||
.and_then(|item| item.text().map(str::to_string)),
|
||||
Some("fox jumps over\n".to_string())
|
||||
);
|
||||
|
||||
// Paste with three selections, noticing how the copied full-line selection is inserted
|
||||
|
||||
@@ -4282,7 +4282,7 @@ fn deploy_blame_entry_context_menu(
|
||||
let sha = format!("{}", blame_entry.sha);
|
||||
menu.on_blur_subscription(Subscription::new(|| {}))
|
||||
.entry("Copy commit SHA", None, move |cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(sha.clone()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(sha.clone()));
|
||||
})
|
||||
.when_some(
|
||||
details.and_then(|details| details.permalink.clone()),
|
||||
|
||||
@@ -44,7 +44,7 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let specs = specs.await.to_string();
|
||||
|
||||
cx.update(|cx| cx.write_to_clipboard(ClipboardItem::new(specs.clone())))
|
||||
cx.update(|cx| cx.write_to_clipboard(ClipboardItem::new_string(specs.clone())))
|
||||
.log_err();
|
||||
|
||||
cx.prompt(
|
||||
|
||||
@@ -67,6 +67,7 @@ serde_json.workspace = true
|
||||
slotmap = "1.0.6"
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
strum.workspace = true
|
||||
sum_tree.workspace = true
|
||||
taffy = "0.4.3"
|
||||
thiserror.workspace = true
|
||||
|
||||
@@ -11,9 +11,12 @@ use std::{
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use futures::{channel::oneshot, future::LocalBoxFuture, Future};
|
||||
use futures::{
|
||||
channel::oneshot,
|
||||
future::{LocalBoxFuture, Shared},
|
||||
Future, FutureExt,
|
||||
};
|
||||
use slotmap::SlotMap;
|
||||
use smol::future::FutureExt;
|
||||
|
||||
pub use async_context::*;
|
||||
use collections::{FxHashMap, FxHashSet, VecDeque};
|
||||
@@ -25,8 +28,8 @@ pub use test_context::*;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::{
|
||||
current_platform, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle,
|
||||
AssetCache, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
|
||||
current_platform, hash, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle,
|
||||
Asset, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
|
||||
Entity, EventEmitter, ForegroundExecutor, Global, KeyBinding, Keymap, Keystroke, LayoutId,
|
||||
Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, Point,
|
||||
PromptBuilder, PromptHandle, PromptLevel, Render, RenderablePromptHandle, Reservation,
|
||||
@@ -220,7 +223,6 @@ pub struct AppContext {
|
||||
pub(crate) background_executor: BackgroundExecutor,
|
||||
pub(crate) foreground_executor: ForegroundExecutor,
|
||||
pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box<dyn Any>>,
|
||||
pub(crate) asset_cache: AssetCache,
|
||||
asset_source: Arc<dyn AssetSource>,
|
||||
pub(crate) svg_renderer: SvgRenderer,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
@@ -276,7 +278,6 @@ impl AppContext {
|
||||
background_executor: executor,
|
||||
foreground_executor,
|
||||
svg_renderer: SvgRenderer::new(asset_source.clone()),
|
||||
asset_cache: AssetCache::new(),
|
||||
loading_assets: Default::default(),
|
||||
asset_source,
|
||||
http_client,
|
||||
@@ -1267,6 +1268,40 @@ impl AppContext {
|
||||
) {
|
||||
self.prompt_builder = Some(PromptBuilder::Custom(Box::new(renderer)))
|
||||
}
|
||||
|
||||
/// Remove an asset from GPUI's cache
|
||||
pub fn remove_cached_asset<A: Asset + 'static>(&mut self, source: &A::Source) {
|
||||
let asset_id = (TypeId::of::<A>(), hash(source));
|
||||
self.loading_assets.remove(&asset_id);
|
||||
}
|
||||
|
||||
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
|
||||
///
|
||||
/// Note that the multiple calls to this method will only result in one `Asset::load` call at a
|
||||
/// time, and the results of this call will be cached
|
||||
///
|
||||
/// This asset will not be cached by default, see [Self::use_cached_asset]
|
||||
pub fn fetch_asset<A: Asset + 'static>(
|
||||
&mut self,
|
||||
source: &A::Source,
|
||||
) -> (Shared<Task<A::Output>>, bool) {
|
||||
let asset_id = (TypeId::of::<A>(), hash(source));
|
||||
let mut is_first = false;
|
||||
let task = self
|
||||
.loading_assets
|
||||
.remove(&asset_id)
|
||||
.map(|boxed_task| *boxed_task.downcast::<Shared<Task<A::Output>>>().unwrap())
|
||||
.unwrap_or_else(|| {
|
||||
is_first = true;
|
||||
let future = A::load(source.clone(), self);
|
||||
let task = self.background_executor().spawn(future).shared();
|
||||
task
|
||||
});
|
||||
|
||||
self.loading_assets.insert(asset_id, Box::new(task.clone()));
|
||||
|
||||
(task, is_first)
|
||||
}
|
||||
}
|
||||
|
||||
impl Context for AppContext {
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
use crate::{SharedString, SharedUri, WindowContext};
|
||||
use collections::FxHashMap;
|
||||
use crate::{AppContext, SharedString, SharedUri};
|
||||
use futures::Future;
|
||||
use parking_lot::Mutex;
|
||||
use std::any::TypeId;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::{any::Any, path::PathBuf};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||
pub(crate) enum UriOrPath {
|
||||
Uri(SharedUri),
|
||||
Path(Arc<PathBuf>),
|
||||
Asset(SharedString),
|
||||
Embedded(SharedString),
|
||||
}
|
||||
|
||||
impl From<SharedUri> for UriOrPath {
|
||||
@@ -37,7 +34,7 @@ pub trait Asset {
|
||||
/// Load the asset asynchronously
|
||||
fn load(
|
||||
source: Self::Source,
|
||||
cx: &mut WindowContext,
|
||||
cx: &mut AppContext,
|
||||
) -> impl Future<Output = Self::Output> + Send + 'static;
|
||||
}
|
||||
|
||||
@@ -47,42 +44,3 @@ pub fn hash<T: Hash>(data: &T) -> u64 {
|
||||
data.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
/// A cache for assets.
|
||||
#[derive(Clone)]
|
||||
pub struct AssetCache {
|
||||
assets: Arc<Mutex<FxHashMap<(TypeId, u64), Box<dyn Any + Send>>>>,
|
||||
}
|
||||
|
||||
impl AssetCache {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
assets: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the asset from the cache, if it exists.
|
||||
pub fn get<A: Asset + 'static>(&self, source: &A::Source) -> Option<A::Output> {
|
||||
self.assets
|
||||
.lock()
|
||||
.get(&(TypeId::of::<A>(), hash(&source)))
|
||||
.and_then(|task| task.downcast_ref::<A::Output>())
|
||||
.cloned()
|
||||
}
|
||||
|
||||
/// Insert the asset into the cache.
|
||||
pub fn insert<A: Asset + 'static>(&mut self, source: A::Source, output: A::Output) {
|
||||
self.assets
|
||||
.lock()
|
||||
.insert((TypeId::of::<A>(), hash(&source)), Box::new(output));
|
||||
}
|
||||
|
||||
/// Remove an entry from the asset cache
|
||||
pub fn remove<A: Asset + 'static>(&mut self, source: &A::Source) -> Option<A::Output> {
|
||||
self.assets
|
||||
.lock()
|
||||
.remove(&(TypeId::of::<A>(), hash(&source)))
|
||||
.and_then(|any| any.downcast::<A::Output>().ok())
|
||||
.map(|boxed| *boxed)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,14 +38,22 @@ pub(crate) struct RenderImageParams {
|
||||
pub(crate) frame_index: usize,
|
||||
}
|
||||
|
||||
/// A cached and processed image.
|
||||
pub struct ImageData {
|
||||
/// A cached and processed image, in BGRA format
|
||||
pub struct RenderImage {
|
||||
/// The ID associated with this image
|
||||
pub id: ImageId,
|
||||
data: SmallVec<[Frame; 1]>,
|
||||
}
|
||||
|
||||
impl ImageData {
|
||||
impl PartialEq for RenderImage {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id == other.id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RenderImage {}
|
||||
|
||||
impl RenderImage {
|
||||
/// Create a new image from the given data.
|
||||
pub fn new(data: impl Into<SmallVec<[Frame; 1]>>) -> Self {
|
||||
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
@@ -57,8 +65,10 @@ impl ImageData {
|
||||
}
|
||||
|
||||
/// Convert this image into a byte slice.
|
||||
pub fn as_bytes(&self, frame_index: usize) -> &[u8] {
|
||||
&self.data[frame_index].buffer()
|
||||
pub fn as_bytes(&self, frame_index: usize) -> Option<&[u8]> {
|
||||
self.data
|
||||
.get(frame_index)
|
||||
.map(|frame| frame.buffer().as_raw().as_slice())
|
||||
}
|
||||
|
||||
/// Get the size of this image, in pixels.
|
||||
@@ -78,7 +88,7 @@ impl ImageData {
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ImageData {
|
||||
impl fmt::Debug for RenderImage {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ImageData")
|
||||
.field("id", &self.id)
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
use crate::{
|
||||
point, px, size, AbsoluteLength, Asset, Bounds, DefiniteLength, DevicePixels, Element,
|
||||
ElementId, GlobalElementId, Hitbox, ImageData, InteractiveElement, Interactivity, IntoElement,
|
||||
LayoutId, Length, Pixels, SharedString, SharedUri, Size, StyleRefinement, Styled, SvgSize,
|
||||
UriOrPath, WindowContext,
|
||||
px, AbsoluteLength, AppContext, Asset, Bounds, DefiniteLength, Element, ElementId,
|
||||
GlobalElementId, Hitbox, Image, InteractiveElement, Interactivity, IntoElement, LayoutId,
|
||||
Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, Size, StyleRefinement, Styled,
|
||||
SvgSize, UriOrPath, WindowContext,
|
||||
};
|
||||
use futures::{AsyncReadExt, Future};
|
||||
use http_client;
|
||||
use image::{
|
||||
codecs::gif::GifDecoder, AnimationDecoder, Frame, ImageBuffer, ImageError, ImageFormat,
|
||||
};
|
||||
#[cfg(target_os = "macos")]
|
||||
use media::core_video::CVImageBuffer;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
fs,
|
||||
@@ -23,20 +21,18 @@ use thiserror::Error;
|
||||
use util::ResultExt;
|
||||
|
||||
/// A source of image content.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ImageSource {
|
||||
/// Image content will be loaded from provided URI at render time.
|
||||
Uri(SharedUri),
|
||||
/// Image content will be loaded from the provided file at render time.
|
||||
File(Arc<PathBuf>),
|
||||
/// Cached image data
|
||||
Data(Arc<ImageData>),
|
||||
Render(Arc<RenderImage>),
|
||||
/// Cached image data
|
||||
Image(Arc<Image>),
|
||||
/// Image content will be loaded from Asset at render time.
|
||||
Asset(SharedString),
|
||||
// TODO: move surface definitions into mac platform module
|
||||
/// A CoreVideo image buffer
|
||||
#[cfg(target_os = "macos")]
|
||||
Surface(CVImageBuffer),
|
||||
Embedded(SharedString),
|
||||
}
|
||||
|
||||
fn is_uri(uri: &str) -> bool {
|
||||
@@ -54,7 +50,7 @@ impl From<&'static str> for ImageSource {
|
||||
if is_uri(&s) {
|
||||
Self::Uri(s.into())
|
||||
} else {
|
||||
Self::Asset(s.into())
|
||||
Self::Embedded(s.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -64,7 +60,7 @@ impl From<String> for ImageSource {
|
||||
if is_uri(&s) {
|
||||
Self::Uri(s.into())
|
||||
} else {
|
||||
Self::Asset(s.into())
|
||||
Self::Embedded(s.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -74,7 +70,7 @@ impl From<SharedString> for ImageSource {
|
||||
if is_uri(&s) {
|
||||
Self::Uri(s.into())
|
||||
} else {
|
||||
Self::Asset(s)
|
||||
Self::Embedded(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -91,16 +87,9 @@ impl From<PathBuf> for ImageSource {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Arc<ImageData>> for ImageSource {
|
||||
fn from(value: Arc<ImageData>) -> Self {
|
||||
Self::Data(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
impl From<CVImageBuffer> for ImageSource {
|
||||
fn from(value: CVImageBuffer) -> Self {
|
||||
Self::Surface(value)
|
||||
impl From<Arc<RenderImage>> for ImageSource {
|
||||
fn from(value: Arc<RenderImage>) -> Self {
|
||||
Self::Render(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,121 +111,6 @@ pub fn img(source: impl Into<ImageSource>) -> Img {
|
||||
}
|
||||
}
|
||||
|
||||
/// How to fit the image into the bounds of the element.
|
||||
pub enum ObjectFit {
|
||||
/// The image will be stretched to fill the bounds of the element.
|
||||
Fill,
|
||||
/// The image will be scaled to fit within the bounds of the element.
|
||||
Contain,
|
||||
/// The image will be scaled to cover the bounds of the element.
|
||||
Cover,
|
||||
/// The image will be scaled down to fit within the bounds of the element.
|
||||
ScaleDown,
|
||||
/// The image will maintain its original size.
|
||||
None,
|
||||
}
|
||||
|
||||
impl ObjectFit {
|
||||
/// Get the bounds of the image within the given bounds.
|
||||
pub fn get_bounds(
|
||||
&self,
|
||||
bounds: Bounds<Pixels>,
|
||||
image_size: Size<DevicePixels>,
|
||||
) -> Bounds<Pixels> {
|
||||
let image_size = image_size.map(|dimension| Pixels::from(u32::from(dimension)));
|
||||
let image_ratio = image_size.width / image_size.height;
|
||||
let bounds_ratio = bounds.size.width / bounds.size.height;
|
||||
|
||||
let result_bounds = match self {
|
||||
ObjectFit::Fill => bounds,
|
||||
ObjectFit::Contain => {
|
||||
let new_size = if bounds_ratio > image_ratio {
|
||||
size(
|
||||
image_size.width * (bounds.size.height / image_size.height),
|
||||
bounds.size.height,
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
bounds.size.width,
|
||||
image_size.height * (bounds.size.width / image_size.width),
|
||||
)
|
||||
};
|
||||
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
|
||||
),
|
||||
size: new_size,
|
||||
}
|
||||
}
|
||||
ObjectFit::ScaleDown => {
|
||||
// Check if the image is larger than the bounds in either dimension.
|
||||
if image_size.width > bounds.size.width || image_size.height > bounds.size.height {
|
||||
// If the image is larger, use the same logic as Contain to scale it down.
|
||||
let new_size = if bounds_ratio > image_ratio {
|
||||
size(
|
||||
image_size.width * (bounds.size.height / image_size.height),
|
||||
bounds.size.height,
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
bounds.size.width,
|
||||
image_size.height * (bounds.size.width / image_size.width),
|
||||
)
|
||||
};
|
||||
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
|
||||
),
|
||||
size: new_size,
|
||||
}
|
||||
} else {
|
||||
// If the image is smaller than or equal to the container, display it at its original size,
|
||||
// centered within the container.
|
||||
let original_size = size(image_size.width, image_size.height);
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - original_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - original_size.height) / 2.0,
|
||||
),
|
||||
size: original_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
ObjectFit::Cover => {
|
||||
let new_size = if bounds_ratio > image_ratio {
|
||||
size(
|
||||
bounds.size.width,
|
||||
image_size.height * (bounds.size.width / image_size.width),
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
image_size.width * (bounds.size.height / image_size.height),
|
||||
bounds.size.height,
|
||||
)
|
||||
};
|
||||
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
|
||||
),
|
||||
size: new_size,
|
||||
}
|
||||
}
|
||||
ObjectFit::None => Bounds {
|
||||
origin: bounds.origin,
|
||||
size: image_size,
|
||||
},
|
||||
};
|
||||
|
||||
result_bounds
|
||||
}
|
||||
}
|
||||
|
||||
impl Img {
|
||||
/// A list of all format extensions currently supported by this img element
|
||||
pub fn extensions() -> &'static [&'static str] {
|
||||
@@ -291,7 +165,7 @@ impl Element for Img {
|
||||
let layout_id = self
|
||||
.interactivity
|
||||
.request_layout(global_id, cx, |mut style, cx| {
|
||||
if let Some(data) = self.source.data(cx) {
|
||||
if let Some(data) = self.source.use_data(cx) {
|
||||
if let Some(state) = &mut state {
|
||||
let frame_count = data.frame_count();
|
||||
if frame_count > 1 {
|
||||
@@ -363,7 +237,7 @@ impl Element for Img {
|
||||
.paint(global_id, bounds, hitbox.as_ref(), cx, |style, cx| {
|
||||
let corner_radii = style.corner_radii.to_pixels(bounds.size, cx.rem_size());
|
||||
|
||||
if let Some(data) = source.data(cx) {
|
||||
if let Some(data) = source.use_data(cx) {
|
||||
let new_bounds = self.object_fit.get_bounds(bounds, data.size(*frame_index));
|
||||
cx.paint_image(
|
||||
new_bounds,
|
||||
@@ -374,17 +248,6 @@ impl Element for Img {
|
||||
)
|
||||
.log_err();
|
||||
}
|
||||
|
||||
match source {
|
||||
#[cfg(target_os = "macos")]
|
||||
ImageSource::Surface(surface) => {
|
||||
let size = size(surface.width().into(), surface.height().into());
|
||||
let new_bounds = self.object_fit.get_bounds(bounds, size);
|
||||
// TODO: Add support for corner_radii and grayscale.
|
||||
cx.paint_surface(new_bounds, surface);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -410,39 +273,74 @@ impl InteractiveElement for Img {
|
||||
}
|
||||
|
||||
impl ImageSource {
|
||||
fn data(&self, cx: &mut WindowContext) -> Option<Arc<ImageData>> {
|
||||
pub(crate) fn use_data(&self, cx: &mut WindowContext) -> Option<Arc<RenderImage>> {
|
||||
match self {
|
||||
ImageSource::Uri(_) | ImageSource::Asset(_) | ImageSource::File(_) => {
|
||||
ImageSource::Uri(_) | ImageSource::Embedded(_) | ImageSource::File(_) => {
|
||||
let uri_or_path: UriOrPath = match self {
|
||||
ImageSource::Uri(uri) => uri.clone().into(),
|
||||
ImageSource::File(path) => path.clone().into(),
|
||||
ImageSource::Asset(path) => UriOrPath::Asset(path.clone()),
|
||||
ImageSource::Embedded(path) => UriOrPath::Embedded(path.clone()),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
cx.use_cached_asset::<Image>(&uri_or_path)?.log_err()
|
||||
cx.use_asset::<ImageAsset>(&uri_or_path)?.log_err()
|
||||
}
|
||||
|
||||
ImageSource::Data(data) => Some(data.to_owned()),
|
||||
#[cfg(target_os = "macos")]
|
||||
ImageSource::Surface(_) => None,
|
||||
ImageSource::Render(data) => Some(data.to_owned()),
|
||||
ImageSource::Image(data) => cx.use_asset::<ImageDecoder>(data)?.log_err(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch the data associated with this source, using GPUI's asset caching
|
||||
pub async fn data(&self, cx: &mut AppContext) -> Option<Arc<RenderImage>> {
|
||||
match self {
|
||||
ImageSource::Uri(_) | ImageSource::Embedded(_) | ImageSource::File(_) => {
|
||||
let uri_or_path: UriOrPath = match self {
|
||||
ImageSource::Uri(uri) => uri.clone().into(),
|
||||
ImageSource::File(path) => path.clone().into(),
|
||||
ImageSource::Embedded(path) => UriOrPath::Embedded(path.clone()),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
cx.fetch_asset::<ImageAsset>(&uri_or_path).0.await.log_err()
|
||||
}
|
||||
|
||||
ImageSource::Render(data) => Some(data.to_owned()),
|
||||
ImageSource::Image(data) => cx.fetch_asset::<ImageDecoder>(data).0.await.log_err(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum Image {}
|
||||
enum ImageDecoder {}
|
||||
|
||||
impl Asset for Image {
|
||||
type Source = UriOrPath;
|
||||
type Output = Result<Arc<ImageData>, ImageCacheError>;
|
||||
impl Asset for ImageDecoder {
|
||||
type Source = Arc<Image>;
|
||||
type Output = Result<Arc<RenderImage>, Arc<anyhow::Error>>;
|
||||
|
||||
fn load(
|
||||
source: Self::Source,
|
||||
cx: &mut WindowContext,
|
||||
cx: &mut AppContext,
|
||||
) -> impl Future<Output = Self::Output> + Send + 'static {
|
||||
let result = source.to_image_data(cx).map_err(Arc::new);
|
||||
async { result }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum ImageAsset {}
|
||||
|
||||
impl Asset for ImageAsset {
|
||||
type Source = UriOrPath;
|
||||
type Output = Result<Arc<RenderImage>, ImageCacheError>;
|
||||
|
||||
fn load(
|
||||
source: Self::Source,
|
||||
cx: &mut AppContext,
|
||||
) -> impl Future<Output = Self::Output> + Send + 'static {
|
||||
let client = cx.http_client();
|
||||
let scale_factor = cx.scale_factor();
|
||||
// TODO: Can we make SVGs always rescale?
|
||||
// let scale_factor = cx.scale_factor();
|
||||
let svg_renderer = cx.svg_renderer();
|
||||
let asset_source = cx.asset_source().clone();
|
||||
async move {
|
||||
@@ -461,7 +359,7 @@ impl Asset for Image {
|
||||
}
|
||||
body
|
||||
}
|
||||
UriOrPath::Asset(path) => {
|
||||
UriOrPath::Embedded(path) => {
|
||||
let data = asset_source.load(&path).ok().flatten();
|
||||
if let Some(data) = data {
|
||||
data.to_vec()
|
||||
@@ -503,15 +401,16 @@ impl Asset for Image {
|
||||
}
|
||||
};
|
||||
|
||||
ImageData::new(data)
|
||||
RenderImage::new(data)
|
||||
} else {
|
||||
let pixmap =
|
||||
svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(scale_factor))?;
|
||||
// TODO: Can we make svgs always rescale?
|
||||
svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?;
|
||||
|
||||
let buffer =
|
||||
ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap();
|
||||
|
||||
ImageData::new(SmallVec::from_elem(Frame::new(buffer), 1))
|
||||
RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1))
|
||||
};
|
||||
|
||||
Ok(Arc::new(data))
|
||||
|
||||
@@ -5,6 +5,7 @@ mod deferred;
|
||||
mod div;
|
||||
mod img;
|
||||
mod list;
|
||||
mod surface;
|
||||
mod svg;
|
||||
mod text;
|
||||
mod uniform_list;
|
||||
@@ -16,6 +17,7 @@ pub use deferred::*;
|
||||
pub use div::*;
|
||||
pub use img::*;
|
||||
pub use list::*;
|
||||
pub use surface::*;
|
||||
pub use svg::*;
|
||||
pub use text::*;
|
||||
pub use uniform_list::*;
|
||||
|
||||
111
crates/gpui/src/elements/surface.rs
Normal file
111
crates/gpui/src/elements/surface.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use crate::{
|
||||
size, Bounds, Element, ElementId, GlobalElementId, IntoElement, LayoutId, ObjectFit, Pixels,
|
||||
Style, StyleRefinement, Styled, WindowContext,
|
||||
};
|
||||
#[cfg(target_os = "macos")]
|
||||
use media::core_video::CVImageBuffer;
|
||||
use refineable::Refineable;
|
||||
|
||||
/// A source of a surface's content.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum SurfaceSource {
|
||||
/// A macOS image buffer from CoreVideo
|
||||
#[cfg(target_os = "macos")]
|
||||
Surface(CVImageBuffer),
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
impl From<CVImageBuffer> for SurfaceSource {
|
||||
fn from(value: CVImageBuffer) -> Self {
|
||||
SurfaceSource::Surface(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// A surface element.
|
||||
pub struct Surface {
|
||||
source: SurfaceSource,
|
||||
object_fit: ObjectFit,
|
||||
style: StyleRefinement,
|
||||
}
|
||||
|
||||
/// Create a new surface element.
|
||||
pub fn surface(source: impl Into<SurfaceSource>) -> Surface {
|
||||
Surface {
|
||||
source: source.into(),
|
||||
object_fit: ObjectFit::Contain,
|
||||
style: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
impl Surface {
|
||||
/// Set the object fit for the image.
|
||||
pub fn object_fit(mut self, object_fit: ObjectFit) -> Self {
|
||||
self.object_fit = object_fit;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Surface {
|
||||
type RequestLayoutState = ();
|
||||
type PrepaintState = ();
|
||||
|
||||
fn id(&self) -> Option<ElementId> {
|
||||
None
|
||||
}
|
||||
|
||||
fn request_layout(
|
||||
&mut self,
|
||||
_global_id: Option<&GlobalElementId>,
|
||||
cx: &mut WindowContext,
|
||||
) -> (LayoutId, Self::RequestLayoutState) {
|
||||
let mut style = Style::default();
|
||||
style.refine(&self.style);
|
||||
let layout_id = cx.request_layout(style, []);
|
||||
(layout_id, ())
|
||||
}
|
||||
|
||||
fn prepaint(
|
||||
&mut self,
|
||||
_global_id: Option<&GlobalElementId>,
|
||||
_bounds: Bounds<Pixels>,
|
||||
_request_layout: &mut Self::RequestLayoutState,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Self::PrepaintState {
|
||||
()
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
_global_id: Option<&GlobalElementId>,
|
||||
bounds: Bounds<Pixels>,
|
||||
_: &mut Self::RequestLayoutState,
|
||||
_: &mut Self::PrepaintState,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
match &self.source {
|
||||
#[cfg(target_os = "macos")]
|
||||
SurfaceSource::Surface(surface) => {
|
||||
let size = size(surface.width().into(), surface.height().into());
|
||||
let new_bounds = self.object_fit.get_bounds(bounds, size);
|
||||
// TODO: Add support for corner_radii
|
||||
cx.paint_surface(new_bounds, surface.clone());
|
||||
}
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoElement for Surface {
|
||||
type Element = Self;
|
||||
|
||||
fn into_element(self) -> Self::Element {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Styled for Surface {
|
||||
fn style(&mut self) -> &mut StyleRefinement {
|
||||
&mut self.style
|
||||
}
|
||||
}
|
||||
@@ -2447,10 +2447,24 @@ impl From<usize> for Pixels {
|
||||
/// affected by the device's scale factor, `DevicePixels` always correspond to real pixels on the
|
||||
/// display.
|
||||
#[derive(
|
||||
Add, AddAssign, Clone, Copy, Default, Div, Eq, Hash, Ord, PartialEq, PartialOrd, Sub, SubAssign,
|
||||
Add,
|
||||
AddAssign,
|
||||
Clone,
|
||||
Copy,
|
||||
Default,
|
||||
Div,
|
||||
Eq,
|
||||
Hash,
|
||||
Ord,
|
||||
PartialEq,
|
||||
PartialOrd,
|
||||
Sub,
|
||||
SubAssign,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
#[repr(transparent)]
|
||||
pub struct DevicePixels(pub(crate) i32);
|
||||
pub struct DevicePixels(pub i32);
|
||||
|
||||
impl DevicePixels {
|
||||
/// Converts the `DevicePixels` value to the number of bytes needed to represent it in memory.
|
||||
|
||||
@@ -20,21 +20,25 @@ mod test;
|
||||
mod windows;
|
||||
|
||||
use crate::{
|
||||
point, Action, AnyWindowHandle, AsyncWindowContext, BackgroundExecutor, Bounds, DevicePixels,
|
||||
DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GPUSpecs, GlyphId,
|
||||
Keymap, LineLayout, Pixels, PlatformInput, Point, RenderGlyphParams, RenderImageParams,
|
||||
RenderSvgParams, Scene, SharedString, Size, Task, TaskLabel, WindowContext,
|
||||
DEFAULT_WINDOW_SIZE,
|
||||
point, Action, AnyWindowHandle, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds,
|
||||
DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor,
|
||||
GPUSpecs, GlyphId, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point,
|
||||
RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, SharedString, Size,
|
||||
SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_task::Runnable;
|
||||
use futures::channel::oneshot;
|
||||
use image::codecs::gif::GifDecoder;
|
||||
use image::{AnimationDecoder as _, Frame};
|
||||
use parking::Unparker;
|
||||
use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
|
||||
use seahash::SeaHasher;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use std::borrow::Cow;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::io::Cursor;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{
|
||||
fmt::{self, Debug},
|
||||
@@ -43,6 +47,7 @@ use std::{
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
};
|
||||
use strum::EnumIter;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub use app_menu::*;
|
||||
@@ -969,12 +974,227 @@ impl Default for CursorStyle {
|
||||
/// A clipboard item that should be copied to the clipboard
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ClipboardItem {
|
||||
entries: Vec<ClipboardEntry>,
|
||||
}
|
||||
|
||||
/// Either a ClipboardString or a ClipboardImage
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum ClipboardEntry {
|
||||
/// A string entry
|
||||
String(ClipboardString),
|
||||
/// An image entry
|
||||
Image(Image),
|
||||
}
|
||||
|
||||
impl ClipboardItem {
|
||||
/// Create a new ClipboardItem with the given entries
|
||||
pub fn new(entries: Vec<ClipboardEntry>) -> Self {
|
||||
Self { entries }
|
||||
}
|
||||
|
||||
/// Create a new ClipboardItem::String with no associated metadata
|
||||
pub fn new_string(text: String) -> Self {
|
||||
Self {
|
||||
entries: vec![ClipboardEntry::String(ClipboardString::new(text))],
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new ClipboardItem::String with the given text and associated metadata
|
||||
pub fn new_string_with_metadata<T: Serialize>(text: String, metadata: T) -> Self {
|
||||
Self {
|
||||
entries: vec![ClipboardEntry::String(
|
||||
ClipboardString::new(text).with_metadata(metadata),
|
||||
)],
|
||||
}
|
||||
}
|
||||
|
||||
/// Concatenates together all the ClipboardString entries in the item.
|
||||
/// Returns None if there were no ClipboardString entries.
|
||||
pub fn text(&self) -> Option<String> {
|
||||
let mut answer = String::new();
|
||||
let mut any_entries = false;
|
||||
|
||||
for entry in self.entries.iter() {
|
||||
if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry {
|
||||
answer.push_str(text);
|
||||
any_entries = true;
|
||||
}
|
||||
}
|
||||
|
||||
if any_entries {
|
||||
Some(answer)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the item's entries
|
||||
pub fn entries(&self) -> &[ClipboardEntry] {
|
||||
&self.entries
|
||||
}
|
||||
|
||||
/// Get owned versions of the item's entries
|
||||
pub fn into_entries(self) -> impl Iterator<Item = ClipboardEntry> {
|
||||
self.entries.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// One of the editor's supported image formats (e.g. PNG, JPEG) - used when dealing with images in the clipboard
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, EnumIter, Hash)]
|
||||
pub enum ImageFormat {
|
||||
// Sorted from most to least likely to be pasted into an editor,
|
||||
// which matters when we iterate through them trying to see if
|
||||
// clipboard content matches them.
|
||||
/// .png
|
||||
Png,
|
||||
/// .jpeg or .jpg
|
||||
Jpeg,
|
||||
/// .webp
|
||||
Webp,
|
||||
/// .gif
|
||||
Gif,
|
||||
/// .svg
|
||||
Svg,
|
||||
/// .bmp
|
||||
Bmp,
|
||||
/// .tif or .tiff
|
||||
Tiff,
|
||||
}
|
||||
|
||||
/// An image, with a format and certain bytes
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Image {
|
||||
/// The image format the bytes represent (e.g. PNG)
|
||||
format: ImageFormat,
|
||||
/// The raw image bytes
|
||||
bytes: Vec<u8>,
|
||||
id: u64,
|
||||
}
|
||||
|
||||
impl Hash for Image {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
state.write_u64(self.id);
|
||||
}
|
||||
}
|
||||
|
||||
impl Image {
|
||||
/// Get this image's ID
|
||||
pub fn id(&self) -> u64 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Use the GPUI `use_asset` API to make this image renderable
|
||||
pub fn use_render_image(self: Arc<Self>, cx: &mut WindowContext) -> Option<Arc<RenderImage>> {
|
||||
ImageSource::Image(self).use_data(cx)
|
||||
}
|
||||
|
||||
/// Convert an `ImageData` object to a clipboard image in PNG format.
|
||||
pub fn from_image_data(render_image: &RenderImage) -> Result<Self> {
|
||||
// hardcode frame index 0; we don't currently support copying animated GIFs
|
||||
if let Some(input_bytes) = render_image.as_bytes(0) {
|
||||
let rgba_bytes = {
|
||||
let mut buf = input_bytes.to_vec();
|
||||
|
||||
// Convert from BGRA to RGBA.
|
||||
for pixel in buf.chunks_exact_mut(4) {
|
||||
pixel.swap(0, 2);
|
||||
}
|
||||
|
||||
buf
|
||||
};
|
||||
|
||||
let mut output_bytes = Vec::with_capacity(rgba_bytes.len());
|
||||
let image_buffer = RgbaIma
|
||||
let cursor = Cursor::new(output_bytes);
|
||||
image::DynamicImage::ImageRgba8(rgba_bytes)
|
||||
.write_to(&mut cursor, image::ImageOutputFormat::Png)?;
|
||||
|
||||
Ok(Self {
|
||||
format: ImageFormat::Png,
|
||||
bytes: cursor.into_inner(),
|
||||
id: rand::random(),
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"RenderImage did not have a frame 0, which should never happen."
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert the clipboard image to an `ImageData` object.
|
||||
pub fn to_image_data(&self, cx: &AppContext) -> Result<Arc<RenderImage>> {
|
||||
fn frames_for_image(
|
||||
bytes: &[u8],
|
||||
format: image::ImageFormat,
|
||||
) -> Result<SmallVec<[Frame; 1]>> {
|
||||
let mut data = image::load_from_memory_with_format(bytes, format)?.into_rgba8();
|
||||
|
||||
// Convert from RGBA to BGRA.
|
||||
for pixel in data.chunks_exact_mut(4) {
|
||||
pixel.swap(0, 2);
|
||||
}
|
||||
|
||||
Ok(SmallVec::from_elem(Frame::new(data), 1))
|
||||
}
|
||||
|
||||
let frames = match self.format {
|
||||
ImageFormat::Gif => {
|
||||
let decoder = GifDecoder::new(Cursor::new(&self.bytes))?;
|
||||
let mut frames = SmallVec::new();
|
||||
|
||||
for frame in decoder.into_frames() {
|
||||
let mut frame = frame?;
|
||||
// Convert from RGBA to BGRA.
|
||||
for pixel in frame.buffer_mut().chunks_exact_mut(4) {
|
||||
pixel.swap(0, 2);
|
||||
}
|
||||
frames.push(frame);
|
||||
}
|
||||
|
||||
frames
|
||||
}
|
||||
ImageFormat::Png => frames_for_image(&self.bytes, image::ImageFormat::Png)?,
|
||||
ImageFormat::Jpeg => frames_for_image(&self.bytes, image::ImageFormat::Jpeg)?,
|
||||
ImageFormat::Webp => frames_for_image(&self.bytes, image::ImageFormat::WebP)?,
|
||||
ImageFormat::Bmp => frames_for_image(&self.bytes, image::ImageFormat::Bmp)?,
|
||||
ImageFormat::Tiff => frames_for_image(&self.bytes, image::ImageFormat::Tiff)?,
|
||||
ImageFormat::Svg => {
|
||||
// TODO: Fix this
|
||||
let pixmap = cx
|
||||
.svg_renderer()
|
||||
.render_pixmap(&self.bytes, SvgSize::ScaleFactor(1.0))?;
|
||||
|
||||
let buffer =
|
||||
image::ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take())
|
||||
.unwrap();
|
||||
|
||||
SmallVec::from_elem(Frame::new(buffer), 1)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Arc::new(RenderImage::new(frames)))
|
||||
}
|
||||
|
||||
/// Get the format of the clipboard image
|
||||
pub fn format(&self) -> ImageFormat {
|
||||
self.format
|
||||
}
|
||||
|
||||
/// Get the raw bytes of the clipboard image
|
||||
pub fn bytes(&self) -> &[u8] {
|
||||
self.bytes.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
/// A clipboard item that should be copied to the clipboard
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ClipboardString {
|
||||
pub(crate) text: String,
|
||||
pub(crate) metadata: Option<String>,
|
||||
}
|
||||
|
||||
impl ClipboardItem {
|
||||
/// Create a new clipboard item with the given text
|
||||
impl ClipboardString {
|
||||
/// Create a new clipboard string with the given text
|
||||
pub fn new(text: String) -> Self {
|
||||
Self {
|
||||
text,
|
||||
@@ -982,18 +1202,23 @@ impl ClipboardItem {
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new clipboard item with the given text and metadata
|
||||
/// Return a new clipboard item with the metadata replaced by the given metadata
|
||||
pub fn with_metadata<T: Serialize>(mut self, metadata: T) -> Self {
|
||||
self.metadata = Some(serde_json::to_string(&metadata).unwrap());
|
||||
self
|
||||
}
|
||||
|
||||
/// Get the text of the clipboard item
|
||||
/// Get the text of the clipboard string
|
||||
pub fn text(&self) -> &String {
|
||||
&self.text
|
||||
}
|
||||
|
||||
/// Get the metadata of the clipboard item
|
||||
/// Get the owned text of the clipboard string
|
||||
pub fn into_text(self) -> String {
|
||||
self.text
|
||||
}
|
||||
|
||||
/// Get the metadata of the clipboard string
|
||||
pub fn metadata<T>(&self) -> Option<T>
|
||||
where
|
||||
T: for<'a> Deserialize<'a>,
|
||||
|
||||
@@ -145,7 +145,7 @@ impl Clipboard {
|
||||
|
||||
match unsafe { read_fd(fd) } {
|
||||
Ok(v) => {
|
||||
self.cached_read = Some(ClipboardItem::new(v));
|
||||
self.cached_read = Some(ClipboardItem::new_string(v));
|
||||
self.cached_read.clone()
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -177,7 +177,7 @@ impl Clipboard {
|
||||
|
||||
match unsafe { read_fd(fd) } {
|
||||
Ok(v) => {
|
||||
self.cached_primary_read = Some(ClipboardItem::new(v.clone()));
|
||||
self.cached_primary_read = Some(ClipboardItem::new_string(v.clone()));
|
||||
self.cached_primary_read.clone()
|
||||
}
|
||||
Err(err) => {
|
||||
|
||||
@@ -16,6 +16,7 @@ use metal_renderer as renderer;
|
||||
#[cfg(feature = "macos-blade")]
|
||||
use crate::platform::blade as renderer;
|
||||
|
||||
mod attributed_string;
|
||||
mod open_type;
|
||||
mod platform;
|
||||
mod text_system;
|
||||
|
||||
121
crates/gpui/src/platform/mac/attributed_string.rs
Normal file
121
crates/gpui/src/platform/mac/attributed_string.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use cocoa::base::id;
|
||||
use cocoa::foundation::NSRange;
|
||||
use objc::{class, msg_send, sel, sel_impl};
|
||||
|
||||
/// The `cocoa` crate does not define NSAttributedString (and related Cocoa classes),
|
||||
/// which are needed for copying rich text (that is, text intermingled with images)
|
||||
/// to the clipboard. This adds access to those APIs.
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub trait NSAttributedString: Sized {
|
||||
unsafe fn alloc(_: Self) -> id {
|
||||
msg_send![class!(NSAttributedString), alloc]
|
||||
}
|
||||
|
||||
unsafe fn init_attributed_string(self, string: id) -> id;
|
||||
unsafe fn appendAttributedString_(self, attr_string: id);
|
||||
unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id;
|
||||
unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id;
|
||||
unsafe fn string(self) -> id;
|
||||
}
|
||||
|
||||
impl NSAttributedString for id {
|
||||
unsafe fn init_attributed_string(self, string: id) -> id {
|
||||
msg_send![self, initWithString: string]
|
||||
}
|
||||
|
||||
unsafe fn appendAttributedString_(self, attr_string: id) {
|
||||
let _: () = msg_send![self, appendAttributedString: attr_string];
|
||||
}
|
||||
|
||||
unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id {
|
||||
msg_send![self, RTFDFromRange: range documentAttributes: attrs]
|
||||
}
|
||||
|
||||
unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id {
|
||||
msg_send![self, RTFFromRange: range documentAttributes: attrs]
|
||||
}
|
||||
|
||||
unsafe fn string(self) -> id {
|
||||
msg_send![self, string]
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub trait NSTextAttachment: Sized {
|
||||
unsafe fn alloc(_: Self) -> id {
|
||||
msg_send![class!(NSTextAttachment), alloc]
|
||||
}
|
||||
}
|
||||
|
||||
impl NSTextAttachment for id {}
|
||||
|
||||
pub trait NSMutableAttributedString: NSAttributedString {
|
||||
unsafe fn alloc(_: Self) -> id {
|
||||
msg_send![class!(NSMutableAttributedString), alloc]
|
||||
}
|
||||
}
|
||||
|
||||
impl NSMutableAttributedString for id {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use cocoa::appkit::NSImage;
|
||||
use cocoa::base::nil;
|
||||
use cocoa::foundation::NSString;
|
||||
|
||||
#[test]
|
||||
fn test_nsattributed_string() {
|
||||
unsafe {
|
||||
let image: id = msg_send![class!(NSImage), alloc];
|
||||
image.initWithContentsOfFile_(
|
||||
NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"),
|
||||
);
|
||||
let size = image.size();
|
||||
|
||||
let string = NSString::alloc(nil).init_str("Test String");
|
||||
let attr_string = NSMutableAttributedString::alloc(nil).init_attributed_string(string);
|
||||
let hello_string = NSString::alloc(nil).init_str("Hello World");
|
||||
let hello_attr_string =
|
||||
NSAttributedString::alloc(nil).init_attributed_string(hello_string);
|
||||
attr_string.appendAttributedString_(hello_attr_string);
|
||||
|
||||
let attachment = NSTextAttachment::alloc(nil);
|
||||
let _: () = msg_send![attachment, setImage: image];
|
||||
let image_attr_string =
|
||||
msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment];
|
||||
attr_string.appendAttributedString_(image_attr_string);
|
||||
|
||||
let another_string = NSString::alloc(nil).init_str("Another String");
|
||||
let another_attr_string =
|
||||
NSAttributedString::alloc(nil).init_attributed_string(another_string);
|
||||
attr_string.appendAttributedString_(another_attr_string);
|
||||
|
||||
let len: cocoa::foundation::NSUInteger = msg_send![attr_string, length];
|
||||
|
||||
///////////////////////////////////////////////////
|
||||
// pasteboard.clearContents();
|
||||
|
||||
let rtfd_data = attr_string.RTFDFromRange_documentAttributes_(
|
||||
NSRange::new(0, msg_send![attr_string, length]),
|
||||
nil,
|
||||
);
|
||||
assert_ne!(rtfd_data, nil);
|
||||
// if rtfd_data != nil {
|
||||
// pasteboard.setData_forType(rtfd_data, NSPasteboardTypeRTFD);
|
||||
// }
|
||||
|
||||
// let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
|
||||
// NSRange::new(0, attributed_string.length()),
|
||||
// nil,
|
||||
// );
|
||||
// if rtf_data != nil {
|
||||
// pasteboard.setData_forType(rtf_data, NSPasteboardTypeRTF);
|
||||
// }
|
||||
|
||||
// let plain_text = attributed_string.string();
|
||||
// pasteboard.setString_forType(plain_text, NSPasteboardTypeString);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
use super::metal_atlas::MetalAtlas;
|
||||
use crate::{
|
||||
point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels,
|
||||
Hsla, MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad,
|
||||
ScaledPixels, Scene, Shadow, Size, Surface, Underline,
|
||||
Hsla, MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite,
|
||||
PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use block::ConcreteBlock;
|
||||
@@ -1020,7 +1020,7 @@ impl MetalRenderer {
|
||||
|
||||
fn draw_surfaces(
|
||||
&mut self,
|
||||
surfaces: &[Surface],
|
||||
surfaces: &[PaintSurface],
|
||||
instance_buffer: &mut InstanceBuffer,
|
||||
instance_offset: &mut usize,
|
||||
viewport_size: Size<DevicePixels>,
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
use super::{events::key_to_native, BoolExt};
|
||||
use super::{
|
||||
attributed_string::{NSAttributedString, NSMutableAttributedString, NSTextAttachment},
|
||||
events::key_to_native,
|
||||
BoolExt,
|
||||
};
|
||||
use crate::{
|
||||
Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor,
|
||||
Keymap, MacDispatcher, MacDisplay, MacTextSystem, MacWindow, Menu, MenuItem, PathPromptOptions,
|
||||
Platform, PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
|
||||
hash, Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem,
|
||||
ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, Keymap, MacDispatcher,
|
||||
MacDisplay, MacTextSystem, MacWindow, Menu, MenuItem, PathPromptOptions, Platform,
|
||||
PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
|
||||
WindowAppearance, WindowParams,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
@@ -10,17 +15,18 @@ use block::ConcreteBlock;
|
||||
use cocoa::{
|
||||
appkit::{
|
||||
NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular,
|
||||
NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSPasteboard,
|
||||
NSPasteboardTypeString, NSSavePanel, NSWindow,
|
||||
NSEventModifierFlags, NSImage, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel,
|
||||
NSPasteboard, NSPasteboardTypePNG, NSPasteboardTypeRTF, NSPasteboardTypeRTFD,
|
||||
NSPasteboardTypeString, NSPasteboardTypeTIFF, NSSavePanel, NSWindow,
|
||||
},
|
||||
base::{id, nil, selector, BOOL, YES},
|
||||
foundation::{
|
||||
NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSString,
|
||||
NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSRange, NSString,
|
||||
NSUInteger, NSURL,
|
||||
},
|
||||
};
|
||||
use core_foundation::{
|
||||
base::{CFRelease, CFType, CFTypeRef, OSStatus, TCFType as _},
|
||||
base::{CFRelease, CFType, CFTypeRef, OSStatus, TCFType},
|
||||
boolean::CFBoolean,
|
||||
data::CFData,
|
||||
dictionary::{CFDictionary, CFDictionaryRef, CFMutableDictionary},
|
||||
@@ -50,6 +56,7 @@ use std::{
|
||||
slice, str,
|
||||
sync::Arc,
|
||||
};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use super::renderer;
|
||||
|
||||
@@ -421,7 +428,7 @@ impl Platform for MacPlatform {
|
||||
pool.drain();
|
||||
|
||||
(*app).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
|
||||
(*app.delegate()).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
|
||||
(*NSWindow::delegate(app)).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -749,7 +756,7 @@ impl Platform for MacPlatform {
|
||||
let app: id = msg_send![APP_CLASS, sharedApplication];
|
||||
let mut state = self.0.lock();
|
||||
let actions = &mut state.menu_actions;
|
||||
app.setMainMenu_(self.create_menu_bar(menus, app.delegate(), actions, keymap));
|
||||
app.setMainMenu_(self.create_menu_bar(menus, NSWindow::delegate(app), actions, keymap));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -758,7 +765,7 @@ impl Platform for MacPlatform {
|
||||
let app: id = msg_send![APP_CLASS, sharedApplication];
|
||||
let mut state = self.0.lock();
|
||||
let actions = &mut state.menu_actions;
|
||||
let new = self.create_dock_menu(menu, app.delegate(), actions, keymap);
|
||||
let new = self.create_dock_menu(menu, NSWindow::delegate(app), actions, keymap);
|
||||
if let Some(old) = state.dock_menu.replace(new) {
|
||||
CFRelease(old as _)
|
||||
}
|
||||
@@ -851,79 +858,212 @@ impl Platform for MacPlatform {
|
||||
}
|
||||
|
||||
fn write_to_clipboard(&self, item: ClipboardItem) {
|
||||
let state = self.0.lock();
|
||||
use crate::ClipboardEntry;
|
||||
|
||||
unsafe {
|
||||
state.pasteboard.clearContents();
|
||||
if item.entries.len() <= 1 {
|
||||
// We only want to do the whole
|
||||
match item.entries.first() {
|
||||
Some(entry) => match entry {
|
||||
ClipboardEntry::String(string) => {
|
||||
self.write_plaintext_to_clipboard(string);
|
||||
}
|
||||
ClipboardEntry::Image(image) => {
|
||||
self.write_image_to_clipboard(image);
|
||||
}
|
||||
},
|
||||
None => {
|
||||
// Write an empty list of entries just clears the clipboard.
|
||||
let state = self.0.lock();
|
||||
state.pasteboard.clearContents();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut any_images = false;
|
||||
let attributed_string = {
|
||||
let mut buf = NSMutableAttributedString::alloc(nil)
|
||||
// TODO can we skip this? Or at least part of it?
|
||||
.init_attributed_string(NSString::alloc(nil).init_str(""));
|
||||
|
||||
let text_bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
item.text.as_ptr() as *const c_void,
|
||||
item.text.len() as u64,
|
||||
);
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(text_bytes, NSPasteboardTypeString);
|
||||
for entry in item.entries {
|
||||
let to_append;
|
||||
|
||||
if let Some(metadata) = item.metadata.as_ref() {
|
||||
let hash_bytes = ClipboardItem::text_hash(&item.text).to_be_bytes();
|
||||
let hash_bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
hash_bytes.as_ptr() as *const c_void,
|
||||
hash_bytes.len() as u64,
|
||||
);
|
||||
match entry {
|
||||
ClipboardEntry::String(ClipboardString { text, metadata: _ }) => {
|
||||
to_append = NSAttributedString::alloc(nil)
|
||||
.init_attributed_string(NSString::alloc(nil).init_str(&text));
|
||||
}
|
||||
ClipboardEntry::Image(Image { format, bytes, id }) => {
|
||||
use cocoa::appkit::NSImage;
|
||||
|
||||
any_images = true;
|
||||
|
||||
// Create an attachment from the image
|
||||
let attachment = {
|
||||
// Initialize the NSImage
|
||||
let image = {
|
||||
let image: id = msg_send![class!(NSImage), alloc];
|
||||
|
||||
NSImage::initWithContentsOfFile_(
|
||||
image,
|
||||
NSString::alloc(nil)
|
||||
.init_str("/Users/rtfeldman/Downloads/test.jpeg"), // TODO read from clipboard bytes
|
||||
);
|
||||
};
|
||||
|
||||
let attachment = NSTextAttachment::alloc(nil);
|
||||
let _: () = msg_send![attachment, setImage: image];
|
||||
attachment
|
||||
};
|
||||
|
||||
// Make a NSAttributedString with the attachment
|
||||
to_append = msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment];
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
|
||||
|
||||
|
||||
{
|
||||
// write some images and text to the clip
|
||||
// let image1 = ns_image(&bytes).unwrap();
|
||||
// let image2 = ns_image(&bytes).unwrap();
|
||||
let image1 = NSImage::initWithPasteboard_(nil, pasteboard);
|
||||
let image2 = NSImage::initWithPasteboard_(nil, pasteboard);
|
||||
// let attributed_string = text_and_images([image1, image2]);
|
||||
// text_and_images([NSImage::initWithPasteboard_(, pasteboard), ns_image(&bytes).unwrap()]);
|
||||
let attributed_string = {
|
||||
use cocoa::appkit::NSImage;
|
||||
|
||||
let image: id = msg_send![class!(NSImage), alloc];
|
||||
NSImage::initWithContentsOfFile_(
|
||||
image,
|
||||
NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"),
|
||||
);
|
||||
let size = image.size();
|
||||
|
||||
let string = NSString::alloc(nil).init_str("Test String");
|
||||
let attr_string =
|
||||
NSMutableAttributedString::alloc(nil).init_attributed_string(string);
|
||||
let hello_string = NSString::alloc(nil).init_str("Hello World");
|
||||
let hello_attr_string =
|
||||
NSAttributedString::alloc(nil).init_attributed_string(hello_string);
|
||||
attr_string.appendAttributedString_(hello_attr_string);
|
||||
|
||||
let attachment = NSTextAttachment::alloc(nil);
|
||||
let _: () = msg_send![attachment, setImage: image];
|
||||
let image_attr_string = msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment];
|
||||
attr_string.appendAttributedString_(image_attr_string);
|
||||
|
||||
let another_string = NSString::alloc(nil).init_str("Another String");
|
||||
let another_attr_string =
|
||||
NSAttributedString::alloc(nil).init_attributed_string(another_string);
|
||||
attr_string.appendAttributedString_(another_attr_string);
|
||||
|
||||
attr_string
|
||||
};
|
||||
|
||||
pasteboard.clearContents();
|
||||
|
||||
let rtfd_data = attributed_string.RTFDFromRange_documentAttributes_(
|
||||
NSRange::new(0, msg_send![attributed_string, length]),
|
||||
nil,
|
||||
);
|
||||
if rtfd_data != nil {
|
||||
pasteboard.setData_forType(rtfd_data, NSPasteboardTypeRTFD);
|
||||
}
|
||||
|
||||
// let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
|
||||
// NSRange::new(0, attributed_string.length()),
|
||||
// nil,
|
||||
// );
|
||||
// if rtf_data != nil {
|
||||
// pasteboard.setData_forType(rtf_data, NSPasteboardTypeRTF);
|
||||
// }
|
||||
|
||||
// let plain_text = attributed_string.string();
|
||||
// pasteboard.setString_forType(plain_text, NSPasteboardTypeString);
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
buf.appendAttributedString_(to_append);
|
||||
}
|
||||
|
||||
buf
|
||||
};
|
||||
|
||||
let state = self.0.lock();
|
||||
state.pasteboard.clearContents();
|
||||
|
||||
// Only set rich text clipboard types if we actually have 1+ images to include.
|
||||
if any_images {
|
||||
let rtfd_data = attributed_string.RTFDFromRange_documentAttributes_(
|
||||
NSRange::new(0, msg_send![attributed_string, length]),
|
||||
nil,
|
||||
);
|
||||
if rtfd_data != nil {
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(rtfd_data, NSPasteboardTypeRTFD);
|
||||
}
|
||||
|
||||
let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
|
||||
NSRange::new(0, attributed_string.length()),
|
||||
nil,
|
||||
);
|
||||
if rtf_data != nil {
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(rtf_data, NSPasteboardTypeRTF);
|
||||
}
|
||||
}
|
||||
|
||||
let plain_text = attributed_string.string();
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(hash_bytes, state.text_hash_pasteboard_type);
|
||||
|
||||
let metadata_bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
metadata.as_ptr() as *const c_void,
|
||||
metadata.len() as u64,
|
||||
);
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(metadata_bytes, state.metadata_pasteboard_type);
|
||||
.setString_forType(plain_text, NSPasteboardTypeString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_from_clipboard(&self) -> Option<ClipboardItem> {
|
||||
let state = self.0.lock();
|
||||
unsafe {
|
||||
if let Some(text_bytes) =
|
||||
self.read_from_pasteboard(state.pasteboard, NSPasteboardTypeString)
|
||||
{
|
||||
let text = String::from_utf8_lossy(text_bytes).to_string();
|
||||
let hash_bytes = self
|
||||
.read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type)
|
||||
.and_then(|bytes| bytes.try_into().ok())
|
||||
.map(u64::from_be_bytes);
|
||||
let metadata_bytes = self
|
||||
.read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)
|
||||
.and_then(|bytes| String::from_utf8(bytes.to_vec()).ok());
|
||||
let pasteboard = state.pasteboard;
|
||||
|
||||
if let Some((hash, metadata)) = hash_bytes.zip(metadata_bytes) {
|
||||
if hash == ClipboardItem::text_hash(&text) {
|
||||
Some(ClipboardItem {
|
||||
text,
|
||||
metadata: Some(metadata),
|
||||
})
|
||||
} else {
|
||||
Some(ClipboardItem {
|
||||
text,
|
||||
metadata: None,
|
||||
})
|
||||
}
|
||||
// First, see if it's a string.
|
||||
unsafe {
|
||||
let types: id = pasteboard.types();
|
||||
let string_type: id = ns_string("public.utf8-plain-text");
|
||||
|
||||
if msg_send![types, containsObject: string_type] {
|
||||
let data = pasteboard.dataForType(string_type);
|
||||
if data == nil {
|
||||
return None;
|
||||
} else if data.bytes().is_null() {
|
||||
// https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc
|
||||
// "If the length of the NSData object is 0, this property returns nil."
|
||||
return Some(self.read_string_from_clipboard(&state, &[]));
|
||||
} else {
|
||||
Some(ClipboardItem {
|
||||
text,
|
||||
metadata: None,
|
||||
})
|
||||
let bytes =
|
||||
slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize);
|
||||
|
||||
return Some(self.read_string_from_clipboard(&state, bytes));
|
||||
}
|
||||
}
|
||||
|
||||
// If it wasn't a string, try the various supported image types.
|
||||
for format in ImageFormat::iter() {
|
||||
if let Some(item) = try_clipboard_image(pasteboard, format) {
|
||||
return Some(item);
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// If it wasn't a string or a supported image type, give up.
|
||||
None
|
||||
}
|
||||
|
||||
fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task<Result<()>> {
|
||||
@@ -1038,6 +1178,112 @@ impl Platform for MacPlatform {
|
||||
}
|
||||
}
|
||||
|
||||
impl MacPlatform {
|
||||
unsafe fn read_string_from_clipboard(
|
||||
&self,
|
||||
state: &MacPlatformState,
|
||||
text_bytes: &[u8],
|
||||
) -> ClipboardItem {
|
||||
let text = String::from_utf8_lossy(text_bytes).to_string();
|
||||
let hash_bytes = self
|
||||
.read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type)
|
||||
.and_then(|bytes| bytes.try_into().ok())
|
||||
.map(u64::from_be_bytes);
|
||||
let metadata_bytes = self
|
||||
.read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)
|
||||
.and_then(|bytes| String::from_utf8(bytes.to_vec()).ok());
|
||||
let opt_metadata;
|
||||
|
||||
if let Some((hash, metadata)) = hash_bytes.zip(metadata_bytes) {
|
||||
if hash == ClipboardString::text_hash(&text) {
|
||||
opt_metadata = Some(metadata);
|
||||
} else {
|
||||
opt_metadata = None;
|
||||
}
|
||||
} else {
|
||||
opt_metadata = None;
|
||||
}
|
||||
|
||||
ClipboardItem::new_string_with_metadata(text, opt_metadata)
|
||||
}
|
||||
|
||||
unsafe fn write_plaintext_to_clipboard(&self, string: &ClipboardString) {
|
||||
let state = self.0.lock();
|
||||
state.pasteboard.clearContents();
|
||||
|
||||
let text_bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
string.text.as_ptr() as *const c_void,
|
||||
string.text.len() as u64,
|
||||
);
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(text_bytes, NSPasteboardTypeString);
|
||||
|
||||
if let Some(metadata) = string.metadata.as_ref() {
|
||||
let hash_bytes = ClipboardString::text_hash(&string.text).to_be_bytes();
|
||||
let hash_bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
hash_bytes.as_ptr() as *const c_void,
|
||||
hash_bytes.len() as u64,
|
||||
);
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(hash_bytes, state.text_hash_pasteboard_type);
|
||||
|
||||
let metadata_bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
metadata.as_ptr() as *const c_void,
|
||||
metadata.len() as u64,
|
||||
);
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(metadata_bytes, state.metadata_pasteboard_type);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn write_image_to_clipboard(&self, image: &Image) {
|
||||
let state = self.0.lock();
|
||||
state.pasteboard.clearContents();
|
||||
|
||||
let bytes = NSData::dataWithBytes_length_(
|
||||
nil,
|
||||
image.bytes.as_ptr() as *const c_void,
|
||||
image.bytes.len() as u64,
|
||||
);
|
||||
|
||||
state
|
||||
.pasteboard
|
||||
.setData_forType(bytes, Into::<UTType>::into(image.format).inner_mut());
|
||||
}
|
||||
}
|
||||
|
||||
fn try_clipboard_image(pasteboard: id, format: ImageFormat) -> Option<ClipboardItem> {
|
||||
let mut ut_type: UTType = format.into();
|
||||
|
||||
unsafe {
|
||||
let types: id = pasteboard.types();
|
||||
if msg_send![types, containsObject: ut_type.inner()] {
|
||||
let data = pasteboard.dataForType(ut_type.inner_mut());
|
||||
if data == nil {
|
||||
None
|
||||
} else {
|
||||
let bytes = Vec::from(slice::from_raw_parts(
|
||||
data.bytes() as *mut u8,
|
||||
data.length() as usize,
|
||||
));
|
||||
let id = hash(&bytes);
|
||||
|
||||
Some(ClipboardItem {
|
||||
entries: vec![ClipboardEntry::Image(Image { format, bytes, id })],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn path_from_objc(path: id) -> PathBuf {
|
||||
let len = msg_send![path, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
|
||||
let bytes = path.UTF8String() as *const u8;
|
||||
@@ -1216,6 +1462,68 @@ mod security {
|
||||
pub const errSecItemNotFound: OSStatus = -25300;
|
||||
}
|
||||
|
||||
impl From<ImageFormat> for UTType {
|
||||
fn from(value: ImageFormat) -> Self {
|
||||
match value {
|
||||
ImageFormat::Png => Self::png(),
|
||||
ImageFormat::Jpeg => Self::jpeg(),
|
||||
ImageFormat::Tiff => Self::tiff(),
|
||||
ImageFormat::Webp => Self::webp(),
|
||||
ImageFormat::Gif => Self::gif(),
|
||||
ImageFormat::Bmp => Self::bmp(),
|
||||
ImageFormat::Svg => Self::svg(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// See https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/
|
||||
struct UTType(id);
|
||||
|
||||
impl UTType {
|
||||
pub fn png() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/png
|
||||
Self(unsafe { NSPasteboardTypePNG }) // This is a rare case where there's a built-in NSPasteboardType
|
||||
}
|
||||
|
||||
pub fn jpeg() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/jpeg
|
||||
Self(unsafe { ns_string("public.jpeg") })
|
||||
}
|
||||
|
||||
pub fn gif() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/gif
|
||||
Self(unsafe { ns_string("com.compuserve.gif") })
|
||||
}
|
||||
|
||||
pub fn webp() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/webp
|
||||
Self(unsafe { ns_string("org.webmproject.webp") })
|
||||
}
|
||||
|
||||
pub fn bmp() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/bmp
|
||||
Self(unsafe { ns_string("com.microsoft.bmp") })
|
||||
}
|
||||
|
||||
pub fn svg() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/svg
|
||||
Self(unsafe { ns_string("public.svg-image") })
|
||||
}
|
||||
|
||||
pub fn tiff() -> Self {
|
||||
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/tiff
|
||||
Self(unsafe { NSPasteboardTypeTIFF }) // This is a rare case where there's a built-in NSPasteboardType
|
||||
}
|
||||
|
||||
fn inner(&self) -> *const Object {
|
||||
self.0
|
||||
}
|
||||
|
||||
fn inner_mut(&mut self) -> *mut Object {
|
||||
self.0 as *mut _
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ClipboardItem;
|
||||
@@ -1227,11 +1535,15 @@ mod tests {
|
||||
let platform = build_platform();
|
||||
assert_eq!(platform.read_from_clipboard(), None);
|
||||
|
||||
let item = ClipboardItem::new("1".to_string());
|
||||
let item = ClipboardItem::new_string("1".to_string());
|
||||
platform.write_to_clipboard(item.clone());
|
||||
assert_eq!(platform.read_from_clipboard(), Some(item));
|
||||
|
||||
let item = ClipboardItem::new("2".to_string()).with_metadata(vec![3, 4]);
|
||||
let item = ClipboardItem {
|
||||
entries: vec![ClipboardEntry::String(
|
||||
ClipboardString::new("2".to_string()).with_metadata(vec![3, 4]),
|
||||
)],
|
||||
};
|
||||
platform.write_to_clipboard(item.clone());
|
||||
assert_eq!(platform.read_from_clipboard(), Some(item));
|
||||
|
||||
@@ -1250,7 +1562,7 @@ mod tests {
|
||||
}
|
||||
assert_eq!(
|
||||
platform.read_from_clipboard(),
|
||||
Some(ClipboardItem::new(text_from_other_app.to_string()))
|
||||
Some(ClipboardItem::new_string(text_from_other_app.to_string()))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -826,15 +826,15 @@ mod tests {
|
||||
#[test]
|
||||
fn test_clipboard() {
|
||||
let platform = WindowsPlatform::new();
|
||||
let item = ClipboardItem::new("你好".to_string());
|
||||
let item = ClipboardItem::new_string("你好".to_string());
|
||||
platform.write_to_clipboard(item.clone());
|
||||
assert_eq!(platform.read_from_clipboard(), Some(item));
|
||||
|
||||
let item = ClipboardItem::new("12345".to_string());
|
||||
let item = ClipboardItem::new_string("12345".to_string());
|
||||
platform.write_to_clipboard(item.clone());
|
||||
assert_eq!(platform.read_from_clipboard(), Some(item));
|
||||
|
||||
let item = ClipboardItem::new("abcdef".to_string()).with_metadata(vec![3, 4]);
|
||||
let item = ClipboardItem::new_string("abcdef".to_string()).with_metadata(vec![3, 4]);
|
||||
platform.write_to_clipboard(item.clone());
|
||||
assert_eq!(platform.read_from_clipboard(), Some(item));
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ pub(crate) struct Scene {
|
||||
pub(crate) underlines: Vec<Underline>,
|
||||
pub(crate) monochrome_sprites: Vec<MonochromeSprite>,
|
||||
pub(crate) polychrome_sprites: Vec<PolychromeSprite>,
|
||||
pub(crate) surfaces: Vec<Surface>,
|
||||
pub(crate) surfaces: Vec<PaintSurface>,
|
||||
}
|
||||
|
||||
impl Scene {
|
||||
@@ -183,7 +183,7 @@ pub(crate) enum Primitive {
|
||||
Underline(Underline),
|
||||
MonochromeSprite(MonochromeSprite),
|
||||
PolychromeSprite(PolychromeSprite),
|
||||
Surface(Surface),
|
||||
Surface(PaintSurface),
|
||||
}
|
||||
|
||||
impl Primitive {
|
||||
@@ -231,9 +231,9 @@ struct BatchIterator<'a> {
|
||||
polychrome_sprites: &'a [PolychromeSprite],
|
||||
polychrome_sprites_start: usize,
|
||||
polychrome_sprites_iter: Peekable<slice::Iter<'a, PolychromeSprite>>,
|
||||
surfaces: &'a [Surface],
|
||||
surfaces: &'a [PaintSurface],
|
||||
surfaces_start: usize,
|
||||
surfaces_iter: Peekable<slice::Iter<'a, Surface>>,
|
||||
surfaces_iter: Peekable<slice::Iter<'a, PaintSurface>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for BatchIterator<'a> {
|
||||
@@ -411,7 +411,7 @@ pub(crate) enum PrimitiveBatch<'a> {
|
||||
texture_id: AtlasTextureId,
|
||||
sprites: &'a [PolychromeSprite],
|
||||
},
|
||||
Surfaces(&'a [Surface]),
|
||||
Surfaces(&'a [PaintSurface]),
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Eq, PartialEq)]
|
||||
@@ -673,7 +673,7 @@ impl From<PolychromeSprite> for Primitive {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct Surface {
|
||||
pub(crate) struct PaintSurface {
|
||||
pub order: DrawOrder,
|
||||
pub bounds: Bounds<ScaledPixels>,
|
||||
pub content_mask: ContentMask<ScaledPixels>,
|
||||
@@ -681,20 +681,20 @@ pub(crate) struct Surface {
|
||||
pub image_buffer: media::core_video::CVImageBuffer,
|
||||
}
|
||||
|
||||
impl Ord for Surface {
|
||||
impl Ord for PaintSurface {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.order.cmp(&other.order)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Surface {
|
||||
impl PartialOrd for PaintSurface {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Surface> for Primitive {
|
||||
fn from(surface: Surface) -> Self {
|
||||
impl From<PaintSurface> for Primitive {
|
||||
fn from(surface: PaintSurface) -> Self {
|
||||
Primitive::Surface(surface)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,10 +5,10 @@ use std::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
black, phi, point, quad, rems, AbsoluteLength, Bounds, ContentMask, Corners, CornersRefinement,
|
||||
CursorStyle, DefiniteLength, Edges, EdgesRefinement, Font, FontFallbacks, FontFeatures,
|
||||
FontStyle, FontWeight, Hsla, Length, Pixels, Point, PointRefinement, Rgba, SharedString, Size,
|
||||
SizeRefinement, Styled, TextRun, WindowContext,
|
||||
black, phi, point, quad, rems, size, AbsoluteLength, Bounds, ContentMask, Corners,
|
||||
CornersRefinement, CursorStyle, DefiniteLength, DevicePixels, Edges, EdgesRefinement, Font,
|
||||
FontFallbacks, FontFeatures, FontStyle, FontWeight, Hsla, Length, Pixels, Point,
|
||||
PointRefinement, Rgba, SharedString, Size, SizeRefinement, Styled, TextRun, WindowContext,
|
||||
};
|
||||
use collections::HashSet;
|
||||
use refineable::Refineable;
|
||||
@@ -27,6 +27,121 @@ pub struct DebugBelow;
|
||||
#[cfg(debug_assertions)]
|
||||
impl crate::Global for DebugBelow {}
|
||||
|
||||
/// How to fit the image into the bounds of the element.
|
||||
pub enum ObjectFit {
|
||||
/// The image will be stretched to fill the bounds of the element.
|
||||
Fill,
|
||||
/// The image will be scaled to fit within the bounds of the element.
|
||||
Contain,
|
||||
/// The image will be scaled to cover the bounds of the element.
|
||||
Cover,
|
||||
/// The image will be scaled down to fit within the bounds of the element.
|
||||
ScaleDown,
|
||||
/// The image will maintain its original size.
|
||||
None,
|
||||
}
|
||||
|
||||
impl ObjectFit {
|
||||
/// Get the bounds of the image within the given bounds.
|
||||
pub fn get_bounds(
|
||||
&self,
|
||||
bounds: Bounds<Pixels>,
|
||||
image_size: Size<DevicePixels>,
|
||||
) -> Bounds<Pixels> {
|
||||
let image_size = image_size.map(|dimension| Pixels::from(u32::from(dimension)));
|
||||
let image_ratio = image_size.width / image_size.height;
|
||||
let bounds_ratio = bounds.size.width / bounds.size.height;
|
||||
|
||||
let result_bounds = match self {
|
||||
ObjectFit::Fill => bounds,
|
||||
ObjectFit::Contain => {
|
||||
let new_size = if bounds_ratio > image_ratio {
|
||||
size(
|
||||
image_size.width * (bounds.size.height / image_size.height),
|
||||
bounds.size.height,
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
bounds.size.width,
|
||||
image_size.height * (bounds.size.width / image_size.width),
|
||||
)
|
||||
};
|
||||
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
|
||||
),
|
||||
size: new_size,
|
||||
}
|
||||
}
|
||||
ObjectFit::ScaleDown => {
|
||||
// Check if the image is larger than the bounds in either dimension.
|
||||
if image_size.width > bounds.size.width || image_size.height > bounds.size.height {
|
||||
// If the image is larger, use the same logic as Contain to scale it down.
|
||||
let new_size = if bounds_ratio > image_ratio {
|
||||
size(
|
||||
image_size.width * (bounds.size.height / image_size.height),
|
||||
bounds.size.height,
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
bounds.size.width,
|
||||
image_size.height * (bounds.size.width / image_size.width),
|
||||
)
|
||||
};
|
||||
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
|
||||
),
|
||||
size: new_size,
|
||||
}
|
||||
} else {
|
||||
// If the image is smaller than or equal to the container, display it at its original size,
|
||||
// centered within the container.
|
||||
let original_size = size(image_size.width, image_size.height);
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - original_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - original_size.height) / 2.0,
|
||||
),
|
||||
size: original_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
ObjectFit::Cover => {
|
||||
let new_size = if bounds_ratio > image_ratio {
|
||||
size(
|
||||
bounds.size.width,
|
||||
image_size.height * (bounds.size.width / image_size.width),
|
||||
)
|
||||
} else {
|
||||
size(
|
||||
image_size.width * (bounds.size.height / image_size.height),
|
||||
bounds.size.height,
|
||||
)
|
||||
};
|
||||
|
||||
Bounds {
|
||||
origin: point(
|
||||
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
|
||||
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
|
||||
),
|
||||
size: new_size,
|
||||
}
|
||||
}
|
||||
ObjectFit::None => Bounds {
|
||||
origin: bounds.origin,
|
||||
size: image_size,
|
||||
},
|
||||
};
|
||||
|
||||
result_bounds
|
||||
}
|
||||
}
|
||||
|
||||
/// The CSS styling that can be applied to an element via the `Styled` trait
|
||||
#[derive(Clone, Refineable, Debug)]
|
||||
#[refineable(Debug)]
|
||||
|
||||
@@ -1,26 +1,25 @@
|
||||
use crate::{
|
||||
hash, point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip,
|
||||
point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip,
|
||||
AnyView, AppContext, Arena, Asset, AsyncWindowContext, AvailableSpace, Bounds, BoxShadow,
|
||||
Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener,
|
||||
DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter,
|
||||
FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, ImageData,
|
||||
InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke,
|
||||
KeystrokeEvent, LayoutId, LineLayoutIndex, Model, ModelContext, Modifiers,
|
||||
ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent,
|
||||
Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler,
|
||||
PlatformWindow, Point, PolychromeSprite, PromptLevel, Quad, Render, RenderGlyphParams,
|
||||
RenderImageParams, RenderSvgParams, Replay, ResizeEdge, ScaledPixels, Scene, Shadow,
|
||||
SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, TaffyLayoutEngine,
|
||||
Task, TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, View,
|
||||
VisualContext, WeakView, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
|
||||
WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
|
||||
SUBPIXEL_VARIANTS,
|
||||
FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, InputHandler,
|
||||
IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, LayoutId,
|
||||
LineLayoutIndex, Model, ModelContext, Modifiers, ModifiersChangedEvent, MonochromeSprite,
|
||||
MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas,
|
||||
PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PolychromeSprite,
|
||||
PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams,
|
||||
Replay, ResizeEdge, ScaledPixels, Scene, Shadow, SharedString, Size, StrikethroughStyle, Style,
|
||||
SubscriberSet, Subscription, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement,
|
||||
TransformationMatrix, Underline, UnderlineStyle, View, VisualContext, WeakView,
|
||||
WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations,
|
||||
WindowOptions, WindowParams, WindowTextSystem, SUBPIXEL_VARIANTS,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use collections::{FxHashMap, FxHashSet};
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use futures::channel::oneshot;
|
||||
use futures::{future::Shared, FutureExt};
|
||||
use futures::FutureExt;
|
||||
#[cfg(target_os = "macos")]
|
||||
use media::core_video::CVImageBuffer;
|
||||
use parking_lot::RwLock;
|
||||
@@ -1956,36 +1955,6 @@ impl<'a> WindowContext<'a> {
|
||||
self.window.requested_autoscroll.take()
|
||||
}
|
||||
|
||||
/// Remove an asset from GPUI's cache
|
||||
pub fn remove_cached_asset<A: Asset + 'static>(
|
||||
&mut self,
|
||||
source: &A::Source,
|
||||
) -> Option<A::Output> {
|
||||
self.asset_cache.remove::<A>(source)
|
||||
}
|
||||
|
||||
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
|
||||
/// Your view will be re-drawn once the asset has finished loading.
|
||||
///
|
||||
/// Note that the multiple calls to this method will only result in one `Asset::load` call.
|
||||
/// The results of that call will be cached, and returned on subsequent uses of this API.
|
||||
///
|
||||
/// Use [Self::remove_cached_asset] to reload your asset.
|
||||
pub fn use_cached_asset<A: Asset + 'static>(
|
||||
&mut self,
|
||||
source: &A::Source,
|
||||
) -> Option<A::Output> {
|
||||
self.asset_cache.get::<A>(source).or_else(|| {
|
||||
if let Some(asset) = self.use_asset::<A>(source) {
|
||||
self.asset_cache
|
||||
.insert::<A>(source.to_owned(), asset.clone());
|
||||
Some(asset)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
|
||||
/// Your view will be re-drawn once the asset has finished loading.
|
||||
///
|
||||
@@ -1994,19 +1963,7 @@ impl<'a> WindowContext<'a> {
|
||||
///
|
||||
/// This asset will not be cached by default, see [Self::use_cached_asset]
|
||||
pub fn use_asset<A: Asset + 'static>(&mut self, source: &A::Source) -> Option<A::Output> {
|
||||
let asset_id = (TypeId::of::<A>(), hash(source));
|
||||
let mut is_first = false;
|
||||
let task = self
|
||||
.loading_assets
|
||||
.remove(&asset_id)
|
||||
.map(|boxed_task| *boxed_task.downcast::<Shared<Task<A::Output>>>().unwrap())
|
||||
.unwrap_or_else(|| {
|
||||
is_first = true;
|
||||
let future = A::load(source.clone(), self);
|
||||
let task = self.background_executor().spawn(future).shared();
|
||||
task
|
||||
});
|
||||
|
||||
let (task, is_first) = self.fetch_asset::<A>(source);
|
||||
task.clone().now_or_never().or_else(|| {
|
||||
if is_first {
|
||||
let parent_id = self.parent_view_id();
|
||||
@@ -2027,12 +1984,9 @@ impl<'a> WindowContext<'a> {
|
||||
.detach();
|
||||
}
|
||||
|
||||
self.loading_assets.insert(asset_id, Box::new(task));
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Obtain the current element offset. This method should only be called during the
|
||||
/// prepaint phase of element drawing.
|
||||
pub fn element_offset(&self) -> Point<Pixels> {
|
||||
@@ -2610,13 +2564,14 @@ impl<'a> WindowContext<'a> {
|
||||
}
|
||||
|
||||
/// Paint an image into the scene for the next frame at the current z-index.
|
||||
/// This method will panic if the frame_index is not valid
|
||||
///
|
||||
/// This method should only be called as part of the paint phase of element drawing.
|
||||
pub fn paint_image(
|
||||
&mut self,
|
||||
bounds: Bounds<Pixels>,
|
||||
corner_radii: Corners<Pixels>,
|
||||
data: Arc<ImageData>,
|
||||
data: Arc<RenderImage>,
|
||||
frame_index: usize,
|
||||
grayscale: bool,
|
||||
) -> Result<()> {
|
||||
@@ -2639,7 +2594,10 @@ impl<'a> WindowContext<'a> {
|
||||
.get_or_insert_with(¶ms.clone().into(), &mut || {
|
||||
Ok(Some((
|
||||
data.size(frame_index),
|
||||
Cow::Borrowed(data.as_bytes(frame_index)),
|
||||
Cow::Borrowed(
|
||||
data.as_bytes(frame_index)
|
||||
.expect("It's the caller's job to pass a valid frame index"),
|
||||
),
|
||||
)))
|
||||
})?
|
||||
.expect("Callback above only returns Some");
|
||||
@@ -2665,6 +2623,8 @@ impl<'a> WindowContext<'a> {
|
||||
/// This method should only be called as part of the paint phase of element drawing.
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn paint_surface(&mut self, bounds: Bounds<Pixels>, image_buffer: CVImageBuffer) {
|
||||
use crate::PaintSurface;
|
||||
|
||||
debug_assert_eq!(
|
||||
self.window.draw_phase,
|
||||
DrawPhase::Paint,
|
||||
@@ -2674,15 +2634,12 @@ impl<'a> WindowContext<'a> {
|
||||
let scale_factor = self.scale_factor();
|
||||
let bounds = bounds.scale(scale_factor);
|
||||
let content_mask = self.content_mask().scale(scale_factor);
|
||||
self.window
|
||||
.next_frame
|
||||
.scene
|
||||
.insert_primitive(crate::Surface {
|
||||
order: 0,
|
||||
bounds,
|
||||
content_mask,
|
||||
image_buffer,
|
||||
});
|
||||
self.window.next_frame.scene.insert_primitive(PaintSurface {
|
||||
order: 0,
|
||||
bounds,
|
||||
content_mask,
|
||||
image_buffer,
|
||||
});
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
|
||||
@@ -50,6 +50,9 @@ theme.workspace = true
|
||||
tiktoken-rs.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
base64.workspace = true
|
||||
image.workspace = true
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
|
||||
@@ -220,24 +220,44 @@ pub fn count_anthropic_tokens(
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let messages = request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
|
||||
role: match message.role {
|
||||
Role::User => "user".into(),
|
||||
Role::Assistant => "assistant".into(),
|
||||
Role::System => "system".into(),
|
||||
},
|
||||
content: Some(message.content),
|
||||
name: None,
|
||||
function_call: None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let messages = request.messages;
|
||||
let mut tokens_from_images = 0;
|
||||
let mut string_messages = Vec::with_capacity(messages.len());
|
||||
|
||||
for message in messages {
|
||||
use crate::MessageContent;
|
||||
|
||||
let mut string_contents = String::new();
|
||||
|
||||
for content in message.content {
|
||||
match content {
|
||||
MessageContent::Text(string) => {
|
||||
string_contents.push_str(&string);
|
||||
}
|
||||
MessageContent::Image(image) => {
|
||||
tokens_from_images += image.estimate_tokens();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !string_contents.is_empty() {
|
||||
string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
|
||||
role: match message.role {
|
||||
Role::User => "user".into(),
|
||||
Role::Assistant => "assistant".into(),
|
||||
Role::System => "system".into(),
|
||||
},
|
||||
content: Some(string_contents),
|
||||
name: None,
|
||||
function_call: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Tiktoken doesn't yet support these models, so we manually use the
|
||||
// same tokenizer as GPT-4.
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
|
||||
.map(|tokens| tokens + tokens_from_images)
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
@@ -193,7 +193,7 @@ impl LanguageModel for CopilotChatLanguageModel {
|
||||
cx: &AsyncAppContext,
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
|
||||
if let Some(message) = request.messages.last() {
|
||||
if message.content.trim().is_empty() {
|
||||
if message.contents_empty() {
|
||||
const EMPTY_PROMPT_MSG: &str =
|
||||
"Empty prompts aren't allowed. Please provide a non-empty prompt.";
|
||||
return futures::future::ready(Err(anyhow::anyhow!(EMPTY_PROMPT_MSG))).boxed();
|
||||
@@ -270,7 +270,7 @@ impl CopilotChatLanguageModel {
|
||||
Role::Assistant => CopilotChatRole::Assistant,
|
||||
Role::System => CopilotChatRole::System,
|
||||
},
|
||||
content: msg.content,
|
||||
content: msg.string_contents(),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
|
||||
@@ -182,14 +182,14 @@ impl OllamaLanguageModel {
|
||||
.into_iter()
|
||||
.map(|msg| match msg.role {
|
||||
Role::User => ChatMessage::User {
|
||||
content: msg.content,
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
Role::Assistant => ChatMessage::Assistant {
|
||||
content: msg.content,
|
||||
content: msg.string_contents(),
|
||||
tool_calls: None,
|
||||
},
|
||||
Role::System => ChatMessage::System {
|
||||
content: msg.content,
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
@@ -257,7 +257,7 @@ impl LanguageModel for OllamaLanguageModel {
|
||||
let token_count = request
|
||||
.messages
|
||||
.iter()
|
||||
.map(|msg| msg.content.chars().count())
|
||||
.map(|msg| msg.string_contents().chars().count())
|
||||
.sum::<usize>()
|
||||
/ 4;
|
||||
|
||||
|
||||
@@ -363,7 +363,7 @@ pub fn count_open_ai_tokens(
|
||||
Role::Assistant => "assistant".into(),
|
||||
Role::System => "system".into(),
|
||||
},
|
||||
content: Some(message.content),
|
||||
content: Some(message.string_contents()),
|
||||
name: None,
|
||||
function_call: None,
|
||||
})
|
||||
|
||||
@@ -1,10 +1,223 @@
|
||||
use std::io::{Cursor, Write};
|
||||
|
||||
use crate::role::Role;
|
||||
use base64::write::EncoderWriter;
|
||||
use gpui::{point, size, AppContext, DevicePixels, Image, ObjectFit, RenderImage, Size, Task};
|
||||
use image::{codecs::png::PngEncoder, imageops::resize, DynamicImage, ImageDecoder};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ui::{px, SharedString};
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)]
|
||||
pub struct LanguageModelImage {
|
||||
// A base64 encoded PNG image
|
||||
source: SharedString,
|
||||
size: Size<DevicePixels>,
|
||||
}
|
||||
|
||||
const ANTHROPIC_SIZE_LIMT: f32 = 1568.0; // Anthropic wants uploaded images to be smaller than this in both dimensions
|
||||
|
||||
impl LanguageModelImage {
|
||||
pub fn from_image(data: Image, cx: &mut AppContext) -> Task<Option<Self>> {
|
||||
cx.background_executor().spawn(async move {
|
||||
match data.format() {
|
||||
gpui::ImageFormat::Png
|
||||
| gpui::ImageFormat::Jpeg
|
||||
| gpui::ImageFormat::Webp
|
||||
| gpui::ImageFormat::Gif => {}
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let image = image::codecs::png::PngDecoder::new(Cursor::new(data.bytes())).log_err()?;
|
||||
let (width, height) = image.dimensions();
|
||||
let image_size = size(DevicePixels(width as i32), DevicePixels(height as i32));
|
||||
|
||||
let mut base64_image = Vec::new();
|
||||
|
||||
{
|
||||
let mut base64_encoder = EncoderWriter::new(
|
||||
Cursor::new(&mut base64_image),
|
||||
&base64::engine::general_purpose::STANDARD,
|
||||
);
|
||||
|
||||
if image_size.width.0 > ANTHROPIC_SIZE_LIMT as i32
|
||||
|| image_size.height.0 > ANTHROPIC_SIZE_LIMT as i32
|
||||
{
|
||||
let new_bounds = ObjectFit::ScaleDown.get_bounds(
|
||||
gpui::Bounds {
|
||||
origin: point(px(0.0), px(0.0)),
|
||||
size: size(px(ANTHROPIC_SIZE_LIMT), px(ANTHROPIC_SIZE_LIMT)),
|
||||
},
|
||||
image_size,
|
||||
);
|
||||
let image = DynamicImage::from_decoder(image).log_err()?.resize(
|
||||
new_bounds.size.width.0 as u32,
|
||||
new_bounds.size.height.0 as u32,
|
||||
image::imageops::FilterType::Triangle,
|
||||
);
|
||||
|
||||
let mut png = Vec::new();
|
||||
image
|
||||
.write_with_encoder(PngEncoder::new(&mut png))
|
||||
.log_err()?;
|
||||
|
||||
base64_encoder.write_all(png.as_slice()).log_err()?;
|
||||
} else {
|
||||
base64_encoder.write_all(data.bytes()).log_err()?;
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: The base64 encoder should not produce non-UTF8
|
||||
let source = unsafe { String::from_utf8_unchecked(base64_image) };
|
||||
|
||||
Some(LanguageModelImage {
|
||||
size: image_size,
|
||||
source: source.into(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Resolves image into an LLM-ready format (base64)
|
||||
pub fn from_render_image(data: &RenderImage) -> Option<Self> {
|
||||
let image_size = data.size(0);
|
||||
|
||||
let mut bytes = data.as_bytes(0).unwrap_or(&[]).to_vec();
|
||||
// Convert from BGRA to RGBA.
|
||||
for pixel in bytes.chunks_exact_mut(4) {
|
||||
pixel.swap(2, 0);
|
||||
}
|
||||
let mut image = image::RgbaImage::from_vec(
|
||||
image_size.width.0 as u32,
|
||||
image_size.height.0 as u32,
|
||||
bytes,
|
||||
)
|
||||
.expect("We already know this works");
|
||||
|
||||
// https://docs.anthropic.com/en/docs/build-with-claude/vision
|
||||
if image_size.width.0 > ANTHROPIC_SIZE_LIMT as i32
|
||||
|| image_size.height.0 > ANTHROPIC_SIZE_LIMT as i32
|
||||
{
|
||||
let new_bounds = ObjectFit::ScaleDown.get_bounds(
|
||||
gpui::Bounds {
|
||||
origin: point(px(0.0), px(0.0)),
|
||||
size: size(px(ANTHROPIC_SIZE_LIMT), px(ANTHROPIC_SIZE_LIMT)),
|
||||
},
|
||||
image_size,
|
||||
);
|
||||
|
||||
image = resize(
|
||||
&image,
|
||||
new_bounds.size.width.0 as u32,
|
||||
new_bounds.size.height.0 as u32,
|
||||
image::imageops::FilterType::Triangle,
|
||||
);
|
||||
}
|
||||
|
||||
let mut png = Vec::new();
|
||||
|
||||
image
|
||||
.write_with_encoder(PngEncoder::new(&mut png))
|
||||
.log_err()?;
|
||||
|
||||
let mut base64_image = Vec::new();
|
||||
|
||||
{
|
||||
let mut base64_encoder = EncoderWriter::new(
|
||||
Cursor::new(&mut base64_image),
|
||||
&base64::engine::general_purpose::STANDARD,
|
||||
);
|
||||
|
||||
base64_encoder.write_all(png.as_slice()).log_err()?;
|
||||
}
|
||||
|
||||
// SAFETY: The base64 encoder should not produce non-UTF8
|
||||
let source = unsafe { String::from_utf8_unchecked(base64_image) };
|
||||
|
||||
Some(LanguageModelImage {
|
||||
size: image_size,
|
||||
source: source.into(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn estimate_tokens(&self) -> usize {
|
||||
let width = self.size.width.0.abs() as usize;
|
||||
let height = self.size.height.0.abs() as usize;
|
||||
|
||||
// From: https://docs.anthropic.com/en/docs/build-with-claude/vision#calculate-image-costs
|
||||
// Note that are a lot of conditions on anthropic's API, and OpenAI doesn't use this,
|
||||
// so this method is more of a rough guess
|
||||
(width * height) / 750
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
pub enum MessageContent {
|
||||
Text(String),
|
||||
Image(LanguageModelImage),
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for MessageContent {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
MessageContent::Text(t) => f.debug_struct("MessageContent").field("text", t).finish(),
|
||||
MessageContent::Image(i) => f
|
||||
.debug_struct("MessageContent")
|
||||
.field("image", &i.source.len())
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MessageContent {
|
||||
pub fn as_string(&self) -> &str {
|
||||
match self {
|
||||
MessageContent::Text(s) => s.as_str(),
|
||||
MessageContent::Image(_) => "",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for MessageContent {
|
||||
fn from(value: String) -> Self {
|
||||
MessageContent::Text(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for MessageContent {
|
||||
fn from(value: &str) -> Self {
|
||||
MessageContent::Text(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Hash)]
|
||||
pub struct LanguageModelRequestMessage {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
pub content: Vec<MessageContent>,
|
||||
}
|
||||
|
||||
impl LanguageModelRequestMessage {
|
||||
pub fn string_contents(&self) -> String {
|
||||
let mut string_buffer = String::new();
|
||||
for string in self.content.iter().filter_map(|content| match content {
|
||||
MessageContent::Text(s) => Some(s),
|
||||
MessageContent::Image(_) => None,
|
||||
}) {
|
||||
string_buffer.push_str(string.as_str())
|
||||
}
|
||||
string_buffer
|
||||
}
|
||||
|
||||
pub fn contents_empty(&self) -> bool {
|
||||
self.content.is_empty()
|
||||
|| self
|
||||
.content
|
||||
.get(0)
|
||||
.map(|content| match content {
|
||||
MessageContent::Text(s) => s.is_empty(),
|
||||
MessageContent::Image(_) => true,
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
@@ -23,14 +236,14 @@ impl LanguageModelRequest {
|
||||
.into_iter()
|
||||
.map(|msg| match msg.role {
|
||||
Role::User => open_ai::RequestMessage::User {
|
||||
content: msg.content,
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
Role::Assistant => open_ai::RequestMessage::Assistant {
|
||||
content: Some(msg.content),
|
||||
content: Some(msg.string_contents()),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => open_ai::RequestMessage::System {
|
||||
content: msg.content,
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
@@ -51,7 +264,7 @@ impl LanguageModelRequest {
|
||||
.into_iter()
|
||||
.map(|msg| google_ai::Content {
|
||||
parts: vec![google_ai::Part::TextPart(google_ai::TextPart {
|
||||
text: msg.content,
|
||||
text: msg.string_contents(),
|
||||
})],
|
||||
role: match msg.role {
|
||||
Role::User => google_ai::Role::User,
|
||||
@@ -77,7 +290,7 @@ impl LanguageModelRequest {
|
||||
let mut system_message = String::new();
|
||||
|
||||
for message in self.messages {
|
||||
if message.content.is_empty() {
|
||||
if message.contents_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -85,8 +298,11 @@ impl LanguageModelRequest {
|
||||
Role::User | Role::Assistant => {
|
||||
if let Some(last_message) = new_messages.last_mut() {
|
||||
if last_message.role == message.role {
|
||||
last_message.content.push_str("\n\n");
|
||||
last_message.content.push_str(&message.content);
|
||||
// TODO: is this append done properly?
|
||||
last_message.content.push(MessageContent::Text(format!(
|
||||
"\n\n{}",
|
||||
message.string_contents()
|
||||
)));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -97,7 +313,7 @@ impl LanguageModelRequest {
|
||||
if !system_message.is_empty() {
|
||||
system_message.push_str("\n\n");
|
||||
}
|
||||
system_message.push_str(&message.content);
|
||||
system_message.push_str(&message.string_contents());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -113,9 +329,24 @@ impl LanguageModelRequest {
|
||||
Role::Assistant => anthropic::Role::Assistant,
|
||||
Role::System => return None,
|
||||
},
|
||||
content: vec![anthropic::Content::Text {
|
||||
text: message.content,
|
||||
}],
|
||||
content: message
|
||||
.content
|
||||
.into_iter()
|
||||
// TODO: filter out the empty messages in the message construction step
|
||||
.filter_map(|content| match content {
|
||||
MessageContent::Text(t) if !t.is_empty() => {
|
||||
Some(anthropic::Content::Text { text: t })
|
||||
}
|
||||
MessageContent::Image(i) => Some(anthropic::Content::Image {
|
||||
source: anthropic::ImageSource {
|
||||
source_type: "base64".to_string(),
|
||||
media_type: "image/png".to_string(),
|
||||
data: i.source.to_string(),
|
||||
},
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
|
||||
@@ -3,7 +3,7 @@ use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::{btree_map::Entry as BTreeEntry, hash_map::Entry, BTreeMap, HashMap, HashSet};
|
||||
use futures::Stream;
|
||||
use gpui::{BackgroundExecutor, ImageSource};
|
||||
use gpui::{BackgroundExecutor, SurfaceSource};
|
||||
use live_kit_server::{proto, token};
|
||||
|
||||
use parking_lot::Mutex;
|
||||
@@ -870,7 +870,7 @@ impl Frame {
|
||||
self.height
|
||||
}
|
||||
|
||||
pub fn image(&self) -> ImageSource {
|
||||
pub fn image(&self) -> SurfaceSource {
|
||||
unimplemented!("you can't call this in test mode")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,7 +150,7 @@ impl Markdown {
|
||||
return;
|
||||
}
|
||||
let text = text.text_for_range(self.selection.start..self.selection.end);
|
||||
cx.write_to_clipboard(ClipboardItem::new(text));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(text));
|
||||
}
|
||||
|
||||
fn parse(&mut self, cx: &mut ViewContext<Self>) {
|
||||
@@ -480,7 +480,7 @@ impl MarkdownElement {
|
||||
{
|
||||
let text = rendered_text
|
||||
.text_for_range(markdown.selection.start..markdown.selection.end);
|
||||
cx.write_to_primary(ClipboardItem::new(text))
|
||||
cx.write_to_primary(ClipboardItem::new_string(text))
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -1067,7 +1067,7 @@ impl OutlinePanel {
|
||||
.and_then(|entry| self.abs_path(&entry, cx))
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
{
|
||||
cx.write_to_clipboard(ClipboardItem::new(clipboard_text));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1082,7 +1082,7 @@ impl OutlinePanel {
|
||||
})
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
{
|
||||
cx.write_to_clipboard(ClipboardItem::new(clipboard_text));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1357,7 +1357,7 @@ impl ProjectPanel {
|
||||
|
||||
fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext<Self>) {
|
||||
if let Some((worktree, entry)) = self.selected_entry(cx) {
|
||||
cx.write_to_clipboard(ClipboardItem::new(
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(
|
||||
worktree
|
||||
.abs_path()
|
||||
.join(&entry.path)
|
||||
@@ -1369,7 +1369,9 @@ impl ProjectPanel {
|
||||
|
||||
fn copy_relative_path(&mut self, _: &CopyRelativePath, cx: &mut ViewContext<Self>) {
|
||||
if let Some((_, entry)) = self.selected_entry(cx) {
|
||||
cx.write_to_clipboard(ClipboardItem::new(entry.path.to_string_lossy().to_string()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(
|
||||
entry.path.to_string_lossy().to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::stdio::TerminalOutput;
|
||||
use anyhow::Result;
|
||||
use base64::prelude::*;
|
||||
use gpui::{
|
||||
img, percentage, Animation, AnimationExt, AnyElement, FontWeight, ImageData, Render, Task,
|
||||
img, percentage, Animation, AnimationExt, AnyElement, FontWeight, Render, RenderImage, Task,
|
||||
TextRun, Transformation, View,
|
||||
};
|
||||
use runtimelib::datatable::TableSchema;
|
||||
@@ -38,7 +38,7 @@ fn rank_mime_type(mimetype: &MimeType) -> usize {
|
||||
pub struct ImageView {
|
||||
height: u32,
|
||||
width: u32,
|
||||
image: Arc<ImageData>,
|
||||
image: Arc<RenderImage>,
|
||||
}
|
||||
|
||||
impl ImageView {
|
||||
@@ -76,7 +76,7 @@ impl ImageView {
|
||||
let height = data.height();
|
||||
let width = data.width();
|
||||
|
||||
let gpui_image_data = ImageData::new(vec![image::Frame::new(data)]);
|
||||
let gpui_image_data = RenderImage::new(vec![image::Frame::new(data)]);
|
||||
|
||||
return Ok(ImageView {
|
||||
height,
|
||||
|
||||
@@ -656,13 +656,17 @@ impl Terminal {
|
||||
cx.emit(Event::BreadcrumbsChanged);
|
||||
}
|
||||
AlacTermEvent::ClipboardStore(_, data) => {
|
||||
cx.write_to_clipboard(ClipboardItem::new(data.to_string()))
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(data.to_string()))
|
||||
}
|
||||
AlacTermEvent::ClipboardLoad(_, format) => {
|
||||
self.write_to_pty(
|
||||
match &cx.read_from_clipboard().and_then(|item| item.text()) {
|
||||
// The terminal only supports pasting strings, not images.
|
||||
Some(text) => format(text),
|
||||
_ => format(""),
|
||||
},
|
||||
)
|
||||
}
|
||||
AlacTermEvent::ClipboardLoad(_, format) => self.write_to_pty(format(
|
||||
&cx.read_from_clipboard()
|
||||
.map(|ci| ci.text().to_string())
|
||||
.unwrap_or_else(|| "".to_string()),
|
||||
)),
|
||||
AlacTermEvent::PtyWrite(out) => self.write_to_pty(out.clone()),
|
||||
AlacTermEvent::TextAreaSizeRequest(format) => {
|
||||
self.write_to_pty(format(self.last_content.size.into()))
|
||||
@@ -767,7 +771,7 @@ impl Terminal {
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
if let Some(selection_text) = term.selection_to_string() {
|
||||
cx.write_to_primary(ClipboardItem::new(selection_text));
|
||||
cx.write_to_primary(ClipboardItem::new_string(selection_text));
|
||||
}
|
||||
|
||||
if let Some((_, head)) = selection {
|
||||
@@ -788,7 +792,7 @@ impl Terminal {
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
if let Some(selection_text) = term.selection_to_string() {
|
||||
cx.write_to_primary(ClipboardItem::new(selection_text));
|
||||
cx.write_to_primary(ClipboardItem::new_string(selection_text));
|
||||
}
|
||||
|
||||
self.selection_head = Some(point);
|
||||
@@ -798,7 +802,7 @@ impl Terminal {
|
||||
|
||||
InternalEvent::Copy => {
|
||||
if let Some(txt) = term.selection_to_string() {
|
||||
cx.write_to_clipboard(ClipboardItem::new(txt))
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(txt))
|
||||
}
|
||||
}
|
||||
InternalEvent::ScrollToAlacPoint(point) => {
|
||||
|
||||
@@ -488,9 +488,9 @@ impl TerminalView {
|
||||
|
||||
///Attempt to paste the clipboard into the terminal
|
||||
fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
|
||||
if let Some(item) = cx.read_from_clipboard() {
|
||||
if let Some(clipboard_string) = cx.read_from_clipboard().and_then(|item| item.text()) {
|
||||
self.terminal
|
||||
.update(cx, |terminal, _cx| terminal.paste(item.text()));
|
||||
.update(cx, |terminal, _cx| terminal.paste(&clipboard_string));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -361,7 +361,8 @@ mod test {
|
||||
Mode::Normal,
|
||||
);
|
||||
assert_eq!(
|
||||
cx.read_from_clipboard().map(|item| item.text().clone()),
|
||||
cx.read_from_clipboard()
|
||||
.map(|item| item.text().map(ToOwned::to_owned).unwrap()),
|
||||
Some("jumps".into())
|
||||
);
|
||||
cx.simulate_keystrokes("d d p");
|
||||
@@ -373,10 +374,11 @@ mod test {
|
||||
Mode::Normal,
|
||||
);
|
||||
assert_eq!(
|
||||
cx.read_from_clipboard().map(|item| item.text().clone()),
|
||||
cx.read_from_clipboard()
|
||||
.map(|item| item.text().map(ToOwned::to_owned).unwrap()),
|
||||
Some("jumps".into())
|
||||
);
|
||||
cx.write_to_clipboard(ClipboardItem::new("test-copy".to_string()));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string("test-copy".to_string()));
|
||||
cx.simulate_keystrokes("shift-p");
|
||||
cx.assert_state(
|
||||
indoc! {"
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::surrounds::SurroundsType;
|
||||
use crate::{motion::Motion, object::Object};
|
||||
use collections::HashMap;
|
||||
use editor::{Anchor, ClipboardSelection};
|
||||
use gpui::{Action, ClipboardItem, KeyContext};
|
||||
use gpui::{Action, ClipboardEntry, ClipboardItem, KeyContext};
|
||||
use language::{CursorShape, Selection, TransactionId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ui::SharedString;
|
||||
@@ -129,20 +129,24 @@ pub struct Register {
|
||||
|
||||
impl From<Register> for ClipboardItem {
|
||||
fn from(register: Register) -> Self {
|
||||
let item = ClipboardItem::new(register.text.into());
|
||||
if let Some(clipboard_selections) = register.clipboard_selections {
|
||||
item.with_metadata(clipboard_selections)
|
||||
ClipboardItem::new_string_with_metadata(register.text.into(), clipboard_selections)
|
||||
} else {
|
||||
item
|
||||
ClipboardItem::new_string(register.text.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ClipboardItem> for Register {
|
||||
fn from(value: ClipboardItem) -> Self {
|
||||
Register {
|
||||
text: value.text().to_owned().into(),
|
||||
clipboard_selections: value.metadata::<Vec<ClipboardSelection>>(),
|
||||
fn from(item: ClipboardItem) -> Self {
|
||||
// For now, we don't store metadata for multiple entries.
|
||||
match item.entries().first() {
|
||||
Some(ClipboardEntry::String(value)) if item.entries().len() == 1 => Register {
|
||||
text: value.text().to_owned().into(),
|
||||
clipboard_selections: value.metadata::<Vec<ClipboardSelection>>(),
|
||||
},
|
||||
// For now, registers can't store images. This could change in the future.
|
||||
_ => Register::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,7 +247,12 @@ impl NeovimBackedTestContext {
|
||||
register: '"',
|
||||
state: self.shared_state().await,
|
||||
neovim: self.neovim.read_register('"').await,
|
||||
editor: self.read_from_clipboard().unwrap().text().clone(),
|
||||
editor: self
|
||||
.read_from_clipboard()
|
||||
.unwrap()
|
||||
.text()
|
||||
.unwrap()
|
||||
.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -586,7 +586,7 @@ impl Vim {
|
||||
} else {
|
||||
self.workspace_state.last_yank = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| item.text().to_owned().into());
|
||||
.and_then(|item| item.text().map(|string| string.into()))
|
||||
}
|
||||
|
||||
self.workspace_state.registers.insert('"', content.clone());
|
||||
@@ -663,7 +663,7 @@ impl Vim {
|
||||
fn system_clipboard_is_newer(&self, cx: &mut AppContext) -> bool {
|
||||
cx.read_from_clipboard().is_some_and(|item| {
|
||||
if let Some(last_state) = &self.workspace_state.last_yank {
|
||||
last_state != item.text()
|
||||
Some(last_state.as_ref()) != item.text().as_deref()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
|
||||
@@ -927,7 +927,7 @@ mod test {
|
||||
the lazy dog"});
|
||||
assert_eq!(
|
||||
cx.read_from_clipboard()
|
||||
.map(|item| item.text().clone())
|
||||
.map(|item| item.text().map(ToOwned::to_owned).unwrap().clone())
|
||||
.unwrap(),
|
||||
"The q"
|
||||
);
|
||||
|
||||
@@ -342,7 +342,7 @@ impl Render for LanguageServerPrompt {
|
||||
.on_click({
|
||||
let message = request.message.clone();
|
||||
move |_, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(
|
||||
message.clone(),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -1609,7 +1609,7 @@ impl Pane {
|
||||
.and_then(|entry| entry.project_path(cx))
|
||||
.map(|p| p.path.to_string_lossy().to_string())
|
||||
{
|
||||
cx.write_to_clipboard(ClipboardItem::new(clipboard_text));
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1819,7 +1819,7 @@ impl Pane {
|
||||
"Copy Path",
|
||||
Some(Box::new(CopyPath)),
|
||||
cx.handler_for(&pane, move |_, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(
|
||||
abs_path.to_string_lossy().to_string(),
|
||||
));
|
||||
}),
|
||||
|
||||
@@ -7,7 +7,7 @@ use call::participant::{Frame, RemoteVideoTrack};
|
||||
use client::{proto::PeerId, User};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
div, img, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
|
||||
div, surface, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
|
||||
ParentElement, Render, SharedString, Styled, Task, View, ViewContext, VisualContext,
|
||||
WindowContext,
|
||||
};
|
||||
@@ -75,7 +75,7 @@ impl Render for SharedScreen {
|
||||
.children(
|
||||
self.frame
|
||||
.as_ref()
|
||||
.map(|frame| img(frame.image()).size_full()),
|
||||
.map(|frame| surface(frame.image()).size_full()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user