Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 15 additions & 7 deletions desktop/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,21 +198,29 @@ impl App {
};
self.send_or_queue_web_message(bytes);
}
DesktopFrontendMessage::OpenFileDialog { title, filters, context } => {
DesktopFrontendMessage::OpenFileDialog { title, filters, multiple, context } => {
let app_event_scheduler = self.app_event_scheduler.clone();
let _ = thread::spawn(move || {
let mut dialog = AsyncFileDialog::new().set_title(title);
for filter in filters {
dialog = dialog.add_filter(filter.name, &filter.extensions);
}

let show_dialog = async move { dialog.pick_file().await.map(|f| f.path().to_path_buf()) };
let handles = if multiple {
futures::executor::block_on(dialog.pick_files()).unwrap_or_default()
} else {
futures::executor::block_on(dialog.pick_file()).into_iter().collect()
};

if let Some(path) = futures::executor::block_on(show_dialog)
&& let Ok(content) = fs::read(&path)
{
let message = DesktopWrapperMessage::FileDialogResult { path, content, context };
app_event_scheduler.schedule(AppEvent::DesktopWrapperMessage(message));
for handle in handles {
let path = handle.path().to_path_buf();
match fs::read(&path) {
Ok(content) => {
let message = DesktopWrapperMessage::FileDialogResult { path, content, context };
app_event_scheduler.schedule(AppEvent::DesktopWrapperMessage(message));
}
Err(e) => tracing::error!("Failed to read file {}: {}", path.display(), e),
}
}
});
}
Expand Down
2 changes: 2 additions & 0 deletions desktop/wrapper/src/intercept_frontend_message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@ pub(super) fn intercept_frontend_message(dispatcher: &mut DesktopWrapperMessageD
dispatcher.respond(DesktopFrontendMessage::OpenFileDialog {
title: "Open Document".to_string(),
filters: vec![],
multiple: true,
context: OpenFileDialogContext::Open,
});
}
FrontendMessage::TriggerImport => {
dispatcher.respond(DesktopFrontendMessage::OpenFileDialog {
title: "Import File".to_string(),
filters: vec![],
multiple: false,
context: OpenFileDialogContext::Import,
});
}
Expand Down
2 changes: 2 additions & 0 deletions desktop/wrapper/src/messages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ pub enum DesktopFrontendMessage {
OpenFileDialog {
title: String,
filters: Vec<FileFilter>,
multiple: bool,
context: OpenFileDialogContext,
},
SaveFileDialog {
Expand Down Expand Up @@ -102,6 +103,7 @@ pub struct FileFilter {
pub extensions: Vec<String>,
}

#[derive(Clone, Copy)]
pub enum OpenFileDialogContext {
Open,
Import,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6442,6 +6442,7 @@ pub struct InputPersistentMetadata {
/// A general datastore than can store key value pairs of any types for any input
/// Each instance of the input node needs to store its own data, since it can lose the reference to its
/// node definition if the node signature is modified by the user. For example adding/removing/renaming an import/export of a network node.
#[serde(serialize_with = "graphene_std::vector::serialize_hashmap_as_sorted_object")]
pub input_data: HashMap<String, Value>,
// An input can override a widget, which would otherwise be automatically generated from the type
// The string is the identifier to the widget override function stored in INPUT_OVERRIDES
Expand Down
4 changes: 2 additions & 2 deletions frontend/src/stores/portfolio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ export function createPortfolioStore(subscriptions: SubscriptionsRouter, editor:
});

subscriptions.subscribeFrontendMessage("TriggerOpen", async () => {
const data = await upload(`image/*,.${editor.fileExtension()}`, "data");
editor.openFile(data.filename, data.content);
const files = await upload(`image/*,.${editor.fileExtension()}`, "data", true);
files.forEach((file) => editor.openFile(file.filename, file.content));
});

subscriptions.subscribeFrontendMessage("TriggerImport", async () => {
Expand Down
43 changes: 29 additions & 14 deletions frontend/src/utility-functions/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,29 +32,44 @@ export function downloadFile(filename: string, content: Uint8Array) {
export async function upload(accept: string, textOrData: "text"): Promise<UploadResult<string>>;
export async function upload(accept: string, textOrData: "data"): Promise<UploadResult<Uint8Array>>;
export async function upload(accept: string, textOrData: "both"): Promise<UploadResult<{ text: string; data: Uint8Array }>>;
export async function upload(accept: string, textOrData: "text" | "data" | "both"): Promise<UploadResult<string | Uint8Array | { text: string; data: Uint8Array }>> {
export async function upload(accept: string, textOrData: "data", multiple: true): Promise<UploadResult<Uint8Array>[]>;
export async function upload(
accept: string,
textOrData: "text" | "data" | "both",
multiple = false,
): Promise<UploadResult<string | Uint8Array | { text: string; data: Uint8Array }> | UploadResult<Uint8Array>[]> {
return new Promise((resolve) => {
const element = document.createElement("input");
element.type = "file";
element.accept = accept;
element.multiple = multiple;

element.addEventListener(
"change",
async () => {
if (element.files?.length) {
const file = element.files[0];

const filename = file.name;
const type = file.type;
const content =
textOrData === "text"
? await file.text()
: textOrData === "data"
? new Uint8Array(await file.arrayBuffer())
: { text: await file.text(), data: new Uint8Array(await file.arrayBuffer()) };

resolve({ filename, type, content });
if (!element.files?.length) return;

// The `multiple: true` overload constrains `textOrData` to "data", so we know each file produces a Uint8Array
if (multiple) {
const results = await Promise.all(
Array.from(element.files).map(async (file) => ({
filename: file.name,
type: file.type,
content: new Uint8Array(await file.arrayBuffer()),
})),
);
resolve(results);
return;
}

const file = element.files[0];
const content =
textOrData === "text"
? await file.text()
: textOrData === "data"
? new Uint8Array(await file.arrayBuffer())
: { text: await file.text(), data: new Uint8Array(await file.arrayBuffer()) };
resolve({ filename: file.name, type: file.type, content });
},
{ capture: false, once: true },
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,20 @@ use core_types::uuid::generate_uuid;
use dyn_any::DynAny;
use glam::DVec2;
use kurbo::{BezPath, PathEl, Point};
use serde::de::{SeqAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::hash::BuildHasher;
use std::hash::Hash;

/// Represents a procedural change to the [`PointDomain`] in [`Vector`].
#[derive(Clone, Debug, Default, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct PointModification {
add: Vec<PointId>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashset"))]
remove: HashSet<PointId>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashmap", deserialize_with = "deserialize_hashmap"))]
delta: HashMap<PointId, DVec2>,
Expand Down Expand Up @@ -79,6 +85,7 @@ impl PointModification {
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct SegmentModification {
add: Vec<SegmentId>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashset"))]
remove: HashSet<SegmentId>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashmap", deserialize_with = "deserialize_hashmap"))]
start_point: HashMap<SegmentId, PointId>,
Expand Down Expand Up @@ -250,6 +257,7 @@ impl SegmentModification {
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct RegionModification {
add: Vec<RegionId>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashset"))]
remove: HashSet<RegionId>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashmap", deserialize_with = "deserialize_hashmap"))]
segment_range: HashMap<RegionId, std::ops::RangeInclusive<SegmentId>>,
Expand Down Expand Up @@ -297,7 +305,9 @@ pub struct VectorModification {
points: PointModification,
segments: SegmentModification,
regions: RegionModification,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashset"))]
add_g1_continuous: HashSet<[HandleId; 2]>,
#[cfg_attr(feature = "serde", serde(serialize_with = "serialize_hashset"))]
remove_g1_continuous: HashSet<[HandleId; 2]>,
}

Expand Down Expand Up @@ -520,27 +530,65 @@ impl graphene_hash::CacheHash for VectorModification {
}
}

// Do we want to enforce that all serialized/deserialized hashmaps are a vec of tuples?
// TODO: Do we want to enforce that all serialized/deserialized hashmaps are a vec of tuples?
// TODO: Eventually remove this document upgrade code
use serde::de::{SeqAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::hash::Hash;
/// Serializes as sorted `[[key, value], ...]` (sequence of pairs)
pub fn serialize_hashmap<K, V, S, H>(hashmap: &HashMap<K, V, H>, serializer: S) -> Result<S::Ok, S::Error>
where
K: Serialize + Eq + Hash,
K: Serialize + Eq + Hash + Ord,
V: Serialize,
S: Serializer,
H: BuildHasher,
{
let mut seq = serializer.serialize_seq(Some(hashmap.len()))?;
for (key, value) in hashmap {
// Sort entries by key so the serialized output is deterministic across runs (HashMap iteration order is randomized).
// Removes a major source of churn in saved-document diffs without affecting load behavior.
let mut entries: Vec<_> = hashmap.iter().collect();
entries.sort_by(|a, b| a.0.cmp(b.0));

let mut seq = serializer.serialize_seq(Some(entries.len()))?;
for (key, value) in entries {
seq.serialize_element(&(key, value))?;
}
seq.end()
}

/// Serializes as sorted `{"key": value, ...}` (JSON object)
pub fn serialize_hashmap_as_sorted_object<K, V, S, H>(hashmap: &HashMap<K, V, H>, serializer: S) -> Result<S::Ok, S::Error>
where
K: Serialize + Eq + Hash + Ord,
V: Serialize,
S: Serializer,
H: BuildHasher,
{
use serde::ser::SerializeMap;

let mut entries: Vec<_> = hashmap.iter().collect();
entries.sort_by(|a, b| a.0.cmp(b.0));

let mut map = serializer.serialize_map(Some(entries.len()))?;
for (key, value) in entries {
map.serialize_entry(key, value)?;
}
map.end()
}

/// Serializes as sorted `[value, ...]` (JSON array)
pub fn serialize_hashset<T, S, H>(set: &HashSet<T, H>, serializer: S) -> Result<S::Ok, S::Error>
where
T: Serialize + Eq + Hash + Ord,
S: Serializer,
H: BuildHasher,
{
let mut entries: Vec<_> = set.iter().collect();
entries.sort();

let mut seq = serializer.serialize_seq(Some(entries.len()))?;
for value in entries {
seq.serialize_element(value)?;
}
seq.end()
}

pub fn deserialize_hashmap<'de, K, V, D, H>(deserializer: D) -> Result<HashMap<K, V, H>, D::Error>
where
K: Deserialize<'de> + Eq + Hash,
Expand Down
2 changes: 1 addition & 1 deletion node-graph/nodes/gstd/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub mod vector {
pub use vector_types::vector::click_target;
pub use vector_types::vector::misc::HandleId;
pub use vector_types::vector::{PointId, RegionId, SegmentId, StrokeId};
pub use vector_types::vector::{deserialize_hashmap, serialize_hashmap};
pub use vector_types::vector::{deserialize_hashmap, serialize_hashmap, serialize_hashmap_as_sorted_object};

// Re-export HandleExt trait and NoHashBuilder
pub use vector_types::vector::HandleExt;
Expand Down
Loading