Implement Syncback to support converting Roblox files to a Rojo project (#937)

This is a very large commit.
Consider checking the linked PR for more information.
This commit is contained in:
Micah
2025-11-19 09:21:33 -08:00
committed by GitHub
parent 071b6e7e23
commit 9b5a07191b
239 changed files with 5325 additions and 225 deletions

View File

@@ -1,16 +1,24 @@
use std::{collections::BTreeMap, path::Path};
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet},
path::Path,
};
use anyhow::Context;
use memofs::Vfs;
use rbx_dom_weak::ustr;
use serde::Serialize;
use rbx_dom_weak::{types::Variant, ustr};
use serde::{Deserialize, Serialize};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
snapshot_middleware::meta_file::DirectoryMetadata,
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::{dir::snapshot_dir_no_meta, meta_file::AdjacentMetadata};
use super::{
dir::{snapshot_dir_no_meta, syncback_dir_no_meta},
meta_file::{AdjacentMetadata, DirectoryMetadata},
PathExt as _,
};
pub fn snapshot_csv(
_context: &InstanceContext,
@@ -51,9 +59,10 @@ pub fn snapshot_csv_init(
context: &InstanceContext,
vfs: &Vfs,
init_path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let folder_path = init_path.parent().unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path)?.unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path, name)?.unwrap();
if dir_snapshot.class_name != "Folder" {
anyhow::bail!(
@@ -70,33 +79,111 @@ pub fn snapshot_csv_init(
init_snapshot.children = dir_snapshot.children;
init_snapshot.metadata = dir_snapshot.metadata;
// The directory snapshot middleware includes all possible init paths
// so we don't need to add it here.
DirectoryMetadata::read_and_apply_all(vfs, folder_path, &mut init_snapshot)?;
Ok(Some(init_snapshot))
}
pub fn syncback_csv<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents =
if let Some(Variant::String(content)) = new_inst.properties.get(&ustr("Contents")) {
content.as_str()
} else {
anyhow::bail!("LocalizationTables must have a `Contents` property that is a String")
};
let mut fs_snapshot = FsSnapshot::new();
fs_snapshot.add_file(&snapshot.path, localization_to_csv(contents)?);
let meta = AdjacentMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// LocalizationTables have relatively few properties that we care
// about, so shifting is fine.
meta.properties.shift_remove(&ustr("Contents"));
if !meta.is_empty() {
let parent = snapshot.path.parent_err()?;
fs_snapshot.add_file(
parent.join(format!("{}.meta.json", new_inst.name)),
serde_json::to_vec_pretty(&meta).context("cannot serialize metadata")?,
)
}
}
Ok(SyncbackReturn {
fs_snapshot,
children: Vec::new(),
removed_children: Vec::new(),
})
}
pub fn syncback_csv_init<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents =
if let Some(Variant::String(content)) = new_inst.properties.get(&ustr("Contents")) {
content.as_str()
} else {
anyhow::bail!("LocalizationTables must have a `Contents` property that is a String")
};
let mut dir_syncback = syncback_dir_no_meta(snapshot)?;
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.csv"),
localization_to_csv(contents)?,
);
let meta = DirectoryMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// LocalizationTables have relatively few properties that we care
// about, so shifting is fine.
meta.properties.shift_remove(&ustr("Contents"));
if !meta.is_empty() {
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.meta.json"),
serde_json::to_vec_pretty(&meta)
.context("could not serialize new init.meta.json")?,
);
}
}
Ok(dir_syncback)
}
/// Struct that holds any valid row from a Roblox CSV translation table.
///
/// We manually deserialize into this table from CSV, but let serde_json handle
/// serialization.
#[derive(Debug, Default, Serialize)]
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct LocalizationEntry<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
key: Option<&'a str>,
key: Option<Cow<'a, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
context: Option<&'a str>,
context: Option<Cow<'a, str>>,
// Roblox writes `examples` for LocalizationTable's Content property, which
// causes it to not roundtrip correctly.
// This is reported here: https://devforum.roblox.com/t/2908720.
//
// To support their mistake, we support an alias named `examples`.
#[serde(skip_serializing_if = "Option::is_none", alias = "examples")]
example: Option<Cow<'a, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
example: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<&'a str>,
source: Option<Cow<'a, str>>,
// We use a BTreeMap here to get deterministic output order.
values: BTreeMap<&'a str, &'a str>,
values: BTreeMap<Cow<'a, str>, Cow<'a, str>>,
}
/// Normally, we'd be able to let the csv crate construct our struct for us.
@@ -130,12 +217,14 @@ fn convert_localization_csv(contents: &[u8]) -> Result<String, csv::Error> {
}
match header {
"Key" => entry.key = Some(value),
"Source" => entry.source = Some(value),
"Context" => entry.context = Some(value),
"Example" => entry.example = Some(value),
"Key" => entry.key = Some(Cow::Borrowed(value)),
"Source" => entry.source = Some(Cow::Borrowed(value)),
"Context" => entry.context = Some(Cow::Borrowed(value)),
"Example" => entry.example = Some(Cow::Borrowed(value)),
_ => {
entry.values.insert(header, value);
entry
.values
.insert(Cow::Borrowed(header), Cow::Borrowed(value));
}
}
}
@@ -153,6 +242,57 @@ fn convert_localization_csv(contents: &[u8]) -> Result<String, csv::Error> {
Ok(encoded)
}
/// Takes a localization table (as a string) and converts it into a CSV file.
///
/// The CSV file is ordered, so it should be deterministic.
fn localization_to_csv(csv_contents: &str) -> anyhow::Result<Vec<u8>> {
let mut out = Vec::new();
let mut writer = csv::Writer::from_writer(&mut out);
let mut csv: Vec<LocalizationEntry> =
serde_json::from_str(csv_contents).context("cannot decode JSON from localization table")?;
// TODO sort this better
csv.sort_by(|a, b| a.source.partial_cmp(&b.source).unwrap());
let mut headers = vec!["Key", "Source", "Context", "Example"];
// We want both order and a lack of duplicates, so we use a BTreeSet.
let mut extra_headers = BTreeSet::new();
for entry in &csv {
for lang in entry.values.keys() {
extra_headers.insert(lang.as_ref());
}
}
headers.extend(extra_headers.iter());
writer
.write_record(&headers)
.context("could not write headers for localization table")?;
let mut record: Vec<&str> = Vec::with_capacity(headers.len());
for entry in &csv {
record.push(entry.key.as_deref().unwrap_or_default());
record.push(entry.source.as_deref().unwrap_or_default());
record.push(entry.context.as_deref().unwrap_or_default());
record.push(entry.example.as_deref().unwrap_or_default());
let values = &entry.values;
for header in &extra_headers {
record.push(values.get(*header).map(AsRef::as_ref).unwrap_or_default());
}
writer
.write_record(&record)
.context("cannot write record for localization table")?;
record.clear();
}
// We must drop `writer` here to regain access to `out`.
drop(writer);
Ok(out)
}
#[cfg(test)]
mod test {
use super::*;
@@ -240,6 +380,7 @@ Ack,Ack!,,An exclamation of despair,¡Ay!"#,
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.csv"),
"root",
)
.unwrap()
.unwrap();
@@ -277,6 +418,7 @@ Ack,Ack!,,An exclamation of despair,¡Ay!"#,
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.csv"),
"root",
)
.unwrap()
.unwrap();

View File

@@ -1,17 +1,27 @@
use std::path::Path;
use std::{
collections::{HashMap, HashSet},
path::Path,
};
use anyhow::Context;
use memofs::{DirEntry, Vfs};
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot, InstigatingSource},
syncback::{hash_instance, FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::{meta_file::DirectoryMetadata, snapshot_from_vfs};
const EMPTY_DIR_KEEP_NAME: &str = ".gitkeep";
pub fn snapshot_dir(
context: &InstanceContext,
vfs: &Vfs,
path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let mut snapshot = match snapshot_dir_no_meta(context, vfs, path)? {
let mut snapshot = match snapshot_dir_no_meta(context, vfs, path, name)? {
Some(snapshot) => snapshot,
None => return Ok(None),
};
@@ -29,6 +39,7 @@ pub fn snapshot_dir_no_meta(
context: &InstanceContext,
vfs: &Vfs,
path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let passes_filter_rules = |child: &DirEntry| {
context
@@ -51,13 +62,6 @@ pub fn snapshot_dir_no_meta(
}
}
let instance_name = path
.file_name()
.expect("Could not extract file name")
.to_str()
.ok_or_else(|| anyhow::anyhow!("File name was not valid UTF-8: {}", path.display()))?
.to_string();
let relevant_paths = vec![
path.to_path_buf(),
// TODO: We shouldn't need to know about Lua existing in this
@@ -73,7 +77,7 @@ pub fn snapshot_dir_no_meta(
];
let snapshot = InstanceSnapshot::new()
.name(instance_name)
.name(name)
.class_name("Folder")
.children(snapshot_children)
.metadata(
@@ -86,6 +90,136 @@ pub fn snapshot_dir_no_meta(
Ok(Some(snapshot))
}
pub fn syncback_dir<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let mut dir_syncback = syncback_dir_no_meta(snapshot)?;
let mut meta = DirectoryMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(meta) = &mut meta {
if new_inst.class != "Folder" {
meta.class_name = Some(new_inst.class);
}
if !meta.is_empty() {
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.meta.json"),
serde_json::to_vec_pretty(&meta)
.context("could not serialize new init.meta.json")?,
);
}
}
let metadata_empty = meta
.as_ref()
.map(DirectoryMetadata::is_empty)
.unwrap_or_default();
if new_inst.children().is_empty() && metadata_empty {
dir_syncback
.fs_snapshot
.add_file(snapshot.path.join(EMPTY_DIR_KEEP_NAME), Vec::new())
}
Ok(dir_syncback)
}
pub fn syncback_dir_no_meta<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let mut children = Vec::new();
let mut removed_children = Vec::new();
// We have to enforce unique child names for the file system.
let mut child_names = HashSet::with_capacity(new_inst.children().len());
let mut duplicate_set = HashSet::new();
for child_ref in new_inst.children() {
let child = snapshot.get_new_instance(*child_ref).unwrap();
if !child_names.insert(child.name.to_lowercase()) {
duplicate_set.insert(child.name.as_str());
}
}
if !duplicate_set.is_empty() {
if duplicate_set.len() <= 25 {
anyhow::bail!(
"Instance has children with duplicate name (case may not exactly match):\n {}",
duplicate_set.into_iter().collect::<Vec<&str>>().join(", ")
);
}
anyhow::bail!("Instance has more than 25 children with duplicate names");
}
if let Some(old_inst) = snapshot.old_inst() {
let mut old_child_map = HashMap::with_capacity(old_inst.children().len());
for child in old_inst.children() {
let inst = snapshot.get_old_instance(*child).unwrap();
old_child_map.insert(inst.name(), inst);
}
for new_child_ref in new_inst.children() {
let new_child = snapshot.get_new_instance(*new_child_ref).unwrap();
if let Some(old_child) = old_child_map.remove(new_child.name.as_str()) {
if old_child.metadata().relevant_paths.is_empty() {
log::debug!(
"Skipping instance {} because it doesn't exist on the disk",
old_child.name()
);
continue;
} else if matches!(
old_child.metadata().instigating_source,
Some(InstigatingSource::ProjectNode { .. })
) {
log::debug!(
"Skipping instance {} because it originates in a project file",
old_child.name()
);
continue;
}
// This child exists in both doms. Pass it on.
children.push(snapshot.with_joined_path(*new_child_ref, Some(old_child.id()))?);
} else {
// The child only exists in the the new dom
children.push(snapshot.with_joined_path(*new_child_ref, None)?);
}
}
// Any children that are in the old dom but not the new one are removed.
removed_children.extend(old_child_map.into_values());
} else {
// There is no old instance. Just add every child.
for new_child_ref in new_inst.children() {
children.push(snapshot.with_joined_path(*new_child_ref, None)?);
}
}
let mut fs_snapshot = FsSnapshot::new();
if let Some(old_ref) = snapshot.old {
let new_hash = hash_instance(snapshot.project(), snapshot.new_tree(), snapshot.new)
.expect("new Instance should be hashable");
let old_hash = hash_instance(snapshot.project(), snapshot.old_tree(), old_ref)
.expect("old Instance should be hashable");
if old_hash != new_hash {
fs_snapshot.add_dir(&snapshot.path);
} else {
log::debug!(
"Skipping reserializing directory {} because old and new tree hash the same",
new_inst.name
);
}
} else {
fs_snapshot.add_dir(&snapshot.path);
}
Ok(SyncbackReturn {
fs_snapshot,
children,
removed_children,
})
}
#[cfg(test)]
mod test {
use super::*;
@@ -100,9 +234,10 @@ mod test {
let vfs = Vfs::new(imfs);
let instance_snapshot = snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"))
.unwrap()
.unwrap();
let instance_snapshot =
snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"), "foo")
.unwrap()
.unwrap();
insta::assert_yaml_snapshot!(instance_snapshot);
}
@@ -118,9 +253,10 @@ mod test {
let vfs = Vfs::new(imfs);
let instance_snapshot = snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"))
.unwrap()
.unwrap();
let instance_snapshot =
snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"), "foo")
.unwrap()
.unwrap();
insta::assert_yaml_snapshot!(instance_snapshot);
}

View File

@@ -1,17 +1,19 @@
use std::{borrow::Cow, collections::HashMap, path::Path, str};
use std::{borrow::Cow, path::Path, str};
use anyhow::Context;
use indexmap::IndexMap;
use memofs::Vfs;
use rbx_dom_weak::{
types::{Attributes, Ref},
types::{Attributes, Ref, Variant},
HashMapExt as _, Ustr, UstrMap,
};
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use crate::{
json,
resolution::UnresolvedValue,
snapshot::{InstanceContext, InstanceSnapshot},
syncback::{filter_properties_preallocated, FsSnapshot, SyncbackReturn, SyncbackSnapshot},
RojoRef,
};
@@ -63,13 +65,86 @@ pub fn snapshot_json_model(
Ok(Some(snapshot))
}
#[derive(Debug, Deserialize)]
pub fn syncback_json_model<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let mut property_buffer = Vec::with_capacity(snapshot.new_inst().properties.len());
let mut model = json_model_from_pair(snapshot, &mut property_buffer, snapshot.new);
// We don't need the name on the root, but we do for children.
model.name = None;
Ok(SyncbackReturn {
fs_snapshot: FsSnapshot::new().with_added_file(
&snapshot.path,
serde_json::to_vec_pretty(&model).context("failed to serialize new JSON Model")?,
),
children: Vec::new(),
removed_children: Vec::new(),
})
}
fn json_model_from_pair<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
prop_buffer: &mut Vec<(Ustr, &'sync Variant)>,
new: Ref,
) -> JsonModel {
let new_inst = snapshot
.get_new_instance(new)
.expect("all new referents passed to json_model_from_pair should exist");
filter_properties_preallocated(snapshot.project(), new_inst, prop_buffer);
let mut properties = IndexMap::new();
let mut attributes = IndexMap::new();
for (name, value) in prop_buffer.drain(..) {
match value {
Variant::Attributes(attrs) => {
for (attr_name, attr_value) in attrs.iter() {
// We (probably) don't want to preserve internal attributes,
// only user defined ones.
if attr_name.starts_with("RBX") {
continue;
}
attributes.insert(
attr_name.clone(),
UnresolvedValue::from_variant_unambiguous(attr_value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), &new_inst.class, &name),
);
}
}
}
let mut children = Vec::with_capacity(new_inst.children().len());
for new_child_ref in new_inst.children() {
children.push(json_model_from_pair(snapshot, prop_buffer, *new_child_ref))
}
JsonModel {
name: Some(new_inst.name.clone()),
class_name: new_inst.class,
children,
properties,
attributes,
id: None,
schema: None,
}
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
struct JsonModel {
#[serde(rename = "$schema", skip_serializing_if = "Option::is_none")]
schema: Option<String>,
#[serde(alias = "Name")]
#[serde(alias = "Name", skip_serializing_if = "Option::is_none")]
name: Option<String>,
#[serde(alias = "ClassName")]
@@ -87,13 +162,13 @@ struct JsonModel {
#[serde(
alias = "Properties",
default = "UstrMap::new",
skip_serializing_if = "HashMap::is_empty"
default,
skip_serializing_if = "IndexMap::is_empty"
)]
properties: UstrMap<UnresolvedValue>,
properties: IndexMap<Ustr, UnresolvedValue>,
#[serde(default = "HashMap::new", skip_serializing_if = "HashMap::is_empty")]
attributes: HashMap<String, UnresolvedValue>,
#[serde(default = "IndexMap::new", skip_serializing_if = "IndexMap::is_empty")]
attributes: IndexMap<String, UnresolvedValue>,
}
impl JsonModel {

View File

@@ -1,11 +1,22 @@
use std::{path::Path, str};
use anyhow::Context as _;
use memofs::Vfs;
use rbx_dom_weak::{types::Enum, ustr, HashMapExt as _, UstrMap};
use rbx_dom_weak::{
types::{Enum, Variant},
ustr, HashMapExt as _, UstrMap,
};
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::{dir::snapshot_dir_no_meta, meta_file::AdjacentMetadata, meta_file::DirectoryMetadata};
use super::{
dir::{snapshot_dir_no_meta, syncback_dir_no_meta},
meta_file::{AdjacentMetadata, DirectoryMetadata},
PathExt as _,
};
#[derive(Debug)]
pub enum ScriptType {
@@ -95,10 +106,11 @@ pub fn snapshot_lua_init(
context: &InstanceContext,
vfs: &Vfs,
init_path: &Path,
name: &str,
script_type: ScriptType,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let folder_path = init_path.parent().unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path)?.unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path, name)?.unwrap();
if dir_snapshot.class_name != "Folder" {
anyhow::bail!(
@@ -117,12 +129,89 @@ pub fn snapshot_lua_init(
init_snapshot.children = dir_snapshot.children;
init_snapshot.metadata = dir_snapshot.metadata;
// The directory snapshot middleware includes all possible init paths
// so we don't need to add it here.
DirectoryMetadata::read_and_apply_all(vfs, folder_path, &mut init_snapshot)?;
Ok(Some(init_snapshot))
}
pub fn syncback_lua<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents = if let Some(Variant::String(source)) = new_inst.properties.get(&ustr("Source")) {
source.as_bytes().to_vec()
} else {
anyhow::bail!("Scripts must have a `Source` property that is a String")
};
let mut fs_snapshot = FsSnapshot::new();
fs_snapshot.add_file(&snapshot.path, contents);
let meta = AdjacentMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// Scripts have relatively few properties that we care about, so shifting
// is fine.
meta.properties.shift_remove(&ustr("Source"));
if !meta.is_empty() {
let parent_location = snapshot.path.parent_err()?;
fs_snapshot.add_file(
parent_location.join(format!("{}.meta.json", new_inst.name)),
serde_json::to_vec_pretty(&meta).context("cannot serialize metadata")?,
);
}
}
Ok(SyncbackReturn {
fs_snapshot,
// Scripts don't have a child!
children: Vec::new(),
removed_children: Vec::new(),
})
}
pub fn syncback_lua_init<'sync>(
script_type: ScriptType,
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let path = snapshot.path.join(match script_type {
ScriptType::Server => "init.server.luau",
ScriptType::Client => "init.client.luau",
ScriptType::Module => "init.luau",
_ => anyhow::bail!("syncback is not yet implemented for {script_type:?}"),
});
let contents = if let Some(Variant::String(source)) = new_inst.properties.get(&ustr("Source")) {
source.as_bytes().to_vec()
} else {
anyhow::bail!("Scripts must have a `Source` property that is a String")
};
let mut dir_syncback = syncback_dir_no_meta(snapshot)?;
dir_syncback.fs_snapshot.add_file(&path, contents);
let meta = DirectoryMetadata::from_syncback_snapshot(snapshot, path.clone())?;
if let Some(mut meta) = meta {
// Scripts have relatively few properties that we care about, so shifting
// is fine.
meta.properties.shift_remove(&ustr("Source"));
if !meta.is_empty() {
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.meta.json"),
serde_json::to_vec_pretty(&meta)
.context("could not serialize new init.meta.json")?,
);
}
}
Ok(dir_syncback)
}
#[cfg(test)]
mod test {
use super::*;
@@ -305,6 +394,7 @@ mod test {
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.lua"),
"root",
ScriptType::Module,
)
.unwrap()
@@ -336,6 +426,7 @@ mod test {
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.lua"),
"root",
ScriptType::Module,
)
.unwrap()

View File

@@ -1,14 +1,18 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use anyhow::{format_err, Context};
use indexmap::IndexMap;
use memofs::{IoResultExt as _, Vfs};
use rbx_dom_weak::{types::Attributes, Ustr, UstrMap};
use rbx_dom_weak::{
types::{Attributes, Variant},
Ustr,
};
use serde::{Deserialize, Serialize};
use crate::{json, resolution::UnresolvedValue, snapshot::InstanceSnapshot, RojoRef};
use crate::{
json, resolution::UnresolvedValue, snapshot::InstanceSnapshot, syncback::SyncbackSnapshot,
RojoRef,
};
/// Represents metadata in a sibling file with the same basename.
///
@@ -26,11 +30,11 @@ pub struct AdjacentMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub ignore_unknown_instances: Option<bool>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub properties: UstrMap<UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub properties: IndexMap<Ustr, UnresolvedValue>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub attributes: HashMap<String, UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub attributes: IndexMap<String, UnresolvedValue>,
#[serde(skip)]
pub path: PathBuf,
@@ -80,6 +84,76 @@ impl AdjacentMetadata {
Ok(meta)
}
/// Constructs an `AdjacentMetadata` from the provided snapshot, assuming it
/// will be at the provided path.
pub fn from_syncback_snapshot(
snapshot: &SyncbackSnapshot,
path: PathBuf,
) -> anyhow::Result<Option<Self>> {
let mut properties = IndexMap::new();
let mut attributes = IndexMap::new();
// TODO make this more granular.
// I am breaking the cycle of bad TODOs. This is in reference to the fact
// that right now, this will just not write any metadata at all for
// project nodes, which is not always desirable. We should try to be
// smarter about it.
if let Some(old_inst) = snapshot.old_inst() {
if let Some(source) = &old_inst.metadata().instigating_source {
let source = source.path();
if source != path {
log::debug!(
"Instigating source for Instance is mismatched so its metadata is being skipped.\nPath: {}",
path.display()
);
return Ok(None);
}
}
}
let ignore_unknown_instances = snapshot
.old_inst()
.map(|inst| inst.metadata().ignore_unknown_instances)
.unwrap_or_default();
let class = &snapshot.new_inst().class;
for (name, value) in snapshot.get_path_filtered_properties(snapshot.new).unwrap() {
match value {
Variant::Attributes(attrs) => {
for (attr_name, attr_value) in attrs.iter() {
// We (probably) don't want to preserve internal
// attributes, only user defined ones.
if attr_name.starts_with("RBX") {
continue;
}
attributes.insert(
attr_name.clone(),
UnresolvedValue::from_variant_unambiguous(attr_value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), class, &name),
);
}
}
}
Ok(Some(Self {
ignore_unknown_instances: if ignore_unknown_instances {
Some(true)
} else {
None
},
properties,
attributes,
path,
id: None,
schema: None,
}))
}
pub fn apply_ignore_unknown_instances(&mut self, snapshot: &mut InstanceSnapshot) {
if let Some(ignore) = self.ignore_unknown_instances.take() {
snapshot.metadata.ignore_unknown_instances = ignore;
@@ -89,7 +163,10 @@ impl AdjacentMetadata {
pub fn apply_properties(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
let path = &self.path;
for (key, unresolved) in self.properties.drain() {
// BTreeMaps don't have an equivalent to HashMap::drain, so the next
// best option is to take ownership of the entire map. Not free, but
// very cheap.
for (key, unresolved) in std::mem::take(&mut self.properties) {
let value = unresolved
.resolve(&snapshot.class_name, &key)
.with_context(|| format!("error applying meta file {}", path.display()))?;
@@ -100,7 +177,7 @@ impl AdjacentMetadata {
if !self.attributes.is_empty() {
let mut attributes = Attributes::new();
for (key, unresolved) in self.attributes.drain() {
for (key, unresolved) in std::mem::take(&mut self.attributes) {
let value = unresolved.resolve_unambiguous()?;
attributes.insert(key, value);
}
@@ -131,6 +208,18 @@ impl AdjacentMetadata {
Ok(())
}
/// Returns whether the metadata is 'empty', meaning it doesn't have anything
/// worth persisting in it. Specifically:
///
/// - The number of properties and attributes is 0
/// - `ignore_unknown_instances` is None
#[inline]
pub fn is_empty(&self) -> bool {
self.attributes.is_empty()
&& self.properties.is_empty()
&& self.ignore_unknown_instances.is_none()
}
// TODO: Add method to allow selectively applying parts of metadata and
// throwing errors if invalid parts are specified.
}
@@ -151,11 +240,11 @@ pub struct DirectoryMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub ignore_unknown_instances: Option<bool>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub properties: UstrMap<UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub properties: IndexMap<Ustr, UnresolvedValue>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub attributes: HashMap<String, UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub attributes: IndexMap<String, UnresolvedValue>,
#[serde(skip_serializing_if = "Option::is_none")]
pub class_name: Option<Ustr>,
@@ -207,6 +296,80 @@ impl DirectoryMetadata {
Ok(meta)
}
/// Constructs a `DirectoryMetadata` from the provided snapshot, assuming it
/// will be at the provided path.
///
/// This function does not set `ClassName` manually as most uses won't
/// want it set.
pub fn from_syncback_snapshot(
snapshot: &SyncbackSnapshot,
path: PathBuf,
) -> anyhow::Result<Option<Self>> {
let mut properties = IndexMap::new();
let mut attributes = IndexMap::new();
// TODO make this more granular.
// I am breaking the cycle of bad TODOs. This is in reference to the fact
// that right now, this will just not write any metadata at all for
// project nodes, which is not always desirable. We should try to be
// smarter about it.
if let Some(old_inst) = snapshot.old_inst() {
if let Some(source) = &old_inst.metadata().instigating_source {
let source = source.path();
if source != path {
log::debug!(
"Instigating source for Instance is mismatched so its metadata is being skipped.\nPath: {}",
path.display()
);
return Ok(None);
}
}
}
let ignore_unknown_instances = snapshot
.old_inst()
.map(|inst| inst.metadata().ignore_unknown_instances)
.unwrap_or_default();
let class = &snapshot.new_inst().class;
for (name, value) in snapshot.get_path_filtered_properties(snapshot.new).unwrap() {
match value {
Variant::Attributes(attrs) => {
for (name, value) in attrs.iter() {
// We (probably) don't want to preserve internal
// attributes, only user defined ones.
if name.starts_with("RBX") {
continue;
}
attributes.insert(
name.to_owned(),
UnresolvedValue::from_variant_unambiguous(value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), class, &name),
);
}
}
}
Ok(Some(Self {
ignore_unknown_instances: if ignore_unknown_instances {
Some(true)
} else {
None
},
properties,
attributes,
class_name: None,
path,
id: None,
schema: None,
}))
}
pub fn apply_all(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
self.apply_ignore_unknown_instances(snapshot);
self.apply_class_name(snapshot)?;
@@ -241,7 +404,7 @@ impl DirectoryMetadata {
fn apply_properties(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
let path = &self.path;
for (key, unresolved) in self.properties.drain() {
for (key, unresolved) in std::mem::take(&mut self.properties) {
let value = unresolved
.resolve(&snapshot.class_name, &key)
.with_context(|| format!("error applying meta file {}", path.display()))?;
@@ -252,7 +415,7 @@ impl DirectoryMetadata {
if !self.attributes.is_empty() {
let mut attributes = Attributes::new();
for (key, unresolved) in self.attributes.drain() {
for (key, unresolved) in std::mem::take(&mut self.attributes) {
let value = unresolved.resolve_unambiguous()?;
attributes.insert(key, value);
}
@@ -275,6 +438,53 @@ impl DirectoryMetadata {
snapshot.metadata.specified_id = self.id.take().map(RojoRef::new);
Ok(())
}
/// Returns whether the metadata is 'empty', meaning it doesn't have anything
/// worth persisting in it. Specifically:
///
/// - The number of properties and attributes is 0
/// - `ignore_unknown_instances` is None
/// - `class_name` is either None or not Some("Folder")
#[inline]
pub fn is_empty(&self) -> bool {
self.attributes.is_empty()
&& self.properties.is_empty()
&& self.ignore_unknown_instances.is_none()
&& if let Some(class) = &self.class_name {
class == "Folder"
} else {
true
}
}
}
/// Retrieves the meta file that should be applied for the provided directory,
/// if it exists.
pub fn dir_meta(vfs: &Vfs, path: &Path) -> anyhow::Result<Option<DirectoryMetadata>> {
let meta_path = path.join("init.meta.json");
if let Some(meta_contents) = vfs.read(&meta_path).with_not_found()? {
let metadata = DirectoryMetadata::from_slice(&meta_contents, meta_path)?;
Ok(Some(metadata))
} else {
Ok(None)
}
}
/// Retrieves the meta file that should be applied for the provided file,
/// if it exists.
///
/// The `name` field should be the name the metadata should have.
pub fn file_meta(vfs: &Vfs, path: &Path, name: &str) -> anyhow::Result<Option<AdjacentMetadata>> {
let mut meta_path = path.with_file_name(name);
meta_path.set_extension("meta.json");
if let Some(meta_contents) = vfs.read(&meta_path).with_not_found()? {
let metadata = AdjacentMetadata::from_slice(&meta_contents, meta_path)?;
Ok(Some(metadata))
} else {
Ok(None)
}
}
#[cfg(test)]

View File

@@ -28,24 +28,34 @@ use anyhow::Context;
use memofs::{IoResultExt, Vfs};
use serde::{Deserialize, Serialize};
use crate::snapshot::{InstanceContext, InstanceSnapshot, SyncRule};
use crate::{glob::Glob, project::DEFAULT_PROJECT_NAMES};
use crate::{
glob::Glob,
project::DEFAULT_PROJECT_NAMES,
syncback::{SyncbackReturn, SyncbackSnapshot},
};
use crate::{
snapshot::{InstanceContext, InstanceSnapshot, SyncRule},
syncback::validate_file_name,
};
use self::{
csv::{snapshot_csv, snapshot_csv_init},
dir::snapshot_dir,
csv::{snapshot_csv, snapshot_csv_init, syncback_csv, syncback_csv_init},
dir::{snapshot_dir, syncback_dir},
json::snapshot_json,
json_model::snapshot_json_model,
lua::{snapshot_lua, snapshot_lua_init, ScriptType},
project::snapshot_project,
rbxm::snapshot_rbxm,
rbxmx::snapshot_rbxmx,
json_model::{snapshot_json_model, syncback_json_model},
lua::{snapshot_lua, snapshot_lua_init, syncback_lua, syncback_lua_init},
project::{snapshot_project, syncback_project},
rbxm::{snapshot_rbxm, syncback_rbxm},
rbxmx::{snapshot_rbxmx, syncback_rbxmx},
toml::snapshot_toml,
txt::snapshot_txt,
txt::{snapshot_txt, syncback_txt},
yaml::snapshot_yaml,
};
pub use self::{project::snapshot_project_node, util::emit_legacy_scripts_default};
pub use self::{
lua::ScriptType, project::snapshot_project_node, util::emit_legacy_scripts_default,
util::PathExt,
};
/// Returns an `InstanceSnapshot` for the provided path.
/// This will inspect the path and find the appropriate middleware for it,
@@ -63,41 +73,14 @@ pub fn snapshot_from_vfs(
};
if meta.is_dir() {
if let Some(init_path) = get_init_path(vfs, path)? {
// TODO: support user-defined init paths
// If and when we do, make sure to go support it in
// `Project::set_file_name`, as right now it special-cases
// `default.project.json` as an `init` path.
for rule in default_sync_rules() {
if rule.matches(&init_path) {
return match rule.middleware {
Middleware::Project => {
let name = init_path
.parent()
.and_then(Path::file_name)
.and_then(|s| s.to_str()).expect("default.project.json should be inside a folder with a unicode name");
snapshot_project(context, vfs, &init_path, name)
}
Middleware::ModuleScript => {
snapshot_lua_init(context, vfs, &init_path, ScriptType::Module)
}
Middleware::ServerScript => {
snapshot_lua_init(context, vfs, &init_path, ScriptType::Server)
}
Middleware::ClientScript => {
snapshot_lua_init(context, vfs, &init_path, ScriptType::Client)
}
Middleware::Csv => snapshot_csv_init(context, vfs, &init_path),
_ => snapshot_dir(context, vfs, path),
};
}
}
snapshot_dir(context, vfs, path)
} else {
snapshot_dir(context, vfs, path)
let (middleware, dir_name, init_path) = get_dir_middleware(vfs, path)?;
// TODO: Support user defined init paths
// If and when we do, make sure to go support it in
// `Project::set_file_name`, as right now it special-cases
// `default.project.json` as an `init` path.
match middleware {
Middleware::Dir => middleware.snapshot(context, vfs, path, dir_name),
_ => middleware.snapshot(context, vfs, &init_path, dir_name),
}
} else {
let file_name = path
@@ -116,55 +99,50 @@ pub fn snapshot_from_vfs(
}
}
/// Gets an `init` path for the given directory.
/// This uses an intrinsic priority list and for compatibility,
/// it should not be changed.
fn get_init_path<P: AsRef<Path>>(vfs: &Vfs, dir: P) -> anyhow::Result<Option<PathBuf>> {
let path = dir.as_ref();
/// Gets the appropriate middleware for a directory by checking for `init`
/// files. This uses an intrinsic priority list and for compatibility,
/// that order should be left unchanged.
///
/// Returns the middleware, the name of the directory, and the path to
/// the init location.
fn get_dir_middleware<'path>(
vfs: &Vfs,
dir_path: &'path Path,
) -> anyhow::Result<(Middleware, &'path str, PathBuf)> {
let dir_name = dir_path
.file_name()
.expect("Could not extract directory name")
.to_str()
.ok_or_else(|| anyhow::anyhow!("File name was not valid UTF-8: {}", dir_path.display()))?;
static INIT_PATHS: OnceLock<Vec<(Middleware, &str)>> = OnceLock::new();
let order = INIT_PATHS.get_or_init(|| {
vec![
(Middleware::ModuleScriptDir, "init.luau"),
(Middleware::ModuleScriptDir, "init.lua"),
(Middleware::ServerScriptDir, "init.server.luau"),
(Middleware::ServerScriptDir, "init.server.lua"),
(Middleware::ClientScriptDir, "init.client.luau"),
(Middleware::ClientScriptDir, "init.client.lua"),
(Middleware::CsvDir, "init.csv"),
]
});
for default_project_name in DEFAULT_PROJECT_NAMES {
let project_path = path.join(default_project_name);
let project_path = dir_path.join(default_project_name);
if vfs.metadata(&project_path).with_not_found()?.is_some() {
return Ok(Some(project_path));
return Ok((Middleware::Project, dir_name, project_path));
}
}
let init_path = path.join("init.luau");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
for (middleware, name) in order {
let test_path = dir_path.join(name);
if vfs.metadata(&test_path).with_not_found()?.is_some() {
return Ok((*middleware, dir_name, test_path));
}
}
let init_path = path.join("init.lua");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.server.luau");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.server.lua");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.client.luau");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.client.lua");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.csv");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
Ok(None)
Ok((Middleware::Dir, dir_name, dir_path.to_path_buf()))
}
/// Gets a snapshot for a path given an InstanceContext and Vfs, taking
@@ -194,9 +172,10 @@ fn snapshot_from_path(
}
/// Represents a possible 'transformer' used by Rojo to turn a file system
/// item into a Roblox Instance. Missing from this list are directories and
/// metadata. This is deliberate, as metadata is not a snapshot middleware
/// and directories do not make sense to turn into files.
/// item into a Roblox Instance. Missing from this list is metadata.
/// This is deliberate, as metadata is not a snapshot middleware.
///
/// Directories cannot be used for sync rules so they're ignored by Serde.
#[derive(Debug, Clone, Copy, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum Middleware {
@@ -218,6 +197,17 @@ pub enum Middleware {
Text,
Yaml,
Ignore,
#[serde(skip_deserializing)]
Dir,
#[serde(skip_deserializing)]
ServerScriptDir,
#[serde(skip_deserializing)]
ClientScriptDir,
#[serde(skip_deserializing)]
ModuleScriptDir,
#[serde(skip_deserializing)]
CsvDir,
}
impl Middleware {
@@ -230,7 +220,7 @@ impl Middleware {
path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
match self {
let mut output = match self {
Self::Csv => snapshot_csv(context, vfs, path, name),
Self::JsonModel => snapshot_json_model(context, vfs, path, name),
Self::Json => snapshot_json(context, vfs, path, name),
@@ -257,6 +247,120 @@ impl Middleware {
Self::Text => snapshot_txt(context, vfs, path, name),
Self::Yaml => snapshot_yaml(context, vfs, path, name),
Self::Ignore => Ok(None),
Self::Dir => snapshot_dir(context, vfs, path, name),
Self::ServerScriptDir => {
snapshot_lua_init(context, vfs, path, name, ScriptType::Server)
}
Self::ClientScriptDir => {
snapshot_lua_init(context, vfs, path, name, ScriptType::Client)
}
Self::ModuleScriptDir => {
snapshot_lua_init(context, vfs, path, name, ScriptType::Module)
}
Self::CsvDir => snapshot_csv_init(context, vfs, path, name),
};
if let Ok(Some(ref mut snapshot)) = output {
snapshot.metadata.middleware = Some(*self);
}
output
}
/// Runs the syncback mechanism for the provided middleware given a
/// SyncbackSnapshot.
pub fn syncback<'sync>(
&self,
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let file_name = snapshot.path.file_name().and_then(|s| s.to_str());
if let Some(file_name) = file_name {
validate_file_name(file_name).with_context(|| {
format!("cannot create a file or directory with name {file_name}")
})?;
}
match self {
Middleware::Csv => syncback_csv(snapshot),
Middleware::JsonModel => syncback_json_model(snapshot),
Middleware::Json => anyhow::bail!("cannot syncback Json middleware"),
// Projects are only generated from files that already exist on the
// file system, so we don't need to pass a file name.
Middleware::Project => syncback_project(snapshot),
Middleware::ServerScript => syncback_lua(snapshot),
Middleware::ClientScript => syncback_lua(snapshot),
Middleware::ModuleScript => syncback_lua(snapshot),
Middleware::Rbxm => syncback_rbxm(snapshot),
Middleware::Rbxmx => syncback_rbxmx(snapshot),
Middleware::Toml => anyhow::bail!("cannot syncback Toml middleware"),
Middleware::Text => syncback_txt(snapshot),
Middleware::Yaml => anyhow::bail!("cannot syncback Yaml middleware"),
Middleware::Ignore => anyhow::bail!("cannot syncback Ignore middleware"),
Middleware::Dir => syncback_dir(snapshot),
Middleware::ServerScriptDir => syncback_lua_init(ScriptType::Server, snapshot),
Middleware::ClientScriptDir => syncback_lua_init(ScriptType::Client, snapshot),
Middleware::ModuleScriptDir => syncback_lua_init(ScriptType::Module, snapshot),
Middleware::CsvDir => syncback_csv_init(snapshot),
Middleware::PluginScript
| Middleware::LegacyServerScript
| Middleware::LegacyClientScript
| Middleware::RunContextServerScript
| Middleware::RunContextClientScript => {
anyhow::bail!("syncback is not implemented for {self:?} yet")
}
}
}
/// Returns whether this particular middleware would become a directory.
#[inline]
pub fn is_dir(&self) -> bool {
matches!(
self,
Middleware::Dir
| Middleware::ServerScriptDir
| Middleware::ClientScriptDir
| Middleware::ModuleScriptDir
| Middleware::CsvDir
)
}
/// Returns whether this particular middleware sets its own properties.
/// This applies to things like `JsonModel` and `Project`, since they
/// set properties without needing a meta.json file.
///
/// It does not cover middleware like `ServerScript` or `Csv` because they
/// need a meta.json file to set properties that aren't their designated
/// 'special' properties.
#[inline]
pub fn handles_own_properties(&self) -> bool {
matches!(
self,
Middleware::JsonModel | Middleware::Project | Middleware::Rbxm | Middleware::Rbxmx
)
}
/// Attempts to return a middleware that should be used for the given path.
///
/// Returns `Err` only if the Vfs cannot read information about the path.
pub fn middleware_for_path(
vfs: &Vfs,
sync_rules: &[SyncRule],
path: &Path,
) -> anyhow::Result<Option<Self>> {
let meta = match vfs.metadata(path).with_not_found()? {
Some(meta) => meta,
None => return Ok(None),
};
if meta.is_dir() {
let (middleware, _, _) = get_dir_middleware(vfs, path)?;
Ok(Some(middleware))
} else {
for rule in sync_rules.iter().chain(default_sync_rules()) {
if rule.matches(path) {
return Ok(Some(rule.middleware));
}
}
Ok(None)
}
}
}

View File

@@ -1,19 +1,27 @@
use std::{borrow::Cow, path::Path};
use std::{
borrow::Cow,
collections::{BTreeMap, HashMap, VecDeque},
path::Path,
};
use anyhow::{bail, Context};
use memofs::Vfs;
use rbx_dom_weak::{
types::{Attributes, Ref},
ustr, HashMapExt as _, Ustr, UstrMap,
types::{Attributes, Ref, Variant},
ustr, HashMapExt as _, Instance, Ustr, UstrMap,
};
use rbx_reflection::ClassTag;
use crate::{
project::{PathNode, Project, ProjectNode},
resolution::UnresolvedValue,
snapshot::{
InstanceContext, InstanceMetadata, InstanceSnapshot, InstigatingSource, PathIgnoreRule,
SyncRule,
InstanceContext, InstanceMetadata, InstanceSnapshot, InstanceWithMeta, InstigatingSource,
PathIgnoreRule, SyncRule,
},
snapshot_middleware::Middleware,
syncback::{filter_properties, FsSnapshot, SyncbackReturn, SyncbackSnapshot},
variant_eq::variant_eq,
RojoRef,
};
@@ -286,12 +294,12 @@ pub fn snapshot_project_node(
metadata.specified_id = Some(RojoRef::new(id.clone()))
}
metadata.instigating_source = Some(InstigatingSource::ProjectNode(
project_path.to_path_buf(),
instance_name.to_string(),
Box::new(node.clone()),
parent_class.map(|name| name.to_owned()),
));
metadata.instigating_source = Some(InstigatingSource::ProjectNode {
path: project_path.to_path_buf(),
name: instance_name.to_string(),
node: node.clone(),
parent_class: parent_class.map(|name| name.to_owned()),
});
Ok(Some(InstanceSnapshot {
snapshot_id: Ref::none(),
@@ -303,6 +311,318 @@ pub fn snapshot_project_node(
}))
}
pub fn syncback_project<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let old_inst = snapshot
.old_inst()
.expect("projects should always exist in both trees");
// Generally, the path of a project is the first thing added to the relevant
// paths. So, we take the last one.
let project_path = old_inst
.metadata()
.relevant_paths
.last()
.expect("all projects should have a relevant path");
let vfs = snapshot.vfs();
log::debug!("Reloading project {} from vfs", project_path.display(),);
let mut project = Project::load_exact(vfs, project_path, None)?;
let base_path = project.folder_location().to_path_buf();
// Sync rules for this project do not have their base rule set but it is
// important when performing syncback on other projects.
for rule in &mut project.sync_rules {
rule.base_path.clone_from(&base_path)
}
let mut descendant_snapshots = Vec::new();
let mut removed_descendants = Vec::new();
let mut ref_to_path_map = HashMap::new();
let mut old_child_map = HashMap::new();
let mut new_child_map = HashMap::new();
let mut node_changed_map = Vec::new();
let mut node_queue = VecDeque::with_capacity(1);
node_queue.push_back((&mut project.tree, old_inst, snapshot.new_inst()));
while let Some((node, old_inst, new_inst)) = node_queue.pop_front() {
log::debug!("Processing node {}", old_inst.name());
if old_inst.class_name() != new_inst.class {
anyhow::bail!(
"Cannot change the class of {} in project file {}.\n\
Current class is {}, it is a {} in the input file.",
old_inst.name(),
project_path.display(),
old_inst.class_name(),
new_inst.class
);
}
// TODO handle meta.json files in this branch. Right now, we perform
// syncback if a node has `$path` set but the Middleware aren't aware
// that the Instances they're running on originate in a project.json.
// As a result, the `meta.json` syncback code is hardcoded to not work
// if the Instance originates from a project file. However, we should
// ideally use a .meta.json over the project node if it exists already.
if node.path.is_some() {
// Since the node has a path, we have to run syncback on it.
let node_path = node.path.as_ref().map(PathNode::path).expect(
"Project nodes with a path must have a path \
If you see this message, something went seriously wrong. Please report it.",
);
let full_path = if node_path.is_absolute() {
node_path.to_path_buf()
} else {
base_path.join(node_path)
};
let middleware = match Middleware::middleware_for_path(
snapshot.vfs(),
&project.sync_rules,
&full_path,
)? {
Some(middleware) => middleware,
// The only way this can happen at this point is if the path does
// not exist on the file system or there's no middleware for it.
None => anyhow::bail!(
"path does not exist or could not be turned into a file Rojo understands: {}",
full_path.display()
),
};
descendant_snapshots.push(
snapshot
.with_new_path(full_path.clone(), new_inst.referent(), Some(old_inst.id()))
.middleware(middleware),
);
ref_to_path_map.insert(new_inst.referent(), full_path);
// We only want to set properties if it needs it.
if !middleware.handles_own_properties() {
project_node_property_syncback_path(snapshot, new_inst, node);
}
} else {
project_node_property_syncback_no_path(snapshot, new_inst, node);
}
for child_ref in new_inst.children() {
let child = snapshot
.get_new_instance(*child_ref)
.expect("all children of Instances should be in new DOM");
if new_child_map.insert(&child.name, child).is_some() {
anyhow::bail!(
"Instances that are direct children of an Instance that is made by a project file \
must have a unique name.\nThe child '{}' of '{}' is duplicated in the place file.", child.name, old_inst.name()
);
}
}
for child_ref in old_inst.children() {
let child = snapshot
.get_old_instance(*child_ref)
.expect("all children of Instances should be in old DOM");
if old_child_map.insert(child.name(), child).is_some() {
anyhow::bail!(
"Instances that are direct children of an Instance that is made by a project file \
must have a unique name.\nThe child '{}' of '{}' is duplicated on the file system.", child.name(), old_inst.name()
);
}
}
// This loop does basic matching of Instance children to the node's
// children. It ensures that `new_child_map` and `old_child_map` will
// only contain Instances that don't belong to the project after this.
for (child_name, child_node) in &mut node.children {
// If a node's path is optional, we want to skip it if the path
// doesn't exist since it isn't in the current old DOM.
if let Some(path) = &child_node.path {
if path.is_optional() {
let real_path = if path.path().is_absolute() {
path.path().to_path_buf()
} else {
base_path.join(path.path())
};
if !real_path.exists() {
log::warn!(
"Skipping node '{child_name}' of project because it is optional and not present on the disk.\n\
If this is not deliberate, please create a file or directory at {}", real_path.display()
);
continue;
}
}
}
let new_equivalent = new_child_map.remove(child_name);
let old_equivalent = old_child_map.remove(child_name.as_str());
match (new_equivalent, old_equivalent) {
(Some(new), Some(old)) => node_queue.push_back((child_node, old, new)),
(_, None) => anyhow::bail!(
"The child '{child_name}' of Instance '{}' would be removed.\n\
Syncback cannot add or remove Instances from project {}",
old_inst.name(),
project_path.display()
),
(None, _) => anyhow::bail!(
"The child '{child_name}' of Instance '{}' is present only in a project file,\n\
and not the provided file. Syncback cannot add or remove Instances from project:\n{}.",
old_inst.name(), project_path.display(),
)
}
}
// All of the children in this loop are by their nature not in the
// project, so we just need to run syncback on them.
for (name, new_child) in new_child_map.drain() {
let parent_path = match ref_to_path_map.get(&new_child.parent()) {
Some(path) => path.clone(),
None => {
log::debug!("Skipping child {name} of node because it has no parent_path");
continue;
}
};
// If a child also exists in the old tree, it will be caught in the
// syncback on the project node path above (or is itself a node).
// So the only things we need to run seperately is new children.
if old_child_map.remove(name.as_str()).is_none() {
let parent_middleware =
Middleware::middleware_for_path(vfs, &project.sync_rules, &parent_path)?
.expect("project nodes should have a middleware if they have children.");
// If this node points directly to a project, it may still have
// children but they'll be handled by syncback. This isn't a
// concern with directories because they're singular things,
// files that contain their own children.
if parent_middleware != Middleware::Project {
descendant_snapshots.push(snapshot.with_base_path(
&parent_path,
new_child.referent(),
None,
)?);
}
}
}
removed_descendants.extend(old_child_map.drain().map(|(_, v)| v));
node_changed_map.push((&node.properties, &node.attributes, old_inst))
}
let mut fs_snapshot = FsSnapshot::new();
for (node_properties, node_attributes, old_inst) in node_changed_map {
if project_node_should_reserialize(node_properties, node_attributes, old_inst)? {
fs_snapshot.add_file(project_path, serde_json::to_vec_pretty(&project)?);
break;
}
}
Ok(SyncbackReturn {
fs_snapshot,
children: descendant_snapshots,
removed_children: removed_descendants,
})
}
fn project_node_property_syncback(
_snapshot: &SyncbackSnapshot,
filtered_properties: UstrMap<&Variant>,
new_inst: &Instance,
node: &mut ProjectNode,
) {
let properties = &mut node.properties;
let mut attributes = BTreeMap::new();
for (name, value) in filtered_properties {
match value {
Variant::Attributes(attrs) => {
for (attr_name, attr_value) in attrs.iter() {
// We (probably) don't want to preserve internal attributes,
// only user defined ones.
if attr_name.starts_with("RBX") {
continue;
}
attributes.insert(
attr_name.clone(),
UnresolvedValue::from_variant_unambiguous(attr_value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), &new_inst.class, &name),
);
}
}
}
node.attributes = attributes;
}
fn project_node_property_syncback_path(
snapshot: &SyncbackSnapshot,
new_inst: &Instance,
node: &mut ProjectNode,
) {
let filtered_properties = snapshot
.get_path_filtered_properties(new_inst.referent())
.unwrap();
project_node_property_syncback(snapshot, filtered_properties, new_inst, node)
}
fn project_node_property_syncback_no_path(
snapshot: &SyncbackSnapshot,
new_inst: &Instance,
node: &mut ProjectNode,
) {
let filtered_properties = filter_properties(snapshot.project(), new_inst);
project_node_property_syncback(snapshot, filtered_properties, new_inst, node)
}
fn project_node_should_reserialize(
node_properties: &BTreeMap<Ustr, UnresolvedValue>,
node_attributes: &BTreeMap<String, UnresolvedValue>,
instance: InstanceWithMeta,
) -> anyhow::Result<bool> {
for (prop_name, unresolved_node_value) in node_properties {
if let Some(inst_value) = instance.properties().get(prop_name) {
let node_value = unresolved_node_value
.clone()
.resolve(&instance.class_name(), prop_name)?;
if !variant_eq(inst_value, &node_value) {
return Ok(true);
}
} else {
return Ok(true);
}
}
match instance.properties().get(&ustr("Attributes")) {
Some(Variant::Attributes(inst_attributes)) => {
// This will also catch if one is empty but the other isn't
if node_attributes.len() != inst_attributes.len() {
Ok(true)
} else {
for (attr_name, unresolved_node_value) in node_attributes {
if let Some(inst_value) = inst_attributes.get(attr_name.as_str()) {
let node_value = unresolved_node_value.clone().resolve_unambiguous()?;
if !variant_eq(inst_value, &node_value) {
return Ok(true);
}
} else {
return Ok(true);
}
}
Ok(false)
}
}
Some(_) => Ok(true),
None => {
if !node_attributes.is_empty() {
Ok(true)
} else {
Ok(false)
}
}
}
}
fn infer_class_name(name: &str, parent_class: Option<&str>) -> Option<Ustr> {
// If className wasn't defined from another source, we may be able
// to infer one.

View File

@@ -3,7 +3,10 @@ use std::path::Path;
use anyhow::Context;
use memofs::Vfs;
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
#[profiling::function]
pub fn snapshot_rbxm(
@@ -39,6 +42,24 @@ pub fn snapshot_rbxm(
}
}
pub fn syncback_rbxm<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let inst = snapshot.new_inst();
// Long-term, we probably want to have some logic for if this contains a
// script. That's a future endeavor though.
let mut serialized = Vec::new();
rbx_binary::to_writer(&mut serialized, snapshot.new_tree(), &[inst.referent()])
.context("failed to serialize new rbxm")?;
Ok(SyncbackReturn {
fs_snapshot: FsSnapshot::new().with_added_file(&snapshot.path, serialized),
children: Vec::new(),
removed_children: Vec::new(),
})
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -2,8 +2,12 @@ use std::path::Path;
use anyhow::Context;
use memofs::Vfs;
use rbx_xml::EncodeOptions;
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
pub fn snapshot_rbxmx(
context: &InstanceContext,
@@ -15,7 +19,7 @@ pub fn snapshot_rbxmx(
.property_behavior(rbx_xml::DecodePropertyBehavior::ReadUnknown);
let temp_tree = rbx_xml::from_reader(vfs.read(path)?.as_slice(), options)
.with_context(|| format!("Malformed rbxm file: {}", path.display()))?;
.with_context(|| format!("Malformed rbxmx file: {}", path.display()))?;
let root_instance = temp_tree.root();
let children = root_instance.children();
@@ -41,6 +45,32 @@ pub fn snapshot_rbxmx(
}
}
pub fn syncback_rbxmx<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let inst = snapshot.new_inst();
let options =
EncodeOptions::new().property_behavior(rbx_xml::EncodePropertyBehavior::WriteUnknown);
// Long-term, we probably want to have some logic for if this contains a
// script. That's a future endeavor though.
let mut serialized = Vec::new();
rbx_xml::to_writer(
&mut serialized,
snapshot.new_tree(),
&[inst.referent()],
options,
)
.context("failed to serialize new rbxmx")?;
Ok(SyncbackReturn {
fs_snapshot: FsSnapshot::new().with_added_file(&snapshot.path, serialized),
children: Vec::new(),
removed_children: Vec::new(),
})
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: LocalizationTable
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: root
class_name: LocalizationTable
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: root
class_name: LocalizationTable
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: LocalizationTable
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Folder
properties: {}

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Folder
properties: {}
@@ -44,6 +45,7 @@ children:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: dir
name: Child
class_name: Folder
properties: {}

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: IntValue
properties:
@@ -25,6 +26,7 @@ children:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: The Child
class_name: StringValue
properties: {}

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: IntValue
properties:
@@ -25,6 +26,7 @@ children:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: The Child
class_name: StringValue
properties: {}

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: LocalScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: bar
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: root
class_name: ModuleScript
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: root
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: bar
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Model
properties: {}

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: direct-project
class_name: Model
properties: {}

View File

@@ -13,6 +13,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: project
name: path-property-override
class_name: StringValue
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: children
class_name: Folder
properties: {}
@@ -21,14 +22,16 @@ children:
ignore_unknown_instances: true
instigating_source:
ProjectNode:
- /foo.project.json
- Child
- $className: Model
- Folder
path: /foo.project.json
name: Child
node:
$className: Model
parent_class: Folder
relevant_paths: []
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: Child
class_name: Model
properties: {}

View File

@@ -13,6 +13,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: project
name: path-project
class_name: Model
properties: {}

View File

@@ -13,6 +13,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: project
name: path-child-project
class_name: Folder
properties: {}
@@ -22,14 +23,16 @@ children:
ignore_unknown_instances: true
instigating_source:
ProjectNode:
- /foo/other.project.json
- SomeChild
- $className: Model
- Folder
path: /foo/other.project.json
name: SomeChild
node:
$className: Model
parent_class: Folder
relevant_paths: []
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: SomeChild
class_name: Model
properties: {}

View File

@@ -15,6 +15,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: text
name: path-project
class_name: StringValue
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: resolved-properties
class_name: StringValue
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: unresolved-properties
class_name: StringValue
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: StringValue
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: StringValue
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -1,11 +1,16 @@
use std::{path::Path, str};
use anyhow::Context as _;
use memofs::Vfs;
use rbx_dom_weak::types::Variant;
use rbx_dom_weak::ustr;
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::meta_file::AdjacentMetadata;
use super::{meta_file::AdjacentMetadata, PathExt as _};
pub fn snapshot_txt(
context: &InstanceContext,
@@ -32,6 +37,41 @@ pub fn snapshot_txt(
Ok(Some(snapshot))
}
pub fn syncback_txt<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents = if let Some(Variant::String(source)) = new_inst.properties.get(&ustr("Value")) {
source.as_bytes().to_vec()
} else {
anyhow::bail!("StringValues must have a `Value` property that is a String");
};
let mut fs_snapshot = FsSnapshot::new();
fs_snapshot.add_file(&snapshot.path, contents);
let meta = AdjacentMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// StringValues have relatively few properties that we care about, so
// shifting is fine.
meta.properties.shift_remove(&ustr("Value"));
if !meta.is_empty() {
let parent = snapshot.path.parent_err()?;
fs_snapshot.add_file(
parent.join(format!("{}.meta.json", new_inst.name)),
serde_json::to_vec_pretty(&meta).context("could not serialize metadata")?,
);
}
}
Ok(SyncbackReturn {
fs_snapshot,
children: Vec::new(),
removed_children: Vec::new(),
})
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -16,6 +16,7 @@ pub fn match_trailing<'a>(input: &'a str, suffix: &str) -> Option<&'a str> {
pub trait PathExt {
fn file_name_ends_with(&self, suffix: &str) -> bool;
fn file_name_trim_end<'a>(&'a self, suffix: &str) -> anyhow::Result<&'a str>;
fn parent_err(&self) -> anyhow::Result<&Path>;
}
impl<P> PathExt for P
@@ -40,6 +41,12 @@ where
match_trailing(file_name, suffix)
.with_context(|| format!("Path did not end in {}: {}", suffix, path.display()))
}
fn parent_err(&self) -> anyhow::Result<&Path> {
let path = self.as_ref();
path.parent()
.with_context(|| format!("Path does not have a parent: {}", path.display()))
}
}
// TEMP function until rojo 8.0, when it can be replaced with bool::default (aka false)