Implement Syncback to support converting Roblox files to a Rojo project (#937)

This is a very large commit.
Consider checking the linked PR for more information.
This commit is contained in:
Micah
2025-11-19 09:21:33 -08:00
committed by GitHub
parent 071b6e7e23
commit 9b5a07191b
239 changed files with 5325 additions and 225 deletions

View File

@@ -183,7 +183,7 @@ impl JobThreadContext {
if let Some(instigating_source) = &instance.metadata().instigating_source {
match instigating_source {
InstigatingSource::Path(path) => fs::remove_file(path).unwrap(),
InstigatingSource::ProjectNode(_, _, _, _) => {
InstigatingSource::ProjectNode { .. } => {
log::warn!(
"Cannot remove instance {:?}, it's from a project file",
id
@@ -231,7 +231,7 @@ impl JobThreadContext {
log::warn!("Cannot change Source to non-string value.");
}
}
InstigatingSource::ProjectNode(_, _, _, _) => {
InstigatingSource::ProjectNode { .. } => {
log::warn!(
"Cannot remove instance {:?}, it's from a project file",
id
@@ -317,16 +317,21 @@ fn compute_and_apply_changes(tree: &mut RojoTree, vfs: &Vfs, id: Ref) -> Option<
}
},
InstigatingSource::ProjectNode(project_path, instance_name, project_node, parent_class) => {
InstigatingSource::ProjectNode {
path,
name,
node,
parent_class,
} => {
// This instance is the direct subject of a project node. Since
// there might be information associated with our instance from
// the project file, we snapshot the entire project node again.
let snapshot_result = snapshot_project_node(
&metadata.context,
project_path,
instance_name,
project_node,
path,
name,
node,
vfs,
parent_class.as_ref().map(|name| name.as_str()),
);

View File

@@ -7,6 +7,7 @@ mod init;
mod plugin;
mod serve;
mod sourcemap;
mod syncback;
mod upload;
use std::{borrow::Cow, env, path::Path, str::FromStr};
@@ -21,6 +22,7 @@ pub use self::init::{InitCommand, InitKind};
pub use self::plugin::{PluginCommand, PluginSubcommand};
pub use self::serve::ServeCommand;
pub use self::sourcemap::SourcemapCommand;
pub use self::syncback::SyncbackCommand;
pub use self::upload::UploadCommand;
/// Command line options that Rojo accepts, defined using the clap crate.
@@ -46,6 +48,7 @@ impl Options {
Subcommand::FmtProject(subcommand) => subcommand.run(),
Subcommand::Doc(subcommand) => subcommand.run(),
Subcommand::Plugin(subcommand) => subcommand.run(),
Subcommand::Syncback(subcommand) => subcommand.run(self.global),
}
}
}
@@ -119,6 +122,7 @@ pub enum Subcommand {
FmtProject(FmtProjectCommand),
Doc(DocCommand),
Plugin(PluginCommand),
Syncback(SyncbackCommand),
}
pub(super) fn resolve_path(path: &Path) -> Cow<'_, Path> {

282
src/cli/syncback.rs Normal file
View File

@@ -0,0 +1,282 @@
use std::{
io::{self, BufReader, Write as _},
mem::forget,
path::{Path, PathBuf},
time::Instant,
};
use anyhow::Context;
use clap::Parser;
use fs_err::File;
use memofs::Vfs;
use rbx_dom_weak::{InstanceBuilder, WeakDom};
use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};
use crate::{
path_serializer::display_absolute,
serve_session::ServeSession,
syncback::{syncback_loop, FsSnapshot},
};
use super::{resolve_path, GlobalOptions};
const UNKNOWN_INPUT_KIND_ERR: &str = "Could not detect what kind of file was inputted. \
Expected input file to end in .rbxl, .rbxlx, .rbxm, or .rbxmx.";
/// Performs 'syncback' for the provided project, using the `input` file
/// given.
///
/// Syncback exists to convert Roblox files into a Rojo project automatically.
/// It uses the project.json file provided to traverse the Roblox file passed as
/// to serialize Instances to the file system in a format that Rojo understands.
///
/// To ease programmatic use, this command pipes all normal output to stderr.
#[derive(Debug, Parser)]
pub struct SyncbackCommand {
/// Path to the project to sync back to.
#[clap(default_value = "")]
pub project: PathBuf,
/// Path to the Roblox file to pull Instances from.
#[clap(long, short)]
pub input: PathBuf,
/// If provided, a list all of the files and directories that will be
/// added or removed is emitted into stdout.
#[clap(long, short)]
pub list: bool,
/// If provided, syncback will not actually write anything to the file
/// system. The command will otherwise run normally.
#[clap(long)]
pub dry_run: bool,
/// If provided, the prompt for writing to the file system is skipped.
#[clap(long, short = 'y')]
pub non_interactive: bool,
}
impl SyncbackCommand {
pub fn run(&self, global: GlobalOptions) -> anyhow::Result<()> {
let path_old = resolve_path(&self.project);
let path_new = resolve_path(&self.input);
let input_kind = FileKind::from_path(&path_new).context(UNKNOWN_INPUT_KIND_ERR)?;
let dom_start_timer = Instant::now();
let dom_new = read_dom(&path_new, input_kind)?;
log::debug!(
"Finished opening file in {:0.02}s",
dom_start_timer.elapsed().as_secs_f32()
);
let vfs = Vfs::new_default();
vfs.set_watch_enabled(false);
let project_start_timer = Instant::now();
let session_old = ServeSession::new(vfs, path_old.clone())?;
log::debug!(
"Finished opening project in {:0.02}s",
project_start_timer.elapsed().as_secs_f32()
);
let mut dom_old = session_old.tree();
log::debug!("Old root: {}", dom_old.inner().root().class);
log::debug!("New root: {}", dom_new.root().class);
if log::log_enabled!(log::Level::Trace) {
log::trace!("Children of old root:");
for child in dom_old.inner().root().children() {
let inst = dom_old.get_instance(*child).unwrap();
log::trace!("{} (class: {})", inst.name(), inst.class_name());
}
log::trace!("Children of new root:");
for child in dom_new.root().children() {
let inst = dom_new.get_by_ref(*child).unwrap();
log::trace!("{} (class: {})", inst.name, inst.class);
}
}
let syncback_timer = Instant::now();
eprintln!("Beginning syncback...");
let snapshot = syncback_loop(
session_old.vfs(),
&mut dom_old,
dom_new,
session_old.root_project(),
)?;
log::debug!(
"Syncback finished in {:.02}s!",
syncback_timer.elapsed().as_secs_f32()
);
let base_path = session_old.root_project().folder_location();
if self.list {
list_files(&snapshot, global.color.into(), base_path)?;
}
if !self.dry_run {
if !self.non_interactive {
eprintln!(
"Would write {} files/folders and remove {} files/folders.",
snapshot.added_paths().len(),
snapshot.removed_paths().len()
);
eprint!("Is this okay? (Y/N): ");
io::stderr().flush()?;
let mut line = String::with_capacity(1);
io::stdin().read_line(&mut line)?;
line = line.trim().to_lowercase();
if line != "y" {
eprintln!("Aborting due to user input!");
return Ok(());
}
}
eprintln!("Writing to the file system...");
snapshot.write_to_vfs(base_path, session_old.vfs())?;
eprintln!("Finished syncback.")
} else {
eprintln!(
"Would write {} files/folders and remove {} files/folders.",
snapshot.added_paths().len(),
snapshot.removed_paths().len()
);
eprintln!("Aborting before writing to file system due to `--dry-run`");
}
// It is potentially prohibitively expensive to drop a ServeSession,
// and the program is about to exit anyway so we're just going to forget
// about it.
drop(dom_old);
forget(session_old);
Ok(())
}
}
fn read_dom(path: &Path, file_kind: FileKind) -> anyhow::Result<WeakDom> {
let content = BufReader::new(File::open(path)?);
match file_kind {
FileKind::Rbxl => rbx_binary::from_reader(content).with_context(|| {
format!(
"Could not deserialize binary place file at {}",
path.display()
)
}),
FileKind::Rbxlx => rbx_xml::from_reader(content, xml_decode_config())
.with_context(|| format!("Could not deserialize XML place file at {}", path.display())),
FileKind::Rbxm => {
let temp_tree = rbx_binary::from_reader(content).with_context(|| {
format!(
"Could not deserialize binary place file at {}",
path.display()
)
})?;
process_model_dom(temp_tree)
}
FileKind::Rbxmx => {
let temp_tree =
rbx_xml::from_reader(content, xml_decode_config()).with_context(|| {
format!("Could not deserialize XML model file at {}", path.display())
})?;
process_model_dom(temp_tree)
}
}
}
fn process_model_dom(dom: WeakDom) -> anyhow::Result<WeakDom> {
let temp_children = dom.root().children();
if temp_children.len() == 1 {
let real_root = dom.get_by_ref(temp_children[0]).unwrap();
let mut new_tree = WeakDom::new(InstanceBuilder::new(real_root.class));
for (name, property) in &real_root.properties {
new_tree
.root_mut()
.properties
.insert(*name, property.to_owned());
}
let children = dom.clone_multiple_into_external(real_root.children(), &mut new_tree);
for child in children {
new_tree.transfer_within(child, new_tree.root_ref());
}
Ok(new_tree)
} else {
anyhow::bail!(
"Rojo does not currently support models with more \
than one Instance at the Root!"
);
}
}
fn xml_decode_config() -> rbx_xml::DecodeOptions<'static> {
rbx_xml::DecodeOptions::new().property_behavior(rbx_xml::DecodePropertyBehavior::ReadUnknown)
}
/// The different kinds of input that Rojo can syncback.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum FileKind {
/// An XML model file.
Rbxmx,
/// An XML place file.
Rbxlx,
/// A binary model file.
Rbxm,
/// A binary place file.
Rbxl,
}
impl FileKind {
fn from_path(output: &Path) -> Option<FileKind> {
let extension = output.extension()?.to_str()?;
match extension {
"rbxlx" => Some(FileKind::Rbxlx),
"rbxmx" => Some(FileKind::Rbxmx),
"rbxl" => Some(FileKind::Rbxl),
"rbxm" => Some(FileKind::Rbxm),
_ => None,
}
}
}
fn list_files(snapshot: &FsSnapshot, color: ColorChoice, base_path: &Path) -> io::Result<()> {
let no_color = ColorSpec::new();
let mut add_color = ColorSpec::new();
add_color.set_fg(Some(Color::Green));
let mut remove_color = ColorSpec::new();
remove_color.set_fg(Some(Color::Red));
let writer = BufferWriter::stdout(color);
let mut buffer = writer.buffer();
let added = snapshot.added_paths();
if !added.is_empty() {
buffer.set_color(&add_color)?;
for path in added {
writeln!(
&mut buffer,
"Writing {}",
display_absolute(path.strip_prefix(base_path).unwrap_or(path))
)?;
}
}
let removed = snapshot.removed_paths();
if !removed.is_empty() {
buffer.set_color(&remove_color)?;
for path in removed {
writeln!(
&mut buffer,
"Removing {}",
display_absolute(path.strip_prefix(base_path).unwrap_or(path))
)?;
}
}
buffer.set_color(&no_color)?;
writer.print(&buffer)
}

View File

@@ -22,9 +22,19 @@ mod serve_session;
mod session_id;
mod snapshot;
mod snapshot_middleware;
mod syncback;
mod variant_eq;
mod web;
// TODO: Work out what we should expose publicly
pub use project::*;
pub use rojo_ref::*;
pub use session_id::SessionId;
pub use snapshot::{
InstanceContext, InstanceMetadata, InstanceSnapshot, InstanceWithMeta, InstanceWithMetaMut,
RojoDescendants, RojoTree,
};
pub use snapshot_middleware::{snapshot_from_vfs, Middleware, ScriptType};
pub use syncback::{syncback_loop, FsSnapshot, SyncbackData, SyncbackSnapshot};
pub use web::interface as web_api;

View File

@@ -5,24 +5,32 @@ use std::path::Path;
use serde::{ser::SerializeSeq, Serialize, Serializer};
pub fn serialize_absolute<S, T>(path: T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: AsRef<Path>,
{
/// Converts the provided value into a String with all directory separators
/// converted into `/`.
pub fn display_absolute<T: AsRef<Path>>(path: T) -> String {
let as_str = path
.as_ref()
.as_os_str()
.to_str()
.expect("Invalid Unicode in file path, cannot serialize");
let replaced = as_str.replace('\\', "/");
as_str.replace('\\', "/")
}
serializer.serialize_str(&replaced)
/// A serializer for serde that serialize a value with all directory separators
/// converted into `/`.
pub fn serialize_absolute<S, T>(path: T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: AsRef<Path>,
{
serializer.serialize_str(&display_absolute(path))
}
#[derive(Serialize)]
struct WithAbsolute<'a>(#[serde(serialize_with = "serialize_absolute")] &'a Path);
/// A serializer for serde that serialize a list of values with all directory
/// separators converted into `/`.
pub fn serialize_vec_absolute<S, T>(paths: &[T], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,

View File

@@ -1,5 +1,5 @@
use std::{
collections::{BTreeMap, HashMap, HashSet},
collections::{BTreeMap, HashSet},
ffi::OsStr,
fs, io,
net::IpAddr,
@@ -7,11 +7,13 @@ use std::{
};
use memofs::Vfs;
use rbx_dom_weak::{Ustr, UstrMap};
use rbx_dom_weak::Ustr;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{glob::Glob, json, resolution::UnresolvedValue, snapshot::SyncRule};
use crate::{
glob::Glob, json, resolution::UnresolvedValue, snapshot::SyncRule, syncback::SyncbackRules,
};
/// Represents 'default' project names that act as `init` files
pub static DEFAULT_PROJECT_NAMES: [&str; 2] = ["default.project.json", "default.project.jsonc"];
@@ -114,6 +116,10 @@ pub struct Project {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub glob_ignore_paths: Vec<Glob>,
/// A list of rules for syncback with this project file.
#[serde(skip_serializing_if = "Option::is_none")]
pub syncback_rules: Option<SyncbackRules>,
/// A list of mappings of globs to syncing rules. If a file matches a glob,
/// it will be 'transformed' into an Instance following the rule provided.
/// Globs are relative to the folder the project file is in.
@@ -332,12 +338,21 @@ pub enum PathNode {
}
impl PathNode {
/// Returns the path of the `PathNode`, without regard for if it's optional
// or not.
#[inline]
pub fn path(&self) -> &Path {
match self {
PathNode::Required(pathbuf) => pathbuf,
PathNode::Optional(OptionalPathNode { optional }) => optional,
}
}
/// Returns whether this `PathNode` is optional or not.
#[inline]
pub fn is_optional(&self) -> bool {
matches!(self, PathNode::Optional(_))
}
}
/// Describes an instance and its descendants in a project.
@@ -367,16 +382,16 @@ pub struct ProjectNode {
#[serde(
rename = "$properties",
default,
skip_serializing_if = "HashMap::is_empty"
skip_serializing_if = "BTreeMap::is_empty"
)]
pub properties: UstrMap<UnresolvedValue>,
pub properties: BTreeMap<Ustr, UnresolvedValue>,
#[serde(
rename = "$attributes",
default,
skip_serializing_if = "HashMap::is_empty"
skip_serializing_if = "BTreeMap::is_empty"
)]
pub attributes: HashMap<String, UnresolvedValue>,
pub attributes: BTreeMap<String, UnresolvedValue>,
/// Defines the behavior when Rojo encounters unknown instances in Roblox
/// Studio during live sync. `$ignoreUnknownInstances` should be considered

View File

@@ -2,8 +2,8 @@ use std::borrow::Borrow;
use anyhow::{bail, format_err};
use rbx_dom_weak::types::{
Attributes, CFrame, Color3, Content, ContentId, Enum, Font, MaterialColors, Matrix3, Tags,
Variant, VariantType, Vector2, Vector3,
Attributes, CFrame, Color3, Content, ContentId, ContentType, Enum, Font, MaterialColors,
Matrix3, Tags, Variant, VariantType, Vector2, Vector3,
};
use rbx_reflection::{DataType, PropertyDescriptor};
use serde::{Deserialize, Serialize};
@@ -37,6 +37,98 @@ impl UnresolvedValue {
UnresolvedValue::Ambiguous(partial) => partial.resolve_unambiguous(),
}
}
/// Creates an `UnresolvedValue` from a variant, using a class and property
/// name to potentially allow for ambiguous Enum variants.
pub fn from_variant(variant: Variant, class_name: &str, prop_name: &str) -> Self {
let descriptor = find_descriptor(class_name, prop_name);
if descriptor.is_some() {
// We can only use an ambiguous syntax if the property is known
// to the reflection database.
Self::Ambiguous(match variant {
Variant::Enum(rbx_enum) => {
if let Some(property) = descriptor {
if let DataType::Enum(enum_name) = &property.data_type {
let database = rbx_reflection_database::get().unwrap();
if let Some(enum_descriptor) = database.enums.get(enum_name) {
for (variant_name, id) in &enum_descriptor.items {
if *id == rbx_enum.to_u32() {
return Self::Ambiguous(AmbiguousValue::String(
variant_name.to_string(),
));
}
}
}
}
}
return Self::FullyQualified(variant);
}
Variant::Bool(bool) => AmbiguousValue::Bool(bool),
Variant::Float32(n) => AmbiguousValue::Number(n as f64),
Variant::Float64(n) => AmbiguousValue::Number(n),
Variant::Int32(n) => AmbiguousValue::Number(n as f64),
Variant::Int64(n) => AmbiguousValue::Number(n as f64),
Variant::String(str) => AmbiguousValue::String(str),
Variant::Tags(tags) => {
AmbiguousValue::StringArray(tags.iter().map(|s| s.to_string()).collect())
}
Variant::Content(ref content) => match content.value() {
ContentType::None => AmbiguousValue::String(String::new()),
ContentType::Uri(uri) => AmbiguousValue::String(uri.clone()),
_ => return Self::FullyQualified(variant),
},
Variant::ContentId(content) => AmbiguousValue::String(content.into_string()),
Variant::Vector2(vector) => {
AmbiguousValue::Array2([vector.x as f64, vector.y as f64])
}
Variant::Vector3(vector) => {
AmbiguousValue::Array3([vector.x as f64, vector.y as f64, vector.z as f64])
}
Variant::Color3(color) => {
AmbiguousValue::Array3([color.r as f64, color.g as f64, color.b as f64])
}
Variant::CFrame(cf) => AmbiguousValue::Array12([
cf.position.x as f64,
cf.position.y as f64,
cf.position.z as f64,
cf.orientation.x.x as f64,
cf.orientation.x.y as f64,
cf.orientation.x.z as f64,
cf.orientation.y.x as f64,
cf.orientation.y.y as f64,
cf.orientation.y.z as f64,
cf.orientation.z.x as f64,
cf.orientation.z.y as f64,
cf.orientation.z.z as f64,
]),
Variant::Attributes(attr) => AmbiguousValue::Attributes(attr),
Variant::Font(font) => AmbiguousValue::Font(font),
Variant::MaterialColors(colors) => AmbiguousValue::MaterialColors(colors),
_ => {
return Self::FullyQualified(variant);
}
})
} else {
Self::FullyQualified(variant)
}
}
/// Creates an `UnresolvedValue` from a variant, only returning ambiguous
/// values if they're able to be resolved in a context-free environment.
pub fn from_variant_unambiguous(variant: Variant) -> Self {
match variant {
Variant::String(str) => Self::Ambiguous(AmbiguousValue::String(str)),
Variant::Float64(number) => Self::Ambiguous(AmbiguousValue::Number(number)),
Variant::Bool(bool) => Self::Ambiguous(AmbiguousValue::Bool(bool)),
Variant::BinaryString(bstr) => match std::str::from_utf8(bstr.as_ref()) {
Ok(_) => Self::Ambiguous(AmbiguousValue::String(
String::from_utf8(bstr.into_vec()).unwrap(),
)),
Err(_) => Self::FullyQualified(Variant::BinaryString(bstr)),
},
_ => Self::FullyQualified(variant),
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]

View File

@@ -210,6 +210,10 @@ impl ServeSession {
pub fn root_dir(&self) -> &Path {
self.root_project.folder_location()
}
pub fn root_project(&self) -> &Project {
&self.root_project
}
}
#[derive(Debug, Error)]

View File

@@ -62,6 +62,10 @@ pub struct InstanceMetadata {
/// Indicates the ID used for Ref properties pointing to this Instance.
pub specified_id: Option<RojoRef>,
/// The Middleware that was used to create this Instance. Should generally
/// not be `None` except if the snapshotting process is not completed.
pub middleware: Option<Middleware>,
}
impl InstanceMetadata {
@@ -72,6 +76,7 @@ impl InstanceMetadata {
relevant_paths: Vec::new(),
context: InstanceContext::default(),
specified_id: None,
middleware: None,
}
}
@@ -109,6 +114,13 @@ impl InstanceMetadata {
..self
}
}
pub fn middleware(self, middleware: Middleware) -> Self {
Self {
middleware: Some(middleware),
..self
}
}
}
impl Default for InstanceMetadata {
@@ -215,22 +227,40 @@ impl PathIgnoreRule {
}
}
/// Represents where a particular Instance or InstanceSnapshot came from.
#[derive(Clone, PartialEq, Serialize, Deserialize)]
pub enum InstigatingSource {
/// The path the Instance was made from.
Path(#[serde(serialize_with = "path_serializer::serialize_absolute")] PathBuf),
ProjectNode(
#[serde(serialize_with = "path_serializer::serialize_absolute")] PathBuf,
String,
Box<ProjectNode>,
Option<String>,
),
/// The node in a Project that the Instance was made from.
ProjectNode {
#[serde(serialize_with = "path_serializer::serialize_absolute")]
path: PathBuf,
name: String,
node: ProjectNode,
parent_class: Option<String>,
},
}
impl InstigatingSource {
pub fn path(&self) -> &Path {
match self {
Self::Path(path) => path.as_path(),
Self::ProjectNode { path, .. } => path.as_path(),
}
}
}
impl fmt::Debug for InstigatingSource {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
InstigatingSource::Path(path) => write!(formatter, "Path({})", path.display()),
InstigatingSource::ProjectNode(path, name, node, parent_class) => write!(
InstigatingSource::ProjectNode {
name,
node,
path,
parent_class,
} => write!(
formatter,
"ProjectNode({}: {:?}) from path {} and parent class {:?}",
name,

View File

@@ -14,5 +14,6 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
children: []

View File

@@ -12,5 +12,6 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
children: []

View File

@@ -14,5 +14,6 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
children: []

View File

@@ -12,5 +12,6 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
children: []

View File

@@ -13,6 +13,7 @@ added_instances:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: New
class_name: Folder
properties: {}

View File

@@ -73,6 +73,13 @@ impl RojoTree {
self.inner.root_ref()
}
/// Returns the root Instance of this tree.
#[inline]
pub fn root(&self) -> InstanceWithMeta<'_> {
self.get_instance(self.get_root_id())
.expect("RojoTrees should have a root")
}
pub fn get_instance(&self, id: Ref) -> Option<InstanceWithMeta<'_>> {
if let Some(instance) = self.inner.get_by_ref(id) {
let metadata = self.metadata_map.get(&id).unwrap();
@@ -322,6 +329,10 @@ impl<'a> InstanceWithMeta<'a> {
pub fn metadata(&self) -> &'a InstanceMetadata {
self.metadata
}
pub fn inner(&self) -> &Instance {
self.instance
}
}
/// RojoTree's equivalent of `&'a mut Instance`.
@@ -371,6 +382,14 @@ impl InstanceWithMetaMut<'_> {
pub fn metadata(&self) -> &InstanceMetadata {
self.metadata
}
pub fn inner(&self) -> &Instance {
self.instance
}
pub fn inner_mut(&mut self) -> &mut Instance {
self.instance
}
}
#[cfg(test)]

View File

@@ -1,16 +1,24 @@
use std::{collections::BTreeMap, path::Path};
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet},
path::Path,
};
use anyhow::Context;
use memofs::Vfs;
use rbx_dom_weak::ustr;
use serde::Serialize;
use rbx_dom_weak::{types::Variant, ustr};
use serde::{Deserialize, Serialize};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
snapshot_middleware::meta_file::DirectoryMetadata,
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::{dir::snapshot_dir_no_meta, meta_file::AdjacentMetadata};
use super::{
dir::{snapshot_dir_no_meta, syncback_dir_no_meta},
meta_file::{AdjacentMetadata, DirectoryMetadata},
PathExt as _,
};
pub fn snapshot_csv(
_context: &InstanceContext,
@@ -51,9 +59,10 @@ pub fn snapshot_csv_init(
context: &InstanceContext,
vfs: &Vfs,
init_path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let folder_path = init_path.parent().unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path)?.unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path, name)?.unwrap();
if dir_snapshot.class_name != "Folder" {
anyhow::bail!(
@@ -70,33 +79,111 @@ pub fn snapshot_csv_init(
init_snapshot.children = dir_snapshot.children;
init_snapshot.metadata = dir_snapshot.metadata;
// The directory snapshot middleware includes all possible init paths
// so we don't need to add it here.
DirectoryMetadata::read_and_apply_all(vfs, folder_path, &mut init_snapshot)?;
Ok(Some(init_snapshot))
}
pub fn syncback_csv<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents =
if let Some(Variant::String(content)) = new_inst.properties.get(&ustr("Contents")) {
content.as_str()
} else {
anyhow::bail!("LocalizationTables must have a `Contents` property that is a String")
};
let mut fs_snapshot = FsSnapshot::new();
fs_snapshot.add_file(&snapshot.path, localization_to_csv(contents)?);
let meta = AdjacentMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// LocalizationTables have relatively few properties that we care
// about, so shifting is fine.
meta.properties.shift_remove(&ustr("Contents"));
if !meta.is_empty() {
let parent = snapshot.path.parent_err()?;
fs_snapshot.add_file(
parent.join(format!("{}.meta.json", new_inst.name)),
serde_json::to_vec_pretty(&meta).context("cannot serialize metadata")?,
)
}
}
Ok(SyncbackReturn {
fs_snapshot,
children: Vec::new(),
removed_children: Vec::new(),
})
}
pub fn syncback_csv_init<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents =
if let Some(Variant::String(content)) = new_inst.properties.get(&ustr("Contents")) {
content.as_str()
} else {
anyhow::bail!("LocalizationTables must have a `Contents` property that is a String")
};
let mut dir_syncback = syncback_dir_no_meta(snapshot)?;
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.csv"),
localization_to_csv(contents)?,
);
let meta = DirectoryMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// LocalizationTables have relatively few properties that we care
// about, so shifting is fine.
meta.properties.shift_remove(&ustr("Contents"));
if !meta.is_empty() {
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.meta.json"),
serde_json::to_vec_pretty(&meta)
.context("could not serialize new init.meta.json")?,
);
}
}
Ok(dir_syncback)
}
/// Struct that holds any valid row from a Roblox CSV translation table.
///
/// We manually deserialize into this table from CSV, but let serde_json handle
/// serialization.
#[derive(Debug, Default, Serialize)]
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct LocalizationEntry<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
key: Option<&'a str>,
key: Option<Cow<'a, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
context: Option<&'a str>,
context: Option<Cow<'a, str>>,
// Roblox writes `examples` for LocalizationTable's Content property, which
// causes it to not roundtrip correctly.
// This is reported here: https://devforum.roblox.com/t/2908720.
//
// To support their mistake, we support an alias named `examples`.
#[serde(skip_serializing_if = "Option::is_none", alias = "examples")]
example: Option<Cow<'a, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
example: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<&'a str>,
source: Option<Cow<'a, str>>,
// We use a BTreeMap here to get deterministic output order.
values: BTreeMap<&'a str, &'a str>,
values: BTreeMap<Cow<'a, str>, Cow<'a, str>>,
}
/// Normally, we'd be able to let the csv crate construct our struct for us.
@@ -130,12 +217,14 @@ fn convert_localization_csv(contents: &[u8]) -> Result<String, csv::Error> {
}
match header {
"Key" => entry.key = Some(value),
"Source" => entry.source = Some(value),
"Context" => entry.context = Some(value),
"Example" => entry.example = Some(value),
"Key" => entry.key = Some(Cow::Borrowed(value)),
"Source" => entry.source = Some(Cow::Borrowed(value)),
"Context" => entry.context = Some(Cow::Borrowed(value)),
"Example" => entry.example = Some(Cow::Borrowed(value)),
_ => {
entry.values.insert(header, value);
entry
.values
.insert(Cow::Borrowed(header), Cow::Borrowed(value));
}
}
}
@@ -153,6 +242,57 @@ fn convert_localization_csv(contents: &[u8]) -> Result<String, csv::Error> {
Ok(encoded)
}
/// Takes a localization table (as a string) and converts it into a CSV file.
///
/// The CSV file is ordered, so it should be deterministic.
fn localization_to_csv(csv_contents: &str) -> anyhow::Result<Vec<u8>> {
let mut out = Vec::new();
let mut writer = csv::Writer::from_writer(&mut out);
let mut csv: Vec<LocalizationEntry> =
serde_json::from_str(csv_contents).context("cannot decode JSON from localization table")?;
// TODO sort this better
csv.sort_by(|a, b| a.source.partial_cmp(&b.source).unwrap());
let mut headers = vec!["Key", "Source", "Context", "Example"];
// We want both order and a lack of duplicates, so we use a BTreeSet.
let mut extra_headers = BTreeSet::new();
for entry in &csv {
for lang in entry.values.keys() {
extra_headers.insert(lang.as_ref());
}
}
headers.extend(extra_headers.iter());
writer
.write_record(&headers)
.context("could not write headers for localization table")?;
let mut record: Vec<&str> = Vec::with_capacity(headers.len());
for entry in &csv {
record.push(entry.key.as_deref().unwrap_or_default());
record.push(entry.source.as_deref().unwrap_or_default());
record.push(entry.context.as_deref().unwrap_or_default());
record.push(entry.example.as_deref().unwrap_or_default());
let values = &entry.values;
for header in &extra_headers {
record.push(values.get(*header).map(AsRef::as_ref).unwrap_or_default());
}
writer
.write_record(&record)
.context("cannot write record for localization table")?;
record.clear();
}
// We must drop `writer` here to regain access to `out`.
drop(writer);
Ok(out)
}
#[cfg(test)]
mod test {
use super::*;
@@ -240,6 +380,7 @@ Ack,Ack!,,An exclamation of despair,¡Ay!"#,
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.csv"),
"root",
)
.unwrap()
.unwrap();
@@ -277,6 +418,7 @@ Ack,Ack!,,An exclamation of despair,¡Ay!"#,
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.csv"),
"root",
)
.unwrap()
.unwrap();

View File

@@ -1,17 +1,27 @@
use std::path::Path;
use std::{
collections::{HashMap, HashSet},
path::Path,
};
use anyhow::Context;
use memofs::{DirEntry, Vfs};
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot, InstigatingSource},
syncback::{hash_instance, FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::{meta_file::DirectoryMetadata, snapshot_from_vfs};
const EMPTY_DIR_KEEP_NAME: &str = ".gitkeep";
pub fn snapshot_dir(
context: &InstanceContext,
vfs: &Vfs,
path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let mut snapshot = match snapshot_dir_no_meta(context, vfs, path)? {
let mut snapshot = match snapshot_dir_no_meta(context, vfs, path, name)? {
Some(snapshot) => snapshot,
None => return Ok(None),
};
@@ -29,6 +39,7 @@ pub fn snapshot_dir_no_meta(
context: &InstanceContext,
vfs: &Vfs,
path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let passes_filter_rules = |child: &DirEntry| {
context
@@ -51,13 +62,6 @@ pub fn snapshot_dir_no_meta(
}
}
let instance_name = path
.file_name()
.expect("Could not extract file name")
.to_str()
.ok_or_else(|| anyhow::anyhow!("File name was not valid UTF-8: {}", path.display()))?
.to_string();
let relevant_paths = vec![
path.to_path_buf(),
// TODO: We shouldn't need to know about Lua existing in this
@@ -73,7 +77,7 @@ pub fn snapshot_dir_no_meta(
];
let snapshot = InstanceSnapshot::new()
.name(instance_name)
.name(name)
.class_name("Folder")
.children(snapshot_children)
.metadata(
@@ -86,6 +90,136 @@ pub fn snapshot_dir_no_meta(
Ok(Some(snapshot))
}
pub fn syncback_dir<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let mut dir_syncback = syncback_dir_no_meta(snapshot)?;
let mut meta = DirectoryMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(meta) = &mut meta {
if new_inst.class != "Folder" {
meta.class_name = Some(new_inst.class);
}
if !meta.is_empty() {
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.meta.json"),
serde_json::to_vec_pretty(&meta)
.context("could not serialize new init.meta.json")?,
);
}
}
let metadata_empty = meta
.as_ref()
.map(DirectoryMetadata::is_empty)
.unwrap_or_default();
if new_inst.children().is_empty() && metadata_empty {
dir_syncback
.fs_snapshot
.add_file(snapshot.path.join(EMPTY_DIR_KEEP_NAME), Vec::new())
}
Ok(dir_syncback)
}
pub fn syncback_dir_no_meta<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let mut children = Vec::new();
let mut removed_children = Vec::new();
// We have to enforce unique child names for the file system.
let mut child_names = HashSet::with_capacity(new_inst.children().len());
let mut duplicate_set = HashSet::new();
for child_ref in new_inst.children() {
let child = snapshot.get_new_instance(*child_ref).unwrap();
if !child_names.insert(child.name.to_lowercase()) {
duplicate_set.insert(child.name.as_str());
}
}
if !duplicate_set.is_empty() {
if duplicate_set.len() <= 25 {
anyhow::bail!(
"Instance has children with duplicate name (case may not exactly match):\n {}",
duplicate_set.into_iter().collect::<Vec<&str>>().join(", ")
);
}
anyhow::bail!("Instance has more than 25 children with duplicate names");
}
if let Some(old_inst) = snapshot.old_inst() {
let mut old_child_map = HashMap::with_capacity(old_inst.children().len());
for child in old_inst.children() {
let inst = snapshot.get_old_instance(*child).unwrap();
old_child_map.insert(inst.name(), inst);
}
for new_child_ref in new_inst.children() {
let new_child = snapshot.get_new_instance(*new_child_ref).unwrap();
if let Some(old_child) = old_child_map.remove(new_child.name.as_str()) {
if old_child.metadata().relevant_paths.is_empty() {
log::debug!(
"Skipping instance {} because it doesn't exist on the disk",
old_child.name()
);
continue;
} else if matches!(
old_child.metadata().instigating_source,
Some(InstigatingSource::ProjectNode { .. })
) {
log::debug!(
"Skipping instance {} because it originates in a project file",
old_child.name()
);
continue;
}
// This child exists in both doms. Pass it on.
children.push(snapshot.with_joined_path(*new_child_ref, Some(old_child.id()))?);
} else {
// The child only exists in the the new dom
children.push(snapshot.with_joined_path(*new_child_ref, None)?);
}
}
// Any children that are in the old dom but not the new one are removed.
removed_children.extend(old_child_map.into_values());
} else {
// There is no old instance. Just add every child.
for new_child_ref in new_inst.children() {
children.push(snapshot.with_joined_path(*new_child_ref, None)?);
}
}
let mut fs_snapshot = FsSnapshot::new();
if let Some(old_ref) = snapshot.old {
let new_hash = hash_instance(snapshot.project(), snapshot.new_tree(), snapshot.new)
.expect("new Instance should be hashable");
let old_hash = hash_instance(snapshot.project(), snapshot.old_tree(), old_ref)
.expect("old Instance should be hashable");
if old_hash != new_hash {
fs_snapshot.add_dir(&snapshot.path);
} else {
log::debug!(
"Skipping reserializing directory {} because old and new tree hash the same",
new_inst.name
);
}
} else {
fs_snapshot.add_dir(&snapshot.path);
}
Ok(SyncbackReturn {
fs_snapshot,
children,
removed_children,
})
}
#[cfg(test)]
mod test {
use super::*;
@@ -100,9 +234,10 @@ mod test {
let vfs = Vfs::new(imfs);
let instance_snapshot = snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"))
.unwrap()
.unwrap();
let instance_snapshot =
snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"), "foo")
.unwrap()
.unwrap();
insta::assert_yaml_snapshot!(instance_snapshot);
}
@@ -118,9 +253,10 @@ mod test {
let vfs = Vfs::new(imfs);
let instance_snapshot = snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"))
.unwrap()
.unwrap();
let instance_snapshot =
snapshot_dir(&InstanceContext::default(), &vfs, Path::new("/foo"), "foo")
.unwrap()
.unwrap();
insta::assert_yaml_snapshot!(instance_snapshot);
}

View File

@@ -1,17 +1,19 @@
use std::{borrow::Cow, collections::HashMap, path::Path, str};
use std::{borrow::Cow, path::Path, str};
use anyhow::Context;
use indexmap::IndexMap;
use memofs::Vfs;
use rbx_dom_weak::{
types::{Attributes, Ref},
types::{Attributes, Ref, Variant},
HashMapExt as _, Ustr, UstrMap,
};
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use crate::{
json,
resolution::UnresolvedValue,
snapshot::{InstanceContext, InstanceSnapshot},
syncback::{filter_properties_preallocated, FsSnapshot, SyncbackReturn, SyncbackSnapshot},
RojoRef,
};
@@ -63,13 +65,86 @@ pub fn snapshot_json_model(
Ok(Some(snapshot))
}
#[derive(Debug, Deserialize)]
pub fn syncback_json_model<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let mut property_buffer = Vec::with_capacity(snapshot.new_inst().properties.len());
let mut model = json_model_from_pair(snapshot, &mut property_buffer, snapshot.new);
// We don't need the name on the root, but we do for children.
model.name = None;
Ok(SyncbackReturn {
fs_snapshot: FsSnapshot::new().with_added_file(
&snapshot.path,
serde_json::to_vec_pretty(&model).context("failed to serialize new JSON Model")?,
),
children: Vec::new(),
removed_children: Vec::new(),
})
}
fn json_model_from_pair<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
prop_buffer: &mut Vec<(Ustr, &'sync Variant)>,
new: Ref,
) -> JsonModel {
let new_inst = snapshot
.get_new_instance(new)
.expect("all new referents passed to json_model_from_pair should exist");
filter_properties_preallocated(snapshot.project(), new_inst, prop_buffer);
let mut properties = IndexMap::new();
let mut attributes = IndexMap::new();
for (name, value) in prop_buffer.drain(..) {
match value {
Variant::Attributes(attrs) => {
for (attr_name, attr_value) in attrs.iter() {
// We (probably) don't want to preserve internal attributes,
// only user defined ones.
if attr_name.starts_with("RBX") {
continue;
}
attributes.insert(
attr_name.clone(),
UnresolvedValue::from_variant_unambiguous(attr_value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), &new_inst.class, &name),
);
}
}
}
let mut children = Vec::with_capacity(new_inst.children().len());
for new_child_ref in new_inst.children() {
children.push(json_model_from_pair(snapshot, prop_buffer, *new_child_ref))
}
JsonModel {
name: Some(new_inst.name.clone()),
class_name: new_inst.class,
children,
properties,
attributes,
id: None,
schema: None,
}
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
struct JsonModel {
#[serde(rename = "$schema", skip_serializing_if = "Option::is_none")]
schema: Option<String>,
#[serde(alias = "Name")]
#[serde(alias = "Name", skip_serializing_if = "Option::is_none")]
name: Option<String>,
#[serde(alias = "ClassName")]
@@ -87,13 +162,13 @@ struct JsonModel {
#[serde(
alias = "Properties",
default = "UstrMap::new",
skip_serializing_if = "HashMap::is_empty"
default,
skip_serializing_if = "IndexMap::is_empty"
)]
properties: UstrMap<UnresolvedValue>,
properties: IndexMap<Ustr, UnresolvedValue>,
#[serde(default = "HashMap::new", skip_serializing_if = "HashMap::is_empty")]
attributes: HashMap<String, UnresolvedValue>,
#[serde(default = "IndexMap::new", skip_serializing_if = "IndexMap::is_empty")]
attributes: IndexMap<String, UnresolvedValue>,
}
impl JsonModel {

View File

@@ -1,11 +1,22 @@
use std::{path::Path, str};
use anyhow::Context as _;
use memofs::Vfs;
use rbx_dom_weak::{types::Enum, ustr, HashMapExt as _, UstrMap};
use rbx_dom_weak::{
types::{Enum, Variant},
ustr, HashMapExt as _, UstrMap,
};
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::{dir::snapshot_dir_no_meta, meta_file::AdjacentMetadata, meta_file::DirectoryMetadata};
use super::{
dir::{snapshot_dir_no_meta, syncback_dir_no_meta},
meta_file::{AdjacentMetadata, DirectoryMetadata},
PathExt as _,
};
#[derive(Debug)]
pub enum ScriptType {
@@ -95,10 +106,11 @@ pub fn snapshot_lua_init(
context: &InstanceContext,
vfs: &Vfs,
init_path: &Path,
name: &str,
script_type: ScriptType,
) -> anyhow::Result<Option<InstanceSnapshot>> {
let folder_path = init_path.parent().unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path)?.unwrap();
let dir_snapshot = snapshot_dir_no_meta(context, vfs, folder_path, name)?.unwrap();
if dir_snapshot.class_name != "Folder" {
anyhow::bail!(
@@ -117,12 +129,89 @@ pub fn snapshot_lua_init(
init_snapshot.children = dir_snapshot.children;
init_snapshot.metadata = dir_snapshot.metadata;
// The directory snapshot middleware includes all possible init paths
// so we don't need to add it here.
DirectoryMetadata::read_and_apply_all(vfs, folder_path, &mut init_snapshot)?;
Ok(Some(init_snapshot))
}
pub fn syncback_lua<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents = if let Some(Variant::String(source)) = new_inst.properties.get(&ustr("Source")) {
source.as_bytes().to_vec()
} else {
anyhow::bail!("Scripts must have a `Source` property that is a String")
};
let mut fs_snapshot = FsSnapshot::new();
fs_snapshot.add_file(&snapshot.path, contents);
let meta = AdjacentMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// Scripts have relatively few properties that we care about, so shifting
// is fine.
meta.properties.shift_remove(&ustr("Source"));
if !meta.is_empty() {
let parent_location = snapshot.path.parent_err()?;
fs_snapshot.add_file(
parent_location.join(format!("{}.meta.json", new_inst.name)),
serde_json::to_vec_pretty(&meta).context("cannot serialize metadata")?,
);
}
}
Ok(SyncbackReturn {
fs_snapshot,
// Scripts don't have a child!
children: Vec::new(),
removed_children: Vec::new(),
})
}
pub fn syncback_lua_init<'sync>(
script_type: ScriptType,
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let path = snapshot.path.join(match script_type {
ScriptType::Server => "init.server.luau",
ScriptType::Client => "init.client.luau",
ScriptType::Module => "init.luau",
_ => anyhow::bail!("syncback is not yet implemented for {script_type:?}"),
});
let contents = if let Some(Variant::String(source)) = new_inst.properties.get(&ustr("Source")) {
source.as_bytes().to_vec()
} else {
anyhow::bail!("Scripts must have a `Source` property that is a String")
};
let mut dir_syncback = syncback_dir_no_meta(snapshot)?;
dir_syncback.fs_snapshot.add_file(&path, contents);
let meta = DirectoryMetadata::from_syncback_snapshot(snapshot, path.clone())?;
if let Some(mut meta) = meta {
// Scripts have relatively few properties that we care about, so shifting
// is fine.
meta.properties.shift_remove(&ustr("Source"));
if !meta.is_empty() {
dir_syncback.fs_snapshot.add_file(
snapshot.path.join("init.meta.json"),
serde_json::to_vec_pretty(&meta)
.context("could not serialize new init.meta.json")?,
);
}
}
Ok(dir_syncback)
}
#[cfg(test)]
mod test {
use super::*;
@@ -305,6 +394,7 @@ mod test {
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.lua"),
"root",
ScriptType::Module,
)
.unwrap()
@@ -336,6 +426,7 @@ mod test {
&InstanceContext::with_emit_legacy_scripts(Some(true)),
&vfs,
Path::new("/root/init.lua"),
"root",
ScriptType::Module,
)
.unwrap()

View File

@@ -1,14 +1,18 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use anyhow::{format_err, Context};
use indexmap::IndexMap;
use memofs::{IoResultExt as _, Vfs};
use rbx_dom_weak::{types::Attributes, Ustr, UstrMap};
use rbx_dom_weak::{
types::{Attributes, Variant},
Ustr,
};
use serde::{Deserialize, Serialize};
use crate::{json, resolution::UnresolvedValue, snapshot::InstanceSnapshot, RojoRef};
use crate::{
json, resolution::UnresolvedValue, snapshot::InstanceSnapshot, syncback::SyncbackSnapshot,
RojoRef,
};
/// Represents metadata in a sibling file with the same basename.
///
@@ -26,11 +30,11 @@ pub struct AdjacentMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub ignore_unknown_instances: Option<bool>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub properties: UstrMap<UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub properties: IndexMap<Ustr, UnresolvedValue>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub attributes: HashMap<String, UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub attributes: IndexMap<String, UnresolvedValue>,
#[serde(skip)]
pub path: PathBuf,
@@ -80,6 +84,76 @@ impl AdjacentMetadata {
Ok(meta)
}
/// Constructs an `AdjacentMetadata` from the provided snapshot, assuming it
/// will be at the provided path.
pub fn from_syncback_snapshot(
snapshot: &SyncbackSnapshot,
path: PathBuf,
) -> anyhow::Result<Option<Self>> {
let mut properties = IndexMap::new();
let mut attributes = IndexMap::new();
// TODO make this more granular.
// I am breaking the cycle of bad TODOs. This is in reference to the fact
// that right now, this will just not write any metadata at all for
// project nodes, which is not always desirable. We should try to be
// smarter about it.
if let Some(old_inst) = snapshot.old_inst() {
if let Some(source) = &old_inst.metadata().instigating_source {
let source = source.path();
if source != path {
log::debug!(
"Instigating source for Instance is mismatched so its metadata is being skipped.\nPath: {}",
path.display()
);
return Ok(None);
}
}
}
let ignore_unknown_instances = snapshot
.old_inst()
.map(|inst| inst.metadata().ignore_unknown_instances)
.unwrap_or_default();
let class = &snapshot.new_inst().class;
for (name, value) in snapshot.get_path_filtered_properties(snapshot.new).unwrap() {
match value {
Variant::Attributes(attrs) => {
for (attr_name, attr_value) in attrs.iter() {
// We (probably) don't want to preserve internal
// attributes, only user defined ones.
if attr_name.starts_with("RBX") {
continue;
}
attributes.insert(
attr_name.clone(),
UnresolvedValue::from_variant_unambiguous(attr_value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), class, &name),
);
}
}
}
Ok(Some(Self {
ignore_unknown_instances: if ignore_unknown_instances {
Some(true)
} else {
None
},
properties,
attributes,
path,
id: None,
schema: None,
}))
}
pub fn apply_ignore_unknown_instances(&mut self, snapshot: &mut InstanceSnapshot) {
if let Some(ignore) = self.ignore_unknown_instances.take() {
snapshot.metadata.ignore_unknown_instances = ignore;
@@ -89,7 +163,10 @@ impl AdjacentMetadata {
pub fn apply_properties(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
let path = &self.path;
for (key, unresolved) in self.properties.drain() {
// BTreeMaps don't have an equivalent to HashMap::drain, so the next
// best option is to take ownership of the entire map. Not free, but
// very cheap.
for (key, unresolved) in std::mem::take(&mut self.properties) {
let value = unresolved
.resolve(&snapshot.class_name, &key)
.with_context(|| format!("error applying meta file {}", path.display()))?;
@@ -100,7 +177,7 @@ impl AdjacentMetadata {
if !self.attributes.is_empty() {
let mut attributes = Attributes::new();
for (key, unresolved) in self.attributes.drain() {
for (key, unresolved) in std::mem::take(&mut self.attributes) {
let value = unresolved.resolve_unambiguous()?;
attributes.insert(key, value);
}
@@ -131,6 +208,18 @@ impl AdjacentMetadata {
Ok(())
}
/// Returns whether the metadata is 'empty', meaning it doesn't have anything
/// worth persisting in it. Specifically:
///
/// - The number of properties and attributes is 0
/// - `ignore_unknown_instances` is None
#[inline]
pub fn is_empty(&self) -> bool {
self.attributes.is_empty()
&& self.properties.is_empty()
&& self.ignore_unknown_instances.is_none()
}
// TODO: Add method to allow selectively applying parts of metadata and
// throwing errors if invalid parts are specified.
}
@@ -151,11 +240,11 @@ pub struct DirectoryMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub ignore_unknown_instances: Option<bool>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub properties: UstrMap<UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub properties: IndexMap<Ustr, UnresolvedValue>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub attributes: HashMap<String, UnresolvedValue>,
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
pub attributes: IndexMap<String, UnresolvedValue>,
#[serde(skip_serializing_if = "Option::is_none")]
pub class_name: Option<Ustr>,
@@ -207,6 +296,80 @@ impl DirectoryMetadata {
Ok(meta)
}
/// Constructs a `DirectoryMetadata` from the provided snapshot, assuming it
/// will be at the provided path.
///
/// This function does not set `ClassName` manually as most uses won't
/// want it set.
pub fn from_syncback_snapshot(
snapshot: &SyncbackSnapshot,
path: PathBuf,
) -> anyhow::Result<Option<Self>> {
let mut properties = IndexMap::new();
let mut attributes = IndexMap::new();
// TODO make this more granular.
// I am breaking the cycle of bad TODOs. This is in reference to the fact
// that right now, this will just not write any metadata at all for
// project nodes, which is not always desirable. We should try to be
// smarter about it.
if let Some(old_inst) = snapshot.old_inst() {
if let Some(source) = &old_inst.metadata().instigating_source {
let source = source.path();
if source != path {
log::debug!(
"Instigating source for Instance is mismatched so its metadata is being skipped.\nPath: {}",
path.display()
);
return Ok(None);
}
}
}
let ignore_unknown_instances = snapshot
.old_inst()
.map(|inst| inst.metadata().ignore_unknown_instances)
.unwrap_or_default();
let class = &snapshot.new_inst().class;
for (name, value) in snapshot.get_path_filtered_properties(snapshot.new).unwrap() {
match value {
Variant::Attributes(attrs) => {
for (name, value) in attrs.iter() {
// We (probably) don't want to preserve internal
// attributes, only user defined ones.
if name.starts_with("RBX") {
continue;
}
attributes.insert(
name.to_owned(),
UnresolvedValue::from_variant_unambiguous(value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), class, &name),
);
}
}
}
Ok(Some(Self {
ignore_unknown_instances: if ignore_unknown_instances {
Some(true)
} else {
None
},
properties,
attributes,
class_name: None,
path,
id: None,
schema: None,
}))
}
pub fn apply_all(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
self.apply_ignore_unknown_instances(snapshot);
self.apply_class_name(snapshot)?;
@@ -241,7 +404,7 @@ impl DirectoryMetadata {
fn apply_properties(&mut self, snapshot: &mut InstanceSnapshot) -> anyhow::Result<()> {
let path = &self.path;
for (key, unresolved) in self.properties.drain() {
for (key, unresolved) in std::mem::take(&mut self.properties) {
let value = unresolved
.resolve(&snapshot.class_name, &key)
.with_context(|| format!("error applying meta file {}", path.display()))?;
@@ -252,7 +415,7 @@ impl DirectoryMetadata {
if !self.attributes.is_empty() {
let mut attributes = Attributes::new();
for (key, unresolved) in self.attributes.drain() {
for (key, unresolved) in std::mem::take(&mut self.attributes) {
let value = unresolved.resolve_unambiguous()?;
attributes.insert(key, value);
}
@@ -275,6 +438,53 @@ impl DirectoryMetadata {
snapshot.metadata.specified_id = self.id.take().map(RojoRef::new);
Ok(())
}
/// Returns whether the metadata is 'empty', meaning it doesn't have anything
/// worth persisting in it. Specifically:
///
/// - The number of properties and attributes is 0
/// - `ignore_unknown_instances` is None
/// - `class_name` is either None or not Some("Folder")
#[inline]
pub fn is_empty(&self) -> bool {
self.attributes.is_empty()
&& self.properties.is_empty()
&& self.ignore_unknown_instances.is_none()
&& if let Some(class) = &self.class_name {
class == "Folder"
} else {
true
}
}
}
/// Retrieves the meta file that should be applied for the provided directory,
/// if it exists.
pub fn dir_meta(vfs: &Vfs, path: &Path) -> anyhow::Result<Option<DirectoryMetadata>> {
let meta_path = path.join("init.meta.json");
if let Some(meta_contents) = vfs.read(&meta_path).with_not_found()? {
let metadata = DirectoryMetadata::from_slice(&meta_contents, meta_path)?;
Ok(Some(metadata))
} else {
Ok(None)
}
}
/// Retrieves the meta file that should be applied for the provided file,
/// if it exists.
///
/// The `name` field should be the name the metadata should have.
pub fn file_meta(vfs: &Vfs, path: &Path, name: &str) -> anyhow::Result<Option<AdjacentMetadata>> {
let mut meta_path = path.with_file_name(name);
meta_path.set_extension("meta.json");
if let Some(meta_contents) = vfs.read(&meta_path).with_not_found()? {
let metadata = AdjacentMetadata::from_slice(&meta_contents, meta_path)?;
Ok(Some(metadata))
} else {
Ok(None)
}
}
#[cfg(test)]

View File

@@ -28,24 +28,34 @@ use anyhow::Context;
use memofs::{IoResultExt, Vfs};
use serde::{Deserialize, Serialize};
use crate::snapshot::{InstanceContext, InstanceSnapshot, SyncRule};
use crate::{glob::Glob, project::DEFAULT_PROJECT_NAMES};
use crate::{
glob::Glob,
project::DEFAULT_PROJECT_NAMES,
syncback::{SyncbackReturn, SyncbackSnapshot},
};
use crate::{
snapshot::{InstanceContext, InstanceSnapshot, SyncRule},
syncback::validate_file_name,
};
use self::{
csv::{snapshot_csv, snapshot_csv_init},
dir::snapshot_dir,
csv::{snapshot_csv, snapshot_csv_init, syncback_csv, syncback_csv_init},
dir::{snapshot_dir, syncback_dir},
json::snapshot_json,
json_model::snapshot_json_model,
lua::{snapshot_lua, snapshot_lua_init, ScriptType},
project::snapshot_project,
rbxm::snapshot_rbxm,
rbxmx::snapshot_rbxmx,
json_model::{snapshot_json_model, syncback_json_model},
lua::{snapshot_lua, snapshot_lua_init, syncback_lua, syncback_lua_init},
project::{snapshot_project, syncback_project},
rbxm::{snapshot_rbxm, syncback_rbxm},
rbxmx::{snapshot_rbxmx, syncback_rbxmx},
toml::snapshot_toml,
txt::snapshot_txt,
txt::{snapshot_txt, syncback_txt},
yaml::snapshot_yaml,
};
pub use self::{project::snapshot_project_node, util::emit_legacy_scripts_default};
pub use self::{
lua::ScriptType, project::snapshot_project_node, util::emit_legacy_scripts_default,
util::PathExt,
};
/// Returns an `InstanceSnapshot` for the provided path.
/// This will inspect the path and find the appropriate middleware for it,
@@ -63,41 +73,14 @@ pub fn snapshot_from_vfs(
};
if meta.is_dir() {
if let Some(init_path) = get_init_path(vfs, path)? {
// TODO: support user-defined init paths
// If and when we do, make sure to go support it in
// `Project::set_file_name`, as right now it special-cases
// `default.project.json` as an `init` path.
for rule in default_sync_rules() {
if rule.matches(&init_path) {
return match rule.middleware {
Middleware::Project => {
let name = init_path
.parent()
.and_then(Path::file_name)
.and_then(|s| s.to_str()).expect("default.project.json should be inside a folder with a unicode name");
snapshot_project(context, vfs, &init_path, name)
}
Middleware::ModuleScript => {
snapshot_lua_init(context, vfs, &init_path, ScriptType::Module)
}
Middleware::ServerScript => {
snapshot_lua_init(context, vfs, &init_path, ScriptType::Server)
}
Middleware::ClientScript => {
snapshot_lua_init(context, vfs, &init_path, ScriptType::Client)
}
Middleware::Csv => snapshot_csv_init(context, vfs, &init_path),
_ => snapshot_dir(context, vfs, path),
};
}
}
snapshot_dir(context, vfs, path)
} else {
snapshot_dir(context, vfs, path)
let (middleware, dir_name, init_path) = get_dir_middleware(vfs, path)?;
// TODO: Support user defined init paths
// If and when we do, make sure to go support it in
// `Project::set_file_name`, as right now it special-cases
// `default.project.json` as an `init` path.
match middleware {
Middleware::Dir => middleware.snapshot(context, vfs, path, dir_name),
_ => middleware.snapshot(context, vfs, &init_path, dir_name),
}
} else {
let file_name = path
@@ -116,55 +99,50 @@ pub fn snapshot_from_vfs(
}
}
/// Gets an `init` path for the given directory.
/// This uses an intrinsic priority list and for compatibility,
/// it should not be changed.
fn get_init_path<P: AsRef<Path>>(vfs: &Vfs, dir: P) -> anyhow::Result<Option<PathBuf>> {
let path = dir.as_ref();
/// Gets the appropriate middleware for a directory by checking for `init`
/// files. This uses an intrinsic priority list and for compatibility,
/// that order should be left unchanged.
///
/// Returns the middleware, the name of the directory, and the path to
/// the init location.
fn get_dir_middleware<'path>(
vfs: &Vfs,
dir_path: &'path Path,
) -> anyhow::Result<(Middleware, &'path str, PathBuf)> {
let dir_name = dir_path
.file_name()
.expect("Could not extract directory name")
.to_str()
.ok_or_else(|| anyhow::anyhow!("File name was not valid UTF-8: {}", dir_path.display()))?;
static INIT_PATHS: OnceLock<Vec<(Middleware, &str)>> = OnceLock::new();
let order = INIT_PATHS.get_or_init(|| {
vec![
(Middleware::ModuleScriptDir, "init.luau"),
(Middleware::ModuleScriptDir, "init.lua"),
(Middleware::ServerScriptDir, "init.server.luau"),
(Middleware::ServerScriptDir, "init.server.lua"),
(Middleware::ClientScriptDir, "init.client.luau"),
(Middleware::ClientScriptDir, "init.client.lua"),
(Middleware::CsvDir, "init.csv"),
]
});
for default_project_name in DEFAULT_PROJECT_NAMES {
let project_path = path.join(default_project_name);
let project_path = dir_path.join(default_project_name);
if vfs.metadata(&project_path).with_not_found()?.is_some() {
return Ok(Some(project_path));
return Ok((Middleware::Project, dir_name, project_path));
}
}
let init_path = path.join("init.luau");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
for (middleware, name) in order {
let test_path = dir_path.join(name);
if vfs.metadata(&test_path).with_not_found()?.is_some() {
return Ok((*middleware, dir_name, test_path));
}
}
let init_path = path.join("init.lua");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.server.luau");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.server.lua");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.client.luau");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.client.lua");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
let init_path = path.join("init.csv");
if vfs.metadata(&init_path).with_not_found()?.is_some() {
return Ok(Some(init_path));
}
Ok(None)
Ok((Middleware::Dir, dir_name, dir_path.to_path_buf()))
}
/// Gets a snapshot for a path given an InstanceContext and Vfs, taking
@@ -194,9 +172,10 @@ fn snapshot_from_path(
}
/// Represents a possible 'transformer' used by Rojo to turn a file system
/// item into a Roblox Instance. Missing from this list are directories and
/// metadata. This is deliberate, as metadata is not a snapshot middleware
/// and directories do not make sense to turn into files.
/// item into a Roblox Instance. Missing from this list is metadata.
/// This is deliberate, as metadata is not a snapshot middleware.
///
/// Directories cannot be used for sync rules so they're ignored by Serde.
#[derive(Debug, Clone, Copy, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum Middleware {
@@ -218,6 +197,17 @@ pub enum Middleware {
Text,
Yaml,
Ignore,
#[serde(skip_deserializing)]
Dir,
#[serde(skip_deserializing)]
ServerScriptDir,
#[serde(skip_deserializing)]
ClientScriptDir,
#[serde(skip_deserializing)]
ModuleScriptDir,
#[serde(skip_deserializing)]
CsvDir,
}
impl Middleware {
@@ -230,7 +220,7 @@ impl Middleware {
path: &Path,
name: &str,
) -> anyhow::Result<Option<InstanceSnapshot>> {
match self {
let mut output = match self {
Self::Csv => snapshot_csv(context, vfs, path, name),
Self::JsonModel => snapshot_json_model(context, vfs, path, name),
Self::Json => snapshot_json(context, vfs, path, name),
@@ -257,6 +247,120 @@ impl Middleware {
Self::Text => snapshot_txt(context, vfs, path, name),
Self::Yaml => snapshot_yaml(context, vfs, path, name),
Self::Ignore => Ok(None),
Self::Dir => snapshot_dir(context, vfs, path, name),
Self::ServerScriptDir => {
snapshot_lua_init(context, vfs, path, name, ScriptType::Server)
}
Self::ClientScriptDir => {
snapshot_lua_init(context, vfs, path, name, ScriptType::Client)
}
Self::ModuleScriptDir => {
snapshot_lua_init(context, vfs, path, name, ScriptType::Module)
}
Self::CsvDir => snapshot_csv_init(context, vfs, path, name),
};
if let Ok(Some(ref mut snapshot)) = output {
snapshot.metadata.middleware = Some(*self);
}
output
}
/// Runs the syncback mechanism for the provided middleware given a
/// SyncbackSnapshot.
pub fn syncback<'sync>(
&self,
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let file_name = snapshot.path.file_name().and_then(|s| s.to_str());
if let Some(file_name) = file_name {
validate_file_name(file_name).with_context(|| {
format!("cannot create a file or directory with name {file_name}")
})?;
}
match self {
Middleware::Csv => syncback_csv(snapshot),
Middleware::JsonModel => syncback_json_model(snapshot),
Middleware::Json => anyhow::bail!("cannot syncback Json middleware"),
// Projects are only generated from files that already exist on the
// file system, so we don't need to pass a file name.
Middleware::Project => syncback_project(snapshot),
Middleware::ServerScript => syncback_lua(snapshot),
Middleware::ClientScript => syncback_lua(snapshot),
Middleware::ModuleScript => syncback_lua(snapshot),
Middleware::Rbxm => syncback_rbxm(snapshot),
Middleware::Rbxmx => syncback_rbxmx(snapshot),
Middleware::Toml => anyhow::bail!("cannot syncback Toml middleware"),
Middleware::Text => syncback_txt(snapshot),
Middleware::Yaml => anyhow::bail!("cannot syncback Yaml middleware"),
Middleware::Ignore => anyhow::bail!("cannot syncback Ignore middleware"),
Middleware::Dir => syncback_dir(snapshot),
Middleware::ServerScriptDir => syncback_lua_init(ScriptType::Server, snapshot),
Middleware::ClientScriptDir => syncback_lua_init(ScriptType::Client, snapshot),
Middleware::ModuleScriptDir => syncback_lua_init(ScriptType::Module, snapshot),
Middleware::CsvDir => syncback_csv_init(snapshot),
Middleware::PluginScript
| Middleware::LegacyServerScript
| Middleware::LegacyClientScript
| Middleware::RunContextServerScript
| Middleware::RunContextClientScript => {
anyhow::bail!("syncback is not implemented for {self:?} yet")
}
}
}
/// Returns whether this particular middleware would become a directory.
#[inline]
pub fn is_dir(&self) -> bool {
matches!(
self,
Middleware::Dir
| Middleware::ServerScriptDir
| Middleware::ClientScriptDir
| Middleware::ModuleScriptDir
| Middleware::CsvDir
)
}
/// Returns whether this particular middleware sets its own properties.
/// This applies to things like `JsonModel` and `Project`, since they
/// set properties without needing a meta.json file.
///
/// It does not cover middleware like `ServerScript` or `Csv` because they
/// need a meta.json file to set properties that aren't their designated
/// 'special' properties.
#[inline]
pub fn handles_own_properties(&self) -> bool {
matches!(
self,
Middleware::JsonModel | Middleware::Project | Middleware::Rbxm | Middleware::Rbxmx
)
}
/// Attempts to return a middleware that should be used for the given path.
///
/// Returns `Err` only if the Vfs cannot read information about the path.
pub fn middleware_for_path(
vfs: &Vfs,
sync_rules: &[SyncRule],
path: &Path,
) -> anyhow::Result<Option<Self>> {
let meta = match vfs.metadata(path).with_not_found()? {
Some(meta) => meta,
None => return Ok(None),
};
if meta.is_dir() {
let (middleware, _, _) = get_dir_middleware(vfs, path)?;
Ok(Some(middleware))
} else {
for rule in sync_rules.iter().chain(default_sync_rules()) {
if rule.matches(path) {
return Ok(Some(rule.middleware));
}
}
Ok(None)
}
}
}

View File

@@ -1,19 +1,27 @@
use std::{borrow::Cow, path::Path};
use std::{
borrow::Cow,
collections::{BTreeMap, HashMap, VecDeque},
path::Path,
};
use anyhow::{bail, Context};
use memofs::Vfs;
use rbx_dom_weak::{
types::{Attributes, Ref},
ustr, HashMapExt as _, Ustr, UstrMap,
types::{Attributes, Ref, Variant},
ustr, HashMapExt as _, Instance, Ustr, UstrMap,
};
use rbx_reflection::ClassTag;
use crate::{
project::{PathNode, Project, ProjectNode},
resolution::UnresolvedValue,
snapshot::{
InstanceContext, InstanceMetadata, InstanceSnapshot, InstigatingSource, PathIgnoreRule,
SyncRule,
InstanceContext, InstanceMetadata, InstanceSnapshot, InstanceWithMeta, InstigatingSource,
PathIgnoreRule, SyncRule,
},
snapshot_middleware::Middleware,
syncback::{filter_properties, FsSnapshot, SyncbackReturn, SyncbackSnapshot},
variant_eq::variant_eq,
RojoRef,
};
@@ -286,12 +294,12 @@ pub fn snapshot_project_node(
metadata.specified_id = Some(RojoRef::new(id.clone()))
}
metadata.instigating_source = Some(InstigatingSource::ProjectNode(
project_path.to_path_buf(),
instance_name.to_string(),
Box::new(node.clone()),
parent_class.map(|name| name.to_owned()),
));
metadata.instigating_source = Some(InstigatingSource::ProjectNode {
path: project_path.to_path_buf(),
name: instance_name.to_string(),
node: node.clone(),
parent_class: parent_class.map(|name| name.to_owned()),
});
Ok(Some(InstanceSnapshot {
snapshot_id: Ref::none(),
@@ -303,6 +311,318 @@ pub fn snapshot_project_node(
}))
}
pub fn syncback_project<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let old_inst = snapshot
.old_inst()
.expect("projects should always exist in both trees");
// Generally, the path of a project is the first thing added to the relevant
// paths. So, we take the last one.
let project_path = old_inst
.metadata()
.relevant_paths
.last()
.expect("all projects should have a relevant path");
let vfs = snapshot.vfs();
log::debug!("Reloading project {} from vfs", project_path.display(),);
let mut project = Project::load_exact(vfs, project_path, None)?;
let base_path = project.folder_location().to_path_buf();
// Sync rules for this project do not have their base rule set but it is
// important when performing syncback on other projects.
for rule in &mut project.sync_rules {
rule.base_path.clone_from(&base_path)
}
let mut descendant_snapshots = Vec::new();
let mut removed_descendants = Vec::new();
let mut ref_to_path_map = HashMap::new();
let mut old_child_map = HashMap::new();
let mut new_child_map = HashMap::new();
let mut node_changed_map = Vec::new();
let mut node_queue = VecDeque::with_capacity(1);
node_queue.push_back((&mut project.tree, old_inst, snapshot.new_inst()));
while let Some((node, old_inst, new_inst)) = node_queue.pop_front() {
log::debug!("Processing node {}", old_inst.name());
if old_inst.class_name() != new_inst.class {
anyhow::bail!(
"Cannot change the class of {} in project file {}.\n\
Current class is {}, it is a {} in the input file.",
old_inst.name(),
project_path.display(),
old_inst.class_name(),
new_inst.class
);
}
// TODO handle meta.json files in this branch. Right now, we perform
// syncback if a node has `$path` set but the Middleware aren't aware
// that the Instances they're running on originate in a project.json.
// As a result, the `meta.json` syncback code is hardcoded to not work
// if the Instance originates from a project file. However, we should
// ideally use a .meta.json over the project node if it exists already.
if node.path.is_some() {
// Since the node has a path, we have to run syncback on it.
let node_path = node.path.as_ref().map(PathNode::path).expect(
"Project nodes with a path must have a path \
If you see this message, something went seriously wrong. Please report it.",
);
let full_path = if node_path.is_absolute() {
node_path.to_path_buf()
} else {
base_path.join(node_path)
};
let middleware = match Middleware::middleware_for_path(
snapshot.vfs(),
&project.sync_rules,
&full_path,
)? {
Some(middleware) => middleware,
// The only way this can happen at this point is if the path does
// not exist on the file system or there's no middleware for it.
None => anyhow::bail!(
"path does not exist or could not be turned into a file Rojo understands: {}",
full_path.display()
),
};
descendant_snapshots.push(
snapshot
.with_new_path(full_path.clone(), new_inst.referent(), Some(old_inst.id()))
.middleware(middleware),
);
ref_to_path_map.insert(new_inst.referent(), full_path);
// We only want to set properties if it needs it.
if !middleware.handles_own_properties() {
project_node_property_syncback_path(snapshot, new_inst, node);
}
} else {
project_node_property_syncback_no_path(snapshot, new_inst, node);
}
for child_ref in new_inst.children() {
let child = snapshot
.get_new_instance(*child_ref)
.expect("all children of Instances should be in new DOM");
if new_child_map.insert(&child.name, child).is_some() {
anyhow::bail!(
"Instances that are direct children of an Instance that is made by a project file \
must have a unique name.\nThe child '{}' of '{}' is duplicated in the place file.", child.name, old_inst.name()
);
}
}
for child_ref in old_inst.children() {
let child = snapshot
.get_old_instance(*child_ref)
.expect("all children of Instances should be in old DOM");
if old_child_map.insert(child.name(), child).is_some() {
anyhow::bail!(
"Instances that are direct children of an Instance that is made by a project file \
must have a unique name.\nThe child '{}' of '{}' is duplicated on the file system.", child.name(), old_inst.name()
);
}
}
// This loop does basic matching of Instance children to the node's
// children. It ensures that `new_child_map` and `old_child_map` will
// only contain Instances that don't belong to the project after this.
for (child_name, child_node) in &mut node.children {
// If a node's path is optional, we want to skip it if the path
// doesn't exist since it isn't in the current old DOM.
if let Some(path) = &child_node.path {
if path.is_optional() {
let real_path = if path.path().is_absolute() {
path.path().to_path_buf()
} else {
base_path.join(path.path())
};
if !real_path.exists() {
log::warn!(
"Skipping node '{child_name}' of project because it is optional and not present on the disk.\n\
If this is not deliberate, please create a file or directory at {}", real_path.display()
);
continue;
}
}
}
let new_equivalent = new_child_map.remove(child_name);
let old_equivalent = old_child_map.remove(child_name.as_str());
match (new_equivalent, old_equivalent) {
(Some(new), Some(old)) => node_queue.push_back((child_node, old, new)),
(_, None) => anyhow::bail!(
"The child '{child_name}' of Instance '{}' would be removed.\n\
Syncback cannot add or remove Instances from project {}",
old_inst.name(),
project_path.display()
),
(None, _) => anyhow::bail!(
"The child '{child_name}' of Instance '{}' is present only in a project file,\n\
and not the provided file. Syncback cannot add or remove Instances from project:\n{}.",
old_inst.name(), project_path.display(),
)
}
}
// All of the children in this loop are by their nature not in the
// project, so we just need to run syncback on them.
for (name, new_child) in new_child_map.drain() {
let parent_path = match ref_to_path_map.get(&new_child.parent()) {
Some(path) => path.clone(),
None => {
log::debug!("Skipping child {name} of node because it has no parent_path");
continue;
}
};
// If a child also exists in the old tree, it will be caught in the
// syncback on the project node path above (or is itself a node).
// So the only things we need to run seperately is new children.
if old_child_map.remove(name.as_str()).is_none() {
let parent_middleware =
Middleware::middleware_for_path(vfs, &project.sync_rules, &parent_path)?
.expect("project nodes should have a middleware if they have children.");
// If this node points directly to a project, it may still have
// children but they'll be handled by syncback. This isn't a
// concern with directories because they're singular things,
// files that contain their own children.
if parent_middleware != Middleware::Project {
descendant_snapshots.push(snapshot.with_base_path(
&parent_path,
new_child.referent(),
None,
)?);
}
}
}
removed_descendants.extend(old_child_map.drain().map(|(_, v)| v));
node_changed_map.push((&node.properties, &node.attributes, old_inst))
}
let mut fs_snapshot = FsSnapshot::new();
for (node_properties, node_attributes, old_inst) in node_changed_map {
if project_node_should_reserialize(node_properties, node_attributes, old_inst)? {
fs_snapshot.add_file(project_path, serde_json::to_vec_pretty(&project)?);
break;
}
}
Ok(SyncbackReturn {
fs_snapshot,
children: descendant_snapshots,
removed_children: removed_descendants,
})
}
fn project_node_property_syncback(
_snapshot: &SyncbackSnapshot,
filtered_properties: UstrMap<&Variant>,
new_inst: &Instance,
node: &mut ProjectNode,
) {
let properties = &mut node.properties;
let mut attributes = BTreeMap::new();
for (name, value) in filtered_properties {
match value {
Variant::Attributes(attrs) => {
for (attr_name, attr_value) in attrs.iter() {
// We (probably) don't want to preserve internal attributes,
// only user defined ones.
if attr_name.starts_with("RBX") {
continue;
}
attributes.insert(
attr_name.clone(),
UnresolvedValue::from_variant_unambiguous(attr_value.clone()),
);
}
}
_ => {
properties.insert(
name,
UnresolvedValue::from_variant(value.clone(), &new_inst.class, &name),
);
}
}
}
node.attributes = attributes;
}
fn project_node_property_syncback_path(
snapshot: &SyncbackSnapshot,
new_inst: &Instance,
node: &mut ProjectNode,
) {
let filtered_properties = snapshot
.get_path_filtered_properties(new_inst.referent())
.unwrap();
project_node_property_syncback(snapshot, filtered_properties, new_inst, node)
}
fn project_node_property_syncback_no_path(
snapshot: &SyncbackSnapshot,
new_inst: &Instance,
node: &mut ProjectNode,
) {
let filtered_properties = filter_properties(snapshot.project(), new_inst);
project_node_property_syncback(snapshot, filtered_properties, new_inst, node)
}
fn project_node_should_reserialize(
node_properties: &BTreeMap<Ustr, UnresolvedValue>,
node_attributes: &BTreeMap<String, UnresolvedValue>,
instance: InstanceWithMeta,
) -> anyhow::Result<bool> {
for (prop_name, unresolved_node_value) in node_properties {
if let Some(inst_value) = instance.properties().get(prop_name) {
let node_value = unresolved_node_value
.clone()
.resolve(&instance.class_name(), prop_name)?;
if !variant_eq(inst_value, &node_value) {
return Ok(true);
}
} else {
return Ok(true);
}
}
match instance.properties().get(&ustr("Attributes")) {
Some(Variant::Attributes(inst_attributes)) => {
// This will also catch if one is empty but the other isn't
if node_attributes.len() != inst_attributes.len() {
Ok(true)
} else {
for (attr_name, unresolved_node_value) in node_attributes {
if let Some(inst_value) = inst_attributes.get(attr_name.as_str()) {
let node_value = unresolved_node_value.clone().resolve_unambiguous()?;
if !variant_eq(inst_value, &node_value) {
return Ok(true);
}
} else {
return Ok(true);
}
}
Ok(false)
}
}
Some(_) => Ok(true),
None => {
if !node_attributes.is_empty() {
Ok(true)
} else {
Ok(false)
}
}
}
}
fn infer_class_name(name: &str, parent_class: Option<&str>) -> Option<Ustr> {
// If className wasn't defined from another source, we may be able
// to infer one.

View File

@@ -3,7 +3,10 @@ use std::path::Path;
use anyhow::Context;
use memofs::Vfs;
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
#[profiling::function]
pub fn snapshot_rbxm(
@@ -39,6 +42,24 @@ pub fn snapshot_rbxm(
}
}
pub fn syncback_rbxm<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let inst = snapshot.new_inst();
// Long-term, we probably want to have some logic for if this contains a
// script. That's a future endeavor though.
let mut serialized = Vec::new();
rbx_binary::to_writer(&mut serialized, snapshot.new_tree(), &[inst.referent()])
.context("failed to serialize new rbxm")?;
Ok(SyncbackReturn {
fs_snapshot: FsSnapshot::new().with_added_file(&snapshot.path, serialized),
children: Vec::new(),
removed_children: Vec::new(),
})
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -2,8 +2,12 @@ use std::path::Path;
use anyhow::Context;
use memofs::Vfs;
use rbx_xml::EncodeOptions;
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
pub fn snapshot_rbxmx(
context: &InstanceContext,
@@ -15,7 +19,7 @@ pub fn snapshot_rbxmx(
.property_behavior(rbx_xml::DecodePropertyBehavior::ReadUnknown);
let temp_tree = rbx_xml::from_reader(vfs.read(path)?.as_slice(), options)
.with_context(|| format!("Malformed rbxm file: {}", path.display()))?;
.with_context(|| format!("Malformed rbxmx file: {}", path.display()))?;
let root_instance = temp_tree.root();
let children = root_instance.children();
@@ -41,6 +45,32 @@ pub fn snapshot_rbxmx(
}
}
pub fn syncback_rbxmx<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let inst = snapshot.new_inst();
let options =
EncodeOptions::new().property_behavior(rbx_xml::EncodePropertyBehavior::WriteUnknown);
// Long-term, we probably want to have some logic for if this contains a
// script. That's a future endeavor though.
let mut serialized = Vec::new();
rbx_xml::to_writer(
&mut serialized,
snapshot.new_tree(),
&[inst.referent()],
options,
)
.context("failed to serialize new rbxmx")?;
Ok(SyncbackReturn {
fs_snapshot: FsSnapshot::new().with_added_file(&snapshot.path, serialized),
children: Vec::new(),
removed_children: Vec::new(),
})
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: LocalizationTable
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: root
class_name: LocalizationTable
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: root
class_name: LocalizationTable
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: LocalizationTable
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Folder
properties: {}

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Folder
properties: {}
@@ -44,6 +45,7 @@ children:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: dir
name: Child
class_name: Folder
properties: {}

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: IntValue
properties:
@@ -25,6 +26,7 @@ children:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: The Child
class_name: StringValue
properties: {}

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: IntValue
properties:
@@ -25,6 +26,7 @@ children:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: The Child
class_name: StringValue
properties: {}

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: LocalScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: bar
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: root
class_name: ModuleScript
properties:

View File

@@ -21,6 +21,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: root
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: bar
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: false
specified_id: ~
middleware: ~
name: foo
class_name: Script
properties:

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -11,6 +11,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: DEFAULT
class_name: DEFAULT
properties: {}

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: Model
properties: {}

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: direct-project
class_name: Model
properties: {}

View File

@@ -13,6 +13,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: project
name: path-property-override
class_name: StringValue
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: children
class_name: Folder
properties: {}
@@ -21,14 +22,16 @@ children:
ignore_unknown_instances: true
instigating_source:
ProjectNode:
- /foo.project.json
- Child
- $className: Model
- Folder
path: /foo.project.json
name: Child
node:
$className: Model
parent_class: Folder
relevant_paths: []
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: Child
class_name: Model
properties: {}

View File

@@ -13,6 +13,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: project
name: path-project
class_name: Model
properties: {}

View File

@@ -13,6 +13,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: project
name: path-child-project
class_name: Folder
properties: {}
@@ -22,14 +23,16 @@ children:
ignore_unknown_instances: true
instigating_source:
ProjectNode:
- /foo/other.project.json
- SomeChild
- $className: Model
- Folder
path: /foo/other.project.json
name: SomeChild
node:
$className: Model
parent_class: Folder
relevant_paths: []
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: SomeChild
class_name: Model
properties: {}

View File

@@ -15,6 +15,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: text
name: path-project
class_name: StringValue
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: resolved-properties
class_name: StringValue
properties:

View File

@@ -12,6 +12,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: unresolved-properties
class_name: StringValue
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: StringValue
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: StringValue
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: ~
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -14,6 +14,7 @@ metadata:
context:
emit_legacy_scripts: true
specified_id: manually specified
middleware: ~
name: foo
class_name: ModuleScript
properties:

View File

@@ -1,11 +1,16 @@
use std::{path::Path, str};
use anyhow::Context as _;
use memofs::Vfs;
use rbx_dom_weak::types::Variant;
use rbx_dom_weak::ustr;
use crate::snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot};
use crate::{
snapshot::{InstanceContext, InstanceMetadata, InstanceSnapshot},
syncback::{FsSnapshot, SyncbackReturn, SyncbackSnapshot},
};
use super::meta_file::AdjacentMetadata;
use super::{meta_file::AdjacentMetadata, PathExt as _};
pub fn snapshot_txt(
context: &InstanceContext,
@@ -32,6 +37,41 @@ pub fn snapshot_txt(
Ok(Some(snapshot))
}
pub fn syncback_txt<'sync>(
snapshot: &SyncbackSnapshot<'sync>,
) -> anyhow::Result<SyncbackReturn<'sync>> {
let new_inst = snapshot.new_inst();
let contents = if let Some(Variant::String(source)) = new_inst.properties.get(&ustr("Value")) {
source.as_bytes().to_vec()
} else {
anyhow::bail!("StringValues must have a `Value` property that is a String");
};
let mut fs_snapshot = FsSnapshot::new();
fs_snapshot.add_file(&snapshot.path, contents);
let meta = AdjacentMetadata::from_syncback_snapshot(snapshot, snapshot.path.clone())?;
if let Some(mut meta) = meta {
// StringValues have relatively few properties that we care about, so
// shifting is fine.
meta.properties.shift_remove(&ustr("Value"));
if !meta.is_empty() {
let parent = snapshot.path.parent_err()?;
fs_snapshot.add_file(
parent.join(format!("{}.meta.json", new_inst.name)),
serde_json::to_vec_pretty(&meta).context("could not serialize metadata")?,
);
}
}
Ok(SyncbackReturn {
fs_snapshot,
children: Vec::new(),
removed_children: Vec::new(),
})
}
#[cfg(test)]
mod test {
use super::*;

View File

@@ -16,6 +16,7 @@ pub fn match_trailing<'a>(input: &'a str, suffix: &str) -> Option<&'a str> {
pub trait PathExt {
fn file_name_ends_with(&self, suffix: &str) -> bool;
fn file_name_trim_end<'a>(&'a self, suffix: &str) -> anyhow::Result<&'a str>;
fn parent_err(&self) -> anyhow::Result<&Path>;
}
impl<P> PathExt for P
@@ -40,6 +41,12 @@ where
match_trailing(file_name, suffix)
.with_context(|| format!("Path did not end in {}: {}", suffix, path.display()))
}
fn parent_err(&self) -> anyhow::Result<&Path> {
let path = self.as_ref();
path.parent()
.with_context(|| format!("Path does not have a parent: {}", path.display()))
}
}
// TEMP function until rojo 8.0, when it can be replaced with bool::default (aka false)

128
src/syncback/file_names.rs Normal file
View File

@@ -0,0 +1,128 @@
//! Contains logic for generating new file names for Instances based on their
//! middleware.
use std::borrow::Cow;
use anyhow::Context;
use rbx_dom_weak::Instance;
use crate::{snapshot::InstanceWithMeta, snapshot_middleware::Middleware};
pub fn name_for_inst<'old>(
middleware: Middleware,
new_inst: &Instance,
old_inst: Option<InstanceWithMeta<'old>>,
) -> anyhow::Result<Cow<'old, str>> {
if let Some(old_inst) = old_inst {
if let Some(source) = old_inst.metadata().relevant_paths.first() {
source
.file_name()
.and_then(|s| s.to_str())
.map(Cow::Borrowed)
.context("sources on the file system should be valid unicode and not be stubs")
} else {
// This is technically not /always/ true, but we want to avoid
// running syncback on anything that has no instigating source
// anyway.
anyhow::bail!(
"members of 'old' trees should have an instigating source. Somehow, {} did not.",
old_inst.name(),
);
}
} else {
Ok(match middleware {
Middleware::Dir
| Middleware::CsvDir
| Middleware::ServerScriptDir
| Middleware::ClientScriptDir
| Middleware::ModuleScriptDir => Cow::Owned(new_inst.name.clone()),
_ => {
let extension = extension_for_middleware(middleware);
let name = &new_inst.name;
validate_file_name(name).with_context(|| {
format!("name '{name}' is not legal to write to the file system")
})?;
Cow::Owned(format!("{name}.{extension}"))
}
})
}
}
/// Returns the extension a provided piece of middleware is supposed to use.
pub fn extension_for_middleware(middleware: Middleware) -> &'static str {
match middleware {
Middleware::Csv => "csv",
Middleware::JsonModel => "model.json",
Middleware::Json => "json",
Middleware::ServerScript => "server.luau",
Middleware::ClientScript => "client.luau",
Middleware::ModuleScript => "luau",
Middleware::PluginScript => "plugin.luau",
Middleware::Project => "project.json",
Middleware::Rbxm => "rbxm",
Middleware::Rbxmx => "rbxmx",
Middleware::Toml => "toml",
Middleware::Text => "txt",
Middleware::Yaml => "yml",
Middleware::LegacyServerScript
| Middleware::LegacyClientScript
| Middleware::RunContextServerScript
| Middleware::RunContextClientScript => {
todo!("syncback does not work on the middleware {middleware:?} yet")
}
// These are manually specified and not `_` to guard against future
// middleware additions missing this function.
Middleware::Ignore => unimplemented!("syncback does not work on Ignore middleware"),
Middleware::Dir
| Middleware::CsvDir
| Middleware::ServerScriptDir
| Middleware::ClientScriptDir
| Middleware::ModuleScriptDir => {
unimplemented!("directory middleware requires special treatment")
}
}
}
/// A list of file names that are not valid on Windows.
const INVALID_WINDOWS_NAMES: [&str; 22] = [
"CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8",
"COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
];
/// A list of all characters that are outright forbidden to be included
/// in a file's name.
const FORBIDDEN_CHARS: [char; 9] = ['<', '>', ':', '"', '/', '|', '?', '*', '\\'];
/// Validates a provided file name to ensure it's allowed on the file system. An
/// error is returned if the name isn't allowed, indicating why.
/// This takes into account rules for Windows, MacOS, and Linux.
///
/// In practice however, these broadly overlap so the only unexpected behavior
/// is Windows, where there are 22 reserved names.
pub fn validate_file_name<S: AsRef<str>>(name: S) -> anyhow::Result<()> {
let str = name.as_ref();
if str.ends_with(' ') {
anyhow::bail!("file names cannot end with a space")
}
if str.ends_with('.') {
anyhow::bail!("file names cannot end with '.'")
}
for char in str.chars() {
if FORBIDDEN_CHARS.contains(&char) {
anyhow::bail!("file names cannot contain <, >, :, \", /, |, ?, *, or \\")
} else if char.is_control() {
anyhow::bail!("file names cannot contain control characters")
}
}
for forbidden in INVALID_WINDOWS_NAMES {
if str == forbidden {
anyhow::bail!("files cannot be named {str}")
}
}
Ok(())
}

191
src/syncback/fs_snapshot.rs Normal file
View File

@@ -0,0 +1,191 @@
use std::{
collections::{HashMap, HashSet},
io,
path::{Path, PathBuf},
};
use memofs::Vfs;
/// A simple representation of a subsection of a file system.
#[derive(Default)]
pub struct FsSnapshot {
/// Paths representing new files mapped to their contents.
added_files: HashMap<PathBuf, Vec<u8>>,
/// Paths representing new directories.
added_dirs: HashSet<PathBuf>,
/// Paths representing removed files.
removed_files: HashSet<PathBuf>,
/// Paths representing removed directories.
removed_dirs: HashSet<PathBuf>,
}
impl FsSnapshot {
/// Creates a new `FsSnapshot`.
pub fn new() -> Self {
Self {
added_files: HashMap::new(),
added_dirs: HashSet::new(),
removed_files: HashSet::new(),
removed_dirs: HashSet::new(),
}
}
/// Adds the given path to the `FsSnapshot` as a file with the given
/// contents, then returns it.
pub fn with_added_file<P: AsRef<Path>>(mut self, path: P, data: Vec<u8>) -> Self {
self.added_files.insert(path.as_ref().to_path_buf(), data);
self
}
/// Adds the given path to the `FsSnapshot` as a file with the given
/// then returns it.
pub fn with_added_dir<P: AsRef<Path>>(mut self, path: P) -> Self {
self.added_dirs.insert(path.as_ref().to_path_buf());
self
}
/// Merges two `FsSnapshot`s together.
#[inline]
pub fn merge(&mut self, other: Self) {
self.added_files.extend(other.added_files);
self.added_dirs.extend(other.added_dirs);
self.removed_files.extend(other.removed_files);
self.removed_dirs.extend(other.removed_dirs);
}
/// Merges two `FsSnapshot`s together, with a filter applied to the paths.
#[inline]
pub fn merge_with_filter<F>(&mut self, other: Self, mut predicate: F)
where
F: FnMut(&Path) -> bool,
{
self.added_files
.extend(other.added_files.into_iter().filter(|(k, _)| predicate(k)));
self.added_dirs
.extend(other.added_dirs.into_iter().filter(|p| predicate(p)));
self.removed_files
.extend(other.removed_files.into_iter().filter(|p| predicate(p)));
self.removed_dirs
.extend(other.removed_dirs.into_iter().filter(|p| predicate(p)));
}
/// Adds the provided path as a file with the given contents.
pub fn add_file<P: AsRef<Path>>(&mut self, path: P, data: Vec<u8>) {
self.added_files.insert(path.as_ref().to_path_buf(), data);
}
/// Adds the provided path as a directory.
pub fn add_dir<P: AsRef<Path>>(&mut self, path: P) {
self.added_dirs.insert(path.as_ref().to_path_buf());
}
/// Removes the provided path, as a file.
pub fn remove_file<P: AsRef<Path>>(&mut self, path: P) {
self.removed_files.insert(path.as_ref().to_path_buf());
}
/// Removes the provided path, as a directory.
pub fn remove_dir<P: AsRef<Path>>(&mut self, path: P) {
self.removed_dirs.insert(path.as_ref().to_path_buf());
}
/// Writes the `FsSnapshot` to the provided VFS, using the provided `base`
/// as a root for the other paths in the `FsSnapshot`.
///
/// This includes removals, but makes no effort to minimize work done.
pub fn write_to_vfs<P: AsRef<Path>>(&self, base: P, vfs: &Vfs) -> io::Result<()> {
let mut lock = vfs.lock();
let base_path = base.as_ref();
for dir_path in &self.added_dirs {
match lock.create_dir_all(base_path.join(dir_path)) {
Ok(_) => (),
Err(err) if err.kind() == io::ErrorKind::AlreadyExists => (),
Err(err) => return Err(err),
};
}
for (path, contents) in &self.added_files {
lock.write(base_path.join(path), contents)?;
}
for dir_path in &self.removed_dirs {
lock.remove_dir_all(base_path.join(dir_path))?;
}
for path in &self.removed_files {
lock.remove_file(base_path.join(path))?;
}
drop(lock);
log::debug!(
"Wrote {} directories and {} files to the file system",
self.added_dirs.len(),
self.added_files.len()
);
log::debug!(
"Removed {} directories and {} files from the file system",
self.removed_dirs.len(),
self.removed_files.len()
);
Ok(())
}
/// Returns whether this `FsSnapshot` is empty or not.
#[inline]
pub fn is_empty(&self) -> bool {
self.added_files.is_empty()
&& self.added_dirs.is_empty()
&& self.removed_files.is_empty()
&& self.removed_dirs.is_empty()
}
/// Returns a list of paths that would be added by this `FsSnapshot`.
#[inline]
pub fn added_paths(&self) -> Vec<&Path> {
let mut list = Vec::with_capacity(self.added_files.len() + self.added_dirs.len());
list.extend(self.added_files());
list.extend(self.added_dirs());
list
}
/// Returns a list of paths that would be removed by this `FsSnapshot`.
#[inline]
pub fn removed_paths(&self) -> Vec<&Path> {
let mut list = Vec::with_capacity(self.removed_files.len() + self.removed_dirs.len());
list.extend(self.removed_files());
list.extend(self.removed_dirs());
list
}
/// Returns a list of file paths that would be added by this `FsSnapshot`
#[inline]
pub fn added_files(&self) -> Vec<&Path> {
let mut added_files: Vec<_> = self.added_files.keys().map(PathBuf::as_path).collect();
added_files.sort_unstable();
added_files
}
/// Returns a list of directory paths that would be added by this `FsSnapshot`
#[inline]
pub fn added_dirs(&self) -> Vec<&Path> {
let mut added_dirs: Vec<_> = self.added_dirs.iter().map(PathBuf::as_path).collect();
added_dirs.sort_unstable();
added_dirs
}
/// Returns a list of file paths that would be removed by this `FsSnapshot`
#[inline]
pub fn removed_files(&self) -> Vec<&Path> {
let mut removed_files: Vec<_> = self.removed_files.iter().map(PathBuf::as_path).collect();
removed_files.sort_unstable();
removed_files
}
/// Returns a list of directory paths that would be removed by this `FsSnapshot`
#[inline]
pub fn removed_dirs(&self) -> Vec<&Path> {
let mut removed_dirs: Vec<_> = self.removed_dirs.iter().map(PathBuf::as_path).collect();
removed_dirs.sort_unstable();
removed_dirs
}
}

122
src/syncback/hash/mod.rs Normal file
View File

@@ -0,0 +1,122 @@
//! Hashing utilities for a WeakDom.
mod variant;
pub use variant::*;
use blake3::{Hash, Hasher};
use rbx_dom_weak::{
types::{Ref, Variant},
Instance, Ustr, WeakDom,
};
use std::collections::HashMap;
use crate::{variant_eq::variant_eq, Project};
use super::{descendants, filter_properties_preallocated};
/// Returns a map of every `Ref` in the `WeakDom` to a hashed version of the
/// `Instance` it points to, including the properties and descendants of the
/// `Instance`.
///
/// The hashes **do** include the descendants of the Instances in them,
/// so they should only be used for comparing subtrees directly.
pub fn hash_tree(project: &Project, dom: &WeakDom, root_ref: Ref) -> HashMap<Ref, Hash> {
let mut order = descendants(dom, root_ref);
let mut map: HashMap<Ref, Hash> = HashMap::with_capacity(order.len());
let mut prop_list = Vec::with_capacity(2);
let mut child_hashes = Vec::new();
while let Some(referent) = order.pop() {
let inst = dom.get_by_ref(referent).unwrap();
let mut hasher = hash_inst_filtered(project, inst, &mut prop_list);
add_children(inst, &map, &mut child_hashes, &mut hasher);
map.insert(referent, hasher.finalize());
}
map
}
/// Hashes a single Instance from the provided WeakDom, if it exists.
///
/// This function filters properties using user-provided syncing rules from
/// the passed project.
#[inline]
pub fn hash_instance(project: &Project, dom: &WeakDom, referent: Ref) -> Option<Hash> {
let mut prop_list = Vec::with_capacity(2);
let inst = dom.get_by_ref(referent)?;
Some(hash_inst_filtered(project, inst, &mut prop_list).finalize())
}
/// Adds the hashes of children for an Instance to the provided Hasher.
fn add_children(
inst: &Instance,
map: &HashMap<Ref, Hash>,
child_hashes: &mut Vec<[u8; 32]>,
hasher: &mut Hasher,
) {
for child_ref in inst.children() {
if let Some(hash) = map.get(child_ref) {
child_hashes.push(*hash.as_bytes())
} else {
panic!("Invariant violated: child not hashed before parent")
}
}
child_hashes.sort_unstable();
for hash in child_hashes.drain(..) {
hasher.update(&hash);
}
}
/// Performs hashing on an Instance using a filtered property list.
/// Does not include the hashes of any children.
fn hash_inst_filtered<'inst>(
project: &Project,
inst: &'inst Instance,
prop_list: &mut Vec<(Ustr, &'inst Variant)>,
) -> Hasher {
filter_properties_preallocated(project, inst, prop_list);
hash_inst_prefilled(inst, prop_list)
}
/// Performs hashing on an Instance using a pre-filled list of properties.
/// It is assumed the property list is **not** sorted, so it is sorted in-line.
fn hash_inst_prefilled<'inst>(
inst: &'inst Instance,
prop_list: &mut Vec<(Ustr, &'inst Variant)>,
) -> Hasher {
let mut hasher = Hasher::new();
hasher.update(inst.name.as_bytes());
hasher.update(inst.class.as_bytes());
prop_list.sort_unstable_by_key(|(name, _)| *name);
let descriptor = rbx_reflection_database::get()
.unwrap()
.classes
.get(inst.class.as_str());
if let Some(descriptor) = descriptor {
for (name, value) in prop_list.drain(..) {
hasher.update(name.as_bytes());
if let Some(default) = descriptor.default_properties.get(name.as_str()) {
if !variant_eq(default, value) {
hash_variant(&mut hasher, value)
}
} else {
hash_variant(&mut hasher, value)
}
}
} else {
for (name, value) in prop_list.drain(..) {
hasher.update(name.as_bytes());
hash_variant(&mut hasher, value)
}
}
hasher
}

View File

@@ -0,0 +1,212 @@
use blake3::Hasher;
use rbx_dom_weak::types::{ContentType, PhysicalProperties, Variant, Vector3};
macro_rules! round {
($value:expr) => {
(($value * 10.0).round() / 10.0)
};
}
macro_rules! n_hash {
($hash:ident, $($num:expr),*) => {
{$(
$hash.update(&($num).to_le_bytes());
)*}
};
}
macro_rules! hash {
($hash:ident, $value:expr) => {{
$hash.update($value);
}};
}
/// Places `value` into the provided hasher.
pub fn hash_variant(hasher: &mut Hasher, value: &Variant) {
// We need to round floats, though I'm not sure to what degree we can
// realistically do that.
match value {
Variant::Attributes(attrs) => {
let mut sorted: Vec<(&String, &Variant)> = attrs.iter().collect();
sorted.sort_unstable_by_key(|(name, _)| *name);
for (name, attribute) in sorted {
hasher.update(name.as_bytes());
hash_variant(hasher, attribute);
}
}
Variant::Axes(a) => hash!(hasher, &[a.bits()]),
Variant::BinaryString(bytes) => hash!(hasher, bytes.as_ref()),
Variant::Bool(bool) => hash!(hasher, &[*bool as u8]),
Variant::BrickColor(color) => n_hash!(hasher, *color as u16),
Variant::CFrame(cf) => {
vector_hash(hasher, cf.position);
vector_hash(hasher, cf.orientation.x);
vector_hash(hasher, cf.orientation.y);
vector_hash(hasher, cf.orientation.z);
}
Variant::Color3(color) => {
n_hash!(hasher, round!(color.r), round!(color.g), round!(color.b))
}
Variant::Color3uint8(color) => hash!(hasher, &[color.r, color.b, color.g]),
Variant::ColorSequence(seq) => {
let mut new = Vec::with_capacity(seq.keypoints.len());
for keypoint in &seq.keypoints {
new.push(keypoint);
}
new.sort_unstable_by(|a, b| round!(a.time).partial_cmp(&round!(b.time)).unwrap());
for keypoint in new {
n_hash!(
hasher,
round!(keypoint.time),
round!(keypoint.color.r),
round!(keypoint.color.g),
round!(keypoint.color.b)
)
}
}
Variant::Content(content) => match content.value() {
ContentType::None => {
hash!(hasher, &[0]);
}
ContentType::Uri(uri) => {
hash!(hasher, &[1]);
hash!(hasher, uri.as_bytes());
}
ContentType::Object(referent) => {
hash!(hasher, &[2]);
hash!(hasher, referent.to_string().as_bytes())
}
other => {
panic!("the ContentType {other:?} cannot be hashed as a Variant")
}
},
Variant::ContentId(content) => {
let s: &str = content.as_ref();
hash!(hasher, s.as_bytes())
}
Variant::Enum(e) => n_hash!(hasher, e.to_u32()),
Variant::Faces(f) => hash!(hasher, &[f.bits()]),
Variant::Float32(n) => n_hash!(hasher, round!(*n)),
Variant::Float64(n) => n_hash!(hasher, round!(n)),
Variant::Font(f) => {
n_hash!(hasher, f.weight as u16);
n_hash!(hasher, f.style as u8);
hash!(hasher, f.family.as_bytes());
if let Some(cache) = &f.cached_face_id {
hash!(hasher, &[0x01]);
hash!(hasher, cache.as_bytes());
} else {
hash!(hasher, &[0x00]);
}
}
Variant::Int32(n) => n_hash!(hasher, n),
Variant::Int64(n) => n_hash!(hasher, n),
Variant::MaterialColors(n) => hash!(hasher, n.encode().as_slice()),
Variant::NetAssetRef(net_asset) => hash!(hasher, net_asset.hash().as_bytes()),
Variant::NumberRange(nr) => n_hash!(hasher, round!(nr.max), round!(nr.min)),
Variant::NumberSequence(seq) => {
let mut new = Vec::with_capacity(seq.keypoints.len());
for keypoint in &seq.keypoints {
new.push(keypoint);
}
new.sort_unstable_by(|a, b| round!(a.time).partial_cmp(&round!(b.time)).unwrap());
for keypoint in new {
n_hash!(
hasher,
round!(keypoint.time),
round!(keypoint.value),
round!(keypoint.envelope)
)
}
}
Variant::OptionalCFrame(maybe_cf) => {
if let Some(cf) = maybe_cf {
hash!(hasher, &[0x01]);
vector_hash(hasher, cf.position);
vector_hash(hasher, cf.orientation.x);
vector_hash(hasher, cf.orientation.y);
vector_hash(hasher, cf.orientation.z);
} else {
hash!(hasher, &[0x00]);
}
}
Variant::PhysicalProperties(properties) => match properties {
PhysicalProperties::Default => hash!(hasher, &[0x00]),
PhysicalProperties::Custom(custom) => {
hash!(hasher, &[0x00]);
n_hash!(
hasher,
round!(custom.density()),
round!(custom.friction()),
round!(custom.elasticity()),
round!(custom.friction_weight()),
round!(custom.elasticity_weight()),
round!(custom.acoustic_absorption())
)
}
},
Variant::Ray(ray) => {
vector_hash(hasher, ray.origin);
vector_hash(hasher, ray.direction);
}
Variant::Rect(rect) => n_hash!(
hasher,
round!(rect.max.x),
round!(rect.max.y),
round!(rect.min.x),
round!(rect.min.y)
),
Variant::Ref(referent) => hash!(hasher, referent.to_string().as_bytes()),
Variant::Region3(region) => {
vector_hash(hasher, region.max);
vector_hash(hasher, region.min);
}
Variant::Region3int16(region) => {
n_hash!(
hasher,
region.max.x,
region.max.y,
region.max.z,
region.min.x,
region.min.y,
region.min.z
)
}
Variant::SecurityCapabilities(capabilities) => n_hash!(hasher, capabilities.bits()),
Variant::SharedString(sstr) => hash!(hasher, sstr.hash().as_bytes()),
Variant::String(str) => hash!(hasher, str.as_bytes()),
Variant::Tags(tags) => {
let mut dupe: Vec<&str> = tags.iter().collect();
dupe.sort_unstable();
for tag in dupe {
hash!(hasher, tag.as_bytes())
}
}
Variant::UDim(udim) => n_hash!(hasher, round!(udim.scale), udim.offset),
Variant::UDim2(udim) => n_hash!(
hasher,
round!(udim.y.scale),
udim.y.offset,
round!(udim.x.scale),
udim.x.offset
),
Variant::Vector2(v2) => n_hash!(hasher, round!(v2.x), round!(v2.y)),
Variant::Vector2int16(v2) => n_hash!(hasher, v2.x, v2.y),
Variant::Vector3(v3) => vector_hash(hasher, *v3),
Variant::Vector3int16(v3) => n_hash!(hasher, v3.x, v3.y, v3.z),
// Hashing UniqueId properties doesn't make sense
Variant::UniqueId(_) => (),
unknown => {
log::warn!(
"Encountered unknown Variant {:?} while hashing",
unknown.ty()
)
}
}
}
fn vector_hash(hasher: &mut Hasher, vector: Vector3) {
n_hash!(hasher, round!(vector.x), round!(vector.y), round!(vector.z))
}

534
src/syncback/mod.rs Normal file
View File

@@ -0,0 +1,534 @@
mod file_names;
mod fs_snapshot;
mod hash;
mod property_filter;
mod ref_properties;
mod snapshot;
use anyhow::Context;
use indexmap::IndexMap;
use memofs::Vfs;
use rbx_dom_weak::{
types::{Ref, Variant},
ustr, Instance, Ustr, UstrSet, WeakDom,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, HashSet, VecDeque},
env,
path::Path,
sync::OnceLock,
};
use crate::{
glob::Glob,
snapshot::{InstanceWithMeta, RojoTree},
snapshot_middleware::Middleware,
syncback::ref_properties::{collect_referents, link_referents},
Project,
};
pub use file_names::{extension_for_middleware, name_for_inst, validate_file_name};
pub use fs_snapshot::FsSnapshot;
pub use hash::*;
pub use property_filter::{filter_properties, filter_properties_preallocated};
pub use snapshot::{SyncbackData, SyncbackSnapshot};
/// The name of an enviroment variable to use to override the behavior of
/// syncback on model files.
/// By default, syncback will use `Rbxm` for model files.
/// If this is set to `1`, it will instead use `Rbxmx`. If it is set to `2`,
/// it will use `JsonModel`.
///
/// This will **not** override existing `Rbxm` middleware. It will only impact
/// new files.
const DEBUG_MODEL_FORMAT_VAR: &str = "ROJO_SYNCBACK_DEBUG";
/// A glob that can be used to tell if a path contains a `.git` folder.
static GIT_IGNORE_GLOB: OnceLock<Glob> = OnceLock::new();
pub fn syncback_loop(
vfs: &Vfs,
old_tree: &mut RojoTree,
mut new_tree: WeakDom,
project: &Project,
) -> anyhow::Result<FsSnapshot> {
let ignore_patterns = project
.syncback_rules
.as_ref()
.map(|rules| rules.compile_globs())
.transpose()?;
// TODO: Add a better way to tell if the root of a project is a directory
let skip_pruning = if let Some(path) = &project.tree.path {
let middleware =
Middleware::middleware_for_path(vfs, &project.sync_rules, path.path()).unwrap();
if let Some(middleware) = middleware {
middleware.is_dir()
} else {
false
}
} else {
false
};
if !skip_pruning {
// Strip out any objects from the new tree that aren't in the old tree. This
// is necessary so that hashing the roots of each tree won't always result
// in different hashes. Shout out to Roblox for serializing a bunch of
// Services nobody cares about.
log::debug!("Pruning new tree");
strip_unknown_root_children(&mut new_tree, old_tree);
}
log::debug!("Collecting referents for new DOM...");
let deferred_referents = collect_referents(&new_tree);
// Remove any properties that are manually blocked from syncback via the
// project file.
log::debug!("Pre-filtering properties on DOMs");
for referent in descendants(&new_tree, new_tree.root_ref()) {
let new_inst = new_tree.get_by_ref_mut(referent).unwrap();
if let Some(filter) = get_property_filter(project, new_inst) {
for prop in filter {
new_inst.properties.remove(&prop);
}
}
}
for referent in descendants(old_tree.inner(), old_tree.get_root_id()) {
let mut old_inst_rojo = old_tree.get_instance_mut(referent).unwrap();
let old_inst = old_inst_rojo.inner_mut();
if let Some(filter) = get_property_filter(project, old_inst) {
for prop in filter {
old_inst.properties.remove(&prop);
}
}
}
// Handle removing the current camera.
if let Some(syncback_rules) = &project.syncback_rules {
if !syncback_rules.sync_current_camera.unwrap_or_default() {
log::debug!("Removing CurrentCamera from new DOM");
let mut camera_ref = None;
for child_ref in new_tree.root().children() {
let inst = new_tree.get_by_ref(*child_ref).unwrap();
if inst.class == "Workspace" {
camera_ref = inst.properties.get(&ustr("CurrentCamera"));
break;
}
}
if let Some(Variant::Ref(camera_ref)) = camera_ref {
if new_tree.get_by_ref(*camera_ref).is_some() {
new_tree.destroy(*camera_ref);
}
}
}
}
let ignore_referents = project
.syncback_rules
.as_ref()
.and_then(|s| s.ignore_referents)
.unwrap_or_default();
if !ignore_referents {
log::debug!("Linking referents for new DOM");
link_referents(deferred_referents, &mut new_tree)?;
} else {
log::debug!("Skipping referent linking as per project syncback rules");
}
// As with pruning the children of the new root, we need to ensure the roots
// for both DOMs have the same name otherwise their hashes will always be
// different.
new_tree.root_mut().name = old_tree.root().name().to_string();
log::debug!("Hashing project DOM");
let old_hashes = hash_tree(project, old_tree.inner(), old_tree.get_root_id());
log::debug!("Hashing file DOM");
let new_hashes = hash_tree(project, &new_tree, new_tree.root_ref());
let project_path = project.folder_location();
let syncback_data = SyncbackData {
vfs,
old_tree,
new_tree: &new_tree,
project,
};
let mut snapshots = vec![SyncbackSnapshot {
data: syncback_data,
old: Some(old_tree.get_root_id()),
new: new_tree.root_ref(),
path: project.file_location.clone(),
middleware: Some(Middleware::Project),
}];
let mut fs_snapshot = FsSnapshot::new();
'syncback: while let Some(snapshot) = snapshots.pop() {
let inst_path = snapshot.get_new_inst_path(snapshot.new);
// We can quickly check that two subtrees are identical and if they are,
// skip reconciling them.
if let Some(old_ref) = snapshot.old {
match (old_hashes.get(&old_ref), new_hashes.get(&snapshot.new)) {
(Some(old), Some(new)) => {
if old == new {
log::trace!(
"Skipping {inst_path} due to it being identically hashed as {old:?}"
);
continue;
}
}
_ => unreachable!("All Instances in both DOMs should have hashes"),
}
}
if !is_valid_path(&ignore_patterns, project_path, &snapshot.path) {
log::debug!("Skipping {inst_path} because its path matches ignore pattern");
continue;
}
if let Some(syncback_rules) = &project.syncback_rules {
// Ignore trees;
for ignored in &syncback_rules.ignore_trees {
if inst_path.starts_with(ignored.as_str()) {
log::debug!("Tree {inst_path} is blocked by project");
continue 'syncback;
}
}
}
let middleware = get_best_middleware(&snapshot);
log::trace!(
"Middleware for {inst_path} is {:?} (path is {})",
middleware,
snapshot.path.display()
);
if matches!(middleware, Middleware::Json | Middleware::Toml) {
log::warn!("Cannot syncback {middleware:?} at {inst_path}, skipping");
continue;
}
let syncback = match middleware.syncback(&snapshot) {
Ok(syncback) => syncback,
Err(err) if middleware == Middleware::Dir => {
let new_middleware = match env::var(DEBUG_MODEL_FORMAT_VAR) {
Ok(value) if value == "1" => Middleware::Rbxmx,
Ok(value) if value == "2" => Middleware::JsonModel,
_ => Middleware::Rbxm,
};
let file_name = snapshot
.path
.file_name()
.and_then(|s| s.to_str())
.context("Directory middleware should have a name in its path")?;
let mut path = snapshot.path.clone();
path.set_file_name(format!(
"{file_name}.{}",
extension_for_middleware(new_middleware)
));
let new_snapshot = snapshot.with_new_path(path, snapshot.new, snapshot.old);
log::warn!(
"Could not syncback {inst_path} as a Directory because: {err}.\n\
It will instead be synced back as a {new_middleware:?}."
);
let new_syncback_result = new_middleware
.syncback(&new_snapshot)
.with_context(|| format!("Failed to syncback {inst_path}"));
if new_syncback_result.is_ok() && snapshot.old_inst().is_some() {
// We need to remove the old FS representation if we're
// reserializing it as an rbxm.
fs_snapshot.remove_dir(&snapshot.path);
}
new_syncback_result?
}
Err(err) => anyhow::bail!("Failed to syncback {inst_path} because {err}"),
};
if !syncback.removed_children.is_empty() {
log::debug!(
"removed children for {inst_path}: {}",
syncback.removed_children.len()
);
'remove: for inst in &syncback.removed_children {
let path = inst.metadata().instigating_source.as_ref().unwrap().path();
let inst_path = snapshot.get_old_inst_path(inst.id());
if !is_valid_path(&ignore_patterns, project_path, path) {
log::debug!(
"Skipping removing {} because its matches an ignore pattern",
path.display()
);
continue;
}
if let Some(syncback_rules) = &project.syncback_rules {
for ignored in &syncback_rules.ignore_trees {
if inst_path.starts_with(ignored.as_str()) {
log::debug!("Skipping removing {inst_path} because its path is blocked by project");
continue 'remove;
}
}
}
if path.is_dir() {
fs_snapshot.remove_dir(path)
} else {
fs_snapshot.remove_file(path)
}
}
}
// TODO provide replacement snapshots for e.g. two way sync
fs_snapshot.merge_with_filter(syncback.fs_snapshot, |path| {
is_valid_path(&ignore_patterns, project_path, path)
});
snapshots.extend(syncback.children);
}
Ok(fs_snapshot)
}
pub struct SyncbackReturn<'sync> {
pub fs_snapshot: FsSnapshot,
pub children: Vec<SyncbackSnapshot<'sync>>,
pub removed_children: Vec<InstanceWithMeta<'sync>>,
}
pub fn get_best_middleware(snapshot: &SyncbackSnapshot) -> Middleware {
// At some point, we're better off using an O(1) method for checking
// equality for classes like this.
static JSON_MODEL_CLASSES: OnceLock<HashSet<&str>> = OnceLock::new();
let json_model_classes = JSON_MODEL_CLASSES.get_or_init(|| {
[
"Sound",
"SoundGroup",
"Sky",
"Atmosphere",
"BloomEffect",
"BlurEffect",
"ColorCorrectionEffect",
"DepthOfFieldEffect",
"SunRaysEffect",
"ParticleEmitter",
"TextChannel",
"TextChatCommand",
// TODO: Implement a way to use inheritance for this
"ChatWindowConfiguration",
"ChatInputBarConfiguration",
"BubbleChatConfiguration",
"ChannelTabsConfiguration",
]
.into()
});
let old_middleware = snapshot
.old_inst()
.and_then(|inst| inst.metadata().middleware);
let inst = snapshot.new_inst();
let mut middleware;
if let Some(override_middleware) = snapshot.middleware {
return override_middleware;
} else if let Some(old_middleware) = old_middleware {
return old_middleware;
} else if json_model_classes.contains(inst.class.as_str()) {
middleware = Middleware::JsonModel;
} else {
middleware = match inst.class.as_str() {
"Folder" | "Configuration" | "Tool" => Middleware::Dir,
"StringValue" => Middleware::Text,
"Script" => Middleware::ServerScript,
"LocalScript" => Middleware::ClientScript,
"ModuleScript" => Middleware::ModuleScript,
"LocalizationTable" => Middleware::Csv,
// This isn't the ideal way to handle this but it works.
name if name.ends_with("Value") => Middleware::JsonModel,
_ => Middleware::Rbxm,
}
}
if !inst.children().is_empty() {
middleware = match middleware {
Middleware::ServerScript => Middleware::ServerScriptDir,
Middleware::ClientScript => Middleware::ClientScriptDir,
Middleware::ModuleScript => Middleware::ModuleScriptDir,
Middleware::Csv => Middleware::CsvDir,
Middleware::JsonModel | Middleware::Text => Middleware::Dir,
_ => middleware,
}
}
if middleware == Middleware::Rbxm {
middleware = match env::var(DEBUG_MODEL_FORMAT_VAR) {
Ok(value) if value == "1" => Middleware::Rbxmx,
Ok(value) if value == "2" => Middleware::JsonModel,
_ => Middleware::Rbxm,
}
}
middleware
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct SyncbackRules {
/// A list of subtrees in a file that will be ignored by Syncback.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
ignore_trees: Vec<String>,
/// A list of patterns to check against the path an Instance would serialize
/// to. If a path matches one of these, the Instance won't be syncbacked.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
ignore_paths: Vec<String>,
/// A map of classes to properties to ignore for that class when doing
/// syncback.
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
ignore_properties: IndexMap<Ustr, Vec<Ustr>>,
/// Whether or not the `CurrentCamera` of `Workspace` is included in the
/// syncback or not. Defaults to `false`.
#[serde(skip_serializing_if = "Option::is_none")]
sync_current_camera: Option<bool>,
/// Whether or not to sync properties that cannot be modified via scripts.
/// Defaults to `true`.
#[serde(skip_serializing_if = "Option::is_none")]
sync_unscriptable: Option<bool>,
/// Whether to skip serializing referent properties like `Model.PrimaryPart`
/// during syncback. Defaults to `false`.
#[serde(skip_serializing_if = "Option::is_none")]
ignore_referents: Option<bool>,
/// Whether the globs specified in `ignore_paths` should be modified to also
/// match directories. Defaults to `true`.
///
/// If this is `true`, it'll take ignore globs that end in `/**` and convert
/// them to also handle the directory they're referring to. This is
/// generally a better UX.
#[serde(skip_serializing_if = "Option::is_none")]
create_ignore_dir_paths: Option<bool>,
}
impl SyncbackRules {
pub fn compile_globs(&self) -> anyhow::Result<Vec<Glob>> {
let mut globs = Vec::with_capacity(self.ignore_paths.len());
let dir_ignore_paths = self.create_ignore_dir_paths.unwrap_or(true);
for pattern in &self.ignore_paths {
let glob = Glob::new(pattern)
.with_context(|| format!("the pattern '{pattern}' is not a valid glob"))?;
globs.push(glob);
if dir_ignore_paths {
if let Some(dir_pattern) = pattern.strip_suffix("/**") {
if let Ok(glob) = Glob::new(dir_pattern) {
globs.push(glob)
}
}
}
}
Ok(globs)
}
}
fn is_valid_path(globs: &Option<Vec<Glob>>, base_path: &Path, path: &Path) -> bool {
let git_glob = GIT_IGNORE_GLOB.get_or_init(|| Glob::new(".git/**").unwrap());
let test_path = match path.strip_prefix(base_path) {
Ok(suffix) => suffix,
Err(_) => path,
};
if git_glob.is_match(test_path) {
return false;
}
if let Some(ref ignore_paths) = globs {
for glob in ignore_paths {
if glob.is_match(test_path) {
return false;
}
}
}
true
}
/// Returns a set of properties that should not be written with syncback if
/// one exists. This list is read directly from the Project and takes
/// inheritance into effect.
///
/// It **does not** handle properties that should not serialize for other
/// reasons, such as being defaults or being marked as not serializing in the
/// ReflectionDatabase.
fn get_property_filter(project: &Project, new_inst: &Instance) -> Option<UstrSet> {
let filter = &project.syncback_rules.as_ref()?.ignore_properties;
let mut set = UstrSet::default();
let database = rbx_reflection_database::get().unwrap();
let mut current_class_name = new_inst.class.as_str();
loop {
if let Some(list) = filter.get(&ustr(current_class_name)) {
set.extend(list)
}
let class = database.classes.get(current_class_name)?;
if let Some(super_class) = class.superclass.as_ref() {
current_class_name = super_class;
} else {
break;
}
}
Some(set)
}
/// Produces a list of descendants in the WeakDom such that all children come
/// before their parents.
fn descendants(dom: &WeakDom, root_ref: Ref) -> Vec<Ref> {
let mut queue = VecDeque::new();
let mut ordered = Vec::new();
queue.push_front(root_ref);
while let Some(referent) = queue.pop_front() {
let inst = dom
.get_by_ref(referent)
.expect("Invariant: WeakDom had a Ref that wasn't inside it");
ordered.push(referent);
for child in inst.children() {
queue.push_back(*child)
}
}
ordered
}
/// Removes the children of `new`'s root that are not also children of `old`'s
/// root.
///
/// This does not care about duplicates, and only filters based on names and
/// class names.
fn strip_unknown_root_children(new: &mut WeakDom, old: &RojoTree) {
let old_root = old.root();
let old_root_children: HashMap<&str, InstanceWithMeta> = old_root
.children()
.iter()
.map(|referent| {
let inst = old
.get_instance(*referent)
.expect("all children of a DOM's root should exist");
(inst.name(), inst)
})
.collect();
let root_children = new.root().children().to_vec();
for child_ref in root_children {
let child = new
.get_by_ref(child_ref)
.expect("all children of the root should exist in the DOM");
if let Some(old) = old_root_children.get(child.name.as_str()) {
if old.class_name() == child.class {
continue;
}
}
log::trace!("Pruning root child {} of class {}", child.name, child.class);
new.destroy(child_ref);
}
}

View File

@@ -0,0 +1,111 @@
use rbx_dom_weak::{types::Variant, Instance, Ustr, UstrMap};
use rbx_reflection::{PropertyKind, PropertySerialization, Scriptability};
use crate::{variant_eq::variant_eq, Project};
/// Returns a map of properties from `inst` that are both allowed under the
/// user-provided settings, are not their default value, and serialize.
pub fn filter_properties<'inst>(
project: &Project,
inst: &'inst Instance,
) -> UstrMap<&'inst Variant> {
let mut map: Vec<(Ustr, &Variant)> = Vec::with_capacity(inst.properties.len());
filter_properties_preallocated(project, inst, &mut map);
map.into_iter().collect()
}
/// Fills `allocation` with a list of properties from `inst` that are
/// user-provided settings, are not their default value, and serialize.
pub fn filter_properties_preallocated<'inst>(
project: &Project,
inst: &'inst Instance,
allocation: &mut Vec<(Ustr, &'inst Variant)>,
) {
let sync_unscriptable = project
.syncback_rules
.as_ref()
.and_then(|s| s.sync_unscriptable)
.unwrap_or(true);
let class_data = rbx_reflection_database::get()
.unwrap()
.classes
.get(inst.class.as_str());
let predicate = |prop_name: &Ustr, prop_value: &Variant| {
// We don't want to serialize Ref or UniqueId properties in JSON files
if matches!(prop_value, Variant::Ref(_) | Variant::UniqueId(_)) {
return true;
}
if !should_property_serialize(&inst.class, prop_name) {
return true;
}
if !sync_unscriptable {
if let Some(data) = class_data {
if let Some(prop_data) = data.properties.get(prop_name.as_str()) {
if matches!(prop_data.scriptability, Scriptability::None) {
return true;
}
}
}
}
false
};
if let Some(class_data) = class_data {
let defaults = &class_data.default_properties;
for (name, value) in &inst.properties {
if predicate(name, value) {
continue;
}
if let Some(default) = defaults.get(name.as_str()) {
if !variant_eq(value, default) {
allocation.push((*name, value));
}
} else {
allocation.push((*name, value));
}
}
} else {
for (name, value) in &inst.properties {
if predicate(name, value) {
continue;
}
allocation.push((*name, value));
}
}
}
fn should_property_serialize(class_name: &str, prop_name: &str) -> bool {
let database = rbx_reflection_database::get().unwrap();
let mut current_class_name = class_name;
loop {
let class_data = match database.classes.get(current_class_name) {
Some(data) => data,
None => return true,
};
if let Some(data) = class_data.properties.get(prop_name) {
log::trace!("found {class_name}.{prop_name} on {current_class_name}");
return match &data.kind {
// It's not really clear if this can ever happen but I want to
// support it just in case!
PropertyKind::Alias { alias_for } => {
should_property_serialize(current_class_name, alias_for)
}
// Migrations and aliases are happily handled for us by parsers
// so we don't really need to handle them.
PropertyKind::Canonical { serialization } => {
!matches!(serialization, PropertySerialization::DoesNotSerialize)
}
kind => unimplemented!("unknown property kind {kind:?}"),
};
} else if let Some(super_class) = class_data.superclass.as_ref() {
current_class_name = super_class;
} else {
break;
}
}
true
}

View File

@@ -0,0 +1,192 @@
//! Implements iterating through an entire WeakDom and linking all Ref
//! properties using attributes.
use std::collections::{HashMap, HashSet, VecDeque};
use rbx_dom_weak::{
types::{Attributes, Ref, UniqueId, Variant},
ustr, Instance, Ustr, WeakDom,
};
use crate::{multimap::MultiMap, REF_ID_ATTRIBUTE_NAME, REF_POINTER_ATTRIBUTE_PREFIX};
pub struct RefLinks {
/// A map of referents to each of their Ref properties.
prop_links: MultiMap<Ref, RefLink>,
/// A set of referents that need their ID rewritten. This includes
/// Instances that have no existing ID.
need_rewrite: HashSet<Ref>,
}
#[derive(PartialEq, Eq)]
struct RefLink {
/// The name of a property
name: Ustr,
/// The value of the property.
value: Ref,
}
/// Iterates through a WeakDom and collects referent properties.
///
/// They can be linked to a dom later using `link_referents`.
pub fn collect_referents(dom: &WeakDom) -> RefLinks {
let mut ids = HashMap::new();
let mut need_rewrite = HashSet::new();
let mut links = MultiMap::new();
// Note that this is back-in, front-out. This is important because
// VecDeque::extend is the equivalent to using push_back.
let mut queue = VecDeque::new();
queue.push_back(dom.root_ref());
while let Some(inst_ref) = queue.pop_front() {
let pointer = dom.get_by_ref(inst_ref).unwrap();
queue.extend(pointer.children().iter().copied());
for (prop_name, prop_value) in &pointer.properties {
let Variant::Ref(prop_value) = prop_value else {
continue;
};
if prop_value.is_none() {
continue;
}
links.insert(
inst_ref,
RefLink {
name: *prop_name,
value: *prop_value,
},
);
let target = dom
.get_by_ref(*prop_value)
.expect("Refs in DOM should point to valid Instances");
// 1. Check if target has an ID
if let Some(id) = get_existing_id(target) {
// If it does, we need to check whether that ID is a duplicate
if let Some(id_ref) = ids.get(id) {
// If the same ID points to a new Instance, rewrite it.
if id_ref != prop_value {
if log::log_enabled!(log::Level::Trace) {
log::trace!(
"{} needs an id rewritten because it has the same id as {}",
target.name,
dom.get_by_ref(*id_ref).unwrap().name
);
}
need_rewrite.insert(*prop_value);
}
}
ids.insert(id, *prop_value);
} else {
log::trace!("{} needs an id rewritten because it has no id but is referred to by {}.{prop_name}", target.name, pointer.name);
// If it does not, it needs one.
need_rewrite.insert(*prop_value);
}
}
}
RefLinks {
need_rewrite,
prop_links: links,
}
}
pub fn link_referents(links: RefLinks, dom: &mut WeakDom) -> anyhow::Result<()> {
write_id_attributes(&links, dom)?;
let mut prop_list = Vec::new();
for (inst_id, properties) in links.prop_links {
for ref_link in properties {
let prop_inst = match dom.get_by_ref(ref_link.value) {
Some(inst) => inst,
None => continue,
};
let id = get_existing_id(prop_inst)
.expect("all Instances that are pointed to should have an ID");
prop_list.push((ref_link.name, Variant::String(id.to_owned())));
}
let inst = match dom.get_by_ref_mut(inst_id) {
Some(inst) => inst,
None => continue,
};
let mut attributes: Attributes = match inst.properties.remove(&ustr("Attributes")) {
Some(Variant::Attributes(attrs)) => attrs,
None => Attributes::new(),
Some(value) => {
anyhow::bail!(
"expected Attributes to be of type 'Attributes' but it was of type '{:?}'",
value.ty()
);
}
}
.into_iter()
.filter(|(name, _)| !name.starts_with(REF_POINTER_ATTRIBUTE_PREFIX))
.collect();
for (prop_name, prop_value) in prop_list.drain(..) {
attributes.insert(
format!("{REF_POINTER_ATTRIBUTE_PREFIX}{prop_name}"),
prop_value,
);
}
inst.properties
.insert("Attributes".into(), attributes.into());
}
Ok(())
}
fn write_id_attributes(links: &RefLinks, dom: &mut WeakDom) -> anyhow::Result<()> {
for referent in &links.need_rewrite {
let inst = match dom.get_by_ref_mut(*referent) {
Some(inst) => inst,
None => continue,
};
let unique_id = match inst.properties.get(&ustr("UniqueId")) {
Some(Variant::UniqueId(id)) => Some(*id),
_ => None,
}
.unwrap_or_else(|| UniqueId::now().unwrap());
let attributes = match inst.properties.get_mut(&ustr("Attributes")) {
Some(Variant::Attributes(attrs)) => attrs,
None => {
inst.properties
.insert("Attributes".into(), Attributes::new().into());
match inst.properties.get_mut(&ustr("Attributes")) {
Some(Variant::Attributes(attrs)) => attrs,
_ => unreachable!(),
}
}
Some(value) => {
anyhow::bail!(
"expected Attributes to be of type 'Attributes' but it was of type '{:?}'",
value.ty()
);
}
};
attributes.insert(
REF_ID_ATTRIBUTE_NAME.into(),
Variant::String(unique_id.to_string()),
);
}
Ok(())
}
fn get_existing_id(inst: &Instance) -> Option<&str> {
if let Variant::Attributes(attrs) = inst.properties.get(&ustr("Attributes"))? {
let id = attrs.get(REF_ID_ATTRIBUTE_NAME)?;
match id {
Variant::String(str) => Some(str),
Variant::BinaryString(bstr) => std::str::from_utf8(bstr.as_ref()).ok(),
_ => None,
}
} else {
None
}
}

259
src/syncback/snapshot.rs Normal file
View File

@@ -0,0 +1,259 @@
use indexmap::IndexMap;
use memofs::Vfs;
use std::path::{Path, PathBuf};
use crate::{
snapshot::{InstanceWithMeta, RojoTree},
snapshot_middleware::Middleware,
Project,
};
use rbx_dom_weak::{
types::{Ref, Variant},
Instance, Ustr, UstrMap, WeakDom,
};
use super::{get_best_middleware, name_for_inst, property_filter::filter_properties};
#[derive(Clone, Copy)]
pub struct SyncbackData<'sync> {
pub(super) vfs: &'sync Vfs,
pub(super) old_tree: &'sync RojoTree,
pub(super) new_tree: &'sync WeakDom,
pub(super) project: &'sync Project,
}
pub struct SyncbackSnapshot<'sync> {
pub data: SyncbackData<'sync>,
pub old: Option<Ref>,
pub new: Ref,
pub path: PathBuf,
pub middleware: Option<Middleware>,
}
impl<'sync> SyncbackSnapshot<'sync> {
/// Constructs a SyncbackSnapshot from the provided refs
/// while inheriting this snapshot's path and data. This should be used for
/// directories.
#[inline]
pub fn with_joined_path(&self, new_ref: Ref, old_ref: Option<Ref>) -> anyhow::Result<Self> {
let mut snapshot = Self {
data: self.data,
old: old_ref,
new: new_ref,
path: PathBuf::new(),
middleware: None,
};
let middleware = get_best_middleware(&snapshot);
let name = name_for_inst(middleware, snapshot.new_inst(), snapshot.old_inst())?;
snapshot.path = self.path.join(name.as_ref());
Ok(snapshot)
}
/// Constructs a SyncbackSnapshot from the provided refs and a base path,
/// while inheriting this snapshot's data.
///
/// The actual path of the snapshot is made by getting a file name for the
/// snapshot and then appending it to the provided base path.
#[inline]
pub fn with_base_path(
&self,
base_path: &Path,
new_ref: Ref,
old_ref: Option<Ref>,
) -> anyhow::Result<Self> {
let mut snapshot = Self {
data: self.data,
old: old_ref,
new: new_ref,
path: PathBuf::new(),
middleware: None,
};
let middleware = get_best_middleware(&snapshot);
let name = name_for_inst(middleware, snapshot.new_inst(), snapshot.old_inst())?;
snapshot.path = base_path.join(name.as_ref());
Ok(snapshot)
}
/// Constructs a SyncbackSnapshot with the provided path and refs while
/// inheriting the data of the this snapshot.
#[inline]
pub fn with_new_path(&self, path: PathBuf, new_ref: Ref, old_ref: Option<Ref>) -> Self {
Self {
data: self.data,
old: old_ref,
new: new_ref,
path,
middleware: None,
}
}
/// Allows a middleware to be 'forced' onto a SyncbackSnapshot to override
/// the attempts to derive it.
#[inline]
pub fn middleware(mut self, middleware: Middleware) -> Self {
self.middleware = Some(middleware);
self
}
/// Returns a map of properties for an Instance from the 'new' tree
/// with filtering done to avoid noise. This method filters out properties
/// that are not meant to be present in Instances that are represented
/// specially by a path, like `LocalScript.Source` and `StringValue.Value`.
///
/// This method is not necessary or desired for blobs like Rbxm or non-path
/// middlewares like JsonModel.
#[inline]
#[must_use]
pub fn get_path_filtered_properties(&self, new_ref: Ref) -> Option<UstrMap<&'sync Variant>> {
let inst = self.get_new_instance(new_ref)?;
// The only filtering we have to do is filter out properties that are
// special-cased in some capacity.
let properties = filter_properties(self.data.project, inst)
.into_iter()
.filter(|(name, _)| !filter_out_property(inst, name))
.collect();
Some(properties)
}
/// Returns a path to the provided Instance in the new DOM. This path is
/// where you would look for the object in Roblox Studio.
#[inline]
pub fn get_new_inst_path(&self, referent: Ref) -> String {
inst_path(self.new_tree(), referent)
}
/// Returns a path to the provided Instance in the old DOM. This path is
/// where you would look for the object in Roblox Studio.
#[inline]
pub fn get_old_inst_path(&self, referent: Ref) -> String {
inst_path(self.old_tree(), referent)
}
/// Returns an Instance from the old tree with the provided referent, if it
/// exists.
#[inline]
pub fn get_old_instance(&self, referent: Ref) -> Option<InstanceWithMeta<'sync>> {
self.data.old_tree.get_instance(referent)
}
/// Returns an Instance from the new tree with the provided referent, if it
/// exists.
#[inline]
pub fn get_new_instance(&self, referent: Ref) -> Option<&'sync Instance> {
self.data.new_tree.get_by_ref(referent)
}
/// The 'old' Instance this snapshot is for, if it exists.
#[inline]
pub fn old_inst(&self) -> Option<InstanceWithMeta<'sync>> {
self.old
.and_then(|old| self.data.old_tree.get_instance(old))
}
/// The 'new' Instance this snapshot is for.
#[inline]
pub fn new_inst(&self) -> &'sync Instance {
self.data
.new_tree
.get_by_ref(self.new)
.expect("SyncbackSnapshot should not contain invalid referents")
}
/// Returns the root Project that was used to make this snapshot.
#[inline]
pub fn project(&self) -> &'sync Project {
self.data.project
}
/// Returns the underlying VFS being used for syncback.
#[inline]
pub fn vfs(&self) -> &'sync Vfs {
self.data.vfs
}
/// Returns the WeakDom used for the 'new' tree.
#[inline]
pub fn new_tree(&self) -> &'sync WeakDom {
self.data.new_tree
}
/// Returns the WeakDom used for the 'old' tree.
#[inline]
pub fn old_tree(&self) -> &'sync WeakDom {
self.data.old_tree.inner()
}
/// Returns user-specified property ignore rules.
#[inline]
pub fn ignore_props(&self) -> Option<&IndexMap<Ustr, Vec<Ustr>>> {
self.data
.project
.syncback_rules
.as_ref()
.map(|rules| &rules.ignore_properties)
}
/// Returns user-specified ignore tree.
#[inline]
pub fn ignore_tree(&self) -> Option<&[String]> {
self.data
.project
.syncback_rules
.as_ref()
.map(|rules| rules.ignore_trees.as_slice())
}
}
pub fn filter_out_property(inst: &Instance, prop_name: &str) -> bool {
match inst.class.as_str() {
"Script" | "LocalScript" | "ModuleScript" => {
// These properties shouldn't be set by scripts that are created via
// `$path` or via being on the file system.
prop_name == "Source" || prop_name == "ScriptGuid"
}
"LocalizationTable" => prop_name == "Contents",
"StringValue" => prop_name == "Value",
_ => false,
}
}
pub fn inst_path(dom: &WeakDom, referent: Ref) -> String {
let mut path = Vec::new();
let mut inst = dom.get_by_ref(referent);
while let Some(instance) = inst {
path.push(instance.name.as_str());
inst = dom.get_by_ref(instance.parent());
}
// This is to avoid the root's name from appearing in the path. Not
// optimal, but should be fine.
path.pop();
path.reverse();
path.join("/")
}
#[cfg(test)]
mod test {
use rbx_dom_weak::{InstanceBuilder, WeakDom};
use super::inst_path as inst_path_outer;
#[test]
fn inst_path() {
let mut new_tree = WeakDom::new(InstanceBuilder::new("ROOT"));
let child_1 = new_tree.insert(new_tree.root_ref(), InstanceBuilder::new("Child1"));
let child_2 = new_tree.insert(child_1, InstanceBuilder::new("Child2"));
let child_3 = new_tree.insert(child_2, InstanceBuilder::new("Child3"));
assert_eq!(inst_path_outer(&new_tree, new_tree.root_ref()), "");
assert_eq!(inst_path_outer(&new_tree, child_1), "Child1");
assert_eq!(inst_path_outer(&new_tree, child_2), "Child1/Child2");
assert_eq!(inst_path_outer(&new_tree, child_3), "Child1/Child2/Child3");
}
}

191
src/variant_eq.rs Normal file
View File

@@ -0,0 +1,191 @@
use rbx_dom_weak::types::{PhysicalProperties, Variant, Vector3};
/// Accepts three argumets: a float type and two values to compare.
///
/// Returns a bool indicating whether they're equal. This accounts for NaN such
/// that `approx_eq!(f32, f32::NAN, f32::NAN)` is `true`.
macro_rules! approx_eq {
($Ty:ty, $a:expr, $b:expr) => {
float_cmp::approx_eq!($Ty, $a, $b) || $a.is_nan() && $b.is_nan()
};
}
/// Compares two variants to determine if they're equal. This correctly takes
/// float comparisons into account.
pub fn variant_eq(variant_a: &Variant, variant_b: &Variant) -> bool {
if variant_a.ty() != variant_b.ty() {
return false;
}
match (variant_a, variant_b) {
(Variant::Attributes(a), Variant::Attributes(b)) => {
// If they're not the same size, we can just abort
if a.len() != b.len() {
return false;
}
// Since Attributes are stored with a BTreeMap, the keys are sorted
// and we can compare each map's keys in order.
for ((a_name, a_value), (b_name, b_value)) in a.iter().zip(b.iter()) {
if !(a_name == b_name && variant_eq(a_value, b_value)) {
return false;
}
}
true
}
(Variant::Axes(a), Variant::Axes(b)) => a == b,
(Variant::BinaryString(a), Variant::BinaryString(b)) => a == b,
(Variant::Bool(a), Variant::Bool(b)) => a == b,
(Variant::BrickColor(a), Variant::BrickColor(b)) => a == b,
(Variant::CFrame(a), Variant::CFrame(b)) => {
vector_eq(&a.position, &b.position)
&& vector_eq(&a.orientation.x, &b.orientation.x)
&& vector_eq(&a.orientation.y, &b.orientation.y)
&& vector_eq(&a.orientation.z, &b.orientation.z)
}
(Variant::Color3(a), Variant::Color3(b)) => {
approx_eq!(f32, a.r, b.r) && approx_eq!(f32, a.g, b.g) && approx_eq!(f32, a.b, b.b)
}
(Variant::Color3uint8(a), Variant::Color3uint8(b)) => a == b,
(Variant::ColorSequence(a), Variant::ColorSequence(b)) => {
if a.keypoints.len() != b.keypoints.len() {
return false;
}
let mut a_keypoints: Vec<_> = a.keypoints.iter().collect();
let mut b_keypoints: Vec<_> = b.keypoints.iter().collect();
a_keypoints.sort_unstable_by(|k1, k2| k1.time.partial_cmp(&k2.time).unwrap());
b_keypoints.sort_unstable_by(|k1, k2| k1.time.partial_cmp(&k2.time).unwrap());
for (a_kp, b_kp) in a_keypoints.iter().zip(b_keypoints) {
if !(approx_eq!(f32, a_kp.time, b_kp.time)
&& approx_eq!(f32, a_kp.color.r, b_kp.color.r)
&& approx_eq!(f32, a_kp.color.g, b_kp.color.g)
&& approx_eq!(f32, a_kp.color.b, b_kp.color.b))
{
return false;
}
}
true
}
(Variant::Content(a), Variant::Content(b)) => a == b,
(Variant::ContentId(a), Variant::ContentId(b)) => a == b,
(Variant::Enum(a), Variant::Enum(b)) => a == b,
(Variant::Faces(a), Variant::Faces(b)) => a == b,
(Variant::Float32(a), Variant::Float32(b)) => approx_eq!(f32, *a, *b),
(Variant::Float64(a), Variant::Float64(b)) => approx_eq!(f64, *a, *b),
(Variant::Font(a), Variant::Font(b)) => {
a.weight == b.weight
&& a.style == b.style
&& a.family == b.family
&& a.cached_face_id == b.cached_face_id
}
(Variant::Int32(a), Variant::Int32(b)) => a == b,
(Variant::Int64(a), Variant::Int64(b)) => a == b,
(Variant::MaterialColors(a), Variant::MaterialColors(b)) => a.encode() == b.encode(),
(Variant::NetAssetRef(a), Variant::NetAssetRef(b)) => a == b,
(Variant::NumberRange(a), Variant::NumberRange(b)) => {
approx_eq!(f32, a.max, b.max) && approx_eq!(f32, a.min, b.min)
}
(Variant::NumberSequence(a), Variant::NumberSequence(b)) => {
if a.keypoints.len() != b.keypoints.len() {
return false;
}
let mut a_keypoints: Vec<_> = a.keypoints.iter().collect();
let mut b_keypoints: Vec<_> = b.keypoints.iter().collect();
a_keypoints.sort_unstable_by(|k1, k2| k1.time.partial_cmp(&k2.time).unwrap());
b_keypoints.sort_unstable_by(|k1, k2| k1.time.partial_cmp(&k2.time).unwrap());
for (a_kp, b_kp) in a_keypoints.iter().zip(b_keypoints) {
if !(approx_eq!(f32, a_kp.time, b_kp.time)
&& approx_eq!(f32, a_kp.value, b_kp.value)
&& approx_eq!(f32, a_kp.envelope, b_kp.envelope))
{
return false;
}
}
true
}
(Variant::OptionalCFrame(a), Variant::OptionalCFrame(b)) => match (a, b) {
(Some(a), Some(b)) => {
vector_eq(&a.position, &b.position)
&& vector_eq(&a.orientation.x, &b.orientation.x)
&& vector_eq(&a.orientation.y, &b.orientation.y)
&& vector_eq(&a.orientation.z, &b.orientation.z)
}
(None, None) => true,
_ => false,
},
(Variant::PhysicalProperties(a), Variant::PhysicalProperties(b)) => match (a, b) {
(PhysicalProperties::Default, PhysicalProperties::Default) => true,
(PhysicalProperties::Custom(a2), PhysicalProperties::Custom(b2)) => {
approx_eq!(f32, a2.density(), b2.density())
&& approx_eq!(f32, a2.elasticity(), b2.elasticity())
&& approx_eq!(f32, a2.friction(), b2.friction())
&& approx_eq!(f32, a2.elasticity_weight(), b2.elasticity_weight())
&& approx_eq!(f32, a2.friction_weight(), b2.friction_weight())
&& approx_eq!(f32, a2.acoustic_absorption(), b2.acoustic_absorption())
}
_ => false,
},
(Variant::Ray(a), Variant::Ray(b)) => {
vector_eq(&a.direction, &b.direction) && vector_eq(&a.origin, &b.origin)
}
(Variant::Rect(a), Variant::Rect(b)) => {
approx_eq!(f32, a.max.x, b.max.x)
&& approx_eq!(f32, a.max.y, b.max.y)
&& approx_eq!(f32, a.min.x, b.min.x)
&& approx_eq!(f32, a.min.y, b.min.y)
}
(Variant::Ref(a), Variant::Ref(b)) => a == b,
(Variant::Region3(a), Variant::Region3(b)) => {
vector_eq(&a.max, &b.max) && vector_eq(&a.min, &b.min)
}
(Variant::Region3int16(a), Variant::Region3int16(b)) => a == b,
(Variant::SecurityCapabilities(a), Variant::SecurityCapabilities(b)) => a == b,
(Variant::SharedString(a), Variant::SharedString(b)) => a == b,
(Variant::Tags(a), Variant::Tags(b)) => {
let mut a_sorted: Vec<&str> = a.iter().collect();
let mut b_sorted: Vec<&str> = b.iter().collect();
if a_sorted.len() == b_sorted.len() {
a_sorted.sort_unstable();
b_sorted.sort_unstable();
for (a_tag, b_tag) in a_sorted.into_iter().zip(b_sorted) {
if a_tag != b_tag {
return false;
}
}
true
} else {
false
}
}
(Variant::UDim(a), Variant::UDim(b)) => {
approx_eq!(f32, a.scale, b.scale) && a.offset == b.offset
}
(Variant::UDim2(a), Variant::UDim2(b)) => {
approx_eq!(f32, a.x.scale, b.x.scale)
&& a.x.offset == b.x.offset
&& approx_eq!(f32, a.y.scale, b.y.scale)
&& a.y.offset == b.y.offset
}
(Variant::UniqueId(a), Variant::UniqueId(b)) => a == b,
(Variant::String(a), Variant::String(b)) => a == b,
(Variant::Vector2(a), Variant::Vector2(b)) => {
approx_eq!(f32, a.x, b.x) && approx_eq!(f32, a.y, b.y)
}
(Variant::Vector2int16(a), Variant::Vector2int16(b)) => a == b,
(Variant::Vector3(a), Variant::Vector3(b)) => vector_eq(a, b),
(Variant::Vector3int16(a), Variant::Vector3int16(b)) => a == b,
(a, b) => panic!(
"unsupport variant comparison: {:?} and {:?}",
a.ty(),
b.ty()
),
}
}
#[inline(always)]
fn vector_eq(a: &Vector3, b: &Vector3) -> bool {
approx_eq!(f32, a.x, b.x) && approx_eq!(f32, a.y, b.y) && approx_eq!(f32, a.z, b.z)
}

View File

@@ -165,6 +165,7 @@ impl UiService {
<div>"specified_id: " { format!("{:?}", metadata.specified_id) } </div>
<div>"ignore_unknown_instances: " { metadata.ignore_unknown_instances.to_string() }</div>
<div>"instigating source: " { format!("{:?}", metadata.instigating_source) }</div>
<div>"middleware: " { format!("{:?}", metadata.middleware) }</div>
{ relevant_paths }
</>
};