forked from rojo-rbx/rojo
555 lines
20 KiB
Rust
555 lines
20 KiB
Rust
mod file_names;
|
|
mod fs_snapshot;
|
|
mod hash;
|
|
mod property_filter;
|
|
mod ref_properties;
|
|
mod snapshot;
|
|
|
|
use anyhow::Context;
|
|
use indexmap::IndexMap;
|
|
use memofs::Vfs;
|
|
use rbx_dom_weak::{
|
|
types::{Ref, Variant},
|
|
ustr, Instance, Ustr, UstrSet, WeakDom,
|
|
};
|
|
use serde::{Deserialize, Serialize};
|
|
use std::{
|
|
collections::{HashMap, HashSet, VecDeque},
|
|
env,
|
|
path::Path,
|
|
sync::OnceLock,
|
|
};
|
|
|
|
use crate::{
|
|
glob::Glob,
|
|
snapshot::{InstanceWithMeta, RojoTree},
|
|
snapshot_middleware::Middleware,
|
|
syncback::ref_properties::{collect_referents, link_referents},
|
|
Project,
|
|
};
|
|
|
|
pub use file_names::{extension_for_middleware, name_for_inst, slugify_name, validate_file_name};
|
|
pub use fs_snapshot::FsSnapshot;
|
|
pub use hash::*;
|
|
pub use property_filter::{filter_properties, filter_properties_preallocated};
|
|
pub use snapshot::{SyncbackData, SyncbackSnapshot};
|
|
|
|
/// The name of an enviroment variable to use to override the behavior of
|
|
/// syncback on model files.
|
|
/// By default, syncback will use `Rbxm` for model files.
|
|
/// If this is set to `1`, it will instead use `Rbxmx`. If it is set to `2`,
|
|
/// it will use `JsonModel`.
|
|
///
|
|
/// This will **not** override existing `Rbxm` middleware. It will only impact
|
|
/// new files.
|
|
const DEBUG_MODEL_FORMAT_VAR: &str = "ROJO_SYNCBACK_DEBUG";
|
|
|
|
/// A glob that can be used to tell if a path contains a `.git` folder.
|
|
static GIT_IGNORE_GLOB: OnceLock<Glob> = OnceLock::new();
|
|
|
|
pub fn syncback_loop(
|
|
vfs: &Vfs,
|
|
old_tree: &mut RojoTree,
|
|
mut new_tree: WeakDom,
|
|
project: &Project,
|
|
force_json: bool,
|
|
) -> anyhow::Result<FsSnapshot> {
|
|
let ignore_patterns = project
|
|
.syncback_rules
|
|
.as_ref()
|
|
.map(|rules| rules.compile_globs())
|
|
.transpose()?;
|
|
|
|
// TODO: Add a better way to tell if the root of a project is a directory
|
|
let skip_pruning = if let Some(path) = &project.tree.path {
|
|
let middleware =
|
|
Middleware::middleware_for_path(vfs, &project.sync_rules, path.path()).unwrap();
|
|
if let Some(middleware) = middleware {
|
|
middleware.is_dir()
|
|
} else {
|
|
false
|
|
}
|
|
} else {
|
|
false
|
|
};
|
|
if !skip_pruning {
|
|
// Strip out any objects from the new tree that aren't in the old tree. This
|
|
// is necessary so that hashing the roots of each tree won't always result
|
|
// in different hashes. Shout out to Roblox for serializing a bunch of
|
|
// Services nobody cares about.
|
|
log::debug!("Pruning new tree");
|
|
strip_unknown_root_children(&mut new_tree, old_tree);
|
|
}
|
|
|
|
log::debug!("Collecting referents for new DOM...");
|
|
let deferred_referents = collect_referents(&new_tree);
|
|
|
|
// Remove any properties that are manually blocked from syncback via the
|
|
// project file.
|
|
log::debug!("Pre-filtering properties on DOMs");
|
|
for referent in descendants(&new_tree, new_tree.root_ref()) {
|
|
let new_inst = new_tree.get_by_ref_mut(referent).unwrap();
|
|
if let Some(filter) = get_property_filter(project, new_inst) {
|
|
for prop in filter {
|
|
new_inst.properties.remove(&prop);
|
|
}
|
|
}
|
|
}
|
|
for referent in descendants(old_tree.inner(), old_tree.get_root_id()) {
|
|
let mut old_inst_rojo = old_tree.get_instance_mut(referent).unwrap();
|
|
let old_inst = old_inst_rojo.inner_mut();
|
|
if let Some(filter) = get_property_filter(project, old_inst) {
|
|
for prop in filter {
|
|
old_inst.properties.remove(&prop);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Handle removing the current camera.
|
|
if let Some(syncback_rules) = &project.syncback_rules {
|
|
if !syncback_rules.sync_current_camera.unwrap_or_default() {
|
|
log::debug!("Removing CurrentCamera from new DOM");
|
|
let mut camera_ref = None;
|
|
for child_ref in new_tree.root().children() {
|
|
let inst = new_tree.get_by_ref(*child_ref).unwrap();
|
|
if inst.class == "Workspace" {
|
|
camera_ref = inst.properties.get(&ustr("CurrentCamera"));
|
|
break;
|
|
}
|
|
}
|
|
if let Some(Variant::Ref(camera_ref)) = camera_ref {
|
|
if new_tree.get_by_ref(*camera_ref).is_some() {
|
|
new_tree.destroy(*camera_ref);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
let ignore_referents = project
|
|
.syncback_rules
|
|
.as_ref()
|
|
.and_then(|s| s.ignore_referents)
|
|
.unwrap_or_default();
|
|
if !ignore_referents {
|
|
log::debug!("Linking referents for new DOM");
|
|
link_referents(deferred_referents, &mut new_tree)?;
|
|
} else {
|
|
log::debug!("Skipping referent linking as per project syncback rules");
|
|
}
|
|
|
|
// As with pruning the children of the new root, we need to ensure the roots
|
|
// for both DOMs have the same name otherwise their hashes will always be
|
|
// different.
|
|
new_tree.root_mut().name = old_tree.root().name().to_string();
|
|
|
|
log::debug!("Hashing project DOM");
|
|
let old_hashes = hash_tree(project, old_tree.inner(), old_tree.get_root_id());
|
|
log::debug!("Hashing file DOM");
|
|
let new_hashes = hash_tree(project, &new_tree, new_tree.root_ref());
|
|
|
|
let project_path = project.folder_location();
|
|
|
|
let syncback_data = SyncbackData {
|
|
vfs,
|
|
old_tree,
|
|
new_tree: &new_tree,
|
|
project,
|
|
force_json,
|
|
};
|
|
|
|
let mut snapshots = vec![SyncbackSnapshot {
|
|
data: syncback_data,
|
|
old: Some(old_tree.get_root_id()),
|
|
new: new_tree.root_ref(),
|
|
path: project.file_location.clone(),
|
|
middleware: Some(Middleware::Project),
|
|
}];
|
|
|
|
let mut fs_snapshot = FsSnapshot::new();
|
|
|
|
'syncback: while let Some(snapshot) = snapshots.pop() {
|
|
let inst_path = snapshot.get_new_inst_path(snapshot.new);
|
|
// We can quickly check that two subtrees are identical and if they are,
|
|
// skip reconciling them.
|
|
if let Some(old_ref) = snapshot.old {
|
|
match (old_hashes.get(&old_ref), new_hashes.get(&snapshot.new)) {
|
|
(Some(old), Some(new)) => {
|
|
if old == new {
|
|
log::trace!(
|
|
"Skipping {inst_path} due to it being identically hashed as {old:?}"
|
|
);
|
|
continue;
|
|
}
|
|
}
|
|
_ => unreachable!("All Instances in both DOMs should have hashes"),
|
|
}
|
|
}
|
|
|
|
if !is_valid_path(&ignore_patterns, project_path, &snapshot.path) {
|
|
log::debug!("Skipping {inst_path} because its path matches ignore pattern");
|
|
continue;
|
|
}
|
|
if let Some(syncback_rules) = &project.syncback_rules {
|
|
// Ignore trees;
|
|
for ignored in &syncback_rules.ignore_trees {
|
|
if inst_path.starts_with(ignored.as_str()) {
|
|
log::debug!("Tree {inst_path} is blocked by project");
|
|
continue 'syncback;
|
|
}
|
|
}
|
|
}
|
|
|
|
let middleware = get_best_middleware(&snapshot, force_json);
|
|
|
|
log::trace!(
|
|
"Middleware for {inst_path} is {:?} (path is {})",
|
|
middleware,
|
|
snapshot.path.display()
|
|
);
|
|
|
|
if matches!(middleware, Middleware::Json | Middleware::Toml) {
|
|
log::warn!("Cannot syncback {middleware:?} at {inst_path}, skipping");
|
|
continue;
|
|
}
|
|
|
|
let syncback = match middleware.syncback(&snapshot) {
|
|
Ok(syncback) => syncback,
|
|
Err(err) if middleware == Middleware::Dir => {
|
|
let new_middleware = if force_json {
|
|
Middleware::JsonModel
|
|
} else {
|
|
match env::var(DEBUG_MODEL_FORMAT_VAR) {
|
|
Ok(value) if value == "1" => Middleware::Rbxmx,
|
|
Ok(value) if value == "2" => Middleware::JsonModel,
|
|
_ => Middleware::Rbxm,
|
|
}
|
|
};
|
|
let file_name = snapshot
|
|
.path
|
|
.file_name()
|
|
.and_then(|s| s.to_str())
|
|
.context("Directory middleware should have a name in its path")?;
|
|
let mut path = snapshot.path.clone();
|
|
path.set_file_name(format!(
|
|
"{file_name}.{}",
|
|
extension_for_middleware(new_middleware)
|
|
));
|
|
let new_snapshot = snapshot.with_new_path(path, snapshot.new, snapshot.old);
|
|
log::warn!(
|
|
"Could not syncback {inst_path} as a Directory because: {err}.\n\
|
|
It will instead be synced back as a {new_middleware:?}."
|
|
);
|
|
let new_syncback_result = new_middleware
|
|
.syncback(&new_snapshot)
|
|
.with_context(|| format!("Failed to syncback {inst_path}"));
|
|
if new_syncback_result.is_ok() && snapshot.old_inst().is_some() {
|
|
// We need to remove the old FS representation if we're
|
|
// reserializing it as an rbxm.
|
|
fs_snapshot.remove_dir(&snapshot.path);
|
|
}
|
|
new_syncback_result?
|
|
}
|
|
Err(err) => anyhow::bail!("Failed to syncback {inst_path} because {err}"),
|
|
};
|
|
|
|
if !syncback.removed_children.is_empty() {
|
|
log::debug!(
|
|
"removed children for {inst_path}: {}",
|
|
syncback.removed_children.len()
|
|
);
|
|
'remove: for inst in &syncback.removed_children {
|
|
let path = inst.metadata().instigating_source.as_ref().unwrap().path();
|
|
let inst_path = snapshot.get_old_inst_path(inst.id());
|
|
if !is_valid_path(&ignore_patterns, project_path, path) {
|
|
log::debug!(
|
|
"Skipping removing {} because its matches an ignore pattern",
|
|
path.display()
|
|
);
|
|
continue;
|
|
}
|
|
if let Some(syncback_rules) = &project.syncback_rules {
|
|
for ignored in &syncback_rules.ignore_trees {
|
|
if inst_path.starts_with(ignored.as_str()) {
|
|
log::debug!("Skipping removing {inst_path} because its path is blocked by project");
|
|
continue 'remove;
|
|
}
|
|
}
|
|
}
|
|
if path.is_dir() {
|
|
fs_snapshot.remove_dir(path)
|
|
} else {
|
|
fs_snapshot.remove_file(path)
|
|
}
|
|
}
|
|
}
|
|
|
|
// TODO provide replacement snapshots for e.g. two way sync
|
|
|
|
fs_snapshot.merge_with_filter(syncback.fs_snapshot, |path| {
|
|
is_valid_path(&ignore_patterns, project_path, path)
|
|
});
|
|
|
|
snapshots.extend(syncback.children);
|
|
}
|
|
|
|
Ok(fs_snapshot)
|
|
}
|
|
|
|
pub struct SyncbackReturn<'sync> {
|
|
pub fs_snapshot: FsSnapshot,
|
|
pub children: Vec<SyncbackSnapshot<'sync>>,
|
|
pub removed_children: Vec<InstanceWithMeta<'sync>>,
|
|
}
|
|
|
|
pub fn get_best_middleware(snapshot: &SyncbackSnapshot, force_json: bool) -> Middleware {
|
|
// At some point, we're better off using an O(1) method for checking
|
|
// equality for classes like this.
|
|
static JSON_MODEL_CLASSES: OnceLock<HashSet<&str>> = OnceLock::new();
|
|
let json_model_classes = JSON_MODEL_CLASSES.get_or_init(|| {
|
|
[
|
|
"Actor",
|
|
"Sound",
|
|
"SoundGroup",
|
|
"Sky",
|
|
"Atmosphere",
|
|
"BloomEffect",
|
|
"BlurEffect",
|
|
"ColorCorrectionEffect",
|
|
"DepthOfFieldEffect",
|
|
"SunRaysEffect",
|
|
"ParticleEmitter",
|
|
"TextChannel",
|
|
"TextChatCommand",
|
|
// TODO: Implement a way to use inheritance for this
|
|
"ChatWindowConfiguration",
|
|
"ChatInputBarConfiguration",
|
|
"BubbleChatConfiguration",
|
|
"ChannelTabsConfiguration",
|
|
"RemoteEvent",
|
|
"UnreliableRemoteEvent",
|
|
"RemoteFunction",
|
|
"BindableEvent",
|
|
"BindableFunction",
|
|
]
|
|
.into()
|
|
});
|
|
|
|
let old_middleware = snapshot
|
|
.old_inst()
|
|
.and_then(|inst| inst.metadata().middleware);
|
|
let inst = snapshot.new_inst();
|
|
|
|
let mut middleware;
|
|
|
|
if let Some(override_middleware) = snapshot.middleware {
|
|
return override_middleware;
|
|
} else if let Some(old_middleware) = old_middleware {
|
|
return old_middleware;
|
|
} else if json_model_classes.contains(inst.class.as_str()) {
|
|
middleware = Middleware::JsonModel;
|
|
} else {
|
|
middleware = match inst.class.as_str() {
|
|
"Folder" | "Configuration" | "Tool" => Middleware::Dir,
|
|
"StringValue" => Middleware::Text,
|
|
"Script" => Middleware::ServerScript,
|
|
"LocalScript" => Middleware::ClientScript,
|
|
"ModuleScript" => Middleware::ModuleScript,
|
|
"LocalizationTable" => Middleware::Csv,
|
|
// This isn't the ideal way to handle this but it works.
|
|
name if name.ends_with("Value") => Middleware::JsonModel,
|
|
_ => Middleware::Rbxm,
|
|
}
|
|
}
|
|
|
|
if !inst.children().is_empty() {
|
|
middleware = match middleware {
|
|
Middleware::ServerScript => Middleware::ServerScriptDir,
|
|
Middleware::ClientScript => Middleware::ClientScriptDir,
|
|
Middleware::ModuleScript => Middleware::ModuleScriptDir,
|
|
Middleware::Csv => Middleware::CsvDir,
|
|
Middleware::JsonModel | Middleware::Text => Middleware::Dir,
|
|
_ => middleware,
|
|
}
|
|
}
|
|
|
|
if middleware == Middleware::Rbxm {
|
|
middleware = if force_json {
|
|
if !inst.children().is_empty() {
|
|
Middleware::Dir
|
|
} else {
|
|
Middleware::JsonModel
|
|
}
|
|
} else {
|
|
match env::var(DEBUG_MODEL_FORMAT_VAR) {
|
|
Ok(value) if value == "1" => Middleware::Rbxmx,
|
|
Ok(value) if value == "2" => Middleware::JsonModel,
|
|
_ => Middleware::Rbxm,
|
|
}
|
|
}
|
|
}
|
|
|
|
middleware
|
|
}
|
|
|
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
|
pub struct SyncbackRules {
|
|
/// A list of subtrees in a file that will be ignored by Syncback.
|
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
|
ignore_trees: Vec<String>,
|
|
/// A list of patterns to check against the path an Instance would serialize
|
|
/// to. If a path matches one of these, the Instance won't be syncbacked.
|
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
|
ignore_paths: Vec<String>,
|
|
/// A map of classes to properties to ignore for that class when doing
|
|
/// syncback.
|
|
#[serde(default, skip_serializing_if = "IndexMap::is_empty")]
|
|
ignore_properties: IndexMap<Ustr, Vec<Ustr>>,
|
|
/// Whether or not the `CurrentCamera` of `Workspace` is included in the
|
|
/// syncback or not. Defaults to `false`.
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
sync_current_camera: Option<bool>,
|
|
/// Whether or not to sync properties that cannot be modified via scripts.
|
|
/// Defaults to `true`.
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
sync_unscriptable: Option<bool>,
|
|
/// Whether to skip serializing referent properties like `Model.PrimaryPart`
|
|
/// during syncback. Defaults to `false`.
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
ignore_referents: Option<bool>,
|
|
/// Whether the globs specified in `ignore_paths` should be modified to also
|
|
/// match directories. Defaults to `true`.
|
|
///
|
|
/// If this is `true`, it'll take ignore globs that end in `/**` and convert
|
|
/// them to also handle the directory they're referring to. This is
|
|
/// generally a better UX.
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
create_ignore_dir_paths: Option<bool>,
|
|
}
|
|
|
|
impl SyncbackRules {
|
|
pub fn compile_globs(&self) -> anyhow::Result<Vec<Glob>> {
|
|
let mut globs = Vec::with_capacity(self.ignore_paths.len());
|
|
let dir_ignore_paths = self.create_ignore_dir_paths.unwrap_or(true);
|
|
|
|
for pattern in &self.ignore_paths {
|
|
let glob = Glob::new(pattern)
|
|
.with_context(|| format!("the pattern '{pattern}' is not a valid glob"))?;
|
|
globs.push(glob);
|
|
|
|
if dir_ignore_paths {
|
|
if let Some(dir_pattern) = pattern.strip_suffix("/**") {
|
|
if let Ok(glob) = Glob::new(dir_pattern) {
|
|
globs.push(glob)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(globs)
|
|
}
|
|
}
|
|
|
|
fn is_valid_path(globs: &Option<Vec<Glob>>, base_path: &Path, path: &Path) -> bool {
|
|
let git_glob = GIT_IGNORE_GLOB.get_or_init(|| Glob::new(".git/**").unwrap());
|
|
let test_path = match path.strip_prefix(base_path) {
|
|
Ok(suffix) => suffix,
|
|
Err(_) => path,
|
|
};
|
|
if git_glob.is_match(test_path) {
|
|
return false;
|
|
}
|
|
if let Some(ref ignore_paths) = globs {
|
|
for glob in ignore_paths {
|
|
if glob.is_match(test_path) {
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
true
|
|
}
|
|
|
|
/// Returns a set of properties that should not be written with syncback if
|
|
/// one exists. This list is read directly from the Project and takes
|
|
/// inheritance into effect.
|
|
///
|
|
/// It **does not** handle properties that should not serialize for other
|
|
/// reasons, such as being defaults or being marked as not serializing in the
|
|
/// ReflectionDatabase.
|
|
fn get_property_filter(project: &Project, new_inst: &Instance) -> Option<UstrSet> {
|
|
let filter = &project.syncback_rules.as_ref()?.ignore_properties;
|
|
let mut set = UstrSet::default();
|
|
|
|
let database = rbx_reflection_database::get().unwrap();
|
|
let mut current_class_name = new_inst.class.as_str();
|
|
|
|
loop {
|
|
if let Some(list) = filter.get(&ustr(current_class_name)) {
|
|
set.extend(list)
|
|
}
|
|
|
|
let class = database.classes.get(current_class_name)?;
|
|
if let Some(super_class) = class.superclass.as_ref() {
|
|
current_class_name = super_class;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
|
|
Some(set)
|
|
}
|
|
|
|
/// Produces a list of descendants in the WeakDom such that all children come
|
|
/// before their parents.
|
|
fn descendants(dom: &WeakDom, root_ref: Ref) -> Vec<Ref> {
|
|
let mut queue = VecDeque::new();
|
|
let mut ordered = Vec::new();
|
|
queue.push_front(root_ref);
|
|
|
|
while let Some(referent) = queue.pop_front() {
|
|
let inst = dom
|
|
.get_by_ref(referent)
|
|
.expect("Invariant: WeakDom had a Ref that wasn't inside it");
|
|
ordered.push(referent);
|
|
for child in inst.children() {
|
|
queue.push_back(*child)
|
|
}
|
|
}
|
|
|
|
ordered
|
|
}
|
|
|
|
/// Removes the children of `new`'s root that are not also children of `old`'s
|
|
/// root.
|
|
///
|
|
/// This does not care about duplicates, and only filters based on names and
|
|
/// class names.
|
|
fn strip_unknown_root_children(new: &mut WeakDom, old: &RojoTree) {
|
|
let old_root = old.root();
|
|
let old_root_children: HashMap<&str, InstanceWithMeta> = old_root
|
|
.children()
|
|
.iter()
|
|
.map(|referent| {
|
|
let inst = old
|
|
.get_instance(*referent)
|
|
.expect("all children of a DOM's root should exist");
|
|
(inst.name(), inst)
|
|
})
|
|
.collect();
|
|
|
|
let root_children = new.root().children().to_vec();
|
|
|
|
for child_ref in root_children {
|
|
let child = new
|
|
.get_by_ref(child_ref)
|
|
.expect("all children of the root should exist in the DOM");
|
|
if let Some(old) = old_root_children.get(child.name.as_str()) {
|
|
if old.class_name() == child.class {
|
|
continue;
|
|
}
|
|
}
|
|
log::trace!("Pruning root child {} of class {}", child.name, child.class);
|
|
new.destroy(child_ref);
|
|
}
|
|
}
|