Compare commits

...

3 Commits

Author SHA1 Message Date
8053909bd0 Add --git-since option to rojo serve
- Add new GitFilter struct for tracking files changed since a Git reference
- Only sync changed (added/deleted/modified) files to Roblox Studio
- Files remain acknowledged once synced, even if content is reverted
- Add enhanced logging for debugging sync issues
- Force acknowledge project structure to prevent 'Cannot sync a model as a place' errors
2026-01-19 22:02:59 +01:00
Micah
d08780fc14 Ensure that pruned Instances aren't treated as existing in syncback (#1179)
Closes #1178.
2025-11-29 21:21:48 -08:00
Micah
b89cc7f398 Release memofs v0.3.1 (#1175) 2025-11-27 12:32:57 -08:00
28 changed files with 662 additions and 31 deletions

View File

@@ -30,6 +30,9 @@ Making a new release? Simply add the new header with the version and date undern
--> -->
## Unreleased ## Unreleased
* Fixed a bug caused by having reference properties (such as `ObjectValue.Value`) that point to an Instance not included in syncback. ([#1179])
[#1179]: https://github.com/rojo-rbx/rojo/pull/1179
## [7.7.0-rc.1] (November 27th, 2025) ## [7.7.0-rc.1] (November 27th, 2025)

2
Cargo.lock generated
View File

@@ -1313,7 +1313,7 @@ checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
[[package]] [[package]]
name = "memofs" name = "memofs"
version = "0.3.0" version = "0.3.1"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"fs-err", "fs-err",

View File

@@ -46,7 +46,7 @@ name = "build"
harness = false harness = false
[dependencies] [dependencies]
memofs = { version = "0.3.0", path = "crates/memofs" } memofs = { version = "0.3.1", path = "crates/memofs" }
# These dependencies can be uncommented when working on rbx-dom simultaneously # These dependencies can be uncommented when working on rbx-dom simultaneously
# rbx_binary = { path = "../rbx-dom/rbx_binary", features = [ # rbx_binary = { path = "../rbx-dom/rbx_binary", features = [

View File

@@ -1,6 +1,8 @@
# memofs Changelog # memofs Changelog
## Unreleased Changes ## Unreleased Changes
## 0.3.1 (2025-11-27)
* Added `Vfs::exists`. [#1169] * Added `Vfs::exists`. [#1169]
* Added `create_dir` and `create_dir_all` to allow creating directories. [#937] * Added `create_dir` and `create_dir_all` to allow creating directories. [#937]

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "memofs" name = "memofs"
description = "Virtual filesystem with configurable backends." description = "Virtual filesystem with configurable backends."
version = "0.3.0" version = "0.3.1"
authors = [ authors = [
"Lucien Greathouse <me@lpghatguy.com>", "Lucien Greathouse <me@lpghatguy.com>",
"Micah Reid <git@dekkonot.com>", "Micah Reid <git@dekkonot.com>",

View File

@@ -31,17 +31,11 @@ impl VfsBackend for NoopBackend {
} }
fn create_dir(&mut self, _path: &Path) -> io::Result<()> { fn create_dir(&mut self, _path: &Path) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn create_dir_all(&mut self, _path: &Path) -> io::Result<()> { fn create_dir_all(&mut self, _path: &Path) -> io::Result<()> {
Err(io::Error::new( Err(io::Error::other("NoopBackend doesn't do anything"))
io::ErrorKind::Other,
"NoopBackend doesn't do anything",
))
} }
fn remove_file(&mut self, _path: &Path) -> io::Result<()> { fn remove_file(&mut self, _path: &Path) -> io::Result<()> {

View File

@@ -0,0 +1,7 @@
---
source: tests/rojo_test/syncback_util.rs
expression: "String::from_utf8_lossy(&output.stdout)"
---
Writing src/Pointer1.model.json
Writing src/Pointer2.model.json
Writing src/Pointer3.model.json

View File

@@ -0,0 +1,7 @@
---
source: tests/tests/syncback.rs
expression: src/Pointer1.model.json
---
{
"className": "ObjectValue"
}

View File

@@ -0,0 +1,7 @@
---
source: tests/tests/syncback.rs
expression: src/Pointer2.model.json
---
{
"className": "ObjectValue"
}

View File

@@ -0,0 +1,7 @@
---
source: tests/tests/syncback.rs
expression: src/Pointer3.model.json
---
{
"className": "ObjectValue"
}

View File

@@ -0,0 +1,9 @@
{
"name": "ref_properties_pruned",
"tree": {
"$className": "DataModel",
"ServerScriptService": {
"$path": "src"
}
}
}

View File

@@ -9,6 +9,7 @@ use memofs::{IoResultExt, Vfs, VfsEvent};
use rbx_dom_weak::types::{Ref, Variant}; use rbx_dom_weak::types::{Ref, Variant};
use crate::{ use crate::{
git::SharedGitFilter,
message_queue::MessageQueue, message_queue::MessageQueue,
snapshot::{ snapshot::{
apply_patch_set, compute_patch_set, AppliedPatchSet, InstigatingSource, PatchSet, RojoTree, apply_patch_set, compute_patch_set, AppliedPatchSet, InstigatingSource, PatchSet, RojoTree,
@@ -46,11 +47,15 @@ pub struct ChangeProcessor {
impl ChangeProcessor { impl ChangeProcessor {
/// Spin up the ChangeProcessor, connecting it to the given tree, VFS, and /// Spin up the ChangeProcessor, connecting it to the given tree, VFS, and
/// outbound message queue. /// outbound message queue.
///
/// If `git_filter` is provided, it will be refreshed on every VFS event
/// to ensure newly changed files are acknowledged.
pub fn start( pub fn start(
tree: Arc<Mutex<RojoTree>>, tree: Arc<Mutex<RojoTree>>,
vfs: Arc<Vfs>, vfs: Arc<Vfs>,
message_queue: Arc<MessageQueue<AppliedPatchSet>>, message_queue: Arc<MessageQueue<AppliedPatchSet>>,
tree_mutation_receiver: Receiver<PatchSet>, tree_mutation_receiver: Receiver<PatchSet>,
git_filter: Option<SharedGitFilter>,
) -> Self { ) -> Self {
let (shutdown_sender, shutdown_receiver) = crossbeam_channel::bounded(1); let (shutdown_sender, shutdown_receiver) = crossbeam_channel::bounded(1);
let vfs_receiver = vfs.event_receiver(); let vfs_receiver = vfs.event_receiver();
@@ -58,6 +63,7 @@ impl ChangeProcessor {
tree, tree,
vfs, vfs,
message_queue, message_queue,
git_filter,
}; };
let job_thread = jod_thread::Builder::new() let job_thread = jod_thread::Builder::new()
@@ -111,12 +117,24 @@ struct JobThreadContext {
/// Whenever changes are applied to the DOM, we should push those changes /// Whenever changes are applied to the DOM, we should push those changes
/// into this message queue to inform any connected clients. /// into this message queue to inform any connected clients.
message_queue: Arc<MessageQueue<AppliedPatchSet>>, message_queue: Arc<MessageQueue<AppliedPatchSet>>,
/// Optional Git filter for --git-since mode. When set, will be refreshed
/// on every VFS event to ensure newly changed files are acknowledged.
git_filter: Option<SharedGitFilter>,
} }
impl JobThreadContext { impl JobThreadContext {
fn handle_vfs_event(&self, event: VfsEvent) { fn handle_vfs_event(&self, event: VfsEvent) {
log::trace!("Vfs event: {:?}", event); log::trace!("Vfs event: {:?}", event);
// If we have a git filter, refresh it to pick up any new changes.
// This ensures that files modified during the session will be acknowledged.
if let Some(ref git_filter) = self.git_filter {
if let Err(err) = git_filter.refresh() {
log::warn!("Failed to refresh git filter: {:?}", err);
}
}
// Update the VFS immediately with the event. // Update the VFS immediately with the event.
self.vfs self.vfs
.commit_event(&event) .commit_event(&event)
@@ -151,6 +169,19 @@ impl JobThreadContext {
} }
}; };
if affected_ids.is_empty() {
log::debug!(
"No instances found for path {} or any of its ancestors",
path.display()
);
} else {
log::debug!(
"Found {} affected instances for path {}",
affected_ids.len(),
path.display()
);
}
for id in affected_ids { for id in affected_ids {
if let Some(patch) = compute_and_apply_changes(&mut tree, &self.vfs, id) { if let Some(patch) = compute_and_apply_changes(&mut tree, &self.vfs, id) {
if !patch.is_empty() { if !patch.is_empty() {

View File

@@ -81,7 +81,7 @@ impl BuildCommand {
let vfs = Vfs::new_default(); let vfs = Vfs::new_default();
vfs.set_watch_enabled(self.watch); vfs.set_watch_enabled(self.watch);
let session = ServeSession::new(vfs, project_path)?; let session = ServeSession::new(vfs, project_path, None)?;
let mut cursor = session.message_queue().cursor(); let mut cursor = session.message_queue().cursor();
write_model(&session, &output_path, output_kind)?; write_model(&session, &output_path, output_kind)?;

View File

@@ -54,7 +54,7 @@ fn initialize_plugin() -> anyhow::Result<ServeSession> {
in_memory_fs.load_snapshot("/plugin", plugin_snapshot)?; in_memory_fs.load_snapshot("/plugin", plugin_snapshot)?;
let vfs = Vfs::new(in_memory_fs); let vfs = Vfs::new(in_memory_fs);
Ok(ServeSession::new(vfs, "/plugin")?) Ok(ServeSession::new(vfs, "/plugin", None)?)
} }
fn install_plugin() -> anyhow::Result<()> { fn install_plugin() -> anyhow::Result<()> {

View File

@@ -9,7 +9,7 @@ use clap::Parser;
use memofs::Vfs; use memofs::Vfs;
use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor}; use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};
use crate::{serve_session::ServeSession, web::LiveServer}; use crate::{git::GitFilter, serve_session::ServeSession, web::LiveServer};
use super::{resolve_path, GlobalOptions}; use super::{resolve_path, GlobalOptions};
@@ -31,6 +31,19 @@ pub struct ServeCommand {
/// it has none. /// it has none.
#[clap(long)] #[clap(long)]
pub port: Option<u16>, pub port: Option<u16>,
/// Only sync files that have changed since the given Git reference.
///
/// When this option is set, Rojo will only include files that have been
/// modified, added, or are untracked since the specified Git reference
/// (e.g., "HEAD", "main", a commit hash). This is useful for working with
/// large projects where you only want to sync your local changes.
///
/// Scripts that have not changed will still be acknowledged if modified
/// during the session, and all synced instances will have
/// ignoreUnknownInstances set to true to preserve descendants in Studio.
#[clap(long, value_name = "REF")]
pub git_since: Option<String>,
} }
impl ServeCommand { impl ServeCommand {
@@ -39,7 +52,19 @@ impl ServeCommand {
let vfs = Vfs::new_default(); let vfs = Vfs::new_default();
let session = Arc::new(ServeSession::new(vfs, project_path)?); // Set up Git filter if --git-since was specified
let git_filter = if let Some(ref base_ref) = self.git_since {
let repo_root = GitFilter::find_repo_root(&project_path)?;
log::info!(
"Git filter enabled: only syncing files changed since '{}'",
base_ref
);
Some(Arc::new(GitFilter::new(repo_root, base_ref.clone(), &project_path)?))
} else {
None
};
let session = Arc::new(ServeSession::new(vfs, project_path, git_filter)?);
let ip = self let ip = self
.address .address
@@ -53,17 +78,25 @@ impl ServeCommand {
let server = LiveServer::new(session); let server = LiveServer::new(session);
let _ = show_start_message(ip, port, global.color.into()); let _ = show_start_message(ip, port, self.git_since.as_deref(), global.color.into());
server.start((ip, port).into()); server.start((ip, port).into());
Ok(()) Ok(())
} }
} }
fn show_start_message(bind_address: IpAddr, port: u16, color: ColorChoice) -> io::Result<()> { fn show_start_message(
bind_address: IpAddr,
port: u16,
git_since: Option<&str>,
color: ColorChoice,
) -> io::Result<()> {
let mut green = ColorSpec::new(); let mut green = ColorSpec::new();
green.set_fg(Some(Color::Green)).set_bold(true); green.set_fg(Some(Color::Green)).set_bold(true);
let mut yellow = ColorSpec::new();
yellow.set_fg(Some(Color::Yellow)).set_bold(true);
let writer = BufferWriter::stdout(color); let writer = BufferWriter::stdout(color);
let mut buffer = writer.buffer(); let mut buffer = writer.buffer();
@@ -84,6 +117,13 @@ fn show_start_message(bind_address: IpAddr, port: u16, color: ColorChoice) -> io
buffer.set_color(&green)?; buffer.set_color(&green)?;
writeln!(&mut buffer, "{}", port)?; writeln!(&mut buffer, "{}", port)?;
if let Some(base_ref) = git_since {
buffer.set_color(&ColorSpec::new())?;
write!(&mut buffer, " Mode: ")?;
buffer.set_color(&yellow)?;
writeln!(&mut buffer, "git-since ({})", base_ref)?;
}
writeln!(&mut buffer)?; writeln!(&mut buffer)?;
buffer.set_color(&ColorSpec::new())?; buffer.set_color(&ColorSpec::new())?;

View File

@@ -76,7 +76,7 @@ impl SourcemapCommand {
let vfs = Vfs::new_default(); let vfs = Vfs::new_default();
vfs.set_watch_enabled(self.watch); vfs.set_watch_enabled(self.watch);
let session = ServeSession::new(vfs, project_path)?; let session = ServeSession::new(vfs, project_path, None)?;
let mut cursor = session.message_queue().cursor(); let mut cursor = session.message_queue().cursor();
let filter = if self.include_non_scripts { let filter = if self.include_non_scripts {

View File

@@ -73,7 +73,7 @@ impl SyncbackCommand {
vfs.set_watch_enabled(false); vfs.set_watch_enabled(false);
let project_start_timer = Instant::now(); let project_start_timer = Instant::now();
let session_old = ServeSession::new(vfs, path_old.clone())?; let session_old = ServeSession::new(vfs, path_old.clone(), None)?;
log::debug!( log::debug!(
"Finished opening project in {:0.02}s", "Finished opening project in {:0.02}s",
project_start_timer.elapsed().as_secs_f32() project_start_timer.elapsed().as_secs_f32()

View File

@@ -42,7 +42,7 @@ impl UploadCommand {
let vfs = Vfs::new_default(); let vfs = Vfs::new_default();
let session = ServeSession::new(vfs, project_path)?; let session = ServeSession::new(vfs, project_path, None)?;
let tree = session.tree(); let tree = session.tree();
let inner_tree = tree.inner(); let inner_tree = tree.inner();

380
src/git.rs Normal file
View File

@@ -0,0 +1,380 @@
//! Git integration for filtering files based on changes since a reference.
use std::{
collections::HashSet,
path::{Path, PathBuf},
process::Command,
sync::{Arc, RwLock},
};
use anyhow::{bail, Context};
/// A filter that tracks which files have been changed since a Git reference.
///
/// When active, only files that have been modified, added, or deleted according
/// to Git will be "acknowledged" and synced to Studio. This allows users to
/// work with large projects where they only want to sync their local changes.
///
/// Once a file is acknowledged (either initially or during the session), it
/// stays acknowledged for the entire session. This prevents files from being
/// deleted in Studio if their content is reverted to match the git reference.
#[derive(Debug)]
pub struct GitFilter {
/// The Git repository root directory.
repo_root: PathBuf,
/// The Git reference to compare against (e.g., "HEAD", "main", a commit hash).
base_ref: String,
/// Cache of paths that are currently different from the base ref according to git.
/// This is refreshed on every VFS event.
git_changed_paths: RwLock<HashSet<PathBuf>>,
/// Paths that have been acknowledged at any point during this session.
/// Once a path is added here, it stays acknowledged forever (for this session).
/// This prevents files from being deleted if their content is reverted.
session_acknowledged_paths: RwLock<HashSet<PathBuf>>,
}
impl GitFilter {
/// Creates a new GitFilter for the given repository root and base reference.
///
/// The `repo_root` should be the root of the Git repository (where .git is located).
/// The `base_ref` is the Git reference to compare against (e.g., "HEAD", "main").
/// The `project_path` is the path to the project being served - it will always be
/// acknowledged regardless of git status to ensure the project structure exists.
pub fn new(repo_root: PathBuf, base_ref: String, project_path: &Path) -> anyhow::Result<Self> {
let filter = Self {
repo_root,
base_ref,
git_changed_paths: RwLock::new(HashSet::new()),
session_acknowledged_paths: RwLock::new(HashSet::new()),
};
// Always acknowledge the project path and its directory so the project
// structure exists even when there are no git changes
filter.acknowledge_project_path(project_path);
// Initial refresh to populate the cache with git changes
filter.refresh()?;
Ok(filter)
}
/// Acknowledges the project path and its containing directory.
/// This ensures the project structure always exists regardless of git status.
fn acknowledge_project_path(&self, project_path: &Path) {
let mut session = self.session_acknowledged_paths.write().unwrap();
// Acknowledge the project path itself (might be a directory or .project.json file)
let canonical = project_path.canonicalize().unwrap_or_else(|_| project_path.to_path_buf());
session.insert(canonical.clone());
// Acknowledge all ancestor directories
let mut current = canonical.parent();
while let Some(parent) = current {
session.insert(parent.to_path_buf());
current = parent.parent();
}
// If it's a directory, also acknowledge default.project.json inside it
if project_path.is_dir() {
for name in &["default.project.json", "default.project.jsonc"] {
let project_file = project_path.join(name);
if let Ok(canonical_file) = project_file.canonicalize() {
session.insert(canonical_file);
} else {
session.insert(project_file);
}
}
}
// If it's a .project.json file, also acknowledge its parent directory
if let Some(parent) = project_path.parent() {
let parent_canonical = parent.canonicalize().unwrap_or_else(|_| parent.to_path_buf());
session.insert(parent_canonical);
}
log::debug!(
"GitFilter: acknowledged project path {} ({} paths total)",
project_path.display(),
session.len()
);
}
/// Finds the Git repository root for the given path.
pub fn find_repo_root(path: &Path) -> anyhow::Result<PathBuf> {
let output = Command::new("git")
.args(["rev-parse", "--show-toplevel"])
.current_dir(path)
.output()
.context("Failed to execute git rev-parse")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
bail!("Failed to find Git repository root: {}", stderr.trim());
}
let root = String::from_utf8_lossy(&output.stdout)
.trim()
.to_string();
Ok(PathBuf::from(root))
}
/// Refreshes the cache of acknowledged paths by querying Git.
///
/// This should be called when files change to ensure newly modified files
/// are properly acknowledged. Once a path is acknowledged, it stays
/// acknowledged for the entire session (even if the file is reverted).
pub fn refresh(&self) -> anyhow::Result<()> {
let mut git_changed = HashSet::new();
// Get files changed since the base ref (modified, added, deleted)
let diff_output = Command::new("git")
.args(["diff", "--name-only", &self.base_ref])
.current_dir(&self.repo_root)
.output()
.context("Failed to execute git diff")?;
if !diff_output.status.success() {
let stderr = String::from_utf8_lossy(&diff_output.stderr);
bail!("git diff failed: {}", stderr.trim());
}
let diff_files = String::from_utf8_lossy(&diff_output.stdout);
let diff_count = diff_files.lines().filter(|l| !l.is_empty()).count();
if diff_count > 0 {
log::debug!("git diff found {} changed files", diff_count);
}
for line in diff_files.lines() {
if !line.is_empty() {
let path = self.repo_root.join(line);
log::trace!("git diff: acknowledging {}", path.display());
self.acknowledge_path(&path, &mut git_changed);
}
}
// Get untracked files (new files not yet committed)
let untracked_output = Command::new("git")
.args(["ls-files", "--others", "--exclude-standard"])
.current_dir(&self.repo_root)
.output()
.context("Failed to execute git ls-files")?;
if !untracked_output.status.success() {
let stderr = String::from_utf8_lossy(&untracked_output.stderr);
bail!("git ls-files failed: {}", stderr.trim());
}
let untracked_files = String::from_utf8_lossy(&untracked_output.stdout);
for line in untracked_files.lines() {
if !line.is_empty() {
let path = self.repo_root.join(line);
self.acknowledge_path(&path, &mut git_changed);
}
}
// Get staged files (files added to index but not yet committed)
let staged_output = Command::new("git")
.args(["diff", "--name-only", "--cached", &self.base_ref])
.current_dir(&self.repo_root)
.output()
.context("Failed to execute git diff --cached")?;
if staged_output.status.success() {
let staged_files = String::from_utf8_lossy(&staged_output.stdout);
for line in staged_files.lines() {
if !line.is_empty() {
let path = self.repo_root.join(line);
self.acknowledge_path(&path, &mut git_changed);
}
}
}
// Update the git changed paths cache
{
let mut cache = self.git_changed_paths.write().unwrap();
*cache = git_changed.clone();
}
// Merge newly changed paths into session acknowledged paths
// Once acknowledged, a path stays acknowledged for the entire session
{
let mut session = self.session_acknowledged_paths.write().unwrap();
for path in git_changed {
session.insert(path);
}
log::debug!(
"GitFilter refreshed: {} paths acknowledged in session",
session.len()
);
}
Ok(())
}
/// Acknowledges a path and all its ancestors, plus associated meta files.
fn acknowledge_path(&self, path: &Path, acknowledged: &mut HashSet<PathBuf>) {
// Canonicalize the path if possible, otherwise use as-is
let path = path.canonicalize().unwrap_or_else(|_| path.to_path_buf());
// Add the path itself
acknowledged.insert(path.clone());
// Add all ancestor directories
let mut current = path.parent();
while let Some(parent) = current {
acknowledged.insert(parent.to_path_buf());
current = parent.parent();
}
// Add associated meta files
self.acknowledge_meta_files(&path, acknowledged);
}
/// Acknowledges associated meta files for a given path.
fn acknowledge_meta_files(&self, path: &Path, acknowledged: &mut HashSet<PathBuf>) {
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
if let Some(parent) = path.parent() {
// For a file like "foo.lua", also acknowledge "foo.meta.json"
// Strip known extensions to get the base name
let base_name = strip_lua_extension(file_name);
let meta_path = parent.join(format!("{}.meta.json", base_name));
if let Ok(canonical) = meta_path.canonicalize() {
acknowledged.insert(canonical);
} else {
acknowledged.insert(meta_path);
}
// For init files, also acknowledge "init.meta.json" in the same directory
if file_name.starts_with("init.") {
let init_meta = parent.join("init.meta.json");
if let Ok(canonical) = init_meta.canonicalize() {
acknowledged.insert(canonical);
} else {
acknowledged.insert(init_meta);
}
}
}
}
}
/// Checks if a path is acknowledged (should be synced).
///
/// Returns `true` if the path or any of its descendants have been changed
/// at any point during this session. Once a file is acknowledged, it stays
/// acknowledged even if its content is reverted to match the git reference.
pub fn is_acknowledged(&self, path: &Path) -> bool {
let session = self.session_acknowledged_paths.read().unwrap();
// Try to canonicalize the path
let canonical = path.canonicalize().unwrap_or_else(|_| path.to_path_buf());
// Check if this exact path is acknowledged
if session.contains(&canonical) {
log::trace!("Path {} is directly acknowledged", path.display());
return true;
}
// Also check without canonicalization in case of path differences
if session.contains(path) {
log::trace!("Path {} is acknowledged (non-canonical)", path.display());
return true;
}
// For directories, check if any descendant is acknowledged
// This is done by checking if any acknowledged path starts with this path
for acknowledged in session.iter() {
if acknowledged.starts_with(&canonical) {
log::trace!(
"Path {} has acknowledged descendant {}",
path.display(),
acknowledged.display()
);
return true;
}
// Also check non-canonical
if acknowledged.starts_with(path) {
log::trace!(
"Path {} has acknowledged descendant {} (non-canonical)",
path.display(),
acknowledged.display()
);
return true;
}
}
log::trace!(
"Path {} is NOT acknowledged (canonical: {})",
path.display(),
canonical.display()
);
false
}
/// Returns the base reference being compared against.
pub fn base_ref(&self) -> &str {
&self.base_ref
}
/// Returns the repository root path.
pub fn repo_root(&self) -> &Path {
&self.repo_root
}
/// Explicitly acknowledges a path and all its ancestors.
/// This is useful for ensuring certain paths are always synced regardless of git status.
pub fn force_acknowledge(&self, path: &Path) {
let mut acknowledged = HashSet::new();
self.acknowledge_path(path, &mut acknowledged);
let mut session = self.session_acknowledged_paths.write().unwrap();
for p in acknowledged {
session.insert(p);
}
}
}
/// Strips Lua-related extensions from a file name to get the base name.
fn strip_lua_extension(file_name: &str) -> &str {
const EXTENSIONS: &[&str] = &[
".server.luau",
".server.lua",
".client.luau",
".client.lua",
".luau",
".lua",
];
for ext in EXTENSIONS {
if let Some(base) = file_name.strip_suffix(ext) {
return base;
}
}
// If no Lua extension, try to strip the regular extension
file_name
.rsplit_once('.')
.map(|(base, _)| base)
.unwrap_or(file_name)
}
/// A wrapper around GitFilter that can be shared across threads.
pub type SharedGitFilter = Arc<GitFilter>;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_strip_lua_extension() {
assert_eq!(strip_lua_extension("foo.server.lua"), "foo");
assert_eq!(strip_lua_extension("foo.client.luau"), "foo");
assert_eq!(strip_lua_extension("foo.lua"), "foo");
assert_eq!(strip_lua_extension("init.server.lua"), "init");
assert_eq!(strip_lua_extension("bar.txt"), "bar");
assert_eq!(strip_lua_extension("noextension"), "noextension");
}
}

View File

@@ -9,6 +9,7 @@ mod tree_view;
mod auth_cookie; mod auth_cookie;
mod change_processor; mod change_processor;
mod git;
mod glob; mod glob;
mod json; mod json;
mod lua_ast; mod lua_ast;
@@ -28,6 +29,7 @@ mod web;
// TODO: Work out what we should expose publicly // TODO: Work out what we should expose publicly
pub use git::{GitFilter, SharedGitFilter};
pub use project::*; pub use project::*;
pub use rojo_ref::*; pub use rojo_ref::*;
pub use session_id::SessionId; pub use session_id::SessionId;

View File

@@ -13,6 +13,7 @@ use thiserror::Error;
use crate::{ use crate::{
change_processor::ChangeProcessor, change_processor::ChangeProcessor,
git::SharedGitFilter,
message_queue::MessageQueue, message_queue::MessageQueue,
project::{Project, ProjectError}, project::{Project, ProjectError},
session_id::SessionId, session_id::SessionId,
@@ -94,7 +95,14 @@ impl ServeSession {
/// The project file is expected to be loaded out-of-band since it's /// The project file is expected to be loaded out-of-band since it's
/// currently loaded from the filesystem directly instead of through the /// currently loaded from the filesystem directly instead of through the
/// in-memory filesystem layer. /// in-memory filesystem layer.
pub fn new<P: AsRef<Path>>(vfs: Vfs, start_path: P) -> Result<Self, ServeSessionError> { ///
/// If `git_filter` is provided, only files that have changed since the
/// specified Git reference will be synced.
pub fn new<P: AsRef<Path>>(
vfs: Vfs,
start_path: P,
git_filter: Option<SharedGitFilter>,
) -> Result<Self, ServeSessionError> {
let start_path = start_path.as_ref(); let start_path = start_path.as_ref();
let start_time = Instant::now(); let start_time = Instant::now();
@@ -102,12 +110,28 @@ impl ServeSession {
let root_project = Project::load_initial_project(&vfs, start_path)?; let root_project = Project::load_initial_project(&vfs, start_path)?;
// If git filter is active, ensure the project file location is acknowledged
// This is necessary so the project structure exists even with no git changes
if let Some(ref filter) = git_filter {
filter.force_acknowledge(start_path);
filter.force_acknowledge(&root_project.file_location);
filter.force_acknowledge(root_project.folder_location());
log::debug!(
"Force acknowledged project at {}",
root_project.file_location.display()
);
}
let mut tree = RojoTree::new(InstanceSnapshot::new()); let mut tree = RojoTree::new(InstanceSnapshot::new());
let root_id = tree.get_root_id(); let root_id = tree.get_root_id();
let instance_context = let instance_context = match &git_filter {
InstanceContext::with_emit_legacy_scripts(root_project.emit_legacy_scripts); Some(filter) => {
InstanceContext::with_git_filter(root_project.emit_legacy_scripts, Arc::clone(filter))
}
None => InstanceContext::with_emit_legacy_scripts(root_project.emit_legacy_scripts),
};
log::trace!("Generating snapshot of instances from VFS"); log::trace!("Generating snapshot of instances from VFS");
let snapshot = snapshot_from_vfs(&instance_context, &vfs, start_path)?; let snapshot = snapshot_from_vfs(&instance_context, &vfs, start_path)?;
@@ -133,6 +157,7 @@ impl ServeSession {
Arc::clone(&vfs), Arc::clone(&vfs),
Arc::clone(&message_queue), Arc::clone(&message_queue),
tree_mutation_receiver, tree_mutation_receiver,
git_filter,
); );
Ok(Self { Ok(Self {

View File

@@ -8,6 +8,7 @@ use anyhow::Context;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
git::SharedGitFilter,
glob::Glob, glob::Glob,
path_serializer, path_serializer,
project::ProjectNode, project::ProjectNode,
@@ -138,13 +139,27 @@ impl Default for InstanceMetadata {
} }
} }
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InstanceContext { pub struct InstanceContext {
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(skip_serializing_if = "Vec::is_empty")]
pub path_ignore_rules: Arc<Vec<PathIgnoreRule>>, pub path_ignore_rules: Arc<Vec<PathIgnoreRule>>,
pub emit_legacy_scripts: bool, pub emit_legacy_scripts: bool,
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(skip_serializing_if = "Vec::is_empty")]
pub sync_rules: Vec<SyncRule>, pub sync_rules: Vec<SyncRule>,
/// Optional Git filter for --git-since mode. When set, only files that have
/// changed since the specified Git reference will be synced.
#[serde(skip)]
pub git_filter: Option<SharedGitFilter>,
}
impl PartialEq for InstanceContext {
fn eq(&self, other: &Self) -> bool {
// Note: git_filter is intentionally excluded from comparison
// since it's runtime state, not configuration
self.path_ignore_rules == other.path_ignore_rules
&& self.emit_legacy_scripts == other.emit_legacy_scripts
&& self.sync_rules == other.sync_rules
}
} }
impl InstanceContext { impl InstanceContext {
@@ -153,6 +168,7 @@ impl InstanceContext {
path_ignore_rules: Arc::new(Vec::new()), path_ignore_rules: Arc::new(Vec::new()),
emit_legacy_scripts: emit_legacy_scripts_default().unwrap(), emit_legacy_scripts: emit_legacy_scripts_default().unwrap(),
sync_rules: Vec::new(), sync_rules: Vec::new(),
git_filter: None,
} }
} }
@@ -165,6 +181,36 @@ impl InstanceContext {
} }
} }
/// Creates a new InstanceContext with a Git filter for --git-since mode.
pub fn with_git_filter(
emit_legacy_scripts: Option<bool>,
git_filter: SharedGitFilter,
) -> Self {
Self {
git_filter: Some(git_filter),
..Self::with_emit_legacy_scripts(emit_legacy_scripts)
}
}
/// Sets the Git filter for this context.
pub fn set_git_filter(&mut self, git_filter: Option<SharedGitFilter>) {
self.git_filter = git_filter;
}
/// Returns true if the given path should be acknowledged (synced).
/// If no git filter is set, all paths are acknowledged.
pub fn is_path_acknowledged(&self, path: &Path) -> bool {
match &self.git_filter {
Some(filter) => filter.is_acknowledged(path),
None => true,
}
}
/// Returns true if a git filter is active.
pub fn has_git_filter(&self) -> bool {
self.git_filter.is_some()
}
/// Extend the list of ignore rules in the context with the given new rules. /// Extend the list of ignore rules in the context with the given new rules.
pub fn add_path_ignore_rules<I>(&mut self, new_rules: I) pub fn add_path_ignore_rules<I>(&mut self, new_rules: I)
where where

View File

@@ -61,6 +61,10 @@ pub use self::{
/// This will inspect the path and find the appropriate middleware for it, /// This will inspect the path and find the appropriate middleware for it,
/// taking user-written rules into account. Then, it will attempt to convert /// taking user-written rules into account. Then, it will attempt to convert
/// the path into an InstanceSnapshot using that middleware. /// the path into an InstanceSnapshot using that middleware.
///
/// If a git filter is active in the context and the path is not acknowledged
/// (i.e., the file hasn't changed since the base git reference), this function
/// returns `Ok(None)` to skip syncing that file.
#[profiling::function] #[profiling::function]
pub fn snapshot_from_vfs( pub fn snapshot_from_vfs(
context: &InstanceContext, context: &InstanceContext,
@@ -72,6 +76,16 @@ pub fn snapshot_from_vfs(
None => return Ok(None), None => return Ok(None),
}; };
// Check if this path is acknowledged by the git filter.
// If not, skip this path entirely.
if !context.is_path_acknowledged(path) {
log::trace!(
"Skipping path {} (not acknowledged by git filter)",
path.display()
);
return Ok(None);
}
if meta.is_dir() { if meta.is_dir() {
let (middleware, dir_name, init_path) = get_dir_middleware(vfs, path)?; let (middleware, dir_name, init_path) = get_dir_middleware(vfs, path)?;
// TODO: Support user defined init paths // TODO: Support user defined init paths
@@ -213,6 +227,10 @@ pub enum Middleware {
impl Middleware { impl Middleware {
/// Creates a snapshot for the given path from the Middleware with /// Creates a snapshot for the given path from the Middleware with
/// the provided name. /// the provided name.
///
/// When a git filter is active in the context, `ignore_unknown_instances`
/// will be set to `true` on all generated snapshots to preserve descendants
/// in Studio that are not tracked by Rojo.
fn snapshot( fn snapshot(
&self, &self,
context: &InstanceContext, context: &InstanceContext,
@@ -262,6 +280,14 @@ impl Middleware {
}; };
if let Ok(Some(ref mut snapshot)) = output { if let Ok(Some(ref mut snapshot)) = output {
snapshot.metadata.middleware = Some(*self); snapshot.metadata.middleware = Some(*self);
// When git filter is active, force ignore_unknown_instances to true
// so that we don't delete children in Studio that aren't tracked.
if context.has_git_filter() {
snapshot.metadata.ignore_unknown_instances = true;
// Also apply this recursively to all children
set_ignore_unknown_instances_recursive(&mut snapshot.children);
}
} }
output output
} }
@@ -365,6 +391,16 @@ impl Middleware {
} }
} }
/// Recursively sets `ignore_unknown_instances` to `true` on all children.
/// This is used when git filter is active to ensure we don't delete
/// children in Studio that aren't tracked by Rojo.
fn set_ignore_unknown_instances_recursive(children: &mut [InstanceSnapshot]) {
for child in children {
child.metadata.ignore_unknown_instances = true;
set_ignore_unknown_instances_recursive(&mut child.children);
}
}
/// A helper for easily defining a SyncRule. Arguments are passed literally /// A helper for easily defining a SyncRule. Arguments are passed literally
/// to this macro in the order `include`, `middleware`, `suffix`, /// to this macro in the order `include`, `middleware`, `suffix`,
/// and `exclude`. Both `suffix` and `exclude` are optional. /// and `exclude`. Both `suffix` and `exclude` are optional.

View File

@@ -192,6 +192,17 @@ pub fn snapshot_project_node(
} }
(_, None, _, Some(PathNode::Required(path))) => { (_, None, _, Some(PathNode::Required(path))) => {
// If git filter is active and the path was filtered out, treat it
// as if the path was optional and skip this node.
if context.has_git_filter() {
log::trace!(
"Skipping project node '{}' because its path was filtered by git filter: {}",
instance_name,
path.display()
);
return Ok(None);
}
anyhow::bail!( anyhow::bail!(
"Rojo project referred to a file using $path that could not be turned into a Roblox Instance by Rojo.\n\ "Rojo project referred to a file using $path that could not be turned into a Roblox Instance by Rojo.\n\
Check that the file exists and is a file type known by Rojo.\n\ Check that the file exists and is a file type known by Rojo.\n\
@@ -282,7 +293,12 @@ pub fn snapshot_project_node(
// If the user didn't specify it AND $path was not specified (meaning // If the user didn't specify it AND $path was not specified (meaning
// there's no existing value we'd be stepping on from a project file or meta // there's no existing value we'd be stepping on from a project file or meta
// file), set it to true. // file), set it to true.
if let Some(ignore) = node.ignore_unknown_instances { //
// When git filter is active, always set to true to preserve descendants
// in Studio that are not tracked by Rojo.
if context.has_git_filter() {
metadata.ignore_unknown_instances = true;
} else if let Some(ignore) = node.ignore_unknown_instances {
metadata.ignore_unknown_instances = ignore; metadata.ignore_unknown_instances = ignore;
} else if node.path.is_none() { } else if node.path.is_none() {
// TODO: Introduce a strict mode where $ignoreUnknownInstances is never // TODO: Introduce a strict mode where $ignoreUnknownInstances is never

View File

@@ -8,7 +8,10 @@ use rbx_dom_weak::{
ustr, Instance, Ustr, WeakDom, ustr, Instance, Ustr, WeakDom,
}; };
use crate::{multimap::MultiMap, REF_ID_ATTRIBUTE_NAME, REF_POINTER_ATTRIBUTE_PREFIX}; use crate::{
multimap::MultiMap, syncback::snapshot::inst_path, REF_ID_ATTRIBUTE_NAME,
REF_POINTER_ATTRIBUTE_PREFIX,
};
pub struct RefLinks { pub struct RefLinks {
/// A map of referents to each of their Ref properties. /// A map of referents to each of their Ref properties.
@@ -50,6 +53,22 @@ pub fn collect_referents(dom: &WeakDom) -> RefLinks {
continue; continue;
} }
let target = match dom.get_by_ref(*prop_value) {
Some(inst) => inst,
None => {
// Properties that are Some but point to nothing may as
// well be `nil`. Roblox and us never produce these values
// but syncback prunes trees without adjusting ref
// properties for performance reasons.
log::warn!(
"Property {}.{} will be `nil` on the disk because the actual value is not being included in syncback",
inst_path(dom, inst_ref),
prop_name
);
continue;
}
};
links.insert( links.insert(
inst_ref, inst_ref,
RefLink { RefLink {
@@ -58,10 +77,6 @@ pub fn collect_referents(dom: &WeakDom) -> RefLinks {
}, },
); );
let target = dom
.get_by_ref(*prop_value)
.expect("Refs in DOM should point to valid Instances");
// 1. Check if target has an ID // 1. Check if target has an ID
if let Some(id) = get_existing_id(target) { if let Some(id) = get_existing_id(target) {
// If it does, we need to check whether that ID is a duplicate // If it does, we need to check whether that ID is a duplicate

View File

@@ -71,8 +71,13 @@ syncback_tests! {
ref_properties_conflict => ["src/Pointer_2.model.json", "src/Target_2.model.json"], ref_properties_conflict => ["src/Pointer_2.model.json", "src/Target_2.model.json"],
// Ensures that having multiple pointers that are aimed at the same target doesn't trigger ref rewrites. // Ensures that having multiple pointers that are aimed at the same target doesn't trigger ref rewrites.
ref_properties_duplicate => [], ref_properties_duplicate => [],
// Ensures that ref properties that point to nothing after the prune both
// do not leave any trace of themselves
ref_properties_pruned => ["src/Pointer1.model.json", "src/Pointer2.model.json", "src/Pointer3.model.json"],
// Ensures that the old middleware is respected during syncback // Ensures that the old middleware is respected during syncback
respect_old_middleware => ["default.project.json", "src/model_json.model.json", "src/rbxm.rbxm", "src/rbxmx.rbxmx"], respect_old_middleware => ["default.project.json", "src/model_json.model.json", "src/rbxm.rbxm", "src/rbxmx.rbxmx"],
// Ensures that the `$schema` field roundtrips with syncback
schema_roundtrip => ["default.project.json", "src/model.model.json", "src/init/init.meta.json", "src/adjacent.meta.json"],
// Ensures that StringValues inside project files are written to the // Ensures that StringValues inside project files are written to the
// project file, but only if they don't have `$path` set // project file, but only if they don't have `$path` set
string_value_project => ["default.project.json"], string_value_project => ["default.project.json"],
@@ -81,5 +86,4 @@ syncback_tests! {
sync_rules => ["src/module.modulescript", "src/text.text"], sync_rules => ["src/module.modulescript", "src/text.text"],
// Ensures that the `syncUnscriptable` setting works // Ensures that the `syncUnscriptable` setting works
unscriptable_properties => ["default.project.json"], unscriptable_properties => ["default.project.json"],
schema_roundtrip => ["default.project.json", "src/model.model.json", "src/init/init.meta.json", "src/adjacent.meta.json"]
} }